code
stringlengths
0
29.6k
language
stringclasses
9 values
AST_depth
int64
3
30
alphanumeric_fraction
float64
0.2
0.86
max_line_length
int64
13
399
avg_line_length
float64
5.02
139
num_lines
int64
7
299
source
stringclasses
4 values
#include <bits/stdc++.h> #define rep(x,to) for(int (x)=0;(x)<(to);(x)++) using namespace std; vector<int> ans, usd(22), g[22]; void dfs(int s){ if(usd[s]) return; usd[s]=1; for(auto v: g[s]) if(!usd[v]) dfs(v); ans.push_back(s); } int main() { int n,m; cin >> m >> n; rep(i,n){ int x,y; cin >>x >>y; g[x-1].push_back(y-1); } rep(i,m) dfs(i); for(int i=ans.size()-1;i>=0;i--) printf("%d\n",ans[i]+1); return 0; }
c++
11
0.525229
58
13.096774
31
codenet
package helpers import ( "github.com/SpectraLogic/ds3_go_sdk/ds3" ds3Models "github.com/SpectraLogic/ds3_go_sdk/ds3/models" helperModels "github.com/SpectraLogic/ds3_go_sdk/helpers/models" "sync" ) type getTransceiver struct { BucketName string ReadObjects *[]helperModels.GetObject Strategy *ReadTransferStrategy Client *ds3.Client } func newGetTransceiver(bucketName string, readObjects *[]helperModels.GetObject, strategy *ReadTransferStrategy, client *ds3.Client) *getTransceiver { return &getTransceiver{ BucketName:bucketName, ReadObjects:readObjects, Strategy:strategy, Client:client, } } // Creates the bulk get request from the list of write objects and get bulk job options func newBulkGetRequest(bucketName string, readObjects *[]helperModels.GetObject, options ReadBulkJobOptions) *ds3Models.GetBulkJobSpectraS3Request { var getObjects []ds3Models.Ds3GetObject for _, obj := range *readObjects { getObjects = append(getObjects, createPartialGetObjects(obj)...) } bulkGet := ds3Models.NewGetBulkJobSpectraS3RequestWithPartialObjects(bucketName, getObjects) if options.Aggregating != nil { bulkGet.WithAggregating(*options.Aggregating) } if options.ChunkClientProcessingOrderGuarantee != ds3Models.UNDEFINED { bulkGet.WithChunkClientProcessingOrderGuarantee(options.ChunkClientProcessingOrderGuarantee) } if options.ImplicitJobIdResolution != nil { bulkGet.WithImplicitJobIdResolution(*options.ImplicitJobIdResolution) } if options.priority != ds3Models.UNDEFINED { bulkGet.WithPriority(options.priority) } return bulkGet } // Converts a GetObject into its corresponding Ds3GetObjects for use in bulk get request building. func createPartialGetObjects(getObject helperModels.GetObject) []ds3Models.Ds3GetObject { // handle getting the entire object if len(getObject.Ranges) == 0 { return []ds3Models.Ds3GetObject { { Name:getObject.Name }, } } // handle partial object retrieval var partialObjects []ds3Models.Ds3GetObject for _, r := range getObject.Ranges { offset := r.Start length := r.End - r.Start + 1 partialObjects = append(partialObjects, ds3Models.Ds3GetObject{Name:getObject.Name, Offset:&offset, Length:&length}) } return partialObjects } func (transceiver *getTransceiver) transfer() (string, error) { // create bulk get job bulkGet := newBulkGetRequest(transceiver.BucketName, transceiver.ReadObjects, transceiver.Strategy.Options) bulkGetResponse, err := transceiver.Client.GetBulkJobSpectraS3(bulkGet) if err != nil { return "", err } // init queue, producer and consumer var waitGroup sync.WaitGroup queue := newOperationQueue(transceiver.Strategy.BlobStrategy.maxWaitingTransfers(), transceiver.Client.Logger) producer := newGetProducer(&bulkGetResponse.MasterObjectList, transceiver.ReadObjects, &queue, transceiver.Strategy, transceiver.Client, &waitGroup) consumer := newConsumer(&queue, &waitGroup, transceiver.Strategy.BlobStrategy.maxConcurrentTransfers()) // Wait for completion of producer-consumer goroutines var aggErr ds3Models.AggregateError waitGroup.Add(2) // adding producer and consumer goroutines to wait group go producer.run(&aggErr) // producer will add to waitGroup for every blob retrieval added to queue, and each transfer performed will decrement from waitGroup go consumer.run() waitGroup.Wait() return bulkGetResponse.MasterObjectList.JobId, aggErr.GetErrors() }
go
14
0.742708
161
39.388889
90
starcoderdata
#import "PlayerBase.h" #import @interface PlayerFFmpeg : NSObject @property (atomic, retain) id player; @property FFOptions* ffOptions; @property bool playInBackground; @property NSString* videoPath; @property bool hasVideoTrack; @property bool isBuffering; @property NSInteger cachedBuffering; @property float cachedPlaybackTime; @property unsigned char* frameBuffer; @end
c
4
0.81106
52
26.125
16
starcoderdata
/* * Copyright 2020 (github.com/mP1) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package walkingkooka.tree.expression.function.datetime; import org.junit.jupiter.api.Test; import walkingkooka.Cast; import walkingkooka.collect.list.Lists; import walkingkooka.tree.expression.ExpressionEvaluationContext; import java.time.LocalTime; public final class NumberExpressionFunctionLocalTimeTest extends NumberExpressionFunctionTestCase { private final static LocalTime TIME = LocalTime.of(0, 58, 59); @Test public void testHour005859() { this.applyAndCheck3( NumberExpressionFunctionLocalTime.hour(), LocalTime.of(0, 0, 1), 0 ); } @Test public void testHour015859() { this.applyAndCheck3( NumberExpressionFunctionLocalTime.hour(), LocalTime.of(1, 0, 1), 1 ); } @Test public void testHour235859() { this.applyAndCheck3( NumberExpressionFunctionLocalTime.hour(), LocalTime.of(23, 0, 1), 23 ); } @Test public void testMinute() { this.applyAndCheck3( NumberExpressionFunctionLocalTime.minute(), 58 ); } @Test public void testSecond() { this.applyAndCheck3( NumberExpressionFunctionLocalTime.second(), 59 ); } private void applyAndCheck3(final NumberExpressionFunctionLocalTime function, final int expected) { this.applyAndCheck3( function, TIME, expected ); } private void applyAndCheck3(final NumberExpressionFunctionLocalTime function, final LocalTime time, final int expected) { this.applyAndCheck2( function, Lists.of(time), KIND.create(expected) ); } @Test public void testToStringHour() { this.toStringAndCheck( NumberExpressionFunctionLocalTime.hour(), "hour" ); } @Test public void testToStringMinute() { this.toStringAndCheck( NumberExpressionFunctionLocalTime.minute(), "minute" ); } @Test public void testToStringSecond() { this.toStringAndCheck( NumberExpressionFunctionLocalTime.second(), "second" ); } @Override public NumberExpressionFunctionLocalTime createBiFunction() { return NumberExpressionFunctionLocalTime.hour(); } @Override public Class type() { return Cast.to(NumberExpressionFunctionLocalTime.class); } }
java
10
0.624724
163
27.535433
127
starcoderdata
package utils; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.TreeMap; import java.util.stream.Stream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.Document; import org.w3c.dom.Element; public class LocalNameSpaceManager implements Serializable { /** * */ private static final long serialVersionUID = 6719694067564124068L; private String directory; private String minion_id; private Document doc; private FileTree tree; private Map<String, Integer> filesMap; public LocalNameSpaceManager(String directory, String id) { this.directory = directory; this.minion_id = id; tree = new FileTree(Integer.parseInt(this.minion_id)); filesMap = new TreeMap<String, Integer>(); this.buildTreeFromDir(); } public void buildTreeFromDir() { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder; try { docBuilder = docFactory.newDocumentBuilder(); this.doc = docBuilder.newDocument(); Element rootElement = this.doc.createElement("root"); rootElement.setAttribute("id", "/tmp"); rootElement.setIdAttribute("id", true); this.doc.appendChild(rootElement); this.doc = this.walkDirectoryToDoc(this.doc, this.directory); Source source = new DOMSource(this.doc); ByteArrayOutputStream out = new ByteArrayOutputStream(); Result result = new StreamResult(out); TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(); transformer.transform(source, result); this.tree.setData(out.toByteArray()); } catch (ParserConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerException e) { // TODO Auto-generated catch block e.printStackTrace(); } }; public FileTree getTreeData() { return this.tree; } public boolean hasFile(String filePath) { return this.filesMap.containsKey(filePath); } public Document walkDirectoryToDoc(Document doc, String directory) { try (Stream filePathStream = Files.walk(Paths.get(directory))) { filePathStream.forEach(filePath -> { Element parentElement = doc.getElementById(filePath.getParent().toString()); this.filesMap.put(filePath.toString(), 1); if (Files.isRegularFile(filePath)) { Element fileElement = doc.createElement("file"); fileElement.setAttribute("id", filePath.toString()); fileElement.setIdAttribute("id", true); parentElement.appendChild(fileElement); } else if (Files.isDirectory(filePath)) { Element folderElement = doc.createElement("folder"); folderElement.setAttribute("id", filePath.toString()); folderElement.setIdAttribute("id", true); parentElement.appendChild(folderElement); } }); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return doc; } public Document getDoc() { return this.doc; } }
java
20
0.752366
80
29.570248
121
starcoderdata
using System.Runtime.InteropServices; namespace app.console { [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)] public class DOCINFOA { [MarshalAs(UnmanagedType.LPStr)] public string pDocName; [MarshalAs(UnmanagedType.LPStr)] public string pOutputFile; [MarshalAs(UnmanagedType.LPStr)] public string pDataType; } }
c#
11
0.675127
65
22.235294
17
starcoderdata
// Author: btjanaka ( // Problem: (Kattis) oddgnome // Title: Odd Gnome // Link: https://open.kattis.com/problems/oddgnome // Idea: Iterate through and find the gnome out of order. // Difficulty: easy // Tags: greedy #include #define FOR(i, a, b) for (int i = a; i < b; ++i) #define FORe(i, a, b) for (int i = a; i <= b; ++i) #define GET(x) scanf("%d", &x) #define PLN putchar('\n') typedef long long ll; using namespace std; int main() { int n; GET(n); FOR(i, 0, n) { int g; GET(g); int cur, prev; bool found = false; FOR(i, 0, g) { GET(cur); if (g == 3 && i == 1) { cout << i + 1 << endl; found = true; } if (!found && i >= 1 && i != g - 1) { if (cur != prev + 1) { cout << i + 1 << endl; found = true; } } prev = cur; } } return 0; }
c++
13
0.490374
57
21.075
40
starcoderdata
public static List<Long> getDependChildrenScheduleTimes(long dataTime, DependencyInfo dependencyInfo, ScheduleInfo childInfo) throws NaviException { Period childPeriod = childInfo.getPeriod(); if (childPeriod == null) { LOGGER.info("Child:" + childInfo.getScheduleId() + " is not a period schedule"); return null; } String childScheduleId = childInfo.getScheduleId(); long windowOffsetMills = parseWindowOffset(dataTime, dependencyInfo.getWindowOffset(), childPeriod.getTimezone(), true); final long millsInAnHour = 3600 * 1000; //the first schedule time of child which depends on dataTime Long nextScheduleTime = ScheduleManager.getSchedule() .calcNextScheduleTime(childScheduleId, dataTime + windowOffsetMills + millsInAnHour, false); if (nextScheduleTime == null) { LOGGER.info("Child:" + childInfo.getScheduleId() + " is not a period schedule"); return null; } if ((dependencyInfo.getRule() == DependencyRule.self_finished || dependencyInfo.getRule() == DependencyRule.self_no_failed) && StringUtils.isEmpty(dependencyInfo.getWindowOffset())) { dependencyInfo = dependencyInfo.clone(); dependencyInfo.setWindowOffset( dependencyInfo.getValue()); //self dependence has default window offset of 1 cycle } long startTime = getDependencyParam(nextScheduleTime, dependencyInfo, childPeriod.getTimezone()) .getDependenceStartTime(); if (startTime > dataTime) { return null; } List<Long> childrenScheduleTimes = new ArrayList<>(); while (startTime <= dataTime) { childrenScheduleTimes.add(nextScheduleTime); nextScheduleTime = ScheduleManager.getSchedule() .calcNextScheduleTime(childScheduleId, nextScheduleTime + 1, false); startTime = getDependencyParam(nextScheduleTime, dependencyInfo, childPeriod.getTimezone()) .getDependenceStartTime(); } return childrenScheduleTimes; }
java
12
0.609966
108
54.452381
42
inline
import { mount } from '@vue/test-utils' import Run from '../../header/Run.vue' import { test } from 'uvu' import * as assert from 'uvu/assert' test('run code button', async () => { const wrapper = mount(Run) const status = wrapper.get('[data-test="runStatus"]') assert.is(status.text(), 'Run', 'Button should include Run') await wrapper.find('button').trigger('click') assert.is(status.text(), 'Running', 'Button should include Running') }) test.run()
javascript
12
0.678643
70
26.833333
18
starcoderdata
#pragma once #include #include namespace n0 { class CompIdentity; } namespace ur { class Device; } namespace ns { class NodeFactory { public: static n0::SceneNodePtr Create(const ur::Device& dev, const std::string& filepath); static n0::SceneNodePtr Create2D(); static n0::SceneNodePtr Create3D(); private: static n0::SceneNodePtr CreateFromCommon(const ur::Device& dev, const std::string& filepath); static n0::SceneNodePtr CreateFromImage(const ur::Device& dev, const std::string& filepath); static n0::SceneNodePtr CreateFromImage3D(const ur::Device& dev, const std::string& filepath); static n0::SceneNodePtr CreateFromModel(const ur::Device& dev, const std::string& filepath); static n0::SceneNodePtr CreateFromScript(const std::string& filepath); static void InitCompId(n0::CompIdentity& cid, const std::string& filepath); }; // NodeFactory }
c
10
0.75983
95
28.4375
32
starcoderdata
/* * Copyright (C) 2011 at g dot and dot y at dot * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jongo.use_native; import com.mongodb.MongoWriteException; import com.mongodb.client.MongoCollection; import junit.framework.Assert; import org.bson.types.ObjectId; import org.jongo.model.Coordinate; import org.jongo.model.ExposableFriend; import org.jongo.model.ExternalFriend; import org.jongo.model.Friend; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.Date; import static com.google.common.collect.Lists.newArrayList; import static org.assertj.core.api.Assertions.assertThat; public class InsertNativeTest extends NativeTestBase { private MongoCollection collection; @Before public void setUp() throws Exception { collection = jongo.wrap(database.getCollection("friends", Friend.class)); } @After public void tearDown() throws Exception { collection.drop(); } @Test public void canInsert() throws Exception { Friend friend = new Friend("John", "22 Wall Street Avenue"); collection.insertOne(friend); long afterSave = new Date().getTime(); Friend john = collection.find(q("{name:'John'}")).first(); assertThat(john).isNotNull(); assertThat(john.getId()).isNotNull(); assertThat(john.getId().getDate().getTime()).isLessThan(afterSave); } @Test public void canInsertWithObjectId() throws Exception { ObjectId oid = ObjectId.get(); Friend john = new Friend(oid, "John"); collection.insertOne(john); long afterSave = new Date().getTime(); Friend result = collection.find(id(oid)).first(); assertThat(result.getId()).isEqualTo(oid); assertThat(john.getId().getDate().getTime()).isLessThan(afterSave); //insert } @Test public void canInsertWithACustomTypeId() throws Exception { MongoCollection friends = collection.withDocumentClass(ExternalFriend.class); ExternalFriend john = new ExternalFriend("999", "Robert"); friends.insertOne(john); ExternalFriend result = friends.find().first(); assertThat(result.getId()).isEqualTo("999"); } @Test public void canInsertWithObjectIdAsString() throws Exception { MongoCollection friends = collection.withDocumentClass(ExposableFriend.class); String id = ObjectId.get().toString(); ExposableFriend john = new ExposableFriend(id, "Robert"); friends.insertOne(john); ExposableFriend result = friends.find().first(); assertThat(result).isNotNull(); assertThat(result.getId()).isEqualTo(id); } @Test public void canInsertAPojoWithAnEmptyObjectIdAsString() throws Exception { MongoCollection friends = collection.withDocumentClass(ExposableFriend.class); ExposableFriend john = new ExposableFriend("Robert"); friends.insertOne(john); ExposableFriend result = friends.find().first(); assertThat(result).isNotNull(); assertThat(result.getId()).isNotNull(); } @Test public void canInsertAnObjectWithoutIdAnnotation() throws Exception { MongoCollection coordinates = collection.withDocumentClass(Coordinate.class); Coordinate noId = new Coordinate(123, 1); coordinates.insertOne(noId); Coordinate result = coordinates.find().first(); assertThat(result).isNotNull(); assertThat(result.lat).isEqualTo(123); } @Test public void canOnlyInsertOnceAPojoWithObjectId() throws Exception { ObjectId id = ObjectId.get(); collection.insertOne(new Friend(id, "John")); try { collection.insertOne(new Friend(id, "John")); Assert.fail(); } catch (MongoWriteException e) { assertThat(e).hasMessageContaining("E11000"); } } @Test public void canOnlyInsertOnceAPojoWithACustomId() throws Exception { MongoCollection friends = jongo.wrap(database.getCollection("friends", ExternalFriend.class)); friends.insertOne(new ExternalFriend("122", "value")); try { friends.insertOne(new ExternalFriend("122", "other value")); Assert.fail(); } catch (MongoWriteException e) { assertThat(e).hasMessageContaining("E11000"); } } @Test public void canInsertAListOfDocuments() throws Exception { collection.insertMany(newArrayList(new Friend("John"), new Friend("Robert"))); assertThat(collection.count()).isEqualTo(2); Iterable friends = collection.find(); assertThat(friends).extracting("name").containsExactly("John", "Robert"); } }
java
13
0.678408
119
31.064706
170
starcoderdata
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to so we can send you a copy immediately. * * @category Zend * @package Zend_Queue * @subpackage UnitTests * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: StompIOTest.php 23522 2010-12-16 20:33:22Z andries $ */ /* * The adapter test class provides a universal test class for all of the * abstract methods. * * All methods marked not supported are explictly checked for for throwing * an exception. */ /** Zend/Queue/Adapter/Stomp/IO.php */ require_once 'Zend/Queue/Adapter/Stomp/IO.php'; /** * @category Zend * @package Zend_Queue * @subpackage UnitTests * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @group Zend_Queue */ class Zend_Queue_Adapter_StompIOTest extends PHPUnit_Framework_TestCase { protected $config = array( 'scheme' => 'tcp', 'host' => '127.0.0.1', 'port' => 61613, ); protected $io = false; protected $body = 'hello world'; // 11 characters public function setUp() { if ( $this->io === false ) { $this->io = new Zend_Queue_Adapter_Stomp_IO($this->config); } } public function test_constructFrame() { $frame = $this->io->constructFrame('SEND', array(), $this->body); $correct = 'SEND' . Zend_Queue_Adapter_Stomp_IO::EOL; $correct .= 'content-length: 11' . Zend_Queue_Adapter_Stomp_IO::EOL; $correct .= Zend_Queue_Adapter_Stomp_IO::EOL; $correct .= $this->body; $correct .= Zend_Queue_Adapter_Stomp_IO::END_OF_FRAME; $this->assertEquals($frame, $correct); // validate parameters try { $frame = $this->io->constructFrame(array()); $this->fail('constructFrame() should have failed $action as an array'); } catch (Exception $e) { $this->assertTrue(true); } try { // this won't test, I think because phpunit suppresses the error $frame = $this->io->constructFrame('SEND', 'string'); $this->fail('constructFrame() should have failed $headers as a string'); } catch (Exception $e) { $this->assertTrue(true); } try { // this won't test, I think because phpunit suppresses the error $frame = $this->io->constructFrame('SEND', array(), array()); $this->fail('constructFrame() should have failed $body as a array'); } catch (Exception $e) { $this->assertTrue(true); } } public function test_deconstructFrame() { $correct = array( 'headers' => array(), 'body' => $this->body, 'command' => 'SEND' ); $frame = $this->io->constructFrame($correct['command'], array(), $correct['body']); $frame = $this->io->deconstructFrame($frame); $this->assertEquals($correct['command'], $frame['command']); $this->assertEquals($correct['body'], $frame['body']); // validate parameters try { $frame = $this->io->deconstructFrame(array()); $this->fail('deconstructFrame() should have failed with an array'); } catch (Exception $e) { $this->assertTrue(true); } } public function test_write_read() { $frame = $this->io->constructFrame('CONNECT'); $frame = $this->io->writeAndRead($frame); $headers = array( 'destination' => '/queue/testing', 'ack' => 'auto' ); $frame = $this->io->constructFrame('SEND', $headers, $this->body); $this->io->write($frame); $frame = $this->io->constructFrame('SUBSCRIBE', $headers); $this->io->write($frame); $frame = $this->io->read(); $frame = $this->io->deconstructFrame($frame); $this->assertEquals($this->body, $frame['body']); // validate parameters try { $frame = $this->io->write(array()); $this->fail('write() should have failed with an array'); } catch (Exception $e) { $this->assertTrue(true); } } public function test_open_close() { try { $obj = new Zend_Queue_Adapter_Stomp_IO($this->config); } catch (Exception $e) { $this->fail('failed to create Zend_Queue_Adapter_Stomp_IO object:' . $e->getMessage()); } try { $obj->close(); } catch (Exception $e) { $this->fail('failed to close Zend_Queue_Adapter_Stomp_IO object:' . $e->getMessage()); } // validate parameters $config = array( 'scheme' => 'tcp', 'host' => 'blahblahb asfd', 'port' => '0' ); try { $frame = $this->io->open($config); $this->fail('open() should have failed with an invalid configuration'); } catch (Exception $e) { $this->assertTrue(true); } } public function test_constant() { $this->assertTrue(is_string(Zend_Queue_Adapter_Stomp_IO::END_OF_FRAME)); $this->assertTrue(is_string(Zend_Queue_Adapter_Stomp_IO::CONTENT_LENGTH)); $this->assertTrue(is_string(Zend_Queue_Adapter_Stomp_IO::EOL)); } public function test_checkSocket() { $this->assertTrue($this->io->checkSocket()); $this->io->close(); try { $this->io->checkSocket(); $this->fail('checkSocket() should have failed on a fclose($socket)'); } catch (Exception $e) { $this->assertTrue(true); } } }
php
15
0.566146
99
30.847716
197
starcoderdata
// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // namespace Microsoft.Azure.Management.DataMigration.Models { using Microsoft.Rest; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; /// /// Describes a database within a MongoDB data source /// public partial class MongoDbDatabaseInfo : MongoDbObjectInfo { /// /// Initializes a new instance of the MongoDbDatabaseInfo class. /// public MongoDbDatabaseInfo() { CustomInit(); } /// /// Initializes a new instance of the MongoDbDatabaseInfo class. /// /// <param name="averageDocumentSize">The average document size, or -1 /// if the average size is unknown /// <param name="dataSize">The estimated total data size, in bytes, or /// -1 if the size is unknown. /// <param name="documentCount">The estimated total number of /// documents, or -1 if the document count is unknown /// <param name="name">The unqualified name of the database or /// collection /// <param name="qualifiedName">The qualified name of the database or /// collection. For a collection, this is the database-qualified /// name. /// <param name="collections">A list of supported collections in a /// MongoDB database /// <param name="supportsSharding">Whether the database has sharding /// enabled. Note that the migration task will enable sharding on the /// target if necessary. public MongoDbDatabaseInfo(long averageDocumentSize, long dataSize, long documentCount, string name, string qualifiedName, IList collections, bool supportsSharding) : base(averageDocumentSize, dataSize, documentCount, name, qualifiedName) { Collections = collections; SupportsSharding = supportsSharding; CustomInit(); } /// /// An initialization method that performs custom operations like setting defaults /// partial void CustomInit(); /// /// Gets or sets a list of supported collections in a MongoDB database /// [JsonProperty(PropertyName = "collections")] public IList Collections { get; set; } /// /// Gets or sets whether the database has sharding enabled. Note that /// the migration task will enable sharding on the target if necessary. /// [JsonProperty(PropertyName = "supportsSharding")] public bool SupportsSharding { get; set; } /// /// Validate the object. /// /// <exception cref="ValidationException"> /// Thrown if validation fails /// public override void Validate() { base.Validate(); if (Collections == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Collections"); } if (Collections != null) { foreach (var element in Collections) { if (element != null) { element.Validate(); } } } } } }
c#
16
0.600717
195
37.303922
102
starcoderdata
void PWSfileV3::StretchKey(const unsigned char *salt, unsigned long saltLen, const StringX &passkey, unsigned int N, unsigned char *Ptag) { /* * P' is the "stretched key" of the user's passphrase and the SALT, as defined * by the hash-function-based key stretching algorithm in * http://www.schneier.com/paper-low-entropy.pdf (Section 4.1), with SHA-256 * as the hash function, and N iterations. */ size_t passLen = 0; unsigned char *pstr = NULL; ConvertString(passkey, pstr, passLen); unsigned char *X = Ptag; SHA256 H0; H0.Update(pstr, passLen); H0.Update(salt, saltLen); H0.Final(X); trashMemory(pstr, passLen); delete[] pstr; ASSERT(N >= MIN_HASH_ITERATIONS); // minimal value we're willing to use for (unsigned int i = 0; i < N; i++) { SHA256 H; // The 2nd param in next line was sizeof(X) in Beta-1 // (bug #1451422). This change broke the ability to read beta-1 // generated databases. If this is really needed, we should // hack the read functionality to try both variants (ugh). H.Update(X, SHA256::HASHLEN); H.Final(X); } }
c++
9
0.645329
79
33.029412
34
inline
def child(self, childName): """Return a fake child""" if childName in self.children: return self.children[childName] ret = DummyFile(self.name + "/" + childName) self.children[childName] = ret return ret
python
10
0.592157
52
35.571429
7
inline
void Flags<Flags1, Flags2, Flags3, Flags4, Flags5>::add( Option<T>* option, const std::string& name, const std::string& help) { Flag flag; flag.name = name; flag.help = help; flag.boolean = typeid(T) == typeid(bool); flag.loader = std::tr1::bind( &OptionLoader<T>::load, option, std::tr1::function<Try<T>(const std::string&)>( std::tr1::bind(&parse<T>, std::tr1::placeholders::_1)), name, std::tr1::placeholders::_2); // Use _2 because ignore FlagsBase*. FlagsBase::add(flag); }
c++
17
0.600368
71
27.631579
19
inline
package com.rcircle.service.resource.service; import com.rcircle.service.resource.mapper.QuotationMapper; import com.rcircle.service.resource.model.Quotation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.Random; @Service public class QuotationService { public static final int TYPE_MINI_ID = 0; public static final int TYPE_MAX_ID = 1; public static final int TYPE_RAND_ID = 2; @Autowired private QuotationMapper quotationMapper; public int getId(int type) { int ret = 1; int max = quotationMapper.getMaxId(); int min = quotationMapper.getMiniId(); switch (type) { case TYPE_MAX_ID: ret = max; break; case TYPE_MINI_ID: ret = min; break; case TYPE_RAND_ID: Random random = new Random(); ret = min + random.nextInt(max - min); break; } return ret; } public Quotation getQuotationById(int id) { return quotationMapper.getQuotation(id); } }
java
14
0.613208
62
27.439024
41
starcoderdata
def select_frequent_k(input_names: List[str], weighted_actual_names: List[List[Tuple[str, float, int]]], candidate_names: np.array, k, input_names_only=False, all_actuals=False, ) -> Tuple[List[str], List[List[Tuple[str, float, int]]], np.array]: """ Filter dataset to have only the most-frequent k names """ selected_names = set(frequent_k_names(input_names, weighted_actual_names, k, input_names_only)) return filter_dataset(input_names, weighted_actual_names, selected_names, all_actuals=all_actuals)
python
15
0.573171
102
49.538462
13
inline
class segment_tree: def __init__(self, N, operator_M, e_M): self.op_M = operator_M self.e_M = e_M self.N0 = 1<<(N-1).bit_length() self.dat = [self.e_M]*(2*self.N0) # 長さNの配列 initial で初期化 def build(self, initial): self.dat[self.N0:self.N0+len(initial)] = initial[:] for k in range(self.N0-1,0,-1): self.dat[k] = self.op_M(self.dat[2*k], self.dat[2*k+1]) # a_k の値を x に更新 def update(self,k,x): k += self.N0 self.dat[k] = x k //= 2 while k: self.dat[k] = self.op_M(self.dat[2*k], self.dat[2*k+1]) k //= 2 # 区間[L,R]をopでまとめる def query(self,L,R): L += self.N0; R += self.N0 + 1 sl = sr = self.e_M while L < R: if R & 1: R -= 1 sr = self.op_M(self.dat[R],sr) if L & 1: sl = self.op_M(sl,self.dat[L]) L += 1 L >>= 1; R >>= 1 return self.op_M(sl,sr) def get(self, k): #k番目の値を取得。query[k,k]と同じ return self.dat[k+self.N0] # coding: utf-8 # Your code here! import sys read = sys.stdin.read readline = sys.stdin.readline sys.setrecursionlimit(10**5) n,*p = map(int,read().split()) INF = 1<<30 p += [INF,INF] def argmin(i,j): if p[i] < p[j]: return i else: return j even = segment_tree(n//2+1,argmin,n) odd = segment_tree(n//2+1,argmin,n+1) even.build(range(0,n+2,2)) odd.build(range(1,n+2,2)) ans = [0]*n def get(i,j): return odd.query(i//2,(j-1)//2) if i%2 else even.query(i//2,(j-1)//2) from heapq import * k = get(0,n-1) q = [(p[k],k,0,n-1)] for I in range(n//2): v,k,i,j = heappop(q) l = even.query((k+1)//2,j//2) if i%2 else odd.query((k+1)//2,j//2) #print(v,p[l]) ans[2*I] = v ans[2*I+1] = p[l] if i < k+1: kk = get(i,k-1) heappush(q,(p[kk],kk,i,k-1)) if k+1<l-1: kk = get(k+1,l-1) heappush(q,(p[kk],kk,k+1,l-1)) if l+1 < j: kk = get(l+1,j) heappush(q,(p[kk],kk,l+1,j)) print(*ans) #x = merge3([[1,0],[4,0]],[[2,0]],[[3,0],[6,0]]) #print(x)
python
15
0.472081
74
21.572917
96
codenet
<?php $lang = array( 'all_pages' => 'Tutte le pagine', 'channel_entry' => 'Voce canale', 'choose_entry' => 'Scegli voce', 'choose_template' => 'Scegli il modello per la visualizzazione della pagina', 'configuration_updated' => 'Configurazione aggiornata', 'create_entry' => 'Crea voce', 'create_new' => 'Crea nuova', 'create_page' => 'Crea nuova pagina', 'create_template' => 'Crea template', 'default_for_page_creation' => 'Canale predefinito per la scheda "Crea nuova pagina"', 'default_template' => 'Template predefinito', 'duplicate_page_uri' => 'URI pagina duplicato', 'edit_page' => 'Modifica pagina', 'example_uri' => '/example/pages/uri/', 'filter_channels' => 'filtra canali', 'invalid_page_name' => 'È stato inviato un nome di pagina non valido', 'invalid_page_num_segs' => 'Hai superato il numero di segmenti URI consentiti nell\'URL delle pagine', 'invalid_page_uri' => 'URI pagina non valido', 'invalid_template' => 'È necessario selezionare un template valido per visualizzare questa pagina.', 'nested' => 'Nidificato', 'no_default' => 'Nessun valore predefinito', 'no_pages' => 'Nessuna pagina attualmente esistente', 'not_nested' => 'Non nidificato', 'page' => 'Pagina', 'page_created' => 'Pagina creata', 'page_name' => 'Nome pagina', 'page_settings' => 'Impostazioni pagina', 'page_updated' => 'Pagina aggiornata', 'page_url' => 'URL pagina', 'pages_channel' => 'Canale', 'pages_channel_desc' => 'Canale predefinito per la creazione di nuove pagine.', 'pages_configuration' => 'Configurazione modulo', 'pages_deleted_desc' => 'I seguenti URL di pagina sono stati eliminati', 'pages_display_on_homepage' => 'Visualizzazione degli URI sulla home page del modulo', 'pages_display_urls' => 'Visualizzazione degli <abbr title="Unified Resource Locator">URL 'pages_display_urls_desc' => 'Se impostato su gli <abbr title="Unified Resource Locator">URL verranno visualizzati in una gerarchia nidificata.', 'pages_homepage' => 'Home page delle pagine', 'pages_manager' => 'Gestore pagine', 'pages_module_description' => 'Utilizza le voci del canale per creare delle pagine statiche', 'pages_module_name' => 'Pagine', 'pages_no_default' => 'Nessun valore predefinito', 'pages_settings' => 'Impostazioni pagina', 'pages_templates' => 'Template', 'pages_templates_desc' => 'Template predefiniti per ciascun canale.', 'pages_uri' => 'URI pagine', 'pages_url_nested' => 'Nidificato', 'pages_url_not_nested' => 'Non nidificato', 'parent_page' => 'Pagina padre', 'preference' => 'Preferenza', 'preference_name' => 'Nome preferenza', 'preference_value' => 'Valore preferenza', 'setting' => 'Impostazione', 'template' => 'Template', 'view_page' => 'Visualizza pagina', ''=>'' ); // End of File
php
6
0.678246
152
15.770588
170
starcoderdata
def test_continuous(self): """ Check the sum of s,i,r=1 across t """ b=1 k=1/3 time=150 ii=0.01 sol1=SIR_continuous(b,k,time,ii) sumy=sol1.y[0]+sol1.y[1]+sol1.y[2] self.assertAlmostEqual(np.linalg.norm(sumy),np.sqrt(time))
python
9
0.513158
66
22.461538
13
inline
import $ from 'jquery' import loadingImg from './loadingImg' import * as u from './util' import * as Bacon from 'baconjs' const SCREENSHOT_LIMIT_ERROR = () => new Bacon.Error('Screenshot limit reached!') const fileTypes = ['image/png', 'image/jpeg'] export function onPaste(e, saver, onValueChanged, limit) { const clipboardData = e.originalEvent.clipboardData const file = clipboardData.items && clipboardData.items.length > 0 && clipboardData.items[0].getAsFile() if (file) { onPasteBlob(e, file, saver, $(e.currentTarget), onValueChanged, limit) } else { const clipboardDataAsHtml = clipboardData.getData('text/html') if (clipboardDataAsHtml) onPasteHtml(e, $(e.currentTarget), clipboardDataAsHtml, limit, saver, onValueChanged) else onLegacyPasteImage($(e.currentTarget), saver, limit, onValueChanged) } } function onPasteBlob(event, file, saver, $answer, onValueChanged, limit) { event.preventDefault() if (fileTypes.indexOf(file.type) >= 0) { if (u.existingScreenshotCount($answer) + 1 <= limit) { saver({ data: file, type: file.type, id: String(new Date().getTime()) }).then(screenshotUrl => { const img = `<img src="${screenshotUrl}"/>` window.document.execCommand('insertHTML', false, img) }) } else { onValueChanged(SCREENSHOT_LIMIT_ERROR()) } } } function onPasteHtml(event, $answer, clipboardDataAsHtml, limit, saver, onValueChanged) { event.preventDefault() if (totalImageCount($answer, clipboardDataAsHtml) <= limit) { window.document.execCommand('insertHTML', false, u.sanitize(clipboardDataAsHtml)) persistInlineImages($answer, saver, limit, onValueChanged) } else { onValueChanged(SCREENSHOT_LIMIT_ERROR()) } } function onLegacyPasteImage($editor, saver, limit, onValueChanged) { persistInlineImages($editor, saver, limit, onValueChanged) } function checkForImageLimit($editor, imageData, limit) { return Bacon.once(u.existingScreenshotCount($editor) > limit ? new Bacon.Error() : imageData) } function persistInlineImages($editor, screenshotSaver, screenshotCountLimit, onValueChanged) { setTimeout( () => Bacon.combineAsArray( markAndGetInlineImages($editor).map(data => checkForImageLimit($editor, data, screenshotCountLimit) .doError(() => onValueChanged(SCREENSHOT_LIMIT_ERROR())) .flatMapLatest(() => Bacon.fromPromise(screenshotSaver(data))) .doAction(screenShotUrl => data.$el.attr('src', screenShotUrl)) .doError(() => data.$el.remove()) ) ).onValue(() => $editor.trigger('input')), 0 ) } function totalImageCount($answer, clipboardDataAsHtml) { return u.existingScreenshotCount($answer) + u.existingScreenshotCount($(` } function markAndGetInlineImages($editor) { const images = $editor .find('img[src^="data"]') .toArray() .map(el => Object.assign(decodeBase64Image(el.getAttribute('src')), { $el: $(el) }) ) images .filter(({ type }) => fileTypes.indexOf(type) === -1 && type !== 'image/svg+xml') .forEach(({ $el }) => $el.remove()) const pngImages = images.filter(({ type }) => fileTypes.indexOf(type) >= 0) pngImages.forEach(({ $el }) => $el.attr('src', loadingImg)) return pngImages } function decodeBase64Image(dataString) { if (!dataString) return null const matches = dataString.match(/^data:([A-Za-z-+/]+);base64,(.+)$/) if (matches.length !== 3) { return null } return { type: matches[1], data: new Buffer(matches[2], 'base64') } }
javascript
27
0.629122
118
38.029703
101
starcoderdata
// BUCKET.HPP #include #include template <typename T, typename RandomAccessIterator> void bucket_sort(RandomAccessIterator begin, RandomAccessIterator end, int rad) { // 1) Создаем корзину std::vector bucket[rad]; // 2) Распределяем массив в различные корзины // (по n разряду) 256 -> [2]56 for (RandomAccessIterator iter = begin; iter < end; iter++) { int bi = (*iter) / rad; bucket[bi].push_back(*iter); } // 3) Отдельно сортируем каждую корзину for (int i = 0; i < rad; i++) std::sort(bucket[i].begin(), bucket[i].end()); // 4) Сливаем отсортированные части в исходный массив // int index = 0; for (int i = 0; i < rad; i++) { for (int j = 0; j < bucket[i].size(); j++) { *begin = bucket[i][j]; begin++; } } }
c++
13
0.571429
81
26.548387
31
starcoderdata
package types import ( "fmt" "github.com/lianxiangcloud/linkchain/libs/common" "github.com/lianxiangcloud/linkchain/libs/crypto" "github.com/lianxiangcloud/linkchain/libs/hexutil" ) type OrderType int //{"txType":"", "tokenGet":"", "amountGet":"", "tokenGive":"", "amountGive":"", "expires":"", "nonce":"", "blockHash":""} type Order struct { TokenGet common.Address `json:"tokenGet"` AmountGet *hexutil.Big `json:"amountGet"` TokenGive common.Address `json:"tokenGive"` AmountGive *hexutil.Big `json:"amountGive"` Expires hexutil.Uint64 `json:"expires"` Nonce hexutil.Uint64 `json:"nonce"` Maker common.Address `json:"maker"` } //Adaptation contract to generate hash func (r *Order) OrderToHash() common.Hash { b := fmt.Sprintf(`{"amountGet":"%s","amountGive":"%s","expires":%d,"maker":"%s","nonce":%d,"tokenGet":"%s","tokenGive":"%s"}`, r.AmountGet.ToInt().String(), r.AmountGive.ToInt().String(), uint64(r.Expires), r.Maker.Hex(), uint64(r.Nonce), r.TokenGet.Hex(), r.TokenGive.Hex(), ) return common.BytesToHash(crypto.Keccak256([]byte(b))) } type SignOrder struct { Order `json:"order"` V *hexutil.Big `json:"v"` S *hexutil.Big `json:"s"` R *hexutil.Big `json:"r"` }
go
12
0.661303
127
27.906977
43
starcoderdata
package com.van.products.webservice.client.impl; import java.util.Date; import javax.jws.WebService; import com.van.products.webservice.client.ClientWebService; /**  * @className: ClientWebServiceImpl.java * @classDescription: Description here * @createTime: 2015年5月2日 * @author Van */ @WebService(endpointInterface = "com.van.products.webservice.client.ClientWebService" ,serviceName = "clientService") public class ClientWebServiceImpl implements ClientWebService { @Override public String service(String param) { System.out.println("+++++++++++" + new Date()); return "访问时间:" + new Date() + "携带参数:" + param; } }
java
11
0.728395
85
23
27
starcoderdata
base_url = "https://uno-cpi-cat.herokuapp.com/" sta_url = "https://uno-cpi-sta.herokuapp.com/" test_url = "http://127.0.0.1:8000/login" # STA credentials campus_partner_user = " campus_partner_pwd = " admin_user = " admin_pwd = " community_partner_user = " community_partner_pwd = " url = test_url
python
5
0.682039
51
23.235294
17
starcoderdata
import templateUrl from 'components/post.html'; class controller { constructor ($sce) { var location = null; if (this.post.acf.venue) { location = this.post.acf.venue; } else if (this.post.acf.name) { location = this.post.acf.name; } else if (this.post.acf.address) { location = this.post.acf.address; } this.mapUrl = location ? $sce.trustAsResourceUrl("https://www.google.com/maps/embed/v1/place?key=AIzaSyBgT9eRPtCKgZy93j9hdX_Fdiszu6K2Vcg&q=" + location) : null; } } export default { controller, templateUrl, bindings: { post: '<post', category: '<category' } };
javascript
9
0.66322
164
24.111111
27
starcoderdata
#pragma once // LaminaFS is Copyright (c) 2016 // See LICENSE for license information. #include #include #include #include #ifdef _WIN32 #include #else #include #endif #include "shared_types.h" namespace laminaFS { namespace util { //! Simple thread-safe pool allocator template <typename T> class PoolAllocator { public: PoolAllocator() { } PoolAllocator(lfs_allocator_t &alloc, uint64_t capacity) { _alloc = alloc; _storage = reinterpret_cast sizeof(T) * capacity, alignof(T))); _capacity = capacity; // create bitmask and initialize all bits to available lldiv_t d = lldiv(capacity, 32); _bitmaskCount = d.quot + (d.rem != 0 ? 1 : 0); _bitmask = reinterpret_cast sizeof(uint32_t) * _bitmaskCount, alignof(uint32_t))); for (uint64_t i = 0 ; i < static_cast ++i) { _bitmask[i] = 0xFFFFFFFF; } if (d.rem != 0) { uint32_t newBitmask = 0; for (uint32_t i = 0; i < d.rem; ++i) { newBitmask |= 1 << i; } _bitmask[d.quot] = newBitmask; } } ~PoolAllocator() { _alloc.free(_alloc.allocator, _storage); _alloc.free(_alloc.allocator, _bitmask); } uint64_t getCapacity() const { return _capacity; } T *alloc() { T *result = nullptr; { std::lock_guard lock(_mutex); for (uint64_t i = 0; i < _bitmaskCount; ++i) { if (_bitmask[i] != 0) { #ifdef _WIN32 unsigned long bit; _BitScanForward(&bit, _bitmask[i]); #else uint64_t bit = static_cast - 1); #endif uint64_t index = i * 32 + bit; result = &_storage[index]; _bitmask[i] = _bitmask[i] & ~(1 << bit); break; } } } if (result) { result = new(result) T(); } return result; } void free(T *v) { if (!v || v < _storage || v >= _storage + _capacity) { return; } v->~T(); std::lock_guard lock(_mutex); uintptr_t index = (reinterpret_cast - reinterpret_cast / sizeof(T); lldiv_t d = lldiv(index, 32); _bitmask[d.quot] |= 1 << d.rem; } private: std::mutex _mutex; T *_storage = nullptr; uint32_t *_bitmask = nullptr; lfs_allocator_t _alloc; uint64_t _capacity = 0; uint64_t _bitmaskCount = 0; }; } }
c
23
0.617907
126
20.825688
109
starcoderdata
@Test public void checkAllRefineFilters(){ driver.navigate().to("https://stopandshop.com/coupons-weekly-circular/digital-coupons/#/available"); WebElement refineButton = driver.findElement(By.cssSelector(".c-sort-filter.c-sort-filter--min-300")); refineButton.click(); JavascriptExecutor js = (JavascriptExecutor) driver; js.executeScript("window.scrollBy(0, 900)"); mainPage.javaWaitSec(1); List<WebElement> checkBoxFiltersList = driver.findElements(By.cssSelector(".c-settings-form__filter-checkbox.c-checkbox__input.js-update-filter")); System.out.println("Number of unchecked check boxes: " + checkBoxFiltersList.size()); for (int i = 0; i <checkBoxFiltersList.size() ; i++) { Actions action = new Actions(driver); action.moveToElement(checkBoxFiltersList.get(i)).click().perform(); mainPage.javaWaitSec(1); js.executeScript("window.scrollBy(0, 500)"); mainPage.javaWaitSec(1); checkBoxFiltersList = driver.findElements(By.cssSelector(".c-settings-form__filter-checkbox.c-checkbox__input.js-update-filter")); } // Assert if all checkboxes are checked List<WebElement> checkBoxFiltersListChecked = driver.findElements(By.xpath("//div[@class='c-checkbox']//input[@checked='true']")); System.out.println("Number of checked check boxes: " + checkBoxFiltersList.size()); Assert.assertEquals(checkBoxFiltersListChecked.size(), checkBoxFiltersList.size()); }
java
12
0.654014
128
50.870968
31
inline
def load_img(self, data_file, img_folder, img_url_idx, img_id_idx): """ load_img download images from the url, save images into the given image folder and use image id as the image name :param data_file: input file which include information such as img_url, img_id :param img_folder: image folder where downloaded image are saved :param img_url_idx: column idx of img url in the data_file :param img_id_idx: column idx of img id in the data_file """ f = open(os.path.join(self.data_folder, data_file), "r", encoding="utf-8") next(f) print("start loading images") for line in f: image_url = line.split("\t")[img_url_idx] image_id = line.split("\t")[img_id_idx] img_path = os.path.join(self.img_rt_folder, img_folder) isExist = os.path.exists(img_path) if not isExist: # Create a new directory because it does not exist os.makedirs(img_path) print("The image directory is created!") filename = os.path.join(img_path, image_id + ".jpg") r = requests.get(image_url, stream=True, headers={'User-agent': 'Mozilla/5.0'}) if r.status_code == 200: with open(filename, 'wb') as f: r.raw.decode_content = True shutil.copyfileobj(r.raw, f) else: print("img can't be loaded")
python
14
0.562918
91
48.833333
30
inline
void dump_pcb(ofdpaPktCb_t *pcb) { char *feild_name[] = { "FEILD_DMAC", "FEILD_SMAC", "FEILD_VLAN_0", "FEILD_VLAN_1", "FEILD_L3_TYPE", "FEILD_MPLS_2", "FEILD_MPLS_1", "FEILD_MPLS_0", "FEILD_CW", "FEILD_L3_HDR", /* layer 3 protocol header */ "FEILD_L4_HDR", /* layer 4 protocol header */ "FEILD_DATA", "FEILD_MAX", }; char *format = "\r\n %-15s = %5d, len = %5d"; char *pBuf = NULL; int i,j; int base = 0; base = pcb->pool_tail; pBuf = calloc(1,2048); printf("\r\n %-15s = %016llx","port",pcb->port); printf("\r\n %-15s = %5d","Base",base); for(i = 0 , j = 0; i < FEILD_MAX; i++){ if(pcb->feilds[i].len){ printf(format,feild_name[i], \ pcb->feilds[i].offset - base, \ pcb->feilds[i].len); memcpy(&pBuf[j],DP_GET_FEILD_ADDR(pcb, i),pcb->feilds[i].len); j += pcb->feilds[i].len; } else { printf("\r\n %-15s = %5s, len = %5s",feild_name[i],"--","--"); } } printf("\r\n"); dump_pkt(pBuf, pcb->pkt_len); free(pBuf); }
c
14
0.528487
66
20.229167
48
inline
int main(void) { char oldname[80], newname[80]; /* prompt for file to rename and new name */ printf("Enter a directory to rename: "); gets(oldname); printf("Enter New name: "); gets(newname); /* Rename the file */ if (rename(oldname, newname) == 0) printf("\nThe directory %s is renamed to %s", oldname, newname); else perror("ERROR in renaming"); return 0; }
c
9
0.596618
69
22.055556
18
inline
N=int(input()) chk = (2*N)**(1/2) k = chk//1 if N != k*(k+1)//2: print("No") else: print("Yes") k = int(k) print(k+1) ans_list = [[0 for i in range(k)] for j in range(k+1)] W1 = 0 H1 = 0 W2 = 0 H2 = 1 for i in range(N): ans_list[H1][W1] = i+1 ans_list[H2][W2] = i+1 if H1 == W1 : H1 = 0 W1 =W1+1 H2 = W1+1 W2 = 0 else: H1 += 1 W2 += 1 for i in range(k+1): print(str(k)+" "+" ".join(str(n) for n in ans_list[i]))
python
14
0.383803
63
20.074074
27
codenet
// Copyright 2013-Present Couchbase, Inc. // // Use of this software is governed by the Business Source License included in // the file licenses/BSL-Couchbase.txt. As of the Change Date specified in that // file, in accordance with the Business Source License, use of this software // will be governed by the Apache License, Version 2.0, included in the file // licenses/APL2.txt. package service_def import ( "github.com/couchbase/goxdcr/base" "github.com/couchbase/goxdcr/metadata" ) // Returns: // 1. bucketInfo // 2. shouldUseAlternateAddressing // 3. connectionString // 4. err type BucketInfoGetter func() (map[string]interface{}, bool, string, error) type RemoteClusterSvc interface { RemoteClusterByRefId(refId string, refresh bool) (*metadata.RemoteClusterReference, error) RemoteClusterByRefName(refName string, refresh bool) (*metadata.RemoteClusterReference, error) RemoteClusterByUuid(uuid string, refresh bool) (*metadata.RemoteClusterReference, error) ValidateAddRemoteCluster(ref *metadata.RemoteClusterReference) error // skipConnectivityValidation is true when called from migration service AddRemoteCluster(ref *metadata.RemoteClusterReference, skipConnectivityValidation bool) error ValidateSetRemoteCluster(refName string, ref *metadata.RemoteClusterReference) error SetRemoteCluster(refName string, ref *metadata.RemoteClusterReference) error ValidateRemoteCluster(ref *metadata.RemoteClusterReference) error DelRemoteCluster(refName string) (*metadata.RemoteClusterReference, error) // Gets a map of cloned remote cluster references, with unique ID being the key RemoteClusters() (map[string]*metadata.RemoteClusterReference, error) // Remote Cluster Service may need to monitor target bucket info RequestRemoteMonitoring(spec *metadata.ReplicationSpecification) error UnRequestRemoteMonitoring(spec *metadata.ReplicationSpecification) error // get connection string for specified remote cluster // when isCapiReplication is false, return ref.activeHostName, which is rotated among target nodes for load balancing // when isCapiReplication is true, return the lexicographically smallest hostname in hostname list of ref, // so as to ensure that the same hostname is returned consistently // this is critical when the connection string returned will be used to retrieve target server vb map // otherwise different server vb maps may be returned by target due to an issue in elastic search plugin GetConnectionStringForRemoteCluster(ref *metadata.RemoteClusterReference, isCapiReplication bool) (string, error) // used by auditing and ui logging GetRemoteClusterNameFromClusterUuid(uuid string) string // Remote cluster service could return two different types of errors: // 1. unexpected internal server error // 2. validation error indicating the remote cluster involved is not valid or does not exist // Distinction between the different types of errors is needed by adminport to decide what status code it should return to client // To enable the distinction, remote cluster service wraps validation errors with additional info. // This method checks which type the passed in error is, and unwraps the underlying error for validation errors, // so as to hide the wrapping implementation from callers. // This method returns // 1. false and the original error for internal server errors. // 2. true and unwrapped error for validation errors. CheckAndUnwrapRemoteClusterError(err error) (bool, error) // Service call back function for remote cluster changed event RemoteClusterServiceCallback(path string, value []byte, rev interface{}) error // set the metadata change call back method // when the remote cluster service makes changes to remote cluster references, it needs to call the call back // explicitly, so that the actions can be taken immediately SetMetadataChangeHandlerCallback(callBack base.MetadataChangeHandlerCallback) // Checks to see if XDCR should use alternate addressing to contact the remote cluster ShouldUseAlternateAddress(ref *metadata.RemoteClusterReference) (bool, error) // Retrieves the last-updated capability matrix GetCapability(ref *metadata.RemoteClusterReference) (metadata.Capability, error) // Called by PipelineMgr to check to see if any pipelines should restart due to remoteClusterRef changes GetRefListForRestartAndClearState() ([]*metadata.RemoteClusterReference, error) // Gets the last pulled manifest GetManifestByUuid(uuid, bucketName string, forceRefresh, restAPIQuery bool) (manifest *metadata.CollectionsManifest, err error) // Get the last-known connectivity status GetConnectivityStatus(ref *metadata.RemoteClusterReference) (metadata.ConnectivityStatus, error) // Gets a list of references that have experienced auth errors and have not been queried before GetRefListForFirstTimeBadAuths() ([]*metadata.RemoteClusterReference, error) // Gives an API that returns the ability to retrieve target bucket info - note that this call may be heavy on ns_server GetBucketInfoGetter(ref *metadata.RemoteClusterReference, bucketName string) (BucketInfoGetter, error) }
go
10
0.80788
130
54.129032
93
starcoderdata
package cn.shopee.beetl.tags; import cn.shopee.beetl.tags.exception.BeetlTagException; import cn.shopee.common.utils.ServletUtils; import com.sun.istack.Nullable; import org.springframework.util.Assert; import cn.shopee.beetl.tags.support.RequestContext; public abstract class RequestContextAwareTag extends TagSupport { public static final String REQUEST_CONTEXT_PAGE_ATTRIBUTE = "org.springframework.web.servlet.tags.REQUEST_CONTEXT"; @Nullable private RequestContext requestContext; protected final RequestContext getRequestContext() { Assert.state(this.requestContext != null, "No current RequestContext"); return this.requestContext; } protected abstract int doStartTagInternal() throws BeetlTagException; protected int doStartTag() throws BeetlTagException{ this.requestContext = (RequestContext) this.ctx.globalVar.get(REQUEST_CONTEXT_PAGE_ATTRIBUTE); if (this.requestContext == null) { this.requestContext = new RequestContext(ServletUtils.getRequest(),this.ctx.globalVar); this.ctx.globalVar.put(REQUEST_CONTEXT_PAGE_ATTRIBUTE, this.requestContext); } return doStartTagInternal(); } protected int doEndTag() throws BeetlTagException{ return 1; } public void doFinally() { this.requestContext = null; } }
java
13
0.792438
116
30.609756
41
starcoderdata
package dojo.form_template_method; public class ResidentalSite extends Site { public ResidentalSite(int units, double rate) { super(units, rate); } public double getBillableAmount() { double base = getUnits() * getRate(); double tax = base * TAX_RATE; return base + tax; } }
java
10
0.69863
48
18.466667
15
starcoderdata
func intersectionStableSorted(a0, a1, b0, b1 Point) (Point, bool) { var pt Point // Compute the normal of the plane through (a0, a1) in a stable way. aNorm := a0.Sub(a1.Vector).Cross(a0.Add(a1.Vector)) aNormLen := aNorm.Norm() bLen := b1.Sub(b0.Vector).Norm() // Compute the projection (i.e., signed distance) of b0 and b1 onto the // plane through (a0, a1). Distances are scaled by the length of aNorm. b0Dist, b0Error := projection(b0.Vector, aNorm, aNormLen, a0, a1) b1Dist, b1Error := projection(b1.Vector, aNorm, aNormLen, a0, a1) // The total distance from b0 to b1 measured perpendicularly to (a0,a1) is // |b0Dist - b1Dist|. Note that b0Dist and b1Dist generally have // opposite signs because b0 and b1 are on opposite sides of (a0, a1). The // code below finds the intersection point by interpolating along the edge // (b0, b1) to a fractional distance of b0Dist / (b0Dist - b1Dist). // // It can be shown that the maximum error in the interpolation fraction is // // (b0Dist * b1Error - b1Dist * b0Error) / (distSum * (distSum - errorSum)) // // We save ourselves some work by scaling the result and the error bound by // "distSum", since the result is normalized to be unit length anyway. distSum := math.Abs(b0Dist - b1Dist) errorSum := b0Error + b1Error if distSum <= errorSum { return pt, false // Error is unbounded in this case. } x := b1.Mul(b0Dist).Sub(b0.Mul(b1Dist)) err := bLen*math.Abs(b0Dist*b1Error-b1Dist*b0Error)/ (distSum-errorSum) + 2*distSum*epsilon // Finally we normalize the result, compute the corresponding error, and // check whether the total error is acceptable. xLen := x.Norm() maxError := intersectionError if err > (float64(maxError)-epsilon)*xLen { return pt, false } return Point{x.Mul(1 / xLen)}, true }
go
12
0.702613
78
39
45
inline
import java.io.File; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Scanner; public class TEST { public static void main(String[] args)throws FileNotFoundException { try (Scanner in=new Scanner(new File("input.txt"))) { PrintWriter out = new PrintWriter(new File("output.txt")); boolean flag=false; int n=in.nextInt(); if(n==1) { out.print("Yes"); out.close(); return; } int[] mas=new int[n+1]; for(int i=1;i<=n;i++) mas[i]=in.nextInt(); for(int i=1;i<=n;i++) { if(2*i<n) { if(mas[i]<=mas[2*i] && mas[i]<=mas[2*i+1]) { flag=true; continue; } else { flag=false; break; } } if(2*i==n) { if(mas[i]<=mas[2*i]) { flag=true; continue; } else { flag=false; break; } } if(2*i>n) break; } if(flag==true) out.print("Yes"); else out.print("No"); out.close(); } } }
java
18
0.488225
69
14.984615
65
starcoderdata
import { controller, get, post, put, del, use, all } from '../lib/decorator' import { getAll, getByUsername } from '../service/administrators' @controller('/api/v0/administrators') export class administratorsController { @get("/") async getAdministrators(ctx,next) { const result =await getAll() ctx.body = { result } } @get("/:username") async getAdministratorsByUsername(ctx,next) { const { username } = ctx.params const result =await getByUsername(username) ctx.body = { result } } }
javascript
11
0.570048
51
17.294118
34
starcoderdata
func buildExposure(e *federation.ExposureKey, config *Config) (*publishmodel.Exposure, error) { upperRegions := make([]string, len(e.Regions)) for i, r := range e.Regions { upperRegions[i] = strings.ToUpper(project.TrimSpaceAndNonPrintable(r)) } sort.Strings(upperRegions) exposure := publishmodel.Exposure{ ExposureKey: e.ExposureKey, TransmissionRisk: int(e.TransmissionRisk), Regions: upperRegions, Traveler: e.Traveler, IntervalNumber: e.IntervalNumber, IntervalCount: e.IntervalCount, LocalProvenance: false, } switch e.ReportType { case federation.ExposureKey_CONFIRMED_TEST: exposure.ReportType = verifyapi.ReportTypeConfirmed case federation.ExposureKey_CONFIRMED_CLINICAL_DIAGNOSIS: exposure.ReportType = verifyapi.ReportTypeClinical case federation.ExposureKey_REVOKED: exposure.ReportType = verifyapi.ReportTypeNegative case federation.ExposureKey_SELF_REPORT: if config.AcceptSelfReport { exposure.ReportType = verifyapi.ReportTypeClinical } else { return nil, ErrInvalidReportType } case federation.ExposureKey_RECURSIVE: if config.AcceptRecursive { exposure.ReportType = verifyapi.ReportTypeClinical } else { return nil, ErrInvalidReportType } default: return nil, ErrInvalidReportType } // Maybe backfill transmission risk exposure.TransmissionRisk = publishmodel.ReportTypeTransmissionRisk(exposure.ReportType, exposure.TransmissionRisk) if e.HasSymptomOnset { if ds := e.DaysSinceOnsetOfSymptoms; ds >= -1*int32(config.MaxMagnitudeSymptomOnsetDays) && ds <= int32(config.MaxMagnitudeSymptomOnsetDays) { exposure.SetDaysSinceSymptomOnset(ds) } } return &exposure, nil }
go
12
0.771598
144
33.510204
49
inline
package eu.arrowhead.core.plantdescriptionengine.providedservices.pde_monitor.routehandlers; import eu.arrowhead.core.plantdescriptionengine.alarms.AlarmManager; import eu.arrowhead.core.plantdescriptionengine.alarms.AlarmSeverity; import eu.arrowhead.core.plantdescriptionengine.providedservices.dto.ErrorMessage; import eu.arrowhead.core.plantdescriptionengine.providedservices.pde_monitor.dto.PdeAlarm; import eu.arrowhead.core.plantdescriptionengine.providedservices.pde_monitor.dto.PdeAlarmDto; import eu.arrowhead.core.plantdescriptionengine.providedservices.pde_monitor.dto.PdeAlarmListDto; import eu.arrowhead.core.plantdescriptionengine.providedservices.requestvalidation.BooleanParameter; import eu.arrowhead.core.plantdescriptionengine.providedservices.requestvalidation.IntParameter; import eu.arrowhead.core.plantdescriptionengine.providedservices.requestvalidation.ParseError; import eu.arrowhead.core.plantdescriptionengine.providedservices.requestvalidation.QueryParamParser; import eu.arrowhead.core.plantdescriptionengine.providedservices.requestvalidation.QueryParameter; import eu.arrowhead.core.plantdescriptionengine.providedservices.requestvalidation.StringParameter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import se.arkalix.codec.CodecType; import se.arkalix.net.http.HttpStatus; import se.arkalix.net.http.service.HttpRouteHandler; import se.arkalix.net.http.service.HttpServiceRequest; import se.arkalix.net.http.service.HttpServiceResponse; import se.arkalix.util.concurrent.Future; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; /** * Handles HTTP requests to retrieve PDE alarms. */ public class GetAllPdeAlarms implements HttpRouteHandler { private static final Logger logger = LoggerFactory.getLogger(GetAllPdeAlarms.class); private final AlarmManager alarmManager; /** * Constructor. * * @param alarmManager Object used for managing PDE alarms. */ public GetAllPdeAlarms(final AlarmManager alarmManager) { Objects.requireNonNull(alarmManager, "Expected Alarm Manager."); this.alarmManager = alarmManager; } /** * Handles an HTTP call to acquire a list of PDE alarms raised by the PDE. * * @param request HTTP request object. * @param response HTTP response containing an alarm list. */ @Override public Future<HttpServiceResponse> handle(final HttpServiceRequest request, final HttpServiceResponse response) { Objects.requireNonNull(request, "Expected request."); Objects.requireNonNull(response, "Expected response."); final List<String> severityValues = new ArrayList<>(); for (final AlarmSeverity severity : AlarmSeverity.values()) { severityValues.add(severity.toString().toLowerCase()); } // Add value corresponding to all severities that are not CLEARED: severityValues.add(PdeAlarm.NOT_CLEARED); final IntParameter itemPerPageParam = new IntParameter.Builder() .name(QueryParameter.ITEM_PER_PAGE) .min(0) .build(); final IntParameter pageParam = new IntParameter.Builder() .name(QueryParameter.PAGE) .min(0) .requires(itemPerPageParam) .build(); final StringParameter sortFieldParam = new StringParameter.Builder() .name(QueryParameter.SORT_FIELD) .legalValues( QueryParameter.ID, QueryParameter.RAISED_AT, QueryParameter.UPDATED_AT, QueryParameter.CLEARED_AT ) .build(); final StringParameter directionParam = new StringParameter.Builder() .name(QueryParameter.DIRECTION) .legalValues(QueryParameter.ASC, QueryParameter.DESC) .defaultValue(QueryParameter.ASC) .build(); final StringParameter systemNameParam = new StringParameter.Builder() .name(QueryParameter.SYSTEM_NAME) .build(); final StringParameter severityParam = new StringParameter.Builder() .name(QueryParameter.SEVERITY) .legalValues(severityValues) .build(); final BooleanParameter acknowledgedParam = new BooleanParameter.Builder() .name(QueryParameter.ACKNOWLEDGED) .build(); final List<QueryParameter> acceptedParameters = List.of(pageParam, sortFieldParam, directionParam, systemNameParam, systemNameParam, severityParam, acknowledgedParam); final QueryParamParser parser; try { parser = new QueryParamParser(null, acceptedParameters, request); } catch (final ParseError error) { logger.error("Encountered the following error(s) while parsing an HTTP request: " + error.getMessage()); response .status(HttpStatus.BAD_REQUEST) .body(ErrorMessage.of(error.getMessage()), CodecType.JSON); return Future.success(response); } List<PdeAlarmDto> alarms = alarmManager.getAlarms(); final Optional<String> sortField = parser.getValue(sortFieldParam); if (sortField.isPresent()) { final String sortDirection = parser.getRequiredValue(directionParam); final boolean ascending = QueryParameter.ASC.equals(sortDirection); switch (sortField.get()) { case QueryParameter.ID: PdeAlarm.sortById(alarms, ascending); break; case QueryParameter.RAISED_AT: PdeAlarm.sortByRaisedAt(alarms, ascending); break; case QueryParameter.UPDATED_AT: PdeAlarm.sortByUpdatedAt(alarms, ascending); break; case QueryParameter.CLEARED_AT: PdeAlarm.sortByClearedAt(alarms, ascending); break; default: // We should never reach this case, since the sortField // param has been validated by the parser. throw new AssertionError("Encountered the invalid sort field '" + sortField + "'."); } } final Optional<String> systemName = parser.getValue(systemNameParam); if (systemName.isPresent()) { PdeAlarm.filterBySystemName(alarms, systemName.get()); } final Optional<String> severityValue = parser.getValue(severityParam); if (severityValue.isPresent()) { PdeAlarm.filterBySeverity(alarms, severityValue.get()); } final Optional<Boolean> acknowledged = parser.getValue(acknowledgedParam); if (acknowledged.isPresent()) { PdeAlarm.filterAcknowledged(alarms, acknowledged.get()); } final int count = alarms.size(); final Optional<Integer> page = parser.getValue(pageParam); if (page.isPresent()) { final int itemsPerPage = parser.getRequiredValue(itemPerPageParam); final int from = Math.min(page.get() * itemsPerPage, alarms.size()); final int to = Math.min(from + itemsPerPage, alarms.size()); alarms = alarms.subList(from, to); } PdeAlarmListDto result = new PdeAlarmListDto.Builder() .data(alarms) .count(count) .build(); response .status(HttpStatus.OK) .body(result, CodecType.JSON); return Future.success(response); } }
java
16
0.670863
117
40.677596
183
research_code
import { InputValidator } from '../../../services'; import { RowFixture } from '../../fixtures'; describe('InputValidator', () => { describe('stream validation', () => { it('should return false when salary < 0', () => { let data = RowFixture.new({ annual_salary: -1 }); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when missing salary', () => { let data = RowFixture.new(); delete data.annual_salary; return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when string salary', () => { let data = RowFixture.new({ annual_salary: 'invalid'}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when firstname null', () => { let data = RowFixture.new({ first_name: null}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when firstname empty', () => { let data = RowFixture.new({ first_name: ''}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when firstname missing', () => { let data = RowFixture.new(); delete data.first_name; return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when lastname null', () => { let data = RowFixture.new({ last_name: null}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when lastname missing', () => { let data = RowFixture.new(); delete data.last_name; return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when lastname empty', () => { let data = RowFixture.new({ last_name: ''}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when super rate null', () => { let data = RowFixture.new({ super_rate: null}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when negative super rate', () => { let data = RowFixture.new({ super_rate: -1}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when super rate > 50%', () => { let data = RowFixture.new({ super_rate: .6}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when payment date empty', () => { let data = RowFixture.new({ payment_start_date: ''}); return expect(InputValidator.validate(data)).to.be.false; }); it('should return false when payment date missing', () => { let data = RowFixture.new(); delete data.payment_start_date; return expect(InputValidator.validate(data)).to.be.false; }); }); });
javascript
24
0.615707
63
33.518072
83
starcoderdata
package com.frank; public class ConcreteMediator implements Mediator { private ConcreteColleagueA colleagueA; private ConcreteColleagueB colleagueB; public void setConcreteColleagueA(ConcreteColleagueA colleague) { colleagueA = colleague; } public void setConcreteColleagueB(ConcreteColleagueB colleague) { colleagueB = colleague; } public void changed(Colleague colleague) { if (colleague == colleagueA){ colleagueB.setHappy(!colleague.isHappy()); }else { colleagueA.setHappy(!colleague.isHappy()); } } }
java
13
0.740506
66
21.444444
27
starcoderdata
import os import sys import redis import tornado.ioloop from tornado import web import yaml from app import blog, piki from app.rss import generate_rss from app.handlers import (BlogFileHandler, BlogHandler, BlogIndexHandler, MainHandler, PikiFileHandler, PikiHandler) def make_app(config): srv_conf = config['server'] redis_client = redis.Redis() piki_root = srv_conf['piki_root'] blog_root = srv_conf['blog_root'] piki_reader = piki.PikiReader( redis_client=redis_client, piki_root=piki_root) blog_reader = blog.BlogReader( redis_client=redis_client, blog_root=blog_root) args = { 'redis_client': redis_client, 'piki_reader': piki_reader, 'blog_reader': blog_reader, 'github_comment_source': srv_conf['github_comment_source'], } file_dir = os.path.dirname(__file__) setting = { 'debug': srv_conf.get('debug', False), 'cookie_secret': srv_conf['cookie_secret'], 'static_path': os.path.join(file_dir, 'static'), 'template_path': os.path.join(file_dir, 'templates') } rss_file_path = os.path.join(file_dir, 'static', 'rss', 'rss.xml') generate_rss(srv_conf['site_domain'], rss_file_path, blog_reader) return tornado.web.Application([ (r'/', MainHandler, args), (r'/blog/', BlogIndexHandler, args), (r'/blog/(\d{4})-(\d{2})-(\d{2})/(.*)/', BlogHandler, args), (r'/blog/(\d{4})-(\d{2})-(\d{2})/(.*)', BlogFileHandler, {'path': blog_root}), (r'/piki/', web.RedirectHandler, {'url': '/piki/index/'}), (r'/piki/(.*)/', PikiHandler, args), (r'/piki/(.*)', PikiFileHandler, {'path': piki_root}), ], **setting) if __name__ == '__main__': with open(sys.argv[1]) as fp: config = yaml.safe_load(fp) app = make_app(config) port = config['server']['port'] app.listen(port) print('Start listening port', port) tornado.ioloop.IOLoop.current().start()
python
11
0.59217
73
27.027778
72
starcoderdata
package me.aggellos2001.survivaleuplugin.playerdata; import co.aikar.commands.annotation.*; import co.aikar.commands.bukkit.contexts.OnlinePlayer; import me.aggellos2001.survivaleuplugin.languages.Language; import me.aggellos2001.survivaleuplugin.modules.DonationBenefitCommand; import me.aggellos2001.survivaleuplugin.utils.PluginActivity; import me.aggellos2001.survivaleuplugin.utils.Utilities; import me.mattstudios.mfgui.gui.components.ItemBuilder; import me.mattstudios.mfgui.gui.guis.Gui; import me.mattstudios.mfgui.gui.guis.GuiItem; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import static me.aggellos2001.survivaleuplugin.utils.Utilities.createRenamedItemStack; @CommandAlias("settings") public final class PlayerDataCommand extends PluginActivity { private static final ItemStack ENABLED_BUTTON = createRenamedItemStack(Material.GREEN_WOOL, "&a&lON"); private static final ItemStack DISABLED_BUTTON = createRenamedItemStack(Material.RED_WOOL, "&c&lOFF"); private static GuiItem keepInventoryButton(final boolean keepingInventory) { return ItemBuilder .from(keepingInventory ? ENABLED_BUTTON.getType() : DISABLED_BUTTON.getType()) .setName(Utilities.colorize("&e&lKeep Inventory")).glow(true) .setLore(keepingInventory ? Utilities.colorize("&a&lON") : Utilities.colorize("&c&lOFF"), Utilities.colorize(" item dropping on death.")) .asGuiItem(); } private static GuiItem sitOnStairsButton(final boolean sittingOnStairs) { return ItemBuilder .from(sittingOnStairs ? ENABLED_BUTTON.getType() : DISABLED_BUTTON.getType()) .setLore(sittingOnStairs ? Utilities.colorize("&a&lON") : Utilities.colorize("&c&lOFF"), Utilities.colorize(" \"sitting\" on a stair block when right clicking it.")) .setName(Utilities.colorize("&e&lSit on stairs")).glow(true) .asGuiItem(); } private static GuiItem pvpButton(final boolean pvp) { return ItemBuilder .from(pvp ? ENABLED_BUTTON.getType() : DISABLED_BUTTON.getType()) .setLore(pvp ? Utilities.colorize("&a&lON") : Utilities.colorize("&c&lOFF"), Utilities.colorize(" if you want to fight with other players!")) .setName(Utilities.colorize("&e&lPvP")).glow(true) .asGuiItem(); } @Default protected static void settingsUI(final Player player) { final var playerData = PlayerDataEvent.getPlayerData(player); final var settingsMenu = new Gui(1, Utilities.colorize(" Menu")); settingsMenu.setDefaultClickAction(event -> { event.setCancelled(true); } ); //add slot actions outside itemset to update them on click settingsMenu.addSlotAction(2, event -> { playerData.keepingInventory = !playerData.keepingInventory; PlayerDataEvent.setPlayerData(player, playerData); settingsMenu.updateItem(2, keepInventoryButton(playerData.keepingInventory)); }); settingsMenu.addSlotAction(3, event -> { playerData.sittingOnStairs = !playerData.sittingOnStairs; PlayerDataEvent.setPlayerData(player, playerData); settingsMenu.updateItem(3, sitOnStairsButton(playerData.sittingOnStairs)); }); settingsMenu.addSlotAction(4, event -> { if (DonationBenefitCommand.hasDonationPotions(player)) { Utilities.sendMsg(player, Language.PVP_DONATION_POTIONS_DENIED.getTranslation(player)); return; } PlayerDataEvent.setPlayerData(player, playerData); playerData.pvp = !playerData.pvp; PlayerDataEvent.setPlayerData(player, playerData); settingsMenu.updateItem(4, pvpButton(playerData.pvp)); }); //add items to menu settingsMenu.setItem(2, keepInventoryButton(playerData.keepingInventory)); settingsMenu.setItem(3, sitOnStairsButton(playerData.sittingOnStairs)); settingsMenu.setItem(4, pvpButton(playerData.pvp)); settingsMenu.setItem(8, ItemBuilder.from(Material.BARRIER) .setName(Utilities.colorize("&4&lExit")).glow(true) .asGuiItem(event -> settingsMenu.close(event.getWhoClicked())) ); settingsMenu.setItem(5, ItemBuilder.from(Material.PAPER) .setName(Utilities.colorize(" chat color")) .setLore(Utilities.colorize(" color: &" + playerData.chatColor + Utilities.readableEnumName(ChatColor.getByChar(playerData.chatColor).name()))) .glow(true) .asGuiItem(event -> { ChatColorMenu.colorUI(((Player) event.getWhoClicked()), settingsMenu); }) ); settingsMenu.open(player); } @Subcommand("setsupportpin") private void setSupportPin(final Player player, final int supportPin) { if (supportPin < 1000 || supportPin > 9999) { Utilities.sendMsg(player, "&cSupport PIN must be a 4 digit number! (1000-9999)"); return; } final var playerDat = PlayerDataEvent.getPlayerData(player); playerDat.supportPIN = supportPin; PlayerDataEvent.setPlayerData(player, playerDat); Utilities.sendMsg(player, "&aSupport PIN set to &e" + supportPin + "&a!"); } @Subcommand("getsupportpIN") @Conditions("ConsoleOrOp") //only OP or console can check someones PIN @CommandCompletion("@players @nothing") private void getSupportPIN(final CommandSender sender, final OnlinePlayer player) { final var dat = PlayerDataEvent.getPlayerData(player.getPlayer()); if (dat == null || dat.supportPIN == 0) { Utilities.sendMsg(sender, "&cPlayer " + player.getPlayer().getName() + " has no PIN set!"); return; } Utilities.sendMsg(sender, "&aPlayer " + player.getPlayer().getName() + " has support PIN: &e" + dat.supportPIN + "!"); } }
java
21
0.751516
120
39.919708
137
starcoderdata
<?php /** * Created by PhpStorm. * User: zhangbobell * Date: 14-10-27 * Time: 下午5:49 */ class MY_Model extends CI_Model { function __construct() { parent::__construct(); } public function select_DB($databaseName) { // $db_config['hostname'] = '192.168.1.110'; // $db_config['username'] = 'kettle'; // $db_config['password'] = ' // $db_config['port'] = 33060; $db_config['hostname'] = 'tools.e-corp.cn'; $db_config['username'] = 'zczx_data'; $db_config['password'] = ' // $db_config['hostname'] = '127.0.0.1'; // $db_config['username'] = 'data'; // $db_config['password'] = ' $db_config['database'] = $databaseName; $db_config['dbdriver'] = 'mysqli'; $db_config['dbprefix'] = ''; $db_config['pconnect'] = TRUE; $db_config['db_debug'] = TRUE; $db_config['cache_on'] = FALSE; $db_config['cachedir'] = ''; $db_config['char_set'] = 'utf8'; $db_config['dbcollat'] = 'utf8_bin'; $db_config['swap_pre'] = ''; $db_config['autoinit'] = TRUE; $db_config['stricton'] = FALSE; return $db_config; } /* get_result_array: 根据数据库和 SQL 语句获取到结果数组 * @param : $db -- 选择的数据库 * $sql -- 要查询的语句 * $valArr[可选] -- 可以带入的参数 * $retArr[可选] -- 返回的数组中的字段 * @return : SQL 查询结果数组 * */ public function get_result_array($db, $sql, $valArr = null) { $config= $this->select_DB($db); $this->load->database($config); $query = $this->db->query($sql, $valArr); return $query->result_array(); } public function set_record($db, $sql, $valArr) { $config= $this->select_DB($db); $this->load->database($config); $this->db->query($sql, $valArr); if (mysql_errno() == 1062) { return -1; } return 0; } /* * my_query: 根据数据库和 SQL 语句获取到结果 * @param: $db -- 选择的数据库 * $sql -- SQL 语句 * $valArr[可选] -- SQL 语句中的参数 * return: object -- 查询的返回对象 * */ public function my_query($db, $sql, $valArr = null) { $config= $this->select_DB($db); $this->load->database($config); return $this->db->query($sql, $valArr); } /* * batch_insert: 批量插入数据库(也可用于更新,使用 on duplicate key update) * @param: $db -- 选择的数据库 * $sql -- SQL 语句(只带一个问号) * $valArr -- 包含要更新的值的二维数组,可以是关联数组 * $n[可选] -- 一次批量操作的数量, 默认是500条 * return: true/false * */ public function batch_insert($db, $sql, $valArr, $n = 500) { $i = 0; $sqlStr = ''; $res = true; // 最后返回的结果 foreach($valArr as $row) { foreach($row as $k => $v) { $row[$k] = str_replace("'", "\'", $v); } $rowStr = implode("', '", array_values($row)); $sqlStr = $sqlStr."('$rowStr'),"; $sqlStr = str_replace("''", 'NULL', $sqlStr); $i++; if ($i === $n) { $res = $res && $this->_exec_batch_insert($db, $sql, $sqlStr); $sqlStr = ''; $i = 0; } } // 最后剩余不足500条的,一起插入 if ($i != 0) { $res = $res && $this->_exec_batch_insert($db, $sql, $sqlStr); } return $res; } /* * _exec_batch_insert: 执行批量插入数据库 * @param: $db -- 选择的数据库 * $sql -- SQL 语句(只带一个问号) * $sqlStr -- 拼凑好的 sql 中的值语句,带结尾的“,”的 * return: true/false * */ private function _exec_batch_insert($db, $sql, $sqlStr) { $sqlStr = rtrim($sqlStr, ','); $sql = str_replace('?', $sqlStr, $sql); return $this->my_query($db, $sql); } }
php
16
0.466493
77
26.992701
137
starcoderdata
// Copyright © Contributors. Licensed under the MIT License (MIT). See License.md in the repository root for more information. // Ported from um/verrsrc.h in the Windows SDK for Windows 10.0.22000.0 // Original source is Copyright © Microsoft. All rights reserved. namespace TerraFX.Interop.Windows; public static unsafe partial class Windows { [NativeTypeName("#define VS_FILE_INFO RT_VERSION")] public static ushort* VS_FILE_INFO => ((ushort*)((nuint)((ushort)(16)))); [NativeTypeName("#define VS_VERSION_INFO 1")] public const int VS_VERSION_INFO = 1; [NativeTypeName("#define VS_USER_DEFINED 100")] public const int VS_USER_DEFINED = 100; [NativeTypeName("#define VS_FFI_SIGNATURE 0xFEEF04BDL")] public const uint VS_FFI_SIGNATURE = 0xFEEF04BD; [NativeTypeName("#define VS_FFI_STRUCVERSION 0x00010000L")] public const int VS_FFI_STRUCVERSION = 0x00010000; [NativeTypeName("#define VS_FFI_FILEFLAGSMASK 0x0000003FL")] public const int VS_FFI_FILEFLAGSMASK = 0x0000003F; [NativeTypeName("#define VS_FF_DEBUG 0x00000001L")] public const int VS_FF_DEBUG = 0x00000001; [NativeTypeName("#define VS_FF_PRERELEASE 0x00000002L")] public const int VS_FF_PRERELEASE = 0x00000002; [NativeTypeName("#define VS_FF_PATCHED 0x00000004L")] public const int VS_FF_PATCHED = 0x00000004; [NativeTypeName("#define VS_FF_PRIVATEBUILD 0x00000008L")] public const int VS_FF_PRIVATEBUILD = 0x00000008; [NativeTypeName("#define VS_FF_INFOINFERRED 0x00000010L")] public const int VS_FF_INFOINFERRED = 0x00000010; [NativeTypeName("#define VS_FF_SPECIALBUILD 0x00000020L")] public const int VS_FF_SPECIALBUILD = 0x00000020; [NativeTypeName("#define VOS_UNKNOWN 0x00000000L")] public const int VOS_UNKNOWN = 0x00000000; [NativeTypeName("#define VOS_DOS 0x00010000L")] public const int VOS_DOS = 0x00010000; [NativeTypeName("#define VOS_OS216 0x00020000L")] public const int VOS_OS216 = 0x00020000; [NativeTypeName("#define VOS_OS232 0x00030000L")] public const int VOS_OS232 = 0x00030000; [NativeTypeName("#define VOS_NT 0x00040000L")] public const int VOS_NT = 0x00040000; [NativeTypeName("#define VOS_WINCE 0x00050000L")] public const int VOS_WINCE = 0x00050000; [NativeTypeName("#define VOS__BASE 0x00000000L")] public const int VOS__BASE = 0x00000000; [NativeTypeName("#define VOS__WINDOWS16 0x00000001L")] public const int VOS__WINDOWS16 = 0x00000001; [NativeTypeName("#define VOS__PM16 0x00000002L")] public const int VOS__PM16 = 0x00000002; [NativeTypeName("#define VOS__PM32 0x00000003L")] public const int VOS__PM32 = 0x00000003; [NativeTypeName("#define VOS__WINDOWS32 0x00000004L")] public const int VOS__WINDOWS32 = 0x00000004; [NativeTypeName("#define VOS_DOS_WINDOWS16 0x00010001L")] public const int VOS_DOS_WINDOWS16 = 0x00010001; [NativeTypeName("#define VOS_DOS_WINDOWS32 0x00010004L")] public const int VOS_DOS_WINDOWS32 = 0x00010004; [NativeTypeName("#define VOS_OS216_PM16 0x00020002L")] public const int VOS_OS216_PM16 = 0x00020002; [NativeTypeName("#define VOS_OS232_PM32 0x00030003L")] public const int VOS_OS232_PM32 = 0x00030003; [NativeTypeName("#define VOS_NT_WINDOWS32 0x00040004L")] public const int VOS_NT_WINDOWS32 = 0x00040004; [NativeTypeName("#define VFT_UNKNOWN 0x00000000L")] public const int VFT_UNKNOWN = 0x00000000; [NativeTypeName("#define VFT_APP 0x00000001L")] public const int VFT_APP = 0x00000001; [NativeTypeName("#define VFT_DLL 0x00000002L")] public const int VFT_DLL = 0x00000002; [NativeTypeName("#define VFT_DRV 0x00000003L")] public const int VFT_DRV = 0x00000003; [NativeTypeName("#define VFT_FONT 0x00000004L")] public const int VFT_FONT = 0x00000004; [NativeTypeName("#define VFT_VXD 0x00000005L")] public const int VFT_VXD = 0x00000005; [NativeTypeName("#define VFT_STATIC_LIB 0x00000007L")] public const int VFT_STATIC_LIB = 0x00000007; [NativeTypeName("#define VFT2_UNKNOWN 0x00000000L")] public const int VFT2_UNKNOWN = 0x00000000; [NativeTypeName("#define VFT2_DRV_PRINTER 0x00000001L")] public const int VFT2_DRV_PRINTER = 0x00000001; [NativeTypeName("#define VFT2_DRV_KEYBOARD 0x00000002L")] public const int VFT2_DRV_KEYBOARD = 0x00000002; [NativeTypeName("#define VFT2_DRV_LANGUAGE 0x00000003L")] public const int VFT2_DRV_LANGUAGE = 0x00000003; [NativeTypeName("#define VFT2_DRV_DISPLAY 0x00000004L")] public const int VFT2_DRV_DISPLAY = 0x00000004; [NativeTypeName("#define VFT2_DRV_MOUSE 0x00000005L")] public const int VFT2_DRV_MOUSE = 0x00000005; [NativeTypeName("#define VFT2_DRV_NETWORK 0x00000006L")] public const int VFT2_DRV_NETWORK = 0x00000006; [NativeTypeName("#define VFT2_DRV_SYSTEM 0x00000007L")] public const int VFT2_DRV_SYSTEM = 0x00000007; [NativeTypeName("#define VFT2_DRV_INSTALLABLE 0x00000008L")] public const int VFT2_DRV_INSTALLABLE = 0x00000008; [NativeTypeName("#define VFT2_DRV_SOUND 0x00000009L")] public const int VFT2_DRV_SOUND = 0x00000009; [NativeTypeName("#define VFT2_DRV_COMM 0x0000000AL")] public const int VFT2_DRV_COMM = 0x0000000A; [NativeTypeName("#define VFT2_DRV_INPUTMETHOD 0x0000000BL")] public const int VFT2_DRV_INPUTMETHOD = 0x0000000B; [NativeTypeName("#define VFT2_DRV_VERSIONED_PRINTER 0x0000000CL")] public const int VFT2_DRV_VERSIONED_PRINTER = 0x0000000C; [NativeTypeName("#define VFT2_FONT_RASTER 0x00000001L")] public const int VFT2_FONT_RASTER = 0x00000001; [NativeTypeName("#define VFT2_FONT_VECTOR 0x00000002L")] public const int VFT2_FONT_VECTOR = 0x00000002; [NativeTypeName("#define VFT2_FONT_TRUETYPE 0x00000003L")] public const int VFT2_FONT_TRUETYPE = 0x00000003; [NativeTypeName("#define VFFF_ISSHAREDFILE 0x0001")] public const int VFFF_ISSHAREDFILE = 0x0001; [NativeTypeName("#define VFF_CURNEDEST 0x0001")] public const int VFF_CURNEDEST = 0x0001; [NativeTypeName("#define VFF_FILEINUSE 0x0002")] public const int VFF_FILEINUSE = 0x0002; [NativeTypeName("#define VFF_BUFFTOOSMALL 0x0004")] public const int VFF_BUFFTOOSMALL = 0x0004; [NativeTypeName("#define VIFF_FORCEINSTALL 0x0001")] public const int VIFF_FORCEINSTALL = 0x0001; [NativeTypeName("#define VIFF_DONTDELETEOLD 0x0002")] public const int VIFF_DONTDELETEOLD = 0x0002; [NativeTypeName("#define VIF_TEMPFILE 0x00000001L")] public const int VIF_TEMPFILE = 0x00000001; [NativeTypeName("#define VIF_MISMATCH 0x00000002L")] public const int VIF_MISMATCH = 0x00000002; [NativeTypeName("#define VIF_SRCOLD 0x00000004L")] public const int VIF_SRCOLD = 0x00000004; [NativeTypeName("#define VIF_DIFFLANG 0x00000008L")] public const int VIF_DIFFLANG = 0x00000008; [NativeTypeName("#define VIF_DIFFCODEPG 0x00000010L")] public const int VIF_DIFFCODEPG = 0x00000010; [NativeTypeName("#define VIF_DIFFTYPE 0x00000020L")] public const int VIF_DIFFTYPE = 0x00000020; [NativeTypeName("#define VIF_WRITEPROT 0x00000040L")] public const int VIF_WRITEPROT = 0x00000040; [NativeTypeName("#define VIF_FILEINUSE 0x00000080L")] public const int VIF_FILEINUSE = 0x00000080; [NativeTypeName("#define VIF_OUTOFSPACE 0x00000100L")] public const int VIF_OUTOFSPACE = 0x00000100; [NativeTypeName("#define VIF_ACCESSVIOLATION 0x00000200L")] public const int VIF_ACCESSVIOLATION = 0x00000200; [NativeTypeName("#define VIF_SHARINGVIOLATION 0x00000400L")] public const int VIF_SHARINGVIOLATION = 0x00000400; [NativeTypeName("#define VIF_CANNOTCREATE 0x00000800L")] public const int VIF_CANNOTCREATE = 0x00000800; [NativeTypeName("#define VIF_CANNOTDELETE 0x00001000L")] public const int VIF_CANNOTDELETE = 0x00001000; [NativeTypeName("#define VIF_CANNOTRENAME 0x00002000L")] public const int VIF_CANNOTRENAME = 0x00002000; [NativeTypeName("#define VIF_CANNOTDELETECUR 0x00004000L")] public const int VIF_CANNOTDELETECUR = 0x00004000; [NativeTypeName("#define VIF_OUTOFMEMORY 0x00008000L")] public const int VIF_OUTOFMEMORY = 0x00008000; [NativeTypeName("#define VIF_CANNOTREADSRC 0x00010000L")] public const int VIF_CANNOTREADSRC = 0x00010000; [NativeTypeName("#define VIF_CANNOTREADDST 0x00020000L")] public const int VIF_CANNOTREADDST = 0x00020000; [NativeTypeName("#define VIF_BUFFTOOSMALL 0x00040000L")] public const int VIF_BUFFTOOSMALL = 0x00040000; [NativeTypeName("#define VIF_CANNOTLOADLZ32 0x00080000L")] public const int VIF_CANNOTLOADLZ32 = 0x00080000; [NativeTypeName("#define VIF_CANNOTLOADCABINET 0x00100000L")] public const int VIF_CANNOTLOADCABINET = 0x00100000; }
c#
12
0.726795
133
35.647541
244
starcoderdata
#!/usr/bin/python import sys, os, io import copy FROM_START = 0 FROM_CURRENT = 1 FROM_END = 2 class Input: def __init__(self, filename): """ Initialize a input file and put the cursor on specified offset """ self.__input__ = open(format(filename, '08X')+'.gen','rb') self.__input__.seek(0, FROM_END); self.__sizeof__ = self.__input__.tell() self.__input__.seek(0, FROM_START); def read(self, bytes): """ Read N bytes from input file, return as int """ return int.from_bytes(self.__input__.read(bytes), byteorder='big') def seek(self, offset): self.__input__.seek(offset, FROM_START); def sizeof(self): return self.__sizeof__ def close(self): """ Close input file """ self.__input__.close() def get_offset(self): return self.__input__.tell() class Output: def __init__(self, filename): """ Open output file and set specified offset as filename """ self.__output__ = open(format(filename, '08X')+'_compressed.gen', 'wb') def write(self, value): """ Write a byte value to output file """ self.__output__.write(value) def sizeof(self): return self.__output__.tell() def close(self): """ Close output file """ self.__output__.close() class LZWindow: def __init__(self): self.__max__ = 0xFFF self.__current__ = 0xFEE self.__window__ = [] for x in range(0, self.__max__+1): self.__window__.append(0x0) def max(self): return self.__max__ def get_current(self): return self.__current__ def append(self, value): self.__window__[self.__current__] = value self.__current__ += 1 self.__current__ = self.__current__&self.__max__ def window(self): return self.__window__ def get(self, key): return self.__window__[key] class LZOptimizer: def __init__(self, _input, lzwindow, match): """ Initialize the LZ data optimizer """ self._input = _input self.lzwindow = lzwindow self.match = match self.current_input_offset = self._input.get_offset() self.current_lzwindow_offset = self.lzwindow.get_current() self.possible_chains = (self.lzwindow.window()).count(self.match) self.probed_chains = 0 self.matches = [] def run(self): """ Probe possible chains and choose the best match """ while self.probed_chains < self.possible_chains: index = 0 self._input.seek(self.current_input_offset) current = self.current_lzwindow_offset&self.lzwindow.max() end = (self.current_lzwindow_offset-self.lzwindow.max()-0x1) while current >= end: if (self.lzwindow.get(current&self.lzwindow.max()) == self.match): length = self.probe_match_length(current) if (length > 0x2) and (index < 0): self.matches.append((index, length-0x1)) self.probed_chains += 1 if not (self.probed_chains < self.possible_chains): break current-=1 index-=1 if len(self.matches) > 0: return self.best_match() return (0,0) def probe_match_length(self, current): """ Probe matched chain length """ self._input.seek(self.current_input_offset) temp_window = copy.copy(self.lzwindow) temp_window.append(temp_window.get((current)&self.lzwindow.max())) next_match = self._input.read(1) length = 1 while next_match == temp_window.get((current+length)&self.lzwindow.max()): if length > 0x11: break temp_window.append(temp_window.get((current+length)&self.lzwindow.max())) next_match = self._input.read(1) length += 1 return length def best_match(self): """ Choose best match in possible matches """ lst = [] lst_by_index = sorted(self.matches, key=lambda x: x[0], reverse=True) lst_by_length = sorted(self.matches, key=lambda x: x[1]) for match in enumerate(lst_by_length): lst.append((match[0]+lst_by_length.index(match[1]), match[0])) return self.matches[sorted(lst, key=lambda x: x[0])[0][1]] class LZEncoder: def __init__(self, _input, _output): """ Start LZ Encoder """ self._input = _input self._output = _output self._chain = (0,0) self._buffer = [] self._bitmask = [] self._cycle = 0 self.maxlen = self._input.sizeof() self.curlen = self.maxlen self.lzwindow = LZWindow() self.print_starting() # Write length of decompressed data _output.write(((self.maxlen >> 8)&0xFF).to_bytes(1, byteorder='big')) _output.write((self.maxlen&0xFF).to_bytes(1, byteorder='big')) def run(self): """ Run LZ Encoder """ while self.curlen > 0: # Display task percentage completed self.print_progress() # Write Bitmask and Data to output if (len(self._bitmask) == 8): self._cycle += 1 self.write_bitmask_to_output() self.write_buffer_to_output() # Check for possible chains and choose the best match current_input_offset = self._input.get_offset() current_match = self._input.read(1) if self.has_output_data(): possible_chains = (self.lzwindow.window()).count(current_match) # If possible chains in buffer run Optimizer to get best chain if possible_chains > 0: optimizer = LZOptimizer(self._input, self.lzwindow, current_match) self._chain = optimizer.run() # If has a LZ Chain process LZ Pair if self.has_chain(): self._input.seek(current_input_offset+self._chain[1]+0x1) length = self._chain[1] offset = (self.lzwindow.get_current()-abs(self._chain[0]))&self.lzwindow.max() pair = (offset&0xFF)<<8 | ((offset>>4)&0xF0 | ((length-0x2)&0xF)) # Append Pair to Buffer self._buffer.append(((pair>>8)&0xFF).to_bytes(1, byteorder='big')) self._buffer.append((pair&0xFF).to_bytes(1, byteorder='big')) # Append Chain to Window for count in range(0, length+1): self.lzwindow.append(self.lzwindow.get((offset+count)&self.lzwindow.max())) self.curlen -= 1 self._chain = (0,0) # Set Bitmask 0 => encoded chain self._bitmask.append(0) else: # If not a encoded chain append readed byte to Buffer and Window self._input.seek(current_input_offset) byte_read = self._input.read(1) self.lzwindow.append(byte_read) self._buffer.append(byte_read.to_bytes(1, byteorder='big')) # Set Bitmask 1 => decoded byte self._bitmask.append(1) self.curlen-=1 def print_starting(self): """ Print encoder start message """ print('[*] Starting data encoding with LZ...') def print_progress(self): """ Print encoder progress """ print('{0:.2f}%'.format(100-(self.curlen*100/self.maxlen))) def print_ratio(self): """ Print encoder ratio """ input_size = ((self._input.sizeof()*100)/self._input.sizeof()) output_size = ((self._output.sizeof()*100)/self._input.sizeof()) print('[+] Success encoded with a ratio '+str(round(input_size/output_size))+':1') def has_chain(self): """ Check if has a valid chain to compress """ return self._chain[1] >= 2 def has_output_data(self): """ Check if has data in output file """ return self._cycle >= 1 def write_bitmask_to_output(self): """ Write LZ bitmask control byte to output """ self._bitmask.reverse() self._output.write(int('0b'+''.join(map(str, self._bitmask)),2).to_bytes(1, byteorder='big')) self._bitmask = [] def write_buffer_to_output(self): """ Write encoded buffer to output """ for value in self._buffer: self._output.write(value) self._buffer = [] if __name__=='__main__': offset = int(sys.argv[1],16) encoder = LZEncoder(Input(offset), Output(offset)) encoder.run() encoder.print_ratio()
python
20
0.550869
101
36.384615
234
starcoderdata
def test_mail_merge(body_template, context_dict): """ Weak test to verify template fields are all in context_dict. Used in cases where we don't want a mail merge failing siltently. """ template_content = open('templates/emails/'+body_template, 'r').read() variables = re.findall(r'{{(.*?)}}', template_content) # Trim whitespace and only take entries to the left of the first period (if applicable): variables = set([x.strip().split('.')[0] for x in variables]) # Remove variable in all templates: variables.remove('BASE_URL') for variable in variables: if variable not in context_dict: raise Exception('Missing variable `%s` in `%s`' % (variable, body_template))
python
13
0.664835
92
47.6
15
inline
def invalidTransactions_wrong(self, transactions): # tlist = list(map(lambda x: x.split(','), transactions)) tlist.sort(key=lambda x: (x[0], int(x[1]))) # KENG: sort int string '137'<'52' ans = set() for i, v in enumerate(tlist): if int(v[2]) > 1000: ans.add(i) if i: a, b = tlist[i-1], tlist[i] # this doesn't work by only checking the prev ONE item if a[0] == b[0] and int(b[1])-int(a[1])<=60 and a[3]!=b[3]: ans.add(i-1) ans.add(i) return [','.join(tlist[i]) for i in ans]
python
15
0.480064
98
47.307692
13
inline
// Copyright 1996-2019 Cyberbotics Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Communication.hpp" #include "Packet.hpp" #include "Time.hpp" #include #include #include #include #include #ifdef _WIN32 #include #else #include #include #include #include #include #include #endif using namespace std; Communication::Communication() : mSocket(-1) { #ifdef _WIN32 // initialize the socket API WSADATA info; int rc = WSAStartup(MAKEWORD(1, 1), &info); // Winsock 1.1 if (rc != 0) cerr << "Cannot initialize Winsock" << endl; #endif } Communication::~Communication() { close(); #ifdef _WIN32 WSACleanup(); #endif } bool Communication::initialize(const char *ip, int port) { close(); mSocket = socket(AF_INET, SOCK_STREAM, 0); if (mSocket == -1) { cerr << "Cannot create socket" << endl; return false; } struct sockaddr_in address; memset(&address, 0, sizeof(struct sockaddr_in)); // fill in the socket address address.sin_family = AF_INET; address.sin_port = htons(port); struct hostent *server = gethostbyname(ip); if (server) memcpy((char *)&address.sin_addr.s_addr, (char *)server->h_addr, server->h_length); else { cerr << "Cannot resolve server name: " << ip << endl; return false; } // connect to the server int rc = connect(mSocket, (struct sockaddr *)&address, sizeof(struct sockaddr)); if (rc == -1) { close(); return false; } return true; } void Communication::close() { if (mSocket == -1) return; #ifdef _WIN32 closesocket(mSocket); #else ::close(mSocket); #endif mSocket = -1; } bool Communication::sendPacket(const Packet *packet) { if (mSocket == -1) { cerr << "Socket not initialized" << endl; return false; } int n = 0; int size = packet->size(); do { int s = send(mSocket, (const char *)(packet->getBufferFromPos(n)), size - n, 0); if (s == -1) { cerr << "Error sending data to socket" << endl; return false; } n += s; } while (n < size); return true; } bool Communication::receivePacket(Packet *packet) { unsigned char buffer[5]; int n = 0; do { // read until the initial 'W' message n = recv(mSocket, (char *)buffer, 1, 0); if (n == -1) { cerr << "Error received packet" << endl; return false; } } while (buffer[0] != 'W'); do { // read the message size (int) int r = recv(mSocket, (char *)&buffer[n], 5 - n, 0); if (r == -1) { cerr << "Error received packet" << endl; return false; } n += r; } while (n < 5); packet->clear(); packet->append(buffer, 5); int packet_size = packet->readIntAt(1); if (packet_size > packet->maxSize()) { cerr << "Too big packet about to be received" << endl; return false; } if (packet_size > 5) packet->readFromSocket(mSocket, packet_size - 5); return true; }
c++
13
0.637419
87
24.056738
141
starcoderdata
public override bool TryToMerge(IAction followingAction) { SetPropertyAction next = followingAction as SetPropertyAction; // Comparing the Property does not allow for proper merging. - D.H. // if (next != null // && next.Property == this.Property) // Using new comparison. if (next != null && next.Property.Name == this.Property.Name && next.Property.Parent == this.Property.Parent && next.Property.CanSetValue == this.Property.CanSetValue) { this.NewValue = next.NewValue; Property.SetValue(NewValue); return true; } return false; }
c#
12
0.518135
79
38.736842
19
inline
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.painless.antlr; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.Token; import org.elasticsearch.painless.lookup.PainlessLookup; /** * A lexer that is customized for painless suggestions with the following modifications: * * the default error behavior to only fail if we don't recognize a token in default mode * the last token in case we need to do lookbehind for regex vs division detection * the regex vs division detection * the error message when a string contains invalid escape sequences to include a list of valid escape sequences * */ public final class EnhancedSuggestLexer extends SuggestLexer { private Token current = null; private final PainlessLookup painlessLookup; public EnhancedSuggestLexer(CharStream charStream, PainlessLookup painlessLookup) { super(charStream); this.painlessLookup = painlessLookup; } @Override public Token nextToken() { current = super.nextToken(); return current; } @Override public void recover(final LexerNoViableAltException lnvae) { if (this._mode != PainlessLexer.DEFAULT_MODE) { this._mode = DEFAULT_MODE; } else { throw new IllegalStateException("unexpected token [" + lnvae.getOffendingToken().getText() + "]", lnvae); } } @Override protected boolean isSlashRegex() { Token lastToken = current; if (lastToken == null) { return true; } switch (lastToken.getType()) { case PainlessLexer.RBRACE: case PainlessLexer.RP: case PainlessLexer.OCTAL: case PainlessLexer.HEX: case PainlessLexer.INTEGER: case PainlessLexer.DECIMAL: case PainlessLexer.ID: case PainlessLexer.DOTINTEGER: case PainlessLexer.DOTID: return false; default: return true; } } @Override protected boolean isType(String text) { return painlessLookup.isValidCanonicalClassName(text); } }
java
15
0.673737
125
33.118421
76
starcoderdata
func IsHTTPRouteReady(r *gatewayv1alpha1.HTTPRoute) (bool, error) { if r.Status.Gateways == nil { return false, nil } for _, gw := range r.Status.Gateways { if !isGatewayAdmitted(gw) { // Return false if _any_ of the gateways isn't admitted yet. return false, nil } } return true, nil }
go
9
0.676568
67
24.333333
12
inline
def __init__(self, L, K, Fout=None, initializer=None, activation=None, use_bias=False, use_bn=0, **kwargs): """ Initializes the graph convolutional layer, assuming the input has dimension (B, M, F) :param L: The graph Laplacian (MxM), as numpy array :param K: Order of the polynomial to use :param Fout: Number of features (channels) of the output, default to number of input channels :param initializer: initializer to use for weight initialisation :param activation: the activation function to use after the layer, defaults to linear :param use_bias: Use learnable bias weights :param use_bn: Apply batch norm (1) or instance norm (2) before adding the bias (0 otherwise) :param kwargs: additional keyword arguments passed on to add_weight """ # This is necessary for every Layer super(Monomial, self).__init__(name='') # save necessary params self.L = L self.K = K self.Fout = Fout self.use_bias = use_bias self.use_bn = use_bn if self.use_bn == 1: self.bn = tf.keras.layers.BatchNormalization(axis=-1, momentum=0.9, epsilon=1e-5, center=False, scale=False) elif self.use_bn == 2: self.inst_norm = tf.keras.layers.Lambda(instance_normalization, name="instance_norm") self.initializer = initializer if activation is None or callable(activation): self.activation = activation elif hasattr(tf.keras.activations, activation): self.activation = getattr(tf.keras.activations, activation) else: raise ValueError(f"Could not find activation <{activation}> in tf.keras.activations...") self.kwargs = kwargs # Rescale Laplacian and store as a TF sparse tensor. Copy to not modify the shared L. L = sparse.csr_matrix(L) lmax = 1.02 * eigsh(L, k=1, which='LM', return_eigenvectors=False)[0] L = utils.rescale_L(L, lmax=lmax) L = L.tocoo() indices = np.column_stack((L.row, L.col)) L = tf.SparseTensor(indices, L.data, L.shape) self.sparse_L = tf.sparse.reorder(L)
python
12
0.628286
120
49.159091
44
inline
using ObserverPattern; var radio = new RadioStation(); var cbsNews = new CBSNews(); var brodcaster = new AssociatedPress(); brodcaster.RegisterObserver(radio); brodcaster.RegisterObserver(cbsNews); brodcaster.NotifyObservers(); Console.ReadLine();
c#
8
0.780303
39
21
12
starcoderdata
package lt.tokenmill.crawling.adminui.view.sourcetest; import com.vaadin.data.util.BeanItemContainer; import com.vaadin.data.util.GeneratedPropertyContainer; import com.vaadin.ui.*; import com.vaadin.ui.renderers.HtmlRenderer; import lt.tokenmill.crawling.adminui.utils.GridUtils.UrlToLinkConverter; import lt.tokenmill.crawling.adminui.view.BaseView; import lt.tokenmill.crawling.commonui.ElasticSearch; import lt.tokenmill.crawling.data.HttpSourceTest; import lt.tokenmill.crawling.data.PageableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.vaadin.server.Sizeable.Unit.PERCENTAGE; import static com.vaadin.server.Sizeable.Unit.PIXELS; public class HttpSourceTestsView extends BaseView { private static final Logger LOG = LoggerFactory.getLogger(HttpSourceTestsView.class); private Grid itemsGrid = new Grid(new GeneratedPropertyContainer(new BeanItemContainer<>(HttpSourceTest.class))); private Label totalCountLabel = new Label(); private TextField filterField = new TextField(); private HorizontalLayout pagingRow = new HorizontalLayout(); private long totalCount = 0; private int currentPage = 1; public HttpSourceTestsView() { super("HTTP Source Tests"); HorizontalLayout mainLayout = new HorizontalLayout(); mainLayout.setWidth(100, PERCENTAGE); mainLayout.setHeight(100, PERCENTAGE); mainLayout.setSpacing(true); VerticalLayout gridLayout = new VerticalLayout(); gridLayout.setSpacing(true); gridLayout.setWidth(100, PERCENTAGE); // Search field and create new button filterField.setInputPrompt("Search..."); filterField.addTextChangeListener(event -> refreshGrid(event.getText())); Button testAllButton = new Button("Run All Tests"); testAllButton.addClickListener(event -> testAll()); Button addNewButton = new Button("Add New Test"); addNewButton.addClickListener(event -> showHttpSourceTestForm(new HttpSourceTest())); HorizontalLayout actionHeader = new HorizontalLayout(filterField, testAllButton, addNewButton); actionHeader.setSpacing(true); actionHeader.setWidth(100, PERCENTAGE); filterField.setWidth(100, PERCENTAGE); actionHeader.setExpandRatio(filterField, 1.0f); gridLayout.addComponent(actionHeader); // Grid itemsGrid.setWidth(100, PERCENTAGE); itemsGrid.setHeight(700, PIXELS); itemsGrid.setSelectionMode(Grid.SelectionMode.SINGLE); itemsGrid.addSelectionListener( e -> { HttpSourceTest hst = (HttpSourceTest) itemsGrid.getSelectedRow(); hst = ElasticSearch.getHttpSourceTestOperations().get(hst.getUrl()); showHttpSourceTestForm(hst); }); itemsGrid.getColumn("source").setRenderer(new HtmlRenderer(), new UrlToLinkConverter()); itemsGrid.getColumn("url").setRenderer(new HtmlRenderer(), new UrlToLinkConverter()); itemsGrid.setColumns("source", "url"); gridLayout.addComponent(itemsGrid); gridLayout.addComponent(totalCountLabel); refreshGrid(filterField.getValue()); mainLayout.addComponent(gridLayout); mainLayout.setExpandRatio(gridLayout, 1f); addComponent(mainLayout); gridLayout.addComponent(pagingRow); } private void refreshPagingRow() { pagingRow.removeAllComponents(); pagingRow.addComponent(new Label("Pages: ")); long amountOfPages = this.totalCount / 100; amountOfPages = amountOfPages + (this.totalCount % 100 != 0 ? 1 : 0); for (int i = 1; i <= amountOfPages; i++) { String buttonLabel = String.valueOf(i); Button button = new Button(); if (i == currentPage) { buttonLabel = ">>" + buttonLabel + "<<"; } button.setCaption(buttonLabel); button.setIconAlternateText(String.valueOf(i)); button.addClickListener(clickEvent -> { this.currentPage = (Integer.parseInt(clickEvent.getButton().getIconAlternateText())); refreshGrid(filterField.getValue()); }); pagingRow.addComponent(button); } } private int getOffset() { return (this.currentPage - 1) * 100; } private void refreshGrid(String text) { PageableList data = ElasticSearch.getHttpSourceTestOperations() .filter(text, getOffset()); itemsGrid.getContainerDataSource().removeAllItems(); for (HttpSourceTest hst : data.getItems()) { itemsGrid.getContainerDataSource().addItem(hst); } this.totalCount = data.getTotalCount(); totalCountLabel.setValue(String.format("Total count: %d", this.totalCount)); LOG.info("Refreshed grid using filter '{}'. Total items: {}", text, this.totalCount); refreshPagingRow(); } private void showHttpSourceTestForm(HttpSourceTest hst) { HttpSourceTestFormWindow formWindow = new HttpSourceTestFormWindow(hst); formWindow.addAfterUpdateListener(() -> refreshGrid(filterField.getValue())); UI.getCurrent().addWindow(formWindow); } private void testAll() { HttpSourceAllTestsWindow window = new HttpSourceAllTestsWindow(); UI.getCurrent().addWindow(window); } }
java
18
0.681122
117
41.215385
130
starcoderdata
// Decompiled by Jad v1.5.8e. Copyright 2001 // Jad home page: http://www.geocities.com/kpdus/jad.html // Decompiler options: braces fieldsfirst space lnc package de.greenrobot.dao.query; import de.greenrobot.dao.AbstractDao; // Referenced classes of package de.greenrobot.dao.query: // AbstractQueryData, Query, AbstractQuery final class offsetPosition extends AbstractQueryData { private final int limitPosition; private final int offsetPosition; protected volatile AbstractQuery createQuery() { return createQuery(); } protected Query createQuery() { return new Query(this, dao, sql, (String[])initialValues.clone(), limitPosition, offsetPosition, null); } a(AbstractDao abstractdao, String s, String as[], int i, int j) { super(abstractdao, s, as); limitPosition = i; offsetPosition = j; } }
java
11
0.693446
111
26.028571
35
starcoderdata
// // Created by moqi on 2018/4/25. // #define LOG_TAG "Representation" #include #include "Representation.h" #include "utils/AFMediaType.h" namespace Cicada { // void Representation::addSegment(segment *seg) { // mSegList.push_back(seg); // // } Representation::~Representation() { delete mPSegList; } void Representation::setPlaylistUrl(const std::string &url) { mPlaylistUrl = url; } void Representation::setBaseUrl(const std::string &url) { mBaseUrl = url; } void Representation::setBandwidth(uint64_t bandwidth) { mBandWidth = bandwidth; } void Representation::setWidth(int width) { mWidth = width; } void Representation::setHeight(int height) { mHeight = height; } void Representation::print() { // AF_LOGD("Representation","url is %s\n" // "mBandWidth is %llu\n" // "res is %d x %d",mPlaylistUrl.c_str(),mBandWidth,mWidth,mHeight); if (mStreamType == STREAM_TYPE_AUDIO || mStreamType == STREAM_TYPE_SUB) { AF_LOGD("%s language is %s\n", mStreamType == STREAM_TYPE_AUDIO ? "Audio" : "Subtitle", mLang.c_str()); } else if (mStreamType == STREAM_TYPE_VIDEO) { AF_LOGD("video size is %d x %d with bandWidth %llu\n", mHeight, mWidth, mBandWidth); } if (mPSegList) { mPSegList->print(); } } void Representation::SetSegmentList(SegmentList *segList) { mPSegList = segList; } const std::string &Representation::getBaseUrl() { return mBaseUrl; } playList *Representation::getPlaylist() { return mAdapt->getPeriod()->getPlayList(); } SegmentList *Representation::GetSegmentList() { return mPSegList; } const std::string &Representation::getPlaylistUrl() { return mPlaylistUrl; } int Representation::getStreamInfo(int *width, int *height, uint64_t *bandwidth, std::string &language) { if (width) { *width = mWidth; } if (height) { *height = mHeight; } if (bandwidth) { *bandwidth = mBandWidth; } language = mLang; return 0; } AdaptationSet *Representation::getAdaptationSet() { return mAdapt; } }
c++
13
0.561721
106
21.017699
113
starcoderdata
package org.raml.jaxrs.test.model; import javax.annotation.Generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @Generated(value = "org.aml.raml2java", date = "Mon Mar 20 21:21:10 NOVT 2017") @XmlAccessorType(XmlAccessType.PROPERTY) public class Label extends Base { protected Project parent; protected String color; public Project getParent() { return parent; } public void setParent(Project value) { this.parent=value; } public Label withParent(Project value) { this.parent=value; return this; } public String getColor() { return color; } public void setColor(String value) { this.color=value; } public Label withColor(String value) { this.color=value; return this; } }
java
8
0.659328
79
19.069767
43
starcoderdata
# -*- coding: utf-8 -*- import sys import os import unittest import trytond.tests.test_tryton from trytond.tests.test_tryton import ModuleTestCase DIR = os.path.abspath(os.path.normpath(os.path.join( __file__, '..', '..', '..', '..', '..', 'trytond' ))) if os.path.isdir(DIR): sys.path.insert(0, os.path.dirname(DIR)) class TestViewsDepends(ModuleTestCase): ''' Test views and depends ''' module = 'customs_value' def suite(): """ Define suite """ test_suite = trytond.tests.test_tryton.suite() test_suite.addTests( unittest.TestLoader().loadTestsFromTestCase(TestViewsDepends) ) return test_suite if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite())
python
10
0.637216
69
20.342857
35
starcoderdata
import argparse import os from typing import List import numpy as np import torch import yaml from torch.utils import data from tqdm import tqdm import pickle import matplotlib.pyplot as plt from data.dataset import TwoStateActionPairIndex, get_dataloader from data.utils import load_single_image, load_frame_from_video, load_seg_masks_from_video, seg_mask_transform, normalize from evaluation.IOU import calc_mean_iou from model.encoder import Encoder from utils import load_from_checkpoint def calc_nearest_neighbours(query_features: torch.FloatTensor, external_dataset_mat: torch.FloatTensor, k_neighbours: int = 5, similarity_func: str = "dot_product_similarity") -> [np.ndarray, np.ndarray]: """ for each features vector, calculate k nearest neighbours (NN) in an external dataset :param query_features: a matrix where each row is a latent representation to calculate NN for :param external_dataset_mat: the external dataset to look NN in as latent representations :param k_neighbours: number of NN to look :param similarity_func: similarity function between neighbours features_vec :return: top_scores - array of k similarity measures for each vector in query_features top_scores_ind - the indexes in the external dataset of the k NN """ if similarity_func == "mse_similarity": vec = query_features.repeat(external_dataset_mat.size()[0], 1, 1).transpose(0, 1).squeeze() if len(vec.shape) == 3: external_dataset_mat = external_dataset_mat.repeat(vec.shape[0], 1, 1) similarity = -1 * torch.nn.MSELoss(reduction="none")(vec, external_dataset_mat).mean(-1).squeeze() elif similarity_func == "dot_product_similarity": similarity = torch.mm(query_features, external_dataset_mat.T).squeeze() elif similarity_func == "cosine_similarity": similarity = torch.mm(query_features.div(query_features.norm(p=2, dim=-1, keepdim=True)), external_dataset_mat.div(external_dataset_mat.norm(p=2, dim=-1, keepdim=True)).T).squeeze() else: assert False, f"Similarity function is not recognized: {similarity_func}" top_scores, top_scores_ind = similarity.topk(k=k_neighbours, largest=True, sorted=True) return top_scores.cpu().numpy(), top_scores_ind.cpu().numpy() def generate_dataset_features_matrix(encoder: Encoder, loader: data.DataLoader, device: str, project_features: bool = True) -> [torch.Tensor, tuple]: """ run all dataset through the encoder and generates latent representations for each image :param encoder: Encoder module :param loader: DataLoader :param device: device :param project_features: bool, if True use projected features, else use full features :return: features_mat - a matrix of size n_samples X n_features info - a tuple of (video_path, frame_ind, seg_masks, states) corresponding to each image in the features_mat """ # init encoder.eval() encoder.to(device) features_mat: torch.FloatTensor = None video_path = [] frame_ind = [] seg_masks: torch.boolTensor = None states = None label: TwoStateActionPairIndex for (batch, seg, label) in tqdm(loader, desc="Generating features"): with torch.no_grad(): # load to device seg_mask, _ = seg observations, _, _ = [b.float().to(device) for b in batch] first_path = label.first_path observation_index = label.observation_index state = label.state # forward pass if project_features: z = encoder(observations) else: z = encoder.extract_features(observations) features_mat = z if features_mat is None else torch.cat((features_mat, z), dim=0) video_path.extend(first_path) frame_ind.extend(observation_index) if len(seg_mask): seg_masks = seg_mask if seg_masks is None else torch.cat((seg_masks, seg_mask), dim=0) if state is not None: states = state if states is None else torch.cat((states, state), dim=0) return features_mat, (video_path, frame_ind, seg_masks, states) def visualize_nearest_neighbours(results: List[tuple], save_path: str = None, title: str = None) -> None: """ plots nearest neighbours images results should be a list of tuples (ref_image, top_scores, top_imgs, mean_iou, iou, top_ind) :param results: list of tuples the holds the results :param save_path: path to save NN results as image :param title: plot title :return: None """ n_rows = len(results) num_nn = len(results[0][1]) n_cols = num_nn + 1 fig = plt.figure(figsize=(20, int(20*n_rows/5.))) if title is not None: fig.suptitle(title, fontsize=60) for row, info in enumerate(results): (ref_image, top_scores, top_imgs, mean_iou, iou, top_ind) = info fig.add_subplot(n_rows, n_cols, row * n_cols + 1).set_title(f"Ref: IOU: {mean_iou:.4f}", fontsize=20) plt.imshow(ref_image) plt.axis('off') for i in range(num_nn): fig.add_subplot(n_rows, n_cols, row * n_cols + i + 2).set_title(f"{i+1}: {top_scores[i]:.4f}, {iou[i]:.4f}", fontsize=20) plt.imshow(top_imgs[i]) plt.axis('off') if save_path is not None: plt.savefig(save_path + ".png") # plt.show() def find_nearest_neighbours(images_path_list: List[str], encoder: Encoder, loader: data.DataLoader, device: str, save_path: str, project_features: bool = True, similarity_func: str = "dot_product_similarity", compare_segmentation_mask: bool = True, dump_raw_results_path: str = None) -> None: """ find nearest neighbours (NN) for each image in images_path_list in an external dataset defined by a DataLoader :param images_path_list: list of images to find NN for them :param encoder: Encoder :param loader: DataLoader :param device: device :param save_path: path to save NN results as image :param project_features: bool, if True use projected features, else use full features :param similarity_func: similarity measurement :param compare_segmentation_mask: whether to calculate IOU for NN :param dump_raw_results_path: path to dump the external data as latent features :return: None """ with torch.no_grad(): # generate external_dataset_mat from data if os.path.exists(os.path.join(dump_raw_results_path, "external_dataset_mat.pkl")): with open(os.path.join(dump_raw_results_path, "external_dataset_mat.pkl"), "rb") as f: (external_dataset_mat, info) = pickle.load(f) else: external_dataset_mat, info = generate_dataset_features_matrix(encoder, loader, device=device, project_features=project_features) if dump_raw_results_path is not None: with open(os.path.join(dump_raw_results_path, "external_dataset_mat.pkl"), "wb") as f: pickle.dump((external_dataset_mat, info), f) (video_path, frame_ind, seg_masks, geom_location) = info # for each observation find nearest neighbours results = [] iou_scores = [] ref_features_vec = None for _, img_path in enumerate(images_path_list): ref_image = load_single_image(img_path) obs = normalize(loader.dataset.image_to_tensor(ref_image)) if compare_segmentation_mask: ref_seg_mask = load_seg_masks_from_video(img_path[:-4] + config["seg_masks_files_sfx"]) ref_seg_mask = seg_mask_transform(ref_seg_mask) features_vec = encoder(obs.float().to(device).unsqueeze(0)) ref_features_vec = features_vec if ref_features_vec is None else torch.cat((ref_features_vec, features_vec), dim=0) top_scores, top_scores_ind = calc_nearest_neighbours(features_vec, external_dataset_mat, k_neighbours=5, similarity_func=similarity_func) top_img = [] for top_ind in top_scores_ind: top_img.append(load_frame_from_video(video_path[top_ind], frame_ind[top_ind])) if compare_segmentation_mask: mean_iou, iou = calc_mean_iou(ref_seg_mask, [seg_masks[top_ind] for top_ind in top_scores_ind]) iou_scores.append(mean_iou) else: mean_iou = 0. iou = np.zeros(len(top_scores)) results.append((ref_image, top_scores, top_img, mean_iou, iou, top_scores_ind)) if dump_raw_results_path is not None: os.remove(os.path.join(dump_raw_results_path, "external_dataset_mat.pkl")) with open(os.path.join(dump_raw_results_path, "nn_results.pkl"), "wb") as f: pickle.dump((results, external_dataset_mat, ref_features_vec, geom_location), f) if compare_segmentation_mask: print(f"Mean IOU score: {np.mean(iou_scores)}") visualize_nearest_neighbours(results, save_path=save_path) def find_nearest_neighbours_from_list(images_path_list, encoder: Encoder, loader: data.DataLoader, device, save_path, project_features=True, dist_func="dot_product_similarity", compare_segmentation_mask=True, dump_raw_results_path: str = None): with torch.no_grad(): # generate_feature_mat seg_masks = None external_dataset_mat = None images = [] ref_features_vec = None for _, img_path in enumerate(images_path_list): ref_image = load_single_image(img_path) obs = normalize(loader.dataset.image_to_tensor(ref_image)).float().to(device).unsqueeze(0) if compare_segmentation_mask: seg_mask = load_seg_masks_from_video(img_path[:-4] + config["seg_masks_files_sfx"]) seg_mask = seg_mask_transform(seg_mask) seg_masks = seg_mask if seg_masks is None else torch.cat((seg_masks, seg_mask), dim=0) features_vec = encoder(obs) if project_features else encoder.extract_features(obs) ref_features_vec = features_vec if ref_features_vec is None else torch.cat((ref_features_vec, features_vec), dim=0) external_dataset_mat = features_vec if external_dataset_mat is None else torch.cat((external_dataset_mat, features_vec), dim=0) images.append(ref_image) # for each observation find nearest neighbours results = [] iou_scores = [] for i, img_path in enumerate(images_path_list): features_vec = external_dataset_mat[i] ref_image = images[i] top_scores, top_scores_ind = calc_nearest_neighbours(features_vec, external_dataset_mat, k_neighbours=5, similarity_func=dist_func) top_img = [] for top_ind in top_scores_ind: top_img.append(images[top_ind]) if compare_segmentation_mask: mean_iou, iou = calc_mean_iou(seg_masks[i], [seg_masks[top_ind] for top_ind in top_scores_ind]) iou_scores.append(mean_iou) else: mean_iou = 0. iou = np.zeros(len(top_scores)) results.append((ref_image, top_scores, top_img, mean_iou, iou, top_scores_ind)) if dump_raw_results_path is not None: with open(os.path.join(dump_raw_results_path, "nn_results.pkl"), "wb") as f: pickle.dump((results, external_dataset_mat, ref_features_vec, None), f) if compare_segmentation_mask: print(f"Mean IOU score: {np.mean(iou_scores)}") visualize_nearest_neighbours(results, save_path=save_path) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-m', '--model_path', type=str, required=True, help='path to trained model') parser.add_argument('-i', '--input', type=str, required=True, help='path to reference image directory') parser.add_argument('-s', '--save_name', type=str, default=None, help='model results save name, if None saves as model name') parser.add_argument('-c', '--cfg', type=str, default=None, help='path to config file, if None will automatically take it from the model directory') parser.add_argument('--dataset', type=str, default='../datasets/textured_rope_val', help='path to dataset directory') parser.add_argument('-o', '--output', type=str, default='../control_results', help='path to output directory') parser.add_argument('-w', '--workers', type=int, default=6, help='num workers for DataLoader') parser.add_argument('--dist_func', type=str, default="mse_similarity", help='distance function type') parser.add_argument('--calc_iou', default=False, action="store_true", help='if True will calculate IOU') args = parser.parse_args() torch.set_default_dtype(torch.float32) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reload config and model assert os.path.exists(args.model_path), f"can't find model file in: {args.model_path}" base_path, checkpoint = os.path.split(args.model_path) if args.cfg is None: args.cfg = os.path.join(base_path, "config.yml") with open(args.cfg) as f: config = yaml.load(f, Loader=yaml.FullLoader) # init model and optimizer config["batch_size"] = 1024 model, _, _ = load_from_checkpoint(path=args.model_path, config=config, device=device) loader = get_dataloader(args.dataset, -1, -1, config, args.workers, load_seg_masks=args.calc_iou) images_path_list = [os.path.join(args.input, f) for f in os.listdir(args.input) if f.endswith('png') or f.endswith("jpeg") or f.endswith("jpg")] assert len(images_path_list), "Error, no images where loaded" os.makedirs(args.output, exist_ok=True) if args.save_name is None: _, model_name = os.path.split(base_path) save_path = os.path.join(args.output, f"{model_name}_epoch_{checkpoint.split('_')[-1]}_{args.dist_func}") else: save_path = os.path.join(args.output, args.save_name + f"_{args.dist_func}") os.makedirs(save_path + "_results", exist_ok=True) find_nearest_neighbours_from_list(images_path_list, encoder=model.encoder, loader=loader, device=device, project_features=True, dist_func=args.dist_func, save_path=save_path + "_real", dump_raw_results_path=None, compare_segmentation_mask=args.calc_iou) find_nearest_neighbours(images_path_list, encoder=model.encoder, loader=loader, device=device, project_features=True, similarity_func=args.dist_func, save_path=save_path, dump_raw_results_path=save_path + "_results", compare_segmentation_mask=args.calc_iou)
python
20
0.621658
149
50.895623
297
starcoderdata
<?php namespace app\admin\model; use app\common\model\ModelBs; use app\admin\model\ViewpointCate as MViewpointCate; use app\admin\model\ColumnViewpoint as MColumnViewpoint; class Viewpoint extends ModelBs { protected $levelText = [ 0 => '免费', 1 => '付费', ]; protected $typeText = [ 0=>'普通观点', // 1=>'专题', 2=>'精选' ]; protected $statusText = [ 0=>'草稿', 1=>'发布', 2=>'删除' ]; /** * 获取观点类型的文案 * * @return array * @author liujuneng */ public function getLevelText($level) { return !is_null($level) ? getDataByKey($this->levelText, $level, 0) : $this->levelText; } /** * 获取类型的文案 * @param unknown $type * @return string[]|mixed|array|\ArrayAccess */ public function getTypeText($type) { return !is_null($type) ? getDataByKey($this->typeText, $type, 0) : $this->typeText; } /** * 获取状态的文案 * @param unknown $status * @return string[]|mixed|array|\ArrayAccess */ public function getStatusText($status) { return !is_null($status) ? getDataByKey($this->statusText, $status, 0) : $this->statusText; } /** * 根据观点id获取观点明细 * @param unknown $viewpointId * @return \think\Collection|\think\db\false|PDOStatement|string * @author liujuneng */ public function getViewpointByIdList($viewpointIdList) { $data = $this->where('id', 'in', $viewpointIdList)->select(); return $data; } /** * 根据观点id修改状态,支持批量 * @param unknown $ids * @param unknown $status * @return number|string * @author liujuneng */ public function changeStatusByViewPointIds($ids, $status) { $result = $this->where('id', 'in', $ids)->update(['status' => $status, 'update_time'=>date('Y-m-d H:i:s')]); return $result; } /** * 报错观点编辑数据 * 1.更新观点 * 2.处理观点标签关联 * 2.1 统计标签关联:已存在的观点标签关联,不再创建;统计需要删除的观点标签关联 * 2.2 删除、创建观点标签关联 * @param unknown $viewpointId * @param unknown $updateViewpointData * @param unknown $cateList * @return boolean * @author liujuneng */ public function updateViewpointForEdit($viewpointId, $updateViewpointData, $cateList, $selectColumnList) { $this->db()->startTrans(); $isSuccess = true; try { $result = $this->db()->where('id', $viewpointId)->update($updateViewpointData); //处理观点标签关联 $model = new MViewpointCate(); $model = $model->db(); $deleteViewpointCate = []; $viewpointCateList = $model->where('viewpoint_id', $viewpointId)->where('grade', 2)->select(); foreach ($viewpointCateList as $viewpointCate){ $cateId = $viewpointCate['cate_id']; $grade = $viewpointCate['grade']; if (key_exists($cateId, $cateList)) { //已存在的观点标签关联,不再创建 unset($cateList[$cateId]); }else { //统计需要删除的观点标签关联 $deleteViewpointCate[] = $cateId; } } //删除观点标签关联 if (!empty($deleteViewpointCate)){ $model->where('viewpoint_id', $viewpointId)->where('cate_id', 'in', $deleteViewpointCate)->delete(); } //创建观点标签关联 if (!empty($cateList)) { $viewpointCateListForCreate = []; foreach ($cateList as $cateId=>$grade){ $viewpointCateListForCreate[] = [ 'viewpoint_id'=>$viewpointId, 'cate_id'=>$cateId, 'grade'=>$grade ]; } $model->insertAll($viewpointCateListForCreate); } //处理栏目与观点关联 $model = new MColumnViewpoint(); $deleteColumnIdList = []; $columnViewpointList = $model->where('viewpoint_id', $viewpointId)->column('column_id'); $selectColumnListFlip = array_flip($selectColumnList); foreach ($columnViewpointList as $columnId) { if (in_array($columnId, $selectColumnList)) { //已存在的栏目管理,不再创建 unset($selectColumnList[$selectColumnListFlip[$columnId]]); }else { $deleteColumnIdList[] = $columnId; } } //删除栏目与观点关联 if (!empty($deleteColumnIdList)) { $model->where('viewpoint_id', $viewpointId)->where('column_id', 'in', $deleteColumnIdList)->delete(); } //创建栏目与观点关联 if (!empty($selectColumnList)) { $columnViewpointListForCreate = []; foreach ($selectColumnList as $columnId){ $columnViewpointListForCreate[] = [ 'column_id'=>$columnId, 'viewpoint_id'=>$viewpointId ]; } $model->insertAll($columnViewpointListForCreate); } $this->db()->commit(); } catch (\Exception $e) { $this->db()->rollback(); $isSuccess = false; } return $isSuccess; } /** * 创建观点 * @param unknown $insertData 创建的数据 * @param unknown $cateList 标签信息 * @return number|string */ public function createViewpoint($insertData, $cateList, $selectColumnList) { $this->db()->startTrans(); $viewpointId = 0; try { $viewpointId = $this->db()->insertGetId($insertData); //处理观点标签关联 $model = new MViewpointCate(); $model = $model->db(); //创建观点标签 if (!empty($cateList)) { $viewpointCateListForCreate = []; foreach ($cateList as $cateId=>$grade){ $viewpointCateListForCreate[] = [ 'viewpoint_id'=>$viewpointId, 'cate_id'=>$cateId, 'grade'=>$grade ]; } $model->insertAll($viewpointCateListForCreate); } //创建栏目与观点关联 $model = new MColumnViewpoint(); if (!empty($selectColumnList)) { $columnViewpointListForCreate = []; foreach ($selectColumnList as $columnId){ $columnViewpointListForCreate[] = [ 'column_id'=>$columnId, 'viewpoint_id'=>$viewpointId ]; } $model->insertAll($columnViewpointListForCreate); } $this->db()->commit(); } catch (\Exception $e) { $this->db()->rollback(); $viewpointId = 0; } return $viewpointId; } /** * 获取观点前台详情页url * * @param $courseId * @return mixed * @author liujuneng */ public function getViewpointUrl($viewpointId) { return url('/#/personalCenter/viewpointDetail/' . $viewpointId, '', false, \helper\UrlSys::getWxHost()); } /** * 获取观点信息 * @name getViewPointInfo * @param $viewpointId * @param string $field * @return array|false|\PDOStatement|string|\think\Model * @author Lizhijian */ public function getViewPointInfo($viewpointId, $field = '*'){ return $this->where('id', $viewpointId)->field($field)->find(); } }
php
16
0.622436
110
22.898833
257
starcoderdata
export default { header: 'Workflows', addAtmWorkflowSchemaButton: 'Add new workflow', noWorkflowsHeader: 'No workflows', noWorkflowsText: 'This automation inventory does not have any workflows yet. To create one, click the button below.', };
javascript
6
0.780415
119
47.142857
7
starcoderdata
""" Simple subclass of adafruit_display_text's Label; overrides the _update_text method to provide minimally viable bidirectional layout support. Treats x as the origin, so LTR text will start at x and make its way to the right, and RTL text will start at x and make its way to the left. Ideally this should have a width property so it can reset to a right margin for RTL text runs, and a left margin for LTR text runs. For now, set x to 0 for displaying LTR text and set it to a nonzero value (like your display's width or width / 2) for RTL. Also note: instead of allowing inline direction changes, direction changes cause a newline, i.e. a RTL text run inside of a LTR text run will trigger line breaks so each run can appear on its own line. """ import displayio from adafruit_display_text import label class BidiLabel(label.Label): def _update_text(self, new_text): # pylint: disable=too-many-locals x = 0 y = 0 i = 0 direction = 1 old_c = 0 y_offset = int((self.font.get_glyph(ord('M')).height - new_text.count('\n') * self.height * self.line_spacing) / 2) #print("y offset from baseline", y_offset) top = bottom = left = right = 0 for character in new_text: if character == '\n': y += int(self.height * self._line_spacing) x = 0 continue glyph = self.font.get_glyph(ord(character)) if not glyph: continue if direction == -1 and glyph.mirrored: glyph = self.font.get_glyph(glyph.mirrored) if glyph.rtl and direction == 1: if x != 0: y += int(self.height * self._line_spacing) x = 0 direction = -1 elif glyph.ltr and direction == -1: if x != 0: y += int(self.height * self._line_spacing) x = 0 direction = 1 right = max(right, x+glyph.width*direction) if y == 0: # first line, find the Ascender height top = min(top, -glyph.height+y_offset) bottom = max(bottom, y-glyph.dy+y_offset) position_y = y - glyph.height - glyph.dy + y_offset position_x = x + glyph.dx * direction if not self._text or old_c >= len(self._text) or character != self._text[old_c]: try: face = displayio.TileGrid(glyph.bitmap, pixel_shader=self.palette, default_tile=glyph.tile_index, tile_width=glyph.width, tile_height=glyph.height, position=(position_x, position_y)) except TypeError: face = displayio.TileGrid(glyph.bitmap, pixel_shader=self.palette, default_tile=glyph.tile_index, tile_width=glyph.width, tile_height=glyph.height, x=position_x, y=position_y) if i < len(self): self[i] = face else: self.append(face) elif self._text and character == self._text[old_c]: try: self[i].position = (position_x, position_y) except AttributeError: self[i].x = position_x self[i].y = position_y x += glyph.shift_x * direction # TODO skip this for control sequences or non-printables. i += 1 old_c += 1 # skip all non-prinables in the old string while (self._text and old_c < len(self._text) and (self._text[old_c] == '\n' or not self.font.get_glyph(ord(self._text[old_c])))): old_c += 1 # Remove the rest while len(self) > i: self.pop() self._text = new_text self._boundingbox = (left, top, left+right, bottom-top)
python
19
0.521729
99
46.314607
89
starcoderdata
#include #include #include #include #include #include #include #include #include model::Net::Net(){ srand(time(NULL)); }; double model::activationDerivative(double x){ double sech = 1.0 / std::cosh(x); return sech * sech; } double model::activation(double x){ return tanh(x); } model::array softMaxArr(model::array& arr){ model::array sft(arr.size(), 0); double m = *(std::max_element(arr.begin(), arr.end())); double sum = std::accumulate(arr.begin(), arr.end(), 0); const double scale = m + log(sum); for (int i = 0; i < arr.size(); i++) { arr[i] = expf(arr[i] - scale); } return sft; } int maxIdx(model::array& arr){ auto maxel = std::max_element(arr.begin(), arr.end()); return int(maxel - arr.begin()); } void model::Net::add_layer(int size){ int prevSize = 0; if(layers.size() != 0){ prevSize = layers.back()->getSize(); } outLayer = new Layer(size, prevSize); if(layers.size() == 0){ inLayer = outLayer; } layers.push_back(outLayer); } void model::Net::forward(model::array& inVals){ inLayer->set(inVals); for(int i = 1; i < layers.size(); i++){ layers[i]->feed(layers[i-1]); } } /* @params: float trainRate (0 - 1.0), int batchSize */ void model::Net::compile(float trainRate, int batchSize){ this->trainRate = trainRate; this->batchSize = batchSize; } void model::Net::Layer::set(model::array& inVals){ if(inVals.size() != this->getSize()){ throw std::invalid_argument("Layer and data don't fit"); } for(int i = 0; i < this->getSize(); i++){ neurons[i]->setOut(inVals[i]); } } void model::Net::fit(model::Dataset input_data, model::Dataset output_data, int num_epochs){ if(input_data.size() != output_data.size()){ throw std::invalid_argument("Input and output data size must be equal!"); } for(int epoch = 1; epoch <= num_epochs; epoch++){ std::cout << "Epoch: " << epoch << std::string(5 - std::to_string(epoch).length(), ' ' ); for(int i = 0; i < input_data.size(); i++){ forward(input_data[i]); model::array result = getResult(); model::array target = output_data[i]; double error = 0.0; for(int i = 0; i < outLayer->getSize(); i++){ double dlt = pow(target[i] - outLayer->neurons[i]->getVal(), 2); error += dlt; } error /= outLayer->getSize(); //error = sqrt(error); if(i%batchSize == 0){ backpropagate(target); } recent_error = (recent_error * error_smooth + error) / (error_smooth + 1.0); } std::cout << " -- loss " << recent_error << std::endl; } std::cout << std::endl; } void model::Net::evaluate(model::Dataset input_data, model::Dataset output_data){ if(input_data.size() != output_data.size()){ throw std::invalid_argument("Input and output data size must be equal!"); } for(int i = 0; i < input_data.size(); i++){ forward(input_data[i]); model::array result = getResult(); model::array target = output_data[i]; double error = 0.0; for(int i = 0; i < outLayer->getSize(); i++){ double dlt = pow(target[i] - outLayer->neurons[i]->getVal(), 2); error += dlt; } error /= outLayer->getSize(); //error = sqrt(error); recent_error = (recent_error * error_smooth + error) / (error_smooth + 1.0); } std::cout << "Train evaluation error: " << recent_error << std::endl; } void model::Net::backpropagate(model::array target){ outLayer->calcGradientTarget(target); for(int i = layers.size() - 2; i > 0; i--){ Layer* currLayer = layers[i]; Layer* nextLayer = layers[i+1]; currLayer->calcGradient(nextLayer); } for(int i = layers.size() - 1; i > 0; i--){ Layer* currLayer = layers[i]; Layer* prevLayer = layers[i-1]; currLayer->updateWeights(prevLayer, trainRate); } } model::array model::Net::predict(model::array& in){ model::array v; forward(in); return getResult(); } model::array model::Net::getResult(){ return outLayer->toArray(); } model::Net::Layer::Layer(int size, int prevSize){ this->size = size; neurons.resize(size); for(int i = 0; i < size; i++){ neurons[i] = new Neuron(0.0, prevSize); } } int model::Net::Layer::getSize(){ return this->size; } void model::Net::Layer::feed(Layer* prev){ for(Neuron* neuron : neurons){ double out = 0; for(int i = 0; i < prev->neurons.size(); i++){ out += prev->neurons[i]->getVal() * neuron->getWeight(i); } out = model::activation(out); neuron->setOut(out); } } void model::Net::Layer::calcGradient(Layer* next){ for(int i = 0; i < neurons.size(); i++){ double grad = next->sumContrib(i) * model::activationDerivative(neurons[i]->getVal()); neurons[i]->setGradient(grad); } } void model::Net::Layer::calcGradientTarget(model::array& target){ for(int i = 0; i < neurons.size(); i++){ double grad = (target[i] - neurons[i]->getVal()) * model::activationDerivative(neurons[i]->getVal()); neurons[i]->setGradient(grad); } } void model::Net::Layer::updateWeights(Layer* prevLayer, float trainRate){ for(Neuron* neuron : neurons){ neuron->updateWeights(prevLayer, trainRate); } } model::array model::Net::Layer::toArray(){ model::array result(neurons.size()); for(int i = 0; i < result.size(); i++){ result[i] = neurons[i]->getVal(); } return result; } model::Net::Layer::Neuron::Neuron(double v, int inSize){ value = v; inputWeights.resize(inSize); for(double& weight : inputWeights){ weight = rand()/double(RAND_MAX); } inputDeltas.resize(inSize, 0); } double model::Net::Layer::Neuron::getVal(){ return value; } double model::Net::Layer::Neuron::getWeight(int i){ return inputWeights[i]; } void model::Net::Layer::Neuron::setOut(double v){ value = v; } void model::Net::Layer::Neuron::setGradient(double g){ gradient = g; } double model::Net::Layer::Neuron::getGradient(){ return gradient; } double model::Net::Layer::sumContrib(int neuronIndex){ double sum = 0; for(int i = 0; i < neurons.size(); i++){ sum += neurons[i]->inputWeights[neuronIndex] * neurons[i]->getGradient(); } return sum; } void model::Net::Layer::Neuron::updateWeights(Layer* prev, float trainRate){ for(int i = 0; i < inputWeights.size(); i++){ double oldDelta = inputDeltas[i]; inputDeltas[i] = trainRate * prev->neurons[i]->getVal() * gradient + momentum * oldDelta; inputWeights[i] += inputDeltas[i]; } } /* Splits input and output data into a trainIn, trainOut, testIn, testOut Returns: a vector of datasets (of size 4), indices 0, 1, 2, 3 are trainIn, trainOut, testIn and testOut respectively */ std::vector model::split(model::Dataset& inData, model::Dataset& outData, float ratio){ if(ratio < 0 || ratio > 1){ throw std::invalid_argument("Ratio must be between 0 and 1"); } int n = int(inData.size() * ratio); std::vector sub_v; sub_v.emplace_back(inData.begin(), inData.begin() + n); sub_v.emplace_back(outData.begin(), outData.begin() + n); sub_v.emplace_back(inData.begin() + n, inData.end()); sub_v.emplace_back(outData.begin() + n, outData.end()); return sub_v; }
c++
18
0.581389
109
28.163569
269
starcoderdata
// import { types as t } from "@babel/core"; const setAttribute = (t, attributes, name, value) => { let isAttributeSet = false; if (!attributes) { attributes = []; } for (let i in attributes) { if (!attributes.hasOwnProperty(i)) { continue; } // Overriding the value if attribute exists if (attributes[i].name.name === name) { attributes[i] = t.JSXAttribute(t.JSXIdentifier(name), t.StringLiteral(value) ); isAttributeSet = true; } } // add new attribute if existing attribute wasn't overriden if (!isAttributeSet) { attributes.unshift( t.JSXAttribute(t.JSXIdentifier(name), t.StringLiteral(value) ) ); } }; const setImageSourceAttribute = (t, attributes) => { if (!attributes) { attributes = []; } for (let i in attributes) { if (!attributes.hasOwnProperty(i)) { continue; } // Overriding the value if attribute exists if (attributes[i].name.name === 'src') { attributes[i] = t.JSXAttribute(t.JSXIdentifier('source'), t.StringLiteral(attributes[i].value.value) ); } } }; const getAttributeValue = (t, attributes, propertyName) => { let propertyValue = null; if (!attributes) { return propertyValue; } for (let i in attributes) { if (!attributes.hasOwnProperty(i)) { continue; } if (attributes[i].name.name === propertyName) { propertyValue = attributes[i].value.value; } } return propertyValue; }; const removeAttribute = (t, attributes, propertyName) => { if (!attributes) { return; } for (let i in attributes) { if (!attributes.hasOwnProperty(i)) { continue; } if (attributes[i].name.name === propertyName) { delete attributes[i]; } } }; const TextTags = [ 'a', 'abbr', 'address', 'b', 'bdi', 'bdo', 'big', 'blockquote', 'br', 'cite', 'code', 'data', 'del', 'dfn', 'em', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'i', 'ins', 'kbd', 'label', 'legend', 'mark', 'marquee', 'meter', 'output', 'p', 'pre', 'q', 'rp', 'rt', 'ruby', 's', 'samp', 'small', 'span', 'strong', 'sub', 'summary', 'sup', 'time', 'u', 'var', 'wbr' ]; const ImageTags = [ 'img' ]; const ViewTags = [ 'area', 'article', 'aside', 'audio', 'base', 'body', 'button', 'canvas', 'caption', 'col', 'colgroup', 'datalist', 'dd', 'details', 'dialog', 'div', 'dl', 'dt', 'embed', 'fieldset', 'figcaption', 'figure', 'footer', 'form', 'head', 'header', 'hgroup', 'hr', 'html', 'iframe', 'input', 'keygen', 'li', 'link', 'main', 'map', 'menu', 'menuitem', 'meta', 'nav', 'noscript', 'object', 'ol', 'optgroup', 'option', 'param', 'picture', 'progress', 'script', 'section', 'select', 'source', 'style', 'table', 'tbody', 'td', 'textarea', 'tfoot', 'th', 'thead', 'title', 'tr', 'track', 'ul', 'video' ]; const SVGTags = [ 'circle', 'clipPath', 'defs', 'ellipse', 'g', 'image', 'line', 'linearGradient', 'mask', 'path', 'pattern', 'polygon', 'polyline', 'radialGradient', 'rect', 'stop', 'svg', 'text', 'tspan' ]; module.exports = function ({types: t }) { const DefaultPrimitives = {}; for (const tag of TextTags) { DefaultPrimitives[tag] = 'Text'; } for (const tag of ImageTags) { DefaultPrimitives[tag] = 'Image'; } for (const tag of ViewTags) { DefaultPrimitives[tag] = 'View'; } return { visitor: { TaggedTemplateExpression(path, state) { const objectName = path.node.tag && path.node.tag.object && path.node.tag.object.name || false; const tagName = path.node.tag && path.node.tag.property && path.node.tag.property.name || false; const Primitives = Object.assign(DefaultPrimitives, state.opts.primitives); const isPrimitiveTag = tagName && Object.keys(Primitives).includes(tagName); const isStyledComponents = objectName && objectName === 'styled'; if (isStyledComponents && isPrimitiveTag) { path.node.tag = t.CallExpression(t.Identifier('styled'), [t.Identifier(Primitives[tagName])]); // path.node.tag = t.MemberExpression(t.Identifier('styled'), t.Identifier(Primitives[tagName])); } }, JSXIdentifier(path, state) { const tagNameProp = state.opts.tagNameProp; const primitiveProp = state.opts.primitiveProp; const Primitives = Object.assign(DefaultPrimitives, state.opts.primitives); let tagName = path.node.name, isPrimitiveTag = Object.keys(Primitives).includes(tagName), primitive = null; if (primitiveProp) { primitive = getAttributeValue(t, path.parent.attributes, primitiveProp); removeAttribute(t, path.parent.attributes, primitiveProp); } else if (isPrimitiveTag) { primitive = Primitives[tagName]; } if (primitive !== null) { path.node.name = primitive; if (tagNameProp) { setAttribute(t, path.parent.attributes, tagNameProp, tagName); } if (path.node.name === 'Image') { setImageSourceAttribute(t, path.parent.attributes); } } // Ignore attributes path.stop(); }, JSXText(path, state) { return; if(path.parent.openingElement.name.name !== 'Text') { let text = path.node.value.replace(/^[\s]+/, '').replace(/[\s]+$/, ''); if (text.length > 0) { let prefixWhitespace = path.node.value.replace(/[\s]+$/, '').replace(text, ''); let suffixWhitespace = path.node.value.replace(/^[\s]+/, '').replace(text, ''); path.replaceWithMultiple([ t.JSXText(prefixWhitespace), t.JSXElement( t.JSXOpeningElement( t.JSXIdentifier('Text'), [] ), t.JSXClosingElement( t.JSXIdentifier('Text') ), [t.JSXText(text)] ), t.JSXText(suffixWhitespace) ]); } } } } }; };
javascript
26
0.532057
108
26.655462
238
starcoderdata
#include using namespace std; int gcd(int a, int b) { if (b == 0) return a; else gcd(b, a%b); } int main(void) { int n, m; cin >> n >> m; cout << m - gcd(n, m); }
c++
9
0.61039
49
14.4
15
starcoderdata
//go:build integration package database import ( "io" "os" "testing" "github.com/zitryss/aye-and-nay/internal/dockertest" "github.com/zitryss/aye-and-nay/pkg/env" "github.com/zitryss/aye-and-nay/pkg/log" ) func TestMain(m *testing.M) { _, err := env.Lookup("CONTINUOUS_INTEGRATION") if err != nil { log.SetOutput(os.Stderr) log.SetLevel(log.Lcritical) docker := dockertest.New() docker.RunMongo() log.SetOutput(io.Discard) code := m.Run() docker.Purge() os.Exit(code) } code := m.Run() os.Exit(code) }
go
9
0.683837
53
17.766667
30
starcoderdata
<?php namespace app\admin\controller; use app\common\controller\Adminbase; use think\Db; class BranchManager extends Adminbase { public function index() { $login = $this->_userinfo; $admin=Db::table('fdz_admin')->where(['companyid'=>$login['companyid']])->where('roleid','neq',1)->select(); foreach ($admin as $k=>$v){ $admin[$k]['auth']=Db::table('fdz_auth_group')->where('id', $v['roleid'])->value('title'); } $this->assign('admin',$admin); return $this->fetch(); } public function log() { $log= Db::view('zlogs') ->view('admin', 'userid,username', 'admin.userid=zlogs.operator')->order('operate_time','desc') ->select(); foreach ($log as $k=>$v){ $log[$k]['cname']=Db::table('fdz_admin')->where('userid', $v['cname'])->value('username'); } $this->assign('log',$log); return $this->fetch(); } public function view() { $login = $this->_userinfo; $user=Db::table('fdz_userlist')->where('frameid',$login['companyid'])-> field(['id','address'])->select(); foreach ($user as $k=>$v){ $user[$k]['url']='http://'.$_SERVER['HTTP_HOST'].'/admin/qrcodes/add?id='.$v['id']; } require'../extend/phpqrcode/phpqrcode.php'; foreach ($user as $k1=>$v1){ $name= parse_url($v1['url'],PHP_URL_QUERY) ; $name= md5(substr_replace($name,'',0,3)); header('Content-Type: image/png'); $qrcode = new \QRcode(); $level = 'L';// 容错级别:L、M、Q、H $size = 4; $data=$v1['url']; $name='./uploads/qrcode/'.$name.'.png'; Db::table('fdz_userlist')->where('id',$v1['id']) ->update(['qrcode' => $name]); $qrcode->png($data, $name, $level, $size); } return redirect('/admin/artificial/gcfx_first'); } //刷新二维码 //签到信息 public function sign() { $userinfo = $this->_userinfo; $where=[]; if(input('name')){ $where[] = ['customer_name','LIKE','%'.input('name').'%']; } if(input('address')){ $where[] = ['address','LIKE','%'.input('address').'%']; } $data=Db::table('fdz_userlist')->where($where)->where('frameid',$userinfo['companyid'])->select(); foreach ($data as $k=>$v) { $data[$k]['user']=Db::table('fdz_register')->where('uid',$v['id'])->select(); } foreach ($data as $k1=>$v1) { if(count($v1['user'])==0){ unset($data[$k1]); }else{ $data[$k1]['count']=count($v1['user']); } } $this->assign('data',$data); return $this->fetch(); } // public function man() { $data=input(); if(!empty($_GET['date'])){ $data=Db::table('fdz_register')->where('uid',$data['id'])->whereBetweenTime('create_time',$data['date'])->select(); }else{ $data=Db::table('fdz_register')->where('uid',$data['id'])->select(); } foreach ($data as $k=>$v) { $data[$k]['type']=Db::name('basis_type_work')->where('id',$v['type'])->value('name'); } $this->assign('data',$data); return $this->fetch(); } // public function signtwo() { $where=[]; if(input('name')){ $where[] = ['name','LIKE','%'.input('name').'%']; } $data=Db::table('fdz_register')->where($where)->select(); foreach ($data as $k=>$v) { $data[$k]['user']=Db::table('fdz_userlist')->where('id',$v['uid'])->find(); $data[$k]['type']=Db::name('basis_type_work')->where('id',$v['type'])->value('name'); } //本公司 $userinfo = $this->_userinfo; foreach ($data as $k1=>$v1) { if($v1['user']['frameid']!=$userinfo['companyid']) { unset($data[$k1]); } } // dump($data); $this->assign('data',$data); return $this->fetch(); } }
php
17
0.463362
127
29.933333
135
starcoderdata
QString Properties::property(const QString &key) const { // Null string indicates the property does not contain the key. if (contains(key)) { QString value = this->value(key); if (value.isNull()) return QString(QLatin1String("")); return value; } if (mpDefaultProperties) return mpDefaultProperties->property(key); return QString(); }
c++
12
0.623762
67
24.3125
16
inline
def __init__(self, host, port, database): self.hall = None self.ambient = None self.line = None self.database = database # Create IP Connection self.connection = IPConnection() # Register IP Connection callbacks self.connection.register_callback(IPConnection.CALLBACK_ENUMERATE, self.cb_enumerate) self.connection.register_callback(IPConnection.CALLBACK_CONNECTED, self.cb_connected) # Connect to brick, will trigger cb_connected self.connection.connect(host, port) self.connection.enumerate()
python
7
0.66723
93
36.0625
16
inline
#!/usr/bin/env python from __future__ import division from myplot.xsection import XSection from myplot.xsection import VectorXSection from myplot.axes3d import Axes3D from psgi.parameterized import state_parser from matplotlib.widgets import Slider from traits.api import HasTraits, Range, Instance, on_trait_change, Array from traitsui.api import View, Item, Group from mayavi.core.api import PipelineBase from mayavi.core.ui.api import MayaviScene, SceneEditor,MlabSceneModel import myplot.topo import mayavi.mlab import modest import numpy as np import pickle import misc import transform as trans import sys sys.path.append('.') import basis Nl = 50 Nw = 50 def slip_vec(x,coeff,strike,dip,seg): s1 = basis.slip(x,coeff[:,0],segment=seg) s2 = basis.slip(x,coeff[:,1],segment=seg) vec = np.array([s1,s2,0*s2]).transpose() argz = np.pi/2.0 - np.pi*strike/180 argx = np.pi/180.0*dip T = trans.point_rotation_x(argx) T += trans.point_rotation_z(argz) vec = T(vec) return vec def slip_mag(x,coeff,seg): rightlateral = basis.slip(x,coeff[:,0],segment=seg) thrust = basis.slip(x,coeff[:,1],segment=seg) return np.sqrt(rightlateral**2 + thrust**2) def view(state,param): param = {i:np.array(v) for i,v in param.iteritems()} #covert lat lon to xyz f = open('basemap.pkl','r') bm = pickle.load(f) f.close() fluidity_transforms = [] x,y = bm(*basis.FLUIDITY_ANCHOR[:2]) length = basis.FLUIDITY_LENGTH width = basis.FLUIDITY_WIDTH thickness = basis.FLUIDITY_THICKNESS t = trans.point_stretch([basis.FLUIDITY_LENGTH, basis.FLUIDITY_THICKNESS, 1.0]) t += trans.point_rotation_x(np.pi/2.0) t += trans.point_translation([0.0,-width/2.0,0.0]) t += trans.point_rotation_z(np.pi/2.0 - basis.FLUIDITY_STRIKE*np.pi/180) t += trans.point_translation([x,y,0.0]) fluidity_transforms += [t] t = trans.point_stretch([basis.FLUIDITY_WIDTH, basis.FLUIDITY_THICKNESS, 1.0]) t += trans.point_rotation_x(np.pi/2.0) t += trans.point_rotation_z(-np.pi/2.0) t += trans.point_translation([basis.FLUIDITY_LENGTH/2.0, 0.0, 0.0]) t += trans.point_rotation_z(np.pi/2.0 - basis.FLUIDITY_STRIKE*np.pi/180) t += trans.point_translation([x,y,0.0]) fluidity_transforms += [t] fault_transforms = basis.FAULT_TRANSFORMS xs1 = XSection(basis.fluidity, f_args=(state['fluidity'][-1],), base_square_y=(-1,0), transforms = fluidity_transforms, clim = param['fluidity_clim']) xs2 = XSection(basis.fluidity, f_args=(state['fluidity'][-1],), base_square_y=(-1,0), transforms = fault_transforms) class InteractiveSlip(HasTraits): #time_index = Range(0,len(state['slip']),0.5) #print(state) time = Range(round(min(state['time']),2),round(max(state['time']),2)) scene = Instance(MlabSceneModel,()) view = View(Item('scene',editor=SceneEditor(scene_class=MayaviScene), height=250,width=300,show_label=False), Group('time'),resizable=True) def __init__(self): #myplot.topo.draw_topography(bm,opacity=0.2) time_index = np.argmin(abs(state['time'][...] - self.time)) slip = np.array(state[str(param['slip_type'])][time_index]) self.xs = () self.vxs = () for i,t in enumerate(fault_transforms): self.xs += XSection(slip_mag, f_args=(slip,i), base_square_y=(-1,0), transforms = [t],clim=param['slip_clim']), self.vxs += VectorXSection(slip_vec, f_args=(slip,basis.FAULT_STRIKE[i],basis.FAULT_DIP[i],i), base_square_y=(-1,0), transforms = [t]), HasTraits.__init__(self) @on_trait_change('time,scene.activated') def update_plot(self): time_index = np.argmin(abs(state['time'][...] - self.time)) slip = np.array(state[str(param['slip_type'])][time_index]) for i,t in enumerate(fault_transforms): self.xs[i].set_f_args((slip,i)) self.vxs[i].set_f_args((slip,basis.FAULT_STRIKE[i],basis.FAULT_DIP[i],i)) if self.xs[i]._plots is None: self.xs[i].draw() else: self.xs[i].redraw() if self.vxs[i]._plots is None: self.vxs[i].draw() else: self.vxs[i].redraw() #myplot.topo.draw_topography(bm,opacity=0.2) mayavi.mlab.figure(1) xs1.draw() xs2.draw(color=(0.2,0.2,0.2),opacity=0.5) myplot.topo.draw_topography(bm,opacity=0.2) #mayavi.mlab.figure(2) xs2 = InteractiveSlip() xs2.configure_traits()
python
18
0.595487
92
33.330986
142
starcoderdata
package com.ilad.pageobjecttopq; import java.util.List; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.testng.Assert; public class TasksPage extends AbstaractTeamWorkPage{ @FindBy(xpath = "//*[@id='liBFOATL']//button") WebElement addTasklistButton; @FindBy(id = "newTaskListName") WebElement newTaslListName; @FindBy(id = "btnCreateTaskList") WebElement btnCreateTasklist; public TasksPage(WebDriver driver) { super(driver); String url = driver.getCurrentUrl(); Assert.assertEquals(url, "https://topq.teamwork.com/projects/145467-webdriver-training/tasks"); } public AddNewTaskListModule clickOnAddTaskListBtnAndAddNewTaskList() throws InterruptedException { Thread.sleep(1000); addTasklistButton.click(); Thread.sleep(1000); return new AddNewTaskListModule(driver); } public TaskListPage clickOnTaskListWithNameAndGoToTaskListPage(String taskListName) throws InterruptedException { Thread.sleep(3000); WebElement myListTag = driver.findElement(By.linkText(taskListName)); myListTag.click(); return new TaskListPage(driver); } public int getNumberOfTasks(){ List taskVerify = driver.findElements(By.xpath("//a[@class='cb']/img")); return taskVerify.size(); } }
java
12
0.774554
114
26.428571
49
starcoderdata
package frc.robot.commands.Feeder; import edu.wpi.first.wpilibj2.command.CommandBase; import frc.robot.subsystems.FeederSubsystem; import frc.robot.subsystems.ShooterSubsystem; public class FeederTurnAuto extends CommandBase { FeederSubsystem m_feeder; ShooterSubsystem m_shooter; public FeederTurnAuto(FeederSubsystem m_feeder, ShooterSubsystem m_shooter) { this.m_feeder = m_feeder; this.m_shooter = m_shooter; } @Override public void initialize() {} @Override public void execute() { if (m_shooter.required_rpm - 10 < m_shooter.getShooterEncoderRPM() && m_shooter.required_rpm + 10 > m_shooter.getShooterEncoderRPM()) { m_feeder.runFeeder(1); } else { m_feeder.runFeeder(0); } } @Override public void end(boolean interrupted) {} @Override public boolean isFinished() { return false; } }
java
11
0.708861
79
23.138889
36
starcoderdata
namespace HtmlGenerator { public class HtmlSelectElement : HtmlElement { public HtmlSelectElement() : base("select") { } public HtmlSelectElement WithAutoFocus() => this.WithAttribute(Attribute.AutoFocus); public HtmlSelectElement WithDisabled() => this.WithAttribute(Attribute.Disabled); public HtmlSelectElement WithForm(string value) => this.WithAttribute(Attribute.Form(value)); public HtmlSelectElement WithMultiple() => this.WithAttribute(Attribute.Multiple); public HtmlSelectElement WithName(string value) => this.WithAttribute(Attribute.Name(value)); public HtmlSelectElement WithRequired() => this.WithAttribute(Attribute.Required); public HtmlSelectElement WithSize(string value) => this.WithAttribute(Attribute.Size(value)); } }
c#
11
0.735981
101
37.909091
22
starcoderdata
# $Filename$ # $Authors$ # Last Changed: $Date$ $Committer$ $Revision-Id$ # Copyright (c) 2003-2011, German Aerospace Center (DLR) # All rights reserved. # #Redistribution and use in source and binary forms, with or without # #modification, are permitted provided that the following conditions are #met: # # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the # distribution. # # * Neither the name of the German Aerospace Center nor the names of # its contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # #THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR #A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT #OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, #SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT #LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, #DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY #THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT #(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Connect dialog for entering the url, username, password of the WebDAV Server. """ from PyQt4 import QtCore from PyQt4.QtGui import QDialog from datafinder.gui.gen.user.authentification_connect_dialog_ui import Ui_AuthConnectDialog from datafinder.gui.user.dialogs.authentification_dialog.auth_pref_dialog import AuthPrefDialogView from datafinder.gui.user.dialogs.authentification_dialog.auth_edit_dialog import AuthEditDialogView __version__ = "$Revision-Id:$" class AuthConnectDialogView(QDialog, Ui_AuthConnectDialog): """ The connection dialog is displayed when the datafinder has to establish a connection to a webdav server or any other server needing authentification information. This dialog contains a field for entering a url and authentification credentials such as username and password. """ def __init__(self, preferences=None, parent=None,): """ Constructor. @param parent: Parent window of this L{QtGui.QDialog} @type parent: C{QtGui.QWidget} @param preferences: The preferences object. @type preferences: L{PreferencesHandler """ QDialog.__init__(self, parent) Ui_AuthConnectDialog.__init__(self) self.setupUi(self) self._preferences = preferences self.connect(self.cancelButton, QtCore.SIGNAL("clicked()"), self.reject) self.connect(self.connectButton, QtCore.SIGNAL("clicked()"), self.accept) self.connect(self.urlComboBox, QtCore.SIGNAL("currentIndexChanged(const QString)"), self._urlChangedSlot) self.connect(self.preferencesButton, QtCore.SIGNAL("clicked()"), self._preferencesActionSlot) self.connect(self.editButton, QtCore.SIGNAL("clicked()"), self._editLocationActionSlot) self.uri = preferences.connectionUris def _urlChangedSlot(self, newUri): """ Implementing changing of connection URI. """ uri = unicode(newUri) connection = self._preferences.getConnection(uri) if not connection is None: self.username = connection.username self.password = connection.password self.savePasswordFlag = not connection.password is None def _getUrl(self): """ Returns the entered url. @return: The url that was entered in the combobox. @rtype: C{string} """ return unicode(self.urlComboBox.lineEdit().text()) def _setUrl(self, urls): """ Appends urls to the L{QtGui.QComboBox} widget. @param urls: A list of urls that has to be added. @type urls: C{list} """ for url in urls: self.urlComboBox.addItem(url) def _getUsername(self): """ Returns the username that was entered by the user. @return: The username that was entered. @rtype: C{string} """ return unicode(self.usernameLineEdit.text()) def _setUsername(self, username): """ Set a string that in the username field. @param username: The username that has to be in the username field. @type username: C{string} """ self.usernameLineEdit.setText(username or "") def _getPassword(self): """ Returns the password from the password field. @return: Returns the password in the password field. @rtype: C{string} """ return unicode(self.passwordLineEdit.text()) def _setPassword(self, password): """ Sets the password in the password field. @param password: The password that has to be in the password field. @type password: C{string} """ self.passwordLineEdit.setText(password or "") def _getSavePassword(self): """ Returns true when the save password L{QtGui.QCheckBox} is checked else false. @return: True when the L{QtGui.QCheckBox} is checked else False. @rtype: C{boolean} """ return self.savePasswordCheckBox.isChecked() def _setSavePassword(self, checked): """ Set the state of the save password L{QtGui.QCheckBox}. @param checked: True when the L{QtGui.QCheckBox} has to be checked else False. @type checked: C{boolean} """ self.savePasswordCheckBox.setChecked(checked) def _setShowUrl(self, show): """ Show or hide the server groupbox by the given show parameter. @param show: True when the server groupbox has to be shown else False. @type show: C{boolean} """ self.serverGroupBox.setHidden(not show) uri = property(_getUrl, _setUrl) username = property(_getUsername, _setUsername) password = property(_getPassword, _setPassword) savePasswordFlag = property(_getSavePassword, _setSavePassword) showUrl = property(fset=_setShowUrl) def _preferencesActionSlot(self): """ Shows the preferences dialog for connection settings. """ preferencesDialog = AuthPrefDialogView(None, self._preferences) #preferencesDialog.useLdap = self._preferences.useLdap #preferencesDialog.ldapBaseDn = self._preferences.ldapBaseDn #preferencesDialog.ldapServerUri = self._preferences.ldapServerUri preferencesDialog.fillingTable(self._preferences.connectionUris) if preferencesDialog.exec_() == QDialog.Accepted: self._preferences.useLdap = preferencesDialog.useLdap self._preferences.ldapBaseDn = preferencesDialog.ldapBaseDn self._preferences.ldapServerUri = preferencesDialog.ldapServerUri def _editLocationActionSlot(self): """ Shows the edit Loaction dialog for more information on the location settings""" editDialog = AuthEditDialogView (None, self._preferences, self.uri) if editDialog.exec_() == QDialog.Accepted: print "good job"
python
13
0.660003
115
34.324201
219
starcoderdata
package config import ( "github.com/micro/go-micro/v3/config" "github.com/micro/go-micro/v3/config/reader" ) // DefaultConfig implementation. Setup in the cmd package, this will // be refactored following the updated config interface. var DefaultConfig config.Config // Bytes representation of config func Bytes() []byte { return DefaultConfig.Bytes() } // Get a value at the path func Get(path ...string) reader.Value { return DefaultConfig.Get(path...) } // Set the value at a path func Set(val interface{}, path ...string) { DefaultConfig.Set(val) } // Delete the value at a path func Delete(path ...string) { DefaultConfig.Del(path...) } // Map represesentation of config func Map() map[string]interface{} { return DefaultConfig.Map() } // Scan config into the value provided func Scan(v interface{}) error { return DefaultConfig.Scan(v) }
go
8
0.727907
68
20.5
40
starcoderdata
/* * This file is part of the Glaziery. * Copyright * * READ README.TXT BEFORE USE!! */ #ifndef __GLAZIERY_INPUTFIELD_H #define __GLAZIERY_INPUTFIELD_H namespace glaziery { class InputField : public Field { private: /** * Whether the content of this input field (text etc.) can be edited. */ bool editable; protected: /** * Creates a new input field. */ InputField(); /** * Destroys the input field. */ virtual ~InputField(); // Runtime class macros require their own public section. public: #if defined(_DEBUG) && (defined(_AFX) || defined(_AFXDLL)) DECLARE_DYNAMIC(InputField); #endif public: /** * Returns whether the ENTER key is entirely consumed by this input field. * If not, it may press the field area's default button. * Defaults to false. * @return Whether the ENTER key is entirely consumed by this input field. */ virtual bool consumingEnter(); /** * Returns whether the content of this input field (text etc.) can be edited. * @return Whether the content of this input field can be edited. */ bool isEditable(); /** * Handles the event that the event target is context-clicked * (usually using the right mouse button). * @param position The position where the context click occurred * relative to the upper-left corner of the event target's origin. * For components, this is their absolute position, * for widgets, this is the absolute position of their component. * @param option1 Whether the option button 1 is currently pressed, * e.g. on Windows systems, this is the SHIFT key. * @param option2 Whether the option button 2 is currently pressed, * e.g. on Windows systems, this is the CTRL key. */ virtual void onContextClick(Vector position, bool option1, bool option2); /** * Handles the event that the ENTER key is pressed on the event target. * @param option1 Whether the option button 1 is currently pressed, * e.g. on Windows systems, this is the SHIFT key. * @param option2 Whether the option button 2 is currently pressed, * e.g. on Windows systems, this is the CTRL key. * @return Whether the event target has consumed the event. * If false, the event is first passed to the desktop * and then to the input manager to increase event quantities. */ virtual bool onEnter(bool option1, bool option2); /** * Handles the event that the input field has been activated by its hot key. * The hot key dispatching has already been performed. * @param direct Whether the pressed hot key is the one of this field. * If false, a preceeding non-input field's hot key (e.g. a label) matched. */ virtual void onMatchedHotKey(bool direct); /** * Sets whether this input field can get focus and therefore * can be edited or selected. Not active fields are usually grayed out. * @param active Whether this input field is active. */ virtual void setActive(bool active); /** * Sets whether the content of this input field (text etc.) can be edited. * @param editable Whether the content of this input field can be edited. */ void setEditable(bool editable); /** * Sets that this input field gets focus. * @note This method does not check whether the field is visible and active. */ void setFocus(); }; } #endif
c
11
0.676856
80
28.869565
115
starcoderdata
package mil.dds.anet.beans.userActivity; import io.leangen.graphql.annotations.GraphQLQuery; import java.time.Instant; import java.util.Comparator; import java.util.Objects; public class Activity implements Comparable { private static final Comparator COMPARATOR = Comparator.comparing(Activity::getTime).reversed(); @GraphQLQuery private String ip; @GraphQLQuery private String request; @GraphQLQuery private Instant time; public Activity() {} public Activity(String ip, String request, Instant time) { this.ip = ip; this.request = request; this.time = time; } public String getIp() { return ip; } public void setIp(String ip) { this.ip = ip; } public String getRequest() { return request; } public void setRequest(String request) { this.request = request; } public Instant getTime() { return time; } public void setTime(Instant time) { this.time = time; } @Override public int compareTo(Activity o) { // Used by Collections.sort() in AdminResource::userActivities return COMPARATOR.compare(this, o); } @Override public boolean equals(Object o) { if (!(o instanceof Activity)) { return false; } final Activity other = (Activity) o; return Objects.equals(ip, other.getIp()) && Objects.equals(request, other.getRequest()) && Objects.equals(time, other.getTime()); } @Override public int hashCode() { return Objects.hash(ip, request, time); } }
java
12
0.678988
91
19.837838
74
starcoderdata
def test_shared(self, m_registry, m_delete): """Test that no action is taken on a shared leaf node.""" m_registry.is_shared.return_value = True m_delete.return_value = 0 path = ['Environment', 'Host', 'AptPackage'] stats = {} prune('session', FakeEnvironment(), path, stats) m_delete.assert_not_called() self.assertTrue(stats is not None)
python
8
0.606965
65
43.777778
9
inline
# Vicfred # https://atcoder.jp/contests/abc071/tasks/arc081_a # greedy, sorting class Counter(dict): def __missing__(self, i): return 0 n = int(input()) a = list(map(int, input().split())) a.sort() a.reverse() counter = Counter() for i in a: counter[i] += 1 first = -1 second = -1 for item in a: if second != -1: break; if counter[item] >= 2: if first == -1: first = item counter[item] -= 2 elif second == -1: second = item if first != -1 and second != -1: print(first*second) else: print(0)
python
11
0.536503
51
16.848485
33
codenet
private boolean isAuthorized(String entityName, Operation operation) { List<Permission> permissions = getPermissions(); if (Permission.hasPermitAll(permissions)) { return true; } if (isGeneralPermissionPresent(operation, permissions)) { // if we are here it means that we have GENERAL ALLOW permission for a given operation (read/write) // since granular permissions have priority over general permissions now we have to look // if there is a GRANULAR DENY permission which discards GENERAL ALLOW AccessMode accessMode = AccessMode.fromRestrictionAndOperation(Restriction.DENY, operation); List<String> deniedNSListsNames = getNSListsNamesByAccessMode(permissions, accessMode); if (!deniedNSListsNames.contains(entityName)) { return true; } } else { AccessMode accessMode = AccessMode.fromRestrictionAndOperation(Restriction.ALLOW, operation); List<String> allowedNSListsNames = getNSListsNamesByAccessMode(permissions, accessMode); if (allowedNSListsNames.contains(entityName)) { return true; } } return false; }
java
10
0.660016
111
45.444444
27
inline
void TestPiiTracking::testLinkedList() { typedef PiiCoordinateTrackerNode<int,2> NodeType; NodeType *n1 = new NodeType(static_cast<NodeType*>(0)), *n2 = new NodeType(static_cast<NodeType*>(n1)), *n3 = new NodeType(static_cast<NodeType*>(n1)), *n4 = new NodeType(static_cast<NodeType*>(n2)), *n5 = new NodeType(static_cast<NodeType*>(n2)), *n6 = new NodeType(static_cast<NodeType*>(n5)); /* Initial situation * * n1--> n2--> n4 * | | * | +--> n5--> n6 * +--> n3 */ QCOMPARE(n1->branches(), 2); QCOMPARE(n2->branches(), 2); QCOMPARE(n3->branches(), 0); QCOMPARE(n4->branches(), 0); QCOMPARE(n5->branches(), 1); QCOMPARE(n6->branches(), 0); delete n4; // deletes only n4 /* Current situation * * n1--> n2 * | | * | +--> n5--> n6 * +--> n3 */ QCOMPARE(n2->branches(), 1); delete n6; // deletes n6, n5, and n2 /* Current situation * * n1 * | * +--> n3 */ QCOMPARE(n1->branches(), 1); delete n3; // deletes n3 and n1 NodeType n7(PiiVector<int,2>(1,2)); NodeType n8(n7); QCOMPARE(n8.measurement()[0],1); QCOMPARE(n8.measurement()[1],2); }
c++
12
0.559149
51
22.058824
51
inline
void handle_write(uint16_t address, uint8_t byte) override { if (address < 0x2000) //Enable external RAM { _ram_enabled = byte == 0x0A; } else if (address < 0x4000) //ROM bank (low 5 bits) { update_byte_with_mask<uint8_t>(_rom_bank_temp, byte, 0b00011111); select_rom_bank(_rom_bank_temp); } else if (address < 0x6000) //ROM bank (high 2 bits) set OR RAM bank { if (_rom_mode) { update_byte_with_mask<uint8_t>(_rom_bank_temp, byte << 5, 0b01100000); select_rom_bank(_rom_bank_temp); } else { select_ram_bank(byte); } } else { _rom_mode = byte == 0; if (_rom_mode) select_ram_bank(0); } }
c++
16
0.593373
74
20.451613
31
inline
func (suite *v1LifecycleTestSuite) TestLaunch() { // generate 25 test tasks numTasks := 25 var launchablePods []*pbhostmgr.LaunchablePod taskInfos := make(map[string]*LaunchableTaskInfo) expectedPodSpecs := make(map[string]*pbpod.PodSpec) for i := 0; i < numTasks; i++ { tmp := createTestTask(i) launchablePod := pbhostmgr.LaunchablePod{ PodId: &peloton.PodID{ Value: tmp.GetRuntime().GetMesosTaskId().GetValue()}, Spec: tmp.Spec, } launchablePods = append(launchablePods, &launchablePod) taskID := tmp.JobId.Value + "-" + fmt.Sprint(tmp.InstanceId) taskInfos[taskID] = tmp expectedPodSpecs[tmp.GetRuntime().GetMesosTaskId().GetValue()] = tmp.Spec } expectedHostname := "host-1" expectedLeaseID := uuid.New() // Capture LaunchPods calls launchedPodSpecMap := make(map[string]*pbpod.PodSpec) gomock.InOrder( // Mock LaunchPods call. suite.mockHostMgr.EXPECT(). LaunchPods( gomock.Any(), gomock.Any()). Do(func(_ context.Context, reqBody interface{}) { req := reqBody.(*v1_hostsvc.LaunchPodsRequest) suite.Equal(req.GetHostname(), expectedHostname) suite.Equal(req.GetLeaseId().GetValue(), expectedLeaseID) for _, lp := range req.GetPods() { launchedPodSpecMap[lp.PodId.GetValue()] = lp.Spec } }). Return(&v1_hostsvc.LaunchPodsResponse{}, nil). Times(1), ) err := suite.lm.Launch( context.Background(), expectedLeaseID, expectedHostname, expectedHostname, taskInfos, nil, ) suite.NoError(err) suite.Equal(launchedPodSpecMap, expectedPodSpecs) }
go
22
0.698137
66
25.862069
58
inline
package hr.fer.zemris.ml.model.data; import java.util.Arrays; import java.util.Objects; import java.util.StringJoiner; /** * Training sample for supervised learning methods. Contains a * {@code double array} of features and a target value. * * @author Dan * @param Type of the target value, usually {@code String} for * classification and {@code Double} for function approximation tasks. */ public class Sample { private double[] features; private T target; public Sample(double[] features, T target) { this.features = Objects.requireNonNull(features); this.target = target; } public int getNumOfFeatures() { return features.length; } public double getFeature(int index) { return features[index]; } public double[] getFeatures() { return features; } public T getTarget() { return target; } public void setTarget(T target) { this.target = target; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(features); result = prime * result + ((target == null) ? 0 : target.hashCode()); return result; } @SuppressWarnings("rawtypes") @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Sample other = (Sample) obj; if (!Arrays.equals(features, other.features)) return false; if (target == null) { if (other.target != null) return false; } else if (!target.equals(other.target)) return false; return true; } @Override public String toString() { StringJoiner sj = new StringJoiner(","); for (double d : features) { sj.add(Double.toString(d)); } sj.add(target.toString()); return sj.toString(); } }
java
12
0.646162
77
20.654762
84
starcoderdata
TV_HOST_DEVICE_INLINE static uint32_t get_third_bits(uint32_t a) { uint32_t x = a & 0x9249249; // we only look at the first 10 bits x = (x ^ (x >> 2)) & 0x30c30c3; x = (x ^ (x >> 4)) & 0x0300f00f; x = (x ^ (x >> 8)) & 0x30000ff; x = (x ^ (x >> 16)) & 0x000003ff; return x; }
c
10
0.523333
68
36.625
8
inline
import { connect } from 'react-redux'; import NotesList from '../components/notes-list'; import { getVisibleNotes } from '../redux/selectors'; import { deleteNote, updateNote } from '../redux/actions'; const mSTP = state => ({ notes: getVisibleNotes(state), }); const mDPT = dispatch => ({ onDeleteNote: id => dispatch(deleteNote(id)), onUpdateNote: note => dispatch(updateNote(note)), }); export default connect(mSTP, mDPT)(NotesList);
javascript
10
0.705285
58
29.75
16
starcoderdata
void karmul2_poly_upper(_MIPD_ int n,big *t,big *x,big *y,big *z) { /* n is large and even, and upper half of z is known already */ int m,nd2,nd; nd2=n/2; nd=n; for (m=0;m<nd2;m++) { add2(x[m],x[nd2+m],z[m]); add2(y[m],y[nd2+m],z[nd2+m]); } karmul2_poly(_MIPP_ nd2,&t[nd],z,&z[nd2],t); karmul2_poly(_MIPP_ nd2,&t[nd],x,y,z); /* only 2 karmuls needed! */ for (m=0;m<nd;m++) add2(t[m],z[m],t[m]); for (m=0;m<nd2;m++) { add2(z[nd+m],z[nd+nd2+m],z[nd+m]); add2(z[nd+m],t[nd2+m],z[nd+m]); } for (m=0;m<nd;m++) { add2(t[m],z[nd+m],t[m]); add2(z[nd2+m],t[m],z[nd2+m]); } }
c
11
0.462555
73
22.517241
29
inline
package com.liminghuang.proxy; /** * ProjectName: data_structure * PackageName: com.liminghuang.proxy * Description: 静态代理Subject, 静态代理模式本身有个大问题,如果类方法数量越来越多的时候,代理类的代码量是十分庞大的。 * * CreateTime: 2017/10/25 19:38 * Modifier: Adaministrator * ModifyTime: 2017/10/25 19:38 * Comment: * * @author Adaministrator */ public class SubjectProxy implements Subject { Subject subImpl = new RealSubject(); @Override public void doSomething(String args) { subImpl.doSomething(args); } @Override public void undoSomething(String args) { subImpl.undoSomething(args); } public static void main(String args[]) { Subject sub = new SubjectProxy(); sub.doSomething("my name is huangliming"); } }
java
9
0.667526
72
22.515152
33
starcoderdata
package org.inspector4j.api.configuration; import org.apache.commons.collections4.IterableUtils; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; import org.inspector4j.SecretVisibility; import java.util.HashMap; import java.util.Set; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; public class MicroprofileConfigurationProvider implements ConfigurationProvider { private final Predicate predicate; public MicroprofileConfigurationProvider() { Predicate scopePattern = Pattern.compile("org.inspect4j.configuration.*." + InspectorConfiguration.VISIBILITY_FIELD).asPredicate(); Predicate overridePattern = Pattern.compile("org.inspect4j.configuration.*.allow-runtime-configuration").asPredicate(); this.predicate = (value) -> { if (value == null) { return Boolean.FALSE; } return scopePattern.test(value) || overridePattern.test(value); }; } @Override public Inspector4JConfiguration toProperties() { Config config = ConfigProvider.getConfig(); Set keys = IterableUtils.toList(config.getPropertyNames()).stream().filter(this.predicate).map(each -> each.substring(each.lastIndexOf(".") + 1)).collect(Collectors.toSet()); Inspector4JConfiguration configuration = new Inspector4JConfiguration(); configuration.setRoot(new InspectorConfiguration()); configuration.getRoot().setVisibility(config.getOptionalValue("org.inspect4j." + InspectorConfiguration.VISIBILITY_FIELD, SecretVisibility.class).orElse(null)); configuration.getRoot().setAllowRuntimeConfiguration(config.getOptionalValue("org.inspect4j." + InspectorConfiguration.ALLOW_RUNTIME_CONFIGURATION_FIELD, Boolean.class).orElse(null)); configuration.setChildren(new HashMap<>()); for (String key : keys) { InspectorConfiguration instance = new InspectorConfiguration(); instance.setVisibility(config.getOptionalValue("org.inspect4j." + key + "." + InspectorConfiguration.VISIBILITY_FIELD, SecretVisibility.class).orElse(null)); instance.setAllowRuntimeConfiguration(config.getOptionalValue("org.inspect4j." + key + "." + InspectorConfiguration.ALLOW_RUNTIME_CONFIGURATION_FIELD, Boolean.class).orElse(null)); configuration.getChildren().put(key, instance); } return configuration; } }
java
17
0.733412
192
45.036364
55
starcoderdata
/* * Copyright (C) 2017 FIX94 * * This software may be modified and distributed under the terms * of the MIT license. See the LICENSE file for details. */ #ifndef _apu_h_ #define _apu_h_ #define NUM_BUFFERS 10 void apuInitBufs(); void apuDeinitBufs(); void apuInit(); bool apuCycle(); void apuClockTimers(); uint8_t *apuGetBuf(); uint32_t apuGetBufSize(); uint32_t apuGetFrequency(); void apuSetReg8(uint16_t addr, uint8_t val); uint8_t apuGetReg8(uint16_t addr); typedef struct _envelope_t { bool modeadd; uint8_t vol; uint8_t curVol; uint8_t period; uint8_t divider; } envelope_t; void doEnvelopeLogic(envelope_t *env); #endif
c
7
0.71988
64
17.444444
36
starcoderdata
import pandas as pd import numpy as np import sys import root_numpy import root_pandas df = root_pandas.read_root("../data/raw/cosmic.0008.03420_03427_v4.root",columns=["Channel", "ADC", "Board", "Nhit", "Asic"]) df.to_csv('../data/raw/cosmic.csv.gz', index=False) del df dfm = root_pandas.read_root("../data/raw/mcmc.root",columns=["Channel", "ADC", "Board", "Nhit", "Asic","Track"]) # no cross talk dfm.to_csv('../data/raw/mcmc_noxtalk.csv.gz', index=False) del dfm dfm2 = root_pandas.read_root("../data/raw/mcmc_xtalk.root",columns=["Channel", "ADC", "Board", "Nhit", "Asic","Track"]) dfm2.to_csv('../data/raw/mcmc_xtalk.csv.gz', index=False) del dfm2
python
8
0.676248
128
33.789474
19
starcoderdata
/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.netconf.util; import com.google.common.base.Preconditions; import org.opendaylight.controller.netconf.api.NetconfDocumentedException; import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants; import org.opendaylight.controller.netconf.util.xml.XmlElement; import org.opendaylight.controller.netconf.util.xml.XmlUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; public final class NetconfUtil { private static final Logger logger = LoggerFactory.getLogger(NetconfUtil.class); private NetconfUtil() {} public static Document checkIsMessageOk(Document response) throws NetconfDocumentedException { XmlElement element = XmlElement.fromDomDocument(response); Preconditions.checkState(element.getName().equals(XmlNetconfConstants.RPC_REPLY_KEY)); element = element.getOnlyChildElement(); if (element.getName().equals(XmlNetconfConstants.OK)) { return response; } logger.warn("Can not load last configuration. Operation failed."); throw new IllegalStateException("Can not load last configuration. Operation failed: " + XmlUtil.toString(response)); } }
java
11
0.756931
98
40.918919
37
starcoderdata
<?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Http\Requests; use Validator; use Auth; use Illuminate\Support\MessageBag; use Illuminate\Support\Facades\DB; class LoginController extends Controller { // public function getLogin(){ return view('dangnhap'); } public function postLogin(Request $request){ $relues=[ 'name' => 'required|min:8', 'password' => 're ]; $message=[ 'name.required' => 'Đây là trường bắt buộc', 'name.min' => 'Tên phải chứa ít nhất 8 kí tự', 'password. => 'Đây là trường bắt buộc', 'password.min' => 'Mật khẩu phải chứa ít nhất 8 kí tự' ]; $validator=Validator::make($request->all(),$relues,$message); if($validator->fails()) { return redirect()->back()->withErrors($validator); }else{ $name=$request->name; $password=$request->password; $arr=[ 'tendangnhap' => $name, 'matkhau'=> md5($password) ]; if(DB::table('taikhoanlogin')->where($arr)->count()==1){ return redirect()->intended('/'); } else{ $errors=new MessageBag(['errorsLogin'=>'Tên đăng nhập hoặc mật khẩu không đúng']); return redirect()->back()->withInput()->withErrors($errors); } } } }
php
19
0.599242
86
22.589286
56
starcoderdata
package top.easyblog.titan.backend.feign.internal; import okhttp3.OkHttpClient; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; /** * @author: frank.huang * @date: 2021-11-14 20:49 */ public final class OkHttpClientFactory { private static final AtomicReference OK_HTTP_CLIENT=new AtomicReference<>(); public static OkHttpClient getInstance(long connectTimeout,long writeTimeout,long readTimeout){ for(;;){ OkHttpClient client = OK_HTTP_CLIENT.get(); if(Objects.nonNull(client)){ return client; } client=newOkHttpClient(connectTimeout, writeTimeout, readTimeout); if(OK_HTTP_CLIENT.compareAndSet(null,client)){ return client; } } } private static OkHttpClient newOkHttpClient(long connectTimeout,long writeTimeout,long readTimeout){ return new OkHttpClient().newBuilder() .connectTimeout(connectTimeout, TimeUnit.MILLISECONDS) .writeTimeout(writeTimeout,TimeUnit.MILLISECONDS) .readTimeout(readTimeout,TimeUnit.MILLISECONDS) .build(); } }
java
13
0.692537
116
33.358974
39
starcoderdata
var express = require("express"); var router = express.Router(); const User = require("../../../models/User"); const Profile = require("../../../models/Profile"); const Authentication = require("../../../middlewares/Authentication"); router.get("/all", Authentication.ADMIN, (req, res, next) => { Profile.find() .populate("user") .then(profile => { return res.render("admin/listUser", { profiles: profile, total: profile.length }); }) .catch(err => res.status(404).json({ noFindFounds: "No find posts found." }) ); }); //@route GET admin/manager //@desc Get all finds //@access Public router.get("/:id", Authentication.ADMIN, (req, res, next) => { User.findById(req.params.id) .then(find => { if (find.length === 0) { res.status(404).json({ noFindPost: "No find post found." }); } res.json(find); }) .catch(err => res.status(404).json({ noFindFounds: "No find posts found." }) ); }); const ROLE = require("../../../constants/roleConstrants"); //@route GET admin/manager //@desc Get all finds //@access Public router.post("/up/:id", Authentication.ADMIN, (req, res, next) => { //Update User.findOneAndUpdate( { _id: req.params.id }, { $set: { role: ROLE.REQUIRE_ADMIN } }, { new: true } ).then(() => res.redirect("/admin/manager/all")); }); router.post("/down/:id", Authentication.ADMIN, (req, res, next) => { //Update User.findOneAndUpdate( { _id: req.params.id }, { $set: { role: ROLE.REQUIRE_MEMBER } }, { new: true } ).then(() => res.redirect("/admin/manager/all")); }); //@route DELETE admin/manager/:id //@desc DELETE find by id //@access Public router.post("/delete/:id", Authentication.ADMIN, (req, res, next) => { Profile.findOneAndRemove({ user: req.params.id }).then(() => { User.findOneAndRemove({ _id: req.params.id }).then(() => { res.redirect("/admin/manager/all"); }); }); }); module.exports = router;
javascript
21
0.592052
70
29.121212
66
starcoderdata
/*********************************************************************************************************************** * DISCLAIMER * This software is supplied by Renesas Electronics Corporation and is only * intended for use with Renesas products. No other uses are authorized. This * software is owned by Renesas Electronics Corporation and is protected under * all applicable laws, including copyright laws. * THIS SOFTWARE IS PROVIDED "AS IS" AND RENESAS MAKES NO WARRANTIES REGARDING * THIS SOFTWARE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING BUT NOT * LIMITED TO WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NON-INFRINGEMENT. ALL SUCH WARRANTIES ARE EXPRESSLY DISCLAIMED. * TO THE MAXIMUM EXTENT PERMITTED NOT PROHIBITED BY LAW, NEITHER RENESAS * ELECTRONICS CORPORATION NOR ANY OF ITS AFFILIATED COMPANIES SHALL BE LIABLE * FOR ANY DIRECT, INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES FOR * ANY REASON RELATED TO THIS SOFTWARE, EVEN IF RENESAS OR ITS AFFILIATES HAVE * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * Renesas reserves the right, without notice, to make changes to this software * and to discontinue the availability of this software. By using this software, * you agree to the additional terms and conditions found by accessing the * following link: * http://www.renesas.com/disclaimer * * Copyright (C) 2010, 2011 Renesas Electronics Corporation. All rights reserved. ***********************************************************************************************************************/ /*********************************************************************************************************************** * File Name : r_cg_intc.c * Version : Applilet3 for RL78/G13 V1.03.01 [11 Oct 2011] * Device(s) : R5F100EA * Tool-Chain : CA78K0R * Description : This file implements device driver for INTC module. * Creation Date: 2/20/2013 ***********************************************************************************************************************/ /*********************************************************************************************************************** Pragma directive ***********************************************************************************************************************/ /* Start user code for pragma. Do not edit comment generated here */ /* End user code. Do not edit comment generated here */ /*********************************************************************************************************************** Includes ***********************************************************************************************************************/ #include "r_cg_macrodriver.h" #include "r_cg_intc.h" /* Start user code for include. Do not edit comment generated here */ /* End user code. Do not edit comment generated here */ #include "r_cg_userdefine.h" /*********************************************************************************************************************** Global variables and functions ***********************************************************************************************************************/ /* Start user code for global. Do not edit comment generated here */ /* End user code. Do not edit comment generated here */ /*********************************************************************************************************************** * Function Name: R_INTC_Create * Description : This function initializes INTP module. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC_Create(void) { PMK0 = 1U; /* disable INTP0 operation */ PIF0 = 0U; /* clear INTP0 interrupt flag */ PMK1 = 1U; /* disable INTP1 operation */ PIF1 = 0U; /* clear INTP1 interrupt flag */ PMK2 = 1U; /* disable INTP2 operation */ PIF2 = 0U; /* clear INTP2 interrupt flag */ PMK3 = 1U; /* disable INTP3 operation */ PIF3 = 0U; /* clear INTP3 interrupt flag */ PMK4 = 1U; /* disable INTP4 operation */ PIF4 = 0U; /* clear INTP4 interrupt flag */ PMK5 = 1U; /* disable INTP5 operation */ PIF5 = 0U; /* clear INTP5 interrupt flag */ /* Set INTP0 low priority */ PPR10 = 1U; PPR00 = 1U; /* Set INTP1 low priority */ PPR11 = 1U; PPR01 = 1U; /* Set INTP2 low priority */ PPR12 = 1U; PPR02 = 1U; /* Set INTP3 low priority */ PPR13 = 1U; PPR03 = 1U; EGP0 = _01_INTP0_EDGE_RISING_SEL | _02_INTP1_EDGE_RISING_SEL | _04_INTP2_EDGE_RISING_SEL | _08_INTP3_EDGE_RISING_SEL; /* Set INTP1 pin */ PM5 |= 0x01U; /* Set INTP2 pin */ PM5 |= 0x02U; /* Set INTP3 pin */ PM3 |= 0x01U; } /*********************************************************************************************************************** * Function Name: R_INTC0_Start * Description : This function clears INTP0 interrupt flag and enables interrupt. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC0_Start(void) { PIF0 = 0U; /* clear INTP0 interrupt flag */ PMK0 = 0U; /* enable INTP0 interrupt */ } /*********************************************************************************************************************** * Function Name: R_INTC0_Stop * Description : This function disables INTP0 interrupt and clears interrupt flag. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC0_Stop(void) { PMK0 = 1U; /* disable INTP0 interrupt */ PIF0 = 0U; /* clear INTP0 interrupt flag */ } /*********************************************************************************************************************** * Function Name: R_INTC1_Start * Description : This function clears INTP1 interrupt flag and enables interrupt. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC1_Start(void) { PIF1 = 0U; /* clear INTP1 interrupt flag */ PMK1 = 0U; /* enable INTP1 interrupt */ } /*********************************************************************************************************************** * Function Name: R_INTC1_Stop * Description : This function disables INTP1 interrupt and clears interrupt flag. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC1_Stop(void) { PMK1 = 1U; /* disable INTP1 interrupt */ PIF1 = 0U; /* clear INTP1 interrupt flag */ } /*********************************************************************************************************************** * Function Name: R_INTC2_Start * Description : This function clears INTP2 interrupt flag and enables interrupt. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC2_Start(void) { PIF2 = 0U; /* clear INTP2 interrupt flag */ PMK2 = 0U; /* enable INTP2 interrupt */ } /*********************************************************************************************************************** * Function Name: R_INTC2_Stop * Description : This function disables INTP2 interrupt and clears interrupt flag. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC2_Stop(void) { PMK2 = 1U; /* disable INTP2 interrupt */ PIF2 = 0U; /* clear INTP2 interrupt flag */ } /*********************************************************************************************************************** * Function Name: R_INTC3_Start * Description : This function clears INTP3 interrupt flag and enables interrupt. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC3_Start(void) { PIF3 = 0U; /* clear INTP3 interrupt flag */ PMK3 = 0U; /* enable INTP3 interrupt */ } /*********************************************************************************************************************** * Function Name: R_INTC3_Stop * Description : This function disables INTP3 interrupt and clears interrupt flag. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_INTC3_Stop(void) { PMK3 = 1U; /* disable INTP3 interrupt */ PIF3 = 0U; /* clear INTP3 interrupt flag */ } /*********************************************************************************************************************** * Function Name: R_KEY_Create * Description : This function initializes the key return module. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_KEY_Create(void) { volatile uint8_t w_count; KRMK = 1U; /* disable INTKR operation */ KRIF = 0U; /* clear INTKR interrupt flag */ /* Set INTKR low priority */ KRPR1 = 1U; KRPR0 = 1U; KRM = _01_KR0_SIGNAL_DETECT_ON | _02_KR1_SIGNAL_DETECT_ON | _04_KR2_SIGNAL_DETECT_ON | _08_KR3_SIGNAL_DETECT_ON; /* Set KR0 pin */ PM7 |= 0x01U; /* Set KR1 pin */ PM7 |= 0x02U; /* Set KR2 pin */ PM7 |= 0x04U; /* Set KR3 pin */ PM7 |= 0x08U; /* Wait 250ns */ for (w_count = 0U; w_count < KEY_WAITTIME; w_count++) { NOP(); } } /*********************************************************************************************************************** * Function Name: R_KEY_Start * Description : This function clears INTKR interrupt flag and enables interrupt. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_KEY_Start(void) { KRIF = 0U; /* clear INTKR interrupt flag */ KRMK = 0U; /* enable INTKR operation */ } /*********************************************************************************************************************** * Function Name: R_KEY_Stop * Description : This function disables INTKR interrupt and clears interrupt flag. * Arguments : None * Return Value : None ***********************************************************************************************************************/ void R_KEY_Stop(void) { KRMK = 1U; /* disable INTKR operation */ KRIF = 0U; /* clear INTKR interrupt flag */ } /* Start user code for adding. Do not edit comment generated here */ /* End user code. Do not edit comment generated here */
c
8
0.423617
120
43.748
250
starcoderdata
'use strict'; let util = require('util'); console.log(`require.cache: ${util.inspect(require.cache, {depth: null, showHidden: true})}`); console.log('__filename in ./app.js', __filename); console.log('__dirname in ./app.js', __dirname); console.log('require.resolve("./console.js")', require.resolve('./console.js')); // console.log('process.argv', process.argv); // console.log('process.env', process.env);
javascript
7
0.702174
94
50.222222
9
starcoderdata