max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
598
<reponame>yangboz/maro # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. from enum import Enum class DriverType(Enum): """Communication driver categories. - ZMQ: The communication driver mode based on ``ZMQ``. """ ZMQ = "zmq_driver"
93
2,151
<reponame>X018/CCTOOL /**************************************************************************** **************************************************************************** *** *** This header was automatically generated from a Linux kernel header *** of the same name, to make information necessary for userspace to *** call into the kernel available to libc. It contains only constants, *** structures, and macros generated from the original header, and thus, *** contains no copyrightable information. *** *** To edit the content of this header, modify the corresponding *** source file (e.g. under external/kernel-headers/original/) then *** run bionic/libc/kernel/tools/update_all.py *** *** Any manual change here will be lost the next time this script will *** be run. You've been warned! *** **************************************************************************** ****************************************************************************/ #ifndef SCSI_NETLINK_FC_H #define SCSI_NETLINK_FC_H #include <scsi/scsi_netlink.h> #define FC_NL_ASYNC_EVENT 0x0100 #define FC_NL_MSGALIGN(len) (((len) + 7) & ~7) struct fc_nl_event { struct scsi_nl_hdr snlh; uint64_t seconds; uint64_t vendor_id; uint16_t host_no; uint16_t event_datalen; uint32_t event_num; uint32_t event_code; uint32_t event_data; } __attribute__((aligned(sizeof(uint64_t)))); #endif
396
1,743
<reponame>fordlaturnas/aws-serverless-airline-booking { "query": "query getLoyalty($customer: String) { getLoyalty(customer: $customer) { points level remainingPoints }}", "variables": {} }
83
662
package com.alibaba.rsocket.reactive; import com.alibaba.rsocket.MutableContext; import org.jetbrains.annotations.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.util.concurrent.CompletableFuture; /** * Reactive Adapter for CompletableFuture * * @author leijuan */ @SuppressWarnings({"unchecked", "rawtypes"}) public class ReactiveAdapterFuture implements ReactiveAdapter { private static ReactiveAdapterFuture instance = new ReactiveAdapterFuture(); public static ReactiveAdapterFuture getInstance() { return instance; } @Override public <T> Mono<T> toMono(@Nullable Object source) { if (source == null) { return Mono.empty(); } else { return Mono.fromFuture((CompletableFuture) source); } } @Override public <T> Flux<T> toFlux(@Nullable Object source) { return (Flux<T>) source; } @Override public Object fromPublisher(Mono<?> mono, Class<?> returnType, MutableContext mutableContext) { return mono.toFuture(); } @Override public Object fromPublisher(Flux<?> flux, Class<?> returnType, MutableContext mutableContext) { return flux.subscriberContext(mutableContext::putAll); } @Override public Object fromPublisher(Flux<?> flux, Class<?> returnType) { return flux; } }
508
470
<gh_stars>100-1000 DISABLE_ALL_COLORS = False ASCII_ONLY = False # See the safe_string function in wgwidget. At the moment the encoding is not safe
55
4,492
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. import binascii import pytest from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.kdf.hkdf import HKDF from .utils import wycheproof_tests _HASH_ALGORITHMS = { "HKDF-SHA-1": hashes.SHA1(), "HKDF-SHA-256": hashes.SHA256(), "HKDF-SHA-384": hashes.SHA384(), "HKDF-SHA-512": hashes.SHA512(), } @wycheproof_tests( "hkdf_sha1_test.json", "hkdf_sha256_test.json", "hkdf_sha384_test.json", "hkdf_sha512_test.json", ) def test_hkdf(backend, wycheproof): hash_algo = _HASH_ALGORITHMS[wycheproof.testfiledata["algorithm"]] if wycheproof.invalid: with pytest.raises(ValueError): HKDF( algorithm=hash_algo, length=wycheproof.testcase["size"], salt=binascii.unhexlify(wycheproof.testcase["salt"]), info=binascii.unhexlify(wycheproof.testcase["info"]), backend=backend, ) return h = HKDF( algorithm=hash_algo, length=wycheproof.testcase["size"], salt=binascii.unhexlify(wycheproof.testcase["salt"]), info=binascii.unhexlify(wycheproof.testcase["info"]), backend=backend, ) result = h.derive(binascii.unhexlify(wycheproof.testcase["ikm"])) assert result == binascii.unhexlify(wycheproof.testcase["okm"])
706
319
package com.shapps.mintubeapp; /** * Created by shyam on 19/2/16. */ public class JavaScript { public static String loadVideoScript(String vId){ return "javascript:player.loadVideoById(\"" + vId + "\");"; } public static String playVideoScript() { return "javascript:player.playVideo();"; } public static String pauseVideoScript() { return "javascript:player.pauseVideo();"; } public static String onPlayerStateChangeListener() { return "javascript:" + "player.addEventListener(\"onStateChange\", \"onPlayerStateChange\");"+ "function onPlayerStateChange(event) {\n" + " window.Interface.showPlayerState(player.getPlayerState());\n" + " }"; } public static String loadPlaylistScript(String pId) { return "javascript:player.loadPlaylist({list:\"" + pId + "\"});"; } public static String nextVideo() { return "javascript:player.nextVideo()"; } public static String prevVideo() { return "javascript:player.previousVideo()"; } public static String getVidUpdateNotiContent() { return "javascript:window.Interface.showVID(player.getVideoData()['video_id']);"; } public static String seekToZero() { return "javascript:player.seekTo(0)"; } public static String setLoopPlaylist() { return "javascript:player.setLoop(true)"; } public static String unsetLoopPlaylist() { return "javascript:player.setLoop(false)"; } public static String replayPlaylistScript() { return "javascript:player.playVideoAt(0)"; } public static String isPlaylistEnded() { return "javascript:window.Interface.playlistItems(player.getPlaylist());" + "window.Interface.currVidIndex(player.getPlaylistIndex());"; } public static String resetPlaybackQuality(String quality) { return "javascript:player.setPlaybackQuality(\"" + quality + "\");"; } public static String getVideosInPlaylist() { return "javascript:window.Interface.videosInPlaylist(player.getPlaylist());"; } }
817
5,250
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.job.service.impl; import java.util.Collection; import java.util.List; import org.flowable.common.engine.api.FlowableIllegalArgumentException; import org.flowable.common.engine.api.delegate.event.FlowableEngineEventType; import org.flowable.job.api.DeadLetterJobQuery; import org.flowable.job.api.HistoryJobQuery; import org.flowable.job.api.JobQuery; import org.flowable.job.api.SuspendedJobQuery; import org.flowable.job.api.TimerJobQuery; import org.flowable.job.service.JobService; import org.flowable.job.service.JobServiceConfiguration; import org.flowable.job.service.event.impl.FlowableJobEventBuilder; import org.flowable.job.service.impl.persistence.entity.AbstractRuntimeJobEntity; import org.flowable.job.service.impl.persistence.entity.DeadLetterJobEntity; import org.flowable.job.service.impl.persistence.entity.DeadLetterJobEntityManager; import org.flowable.job.service.impl.persistence.entity.ExternalWorkerJobEntity; import org.flowable.job.service.impl.persistence.entity.JobEntity; import org.flowable.job.service.impl.persistence.entity.JobEntityManager; import org.flowable.job.service.impl.persistence.entity.SuspendedJobEntity; import org.flowable.job.service.impl.persistence.entity.SuspendedJobEntityManager; /** * @author <NAME> */ public class JobServiceImpl extends ServiceImpl implements JobService { public JobServiceImpl(JobServiceConfiguration jobServiceConfiguration) { super(jobServiceConfiguration); } @Override public JobQuery createJobQuery() { return new JobQueryImpl(getCommandExecutor(), configuration); } @Override public TimerJobQuery createTimerJobQuery() { return new TimerJobQueryImpl(getCommandExecutor(), configuration); } @Override public SuspendedJobQuery createSuspendedJobQuery() { return new SuspendedJobQueryImpl(getCommandExecutor(), configuration); } @Override public DeadLetterJobQuery createDeadLetterJobQuery() { return new DeadLetterJobQueryImpl(getCommandExecutor(), configuration); } @Override public HistoryJobQuery createHistoryJobQuery() { return new HistoryJobQueryImpl(getCommandExecutor(), configuration); } @Override public void scheduleAsyncJob(JobEntity job) { getJobManager().scheduleAsyncJob(job); } @Override public JobEntity findJobById(String jobId) { return getJobEntityManager().findById(jobId); } @Override public List<JobEntity> findJobsByExecutionId(String executionId) { return getJobEntityManager().findJobsByExecutionId(executionId); } @Override public List<SuspendedJobEntity> findSuspendedJobsByExecutionId(String executionId) { return getSuspendedJobEntityManager().findJobsByExecutionId(executionId); } @Override public List<DeadLetterJobEntity> findDeadLetterJobsByExecutionId(String executionId) { return getDeadLetterJobEntityManager().findJobsByExecutionId(executionId); } @Override public List<ExternalWorkerJobEntity> findExternalWorkerJobsByExecutionId(String executionId) { return getExternalWorkerJobEntityManager().findJobsByExecutionId(executionId); } @Override public List<JobEntity> findJobsByProcessInstanceId(String processInstanceId) { return getJobEntityManager().findJobsByProcessInstanceId(processInstanceId); } @Override public List<SuspendedJobEntity> findSuspendedJobsByProcessInstanceId(String processInstanceId) { return getSuspendedJobEntityManager().findJobsByProcessInstanceId(processInstanceId); } @Override public List<DeadLetterJobEntity> findDeadLetterJobsByProcessInstanceId(String processInstanceId) { return getDeadLetterJobEntityManager().findJobsByProcessInstanceId(processInstanceId); } @Override public void updateAllJobTypesTenantIdForDeployment(String deploymentId, String newTenantId) { getJobEntityManager().updateJobTenantIdForDeployment(deploymentId, newTenantId); getTimerJobEntityManager().updateJobTenantIdForDeployment(deploymentId, newTenantId); getSuspendedJobEntityManager().updateJobTenantIdForDeployment(deploymentId, newTenantId); getDeadLetterJobEntityManager().updateJobTenantIdForDeployment(deploymentId, newTenantId); } @Override public AbstractRuntimeJobEntity activateSuspendedJob(SuspendedJobEntity job) { if (configuration.getJobParentStateResolver().isSuspended(job)) { throw new FlowableIllegalArgumentException("Can not activate job "+ job.getId() +". Parent is suspended."); } return getJobManager().activateSuspendedJob(job); } @Override public SuspendedJobEntity moveJobToSuspendedJob(AbstractRuntimeJobEntity job) { return getJobManager().moveJobToSuspendedJob(job); } @Override public AbstractRuntimeJobEntity moveJobToDeadLetterJob(AbstractRuntimeJobEntity job) { return getJobManager().moveJobToDeadLetterJob(job); } @Override public JobEntity createJob() { return getJobEntityManager().create(); } @Override public void createAsyncJob(JobEntity job, boolean isExclusive) { getJobManager().createAsyncJob(job, isExclusive); } @Override public void createAsyncJobNoTriggerAsyncExecutor(JobEntity job, boolean isExclusive) { getJobManager().createAsyncJob(job, isExclusive); job.setLockExpirationTime(null); job.setLockOwner(null); } @Override public void insertJob(JobEntity job) { getJobEntityManager().insert(job); } @Override public ExternalWorkerJobEntity createExternalWorkerJob() { return getExternalWorkerJobEntityManager().create(); } @Override public void insertExternalWorkerJob(ExternalWorkerJobEntity job) { getExternalWorkerJobEntityManager().insert(job); } @Override public DeadLetterJobEntity createDeadLetterJob() { return getDeadLetterJobEntityManager().create(); } @Override public void insertDeadLetterJob(DeadLetterJobEntity deadLetterJob) { getDeadLetterJobEntityManager().insert(deadLetterJob); } @Override public void updateJob(JobEntity job) { getJobEntityManager().update(job); } @Override public void deleteJob(String jobId) { getJobEntityManager().delete(jobId); } @Override public void deleteJob(JobEntity job) { getJobEntityManager().delete(job); } @Override public void deleteJobsByExecutionId(String executionId) { JobEntityManager jobEntityManager = getJobEntityManager(); Collection<JobEntity> jobsForExecution = jobEntityManager.findJobsByExecutionId(executionId); for (JobEntity job : jobsForExecution) { getJobEntityManager().delete(job); if (getEventDispatcher() != null && getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableJobEventBuilder.createEntityEvent( FlowableEngineEventType.JOB_CANCELED, job), configuration.getEngineName()); } } } @Override public void deleteSuspendedJobsByExecutionId(String executionId) { SuspendedJobEntityManager suspendedJobEntityManager = getSuspendedJobEntityManager(); Collection<SuspendedJobEntity> suspendedJobsForExecution = suspendedJobEntityManager.findJobsByExecutionId(executionId); for (SuspendedJobEntity job : suspendedJobsForExecution) { suspendedJobEntityManager.delete(job); if (getEventDispatcher() != null && getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableJobEventBuilder.createEntityEvent( FlowableEngineEventType.JOB_CANCELED, job), configuration.getEngineName()); } } } @Override public void deleteDeadLetterJobsByExecutionId(String executionId) { DeadLetterJobEntityManager deadLetterJobEntityManager = getDeadLetterJobEntityManager(); Collection<DeadLetterJobEntity> deadLetterJobsForExecution = deadLetterJobEntityManager.findJobsByExecutionId(executionId); for (DeadLetterJobEntity job : deadLetterJobsForExecution) { deadLetterJobEntityManager.delete(job); if (getEventDispatcher() != null && getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableJobEventBuilder.createEntityEvent( FlowableEngineEventType.JOB_CANCELED, job), configuration.getEngineName()); } } } }
3,234
563
package com.gentics.mesh.madl.frame; import com.gentics.mesh.madl.traversal.TraversalResult; public interface EdgeFrame extends ElementFrame, com.syncleus.ferma.EdgeFrame { /** * Return the label of the edge. * * @return */ default String label() { //TODO Move this to edge frame instead return getLabel(); } <T extends VertexFrame> TraversalResult<? extends T> outV(Class<T> clazz); <T extends VertexFrame> TraversalResult<? extends T> inV(Class<T> clazz); }
172
520
/* * Copyright (C) 2012 Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.lifecycle; /** * allows for objects to register shutdown hooks used instead of direct shutdown hooks in order to * manage the order they are called * * <p>The idea here is that on startup, we register the Thread this provides as a hook in case the * JVM is killed. If the fb303.shutdown() is called, this will execute and remove itself from the * jvm shutdown hooks */ public interface ShutdownManager<T extends Comparable> { public boolean tryAddShutdownHook(Runnable hook); /** * attempt to add a shutdown hook to be run at shutdown * * @param stage - stage to add the hook to * @param hook - hook to run * @return true if the hook was added, false if we are already shutting down and the hook was not * added */ public boolean tryAddShutdownHook(T stage, Runnable hook); public void addShutdownHook(Runnable hook); /** * add a hook to a given stage * * @param stage - stage to add the hook to * @param hook - hook to run * @throws IllegalArgumentException if shutdown has started and we have passed the specified * stage. */ public void addShutdownHook(T stage, Runnable hook); public void shutdown(); public Thread getAsThread(); }
512
371
package com.rnglmodelview.exceptions; public class PrimitiveTypeNotSupportedException extends ModelObjectNotSupportedException { public PrimitiveTypeNotSupportedException(String message) { super(message); } }
57
1,179
<reponame>solider245/OpenData # encoding: utf-8 from opendatatools.common import RestAgent import pandas as pd import json class EastMoneyAgent(RestAgent): def __init__(self): RestAgent.__init__(self) # direct: north, south def _get_realtime_moneyflow(self, direct): url = 'http://ff.eastmoney.com/EM_CapitalFlowInterface/api/js?id=%s&type=EFR&rtntype=2&acces_token=1942f5da9b46b069953c873404aad4b5&js={"data":[(x)]}' % direct response = self.do_request(url) if response is None: return None jsonobj = json.loads(response) result = [] for data in jsonobj['data']: items = data.split(',') if (items[1] == ''): break result.append({ 'time' : items[0], 'hgtzj': items[1], 'sgtzj': items[2], 'hgtye': items[3], 'sgtye': items[4], }) return pd.DataFrame(result) def get_realtime_moneyflow(self): directs = ['north', 'south'] df_data = [] for direct in directs: df = self._get_realtime_moneyflow(direct) df['direct'] = direct if df is not None: df_data.append(df) if len(df_data) > 0: return pd.concat(df_data), '' return None, '获取数据失败' def get_hist_moneyflow(self): url = 'http://dcfm.eastmoney.com/EM_MutiSvcExpandInterface/api/js/get?type=HSGTZJZS&token=70f12f2f4f091e459a279469fe49eca5&js={"data":(x)}' response = self.do_request(url) if response is None: return None, '获取数据失败' jsonobj = json.loads(response) df = pd.DataFrame(jsonobj['data']) return df, '' def _get_his_tradestat_onepage(self, markettype, page_no): url = 'http://dcfm.eastmoney.com/EM_MutiSvcExpandInterface/api/js/get?type=HSGTHIS&token=7<PASSWORD>&filter=(MarketType=%d)&js={"data":(x)}&ps=500&p=%d' % (markettype, page_no) response = self.do_request(url) if response is None: return None jsonobj = json.loads(response) return pd.DataFrame(jsonobj['data']) def get_his_tradestat(self, markettype): page_no = 1 df_data = [] while True: df = self._get_his_tradestat_onepage(markettype, page_no) if df is None: return None, '获取数据失败' if len(df) > 0: df_data.append(df) if len(df) < 500: break page_no = page_no + 1 if len(df_data) == 0: return None, '获取数据失败' return pd.concat(df_data), '' def get_ah_compare(self): url = 'http://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&token=4f1862fc3b5e77c150a2b985b12db0fd&p=1&ps=1000&sty=FCABHL&cmd=C._AHH&js={"data":[(x)]}' response = self.do_request(url) if response is None: return None, '获取数据失败' jsonobj = json.loads(response) result = [] for data in jsonobj['data']: items = data.split(',') result.append({ "hshare_code" : items[1], 'hshare_name' : items[2], 'hshare_last' : items[3], 'hshare_chg' : items[4], "ashare_code" : items[5], 'ashare_name' : items[7], 'ashare_last' : items[8], 'ashare_chg' : items[9], 'ah_ratio' : items[13], }) df = pd.DataFrame(result) return df, ''
1,997
454
package io.vertx.up.uca.rs; import io.vertx.up.exception.UpException; import java.lang.annotation.Annotation; /** * Verification for epsilon */ public interface Rambler { void verify(Class<? extends Annotation> clazz, Class<?> type) throws UpException; }
105
585
######################################################### # # DO NOT EDIT THIS FILE. IT IS GENERATED AUTOMATICALLY. # # PLEASE LOOK INTO THE README FOR MORE INFORMATION. # # ######################################################### # coding: utf-8 # # Control Ops Tutorial # # In this tutorial we show how to use control flow operators in Caffe2 and give some details about their underlying implementations. # ### Conditional Execution Using NetBuilder # # Let's start with conditional operator. We will demonstrate how to use it in two Caffe2 APIs used for building nets: `NetBuilder` and `brew`. # In[1]: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import workspace from caffe2.python.core import Plan, to_execution_step, Net from caffe2.python.net_builder import ops, NetBuilder # In the first example, we define several blobs and then use the 'If' operator to set the value of one of them conditionally depending on values of other blobs. # # The pseudocode for the conditional examples we will implement is as follows: # # if (x > 0): # y = 1 # else: # y = 0 # In[2]: with NetBuilder() as nb: # Define our constants ops.Const(0.0, blob_out="zero") ops.Const(1.0, blob_out="one") ops.Const(0.5, blob_out="x") ops.Const(0.0, blob_out="y") # Define our conditional sequence with ops.IfNet(ops.GT(["x", "zero"])): ops.Copy("one", "y") with ops.Else(): ops.Copy("zero", "y") # Note the usage of `NetBuilder`'s `ops.IfNet` and `ops.Else` calls: `ops.IfNet` accepts a blob reference or blob name as an input, it expects an input blob to have a scalar value convertible to bool. Note that the optional `ops.Else` is at the same level as `ops.IfNet` and immediately follows the corresponding `ops.IfNet`. Let's execute the resulting net (execution step) and check the values of the blobs. # # Note that since x = 0.5, which is indeed greater than 0, we should expect y = 1 after execution. # In[3]: # Initialize a Plan plan = Plan('if_net_test') # Add the NetBuilder definition above to the Plan plan.AddStep(to_execution_step(nb)) # Initialize workspace for blobs ws = workspace.C.Workspace() # Run the Plan ws.run(plan) # Fetch some blobs and print print('x = ', ws.blobs["x"].fetch()) print('y = ', ws.blobs["y"].fetch()) # Before going further, it's important to understand the semantics of execution blocks ('then' and 'else' branches in the example above), i.e. handling of reads and writes into global (defined outside of the block) and local (defined inside the block) blobs. # # `NetBuilder` uses the following set of rules: # # - In `NetBuilder`'s syntax, a blob's declaration and definition occur at the same time - we define an operator which writes its output into a blob with a given name. # # - `NetBuilder` keeps track of all operators seen before the current execution point in the same block and up the stack in parent blocks. # # - If an operator writes into a previously unseen blob, it creates a **local** blob that is visible only within the current block and the subsequent children blocks. Local blobs created in a given block are effectively deleted when we exit the block. Any write into previously defined (in the same block or in the parent blocks) blob updates an originally created blob and does not result in the redefinition of a blob. # # - An operator's input blobs have to be defined earlier in the same block or in the stack of parent blocks. # # # As a result, in order to see the values computed by a block after its execution, the blobs of interest have to be defined outside of the block. This rule effectively forces visible blobs to always be correctly initialized. # # To illustrate concepts of block semantics and provide a more sophisticated example, let's consider the following net: # In[4]: with NetBuilder() as nb: # Define our constants ops.Const(0.0, blob_out="zero") ops.Const(1.0, blob_out="one") ops.Const(2.0, blob_out="two") ops.Const(1.5, blob_out="x") ops.Const(0.0, blob_out="y") # Define our conditional sequence with ops.IfNet(ops.GT(["x", "zero"])): ops.Copy("x", "local_blob") # create local_blob using Copy -- this is not visible outside of this block with ops.IfNet(ops.LE(["local_blob", "one"])): ops.Copy("one", "y") with ops.Else(): ops.Copy("two", "y") with ops.Else(): ops.Copy("zero", "y") # Note that using local_blob would fail here because it is outside of the block in # which it was created # When we execute this, we expect that y == 2.0, and that `local_blob` will not exist in the workspace. # In[5]: # Initialize a Plan plan = Plan('if_net_test_2') # Add the NetBuilder definition above to the Plan plan.AddStep(to_execution_step(nb)) # Initialize workspace for blobs ws = workspace.C.Workspace() # Run the Plan ws.run(plan) # Fetch some blobs and print print('x = ', ws.blobs["x"].fetch()) print('y = ', ws.blobs["y"].fetch()) # Assert that the local_blob does not exist in the workspace # It should have been destroyed because of its locality assert "local_blob" not in ws.blobs # ### Conditional Execution Using Brew Module # # Brew is another Caffe2 interface used to construct nets. Unlike `NetBuilder`, `brew` does not track the hierarchy of blocks and, as a result, we need to specify which blobs are considered local and which blobs are considered global when passing 'then' and 'else' models to an API call. # # Let's start by importing the necessary items for the `brew` API. # In[6]: from caffe2.python import brew from caffe2.python.workspace import FeedBlob, RunNetOnce, FetchBlob from caffe2.python.model_helper import ModelHelper # We will use the Caffe2's `ModelHelper` class to define and represent our models, as well as contain the parameter information about the models. Note that a `ModelHelper` object has two underlying nets: # # (1) param_init_net: Responsible for parameter initialization # (2) net: Contains the main network definition, i.e. the graph of operators that the data flows through # # Note that `ModelHelper` is similar to `NetBuilder` in that we define the operator graph first, and actually run later. With that said, let's define some models to act as conditional elements, and use the `brew` module to form the conditional statement that we want to run. We will construct the same statement used in the first example above. # In[7]: # Initialize model, which will represent our main conditional model for this test model = ModelHelper(name="test_if_model") # Add variables and constants to our conditional model; notice how we add them to the param_init_net model.param_init_net.ConstantFill([], ["zero"], shape=[1], value=0.0) model.param_init_net.ConstantFill([], ["one"], shape=[1], value=1.0) model.param_init_net.ConstantFill([], ["x"], shape=[1], value=0.5) model.param_init_net.ConstantFill([], ["y"], shape=[1], value=0.0) # Add Greater Than (GT) conditional operator to our model # which checks if "x" > "zero", and outputs the result in the "cond" blob model.param_init_net.GT(["x", "zero"], "cond") # Initialize a then_model, and add an operator which we will set to be # executed if the conditional model returns True then_model = ModelHelper(name="then_test_model") then_model.net.Copy("one", "y") # Initialize an else_model, and add an operator which we will set to be # executed if the conditional model returns False else_model = ModelHelper(name="else_test_model") else_model.net.Copy("zero", "y") # Use the brew module's handy cond operator to facilitate the construction of the operator graph brew.cond( model=model, # main conditional model cond_blob="cond", # blob with condition value external_blobs=["x", "y", "zero", "one"], # data blobs used in execution of conditional then_model=then_model, # pass then_model else_model=else_model) # pass else_model # Before we run the model, let's use Caffe2's graph visualization tool `net_drawer` to check if the operator graph makes sense. # In[8]: from caffe2.python import net_drawer from IPython import display graph = net_drawer.GetPydotGraph(model.net, rankdir="LR") display.Image(graph.create_png(), width=800) # Now let's run the net! When using `ModelHelper`, we must first run the `param_init_net` to initialize paramaters, then we execute the main `net`. # In[9]: # Run param_init_net once RunNetOnce(model.param_init_net) # Run main net (once in this case) RunNetOnce(model.net) # Fetch and examine some blobs print("x = ", FetchBlob("x")) print("y = ", FetchBlob("y")) # ### Loops Using NetBuilder # # Another important control flow operator is 'While', which allows repeated execution of a fragment of net. Let's consider `NetBuilder`'s version first. # # The pseudocode for this example is: # # i = 0 # y = 0 # while (i <= 7): # y = i + y # i += 1 # In[10]: with NetBuilder() as nb: # Define our variables ops.Const(0, blob_out="i") ops.Const(0, blob_out="y") # Define loop code and conditions with ops.WhileNet(): with ops.Condition(): ops.Add(["i", ops.Const(1)], ["i"]) ops.LE(["i", ops.Const(7)]) ops.Add(["i", "y"], ["y"]) # As with the 'If' operator, standard block semantic rules apply. Note the usage of `ops.Condition` clause that should immediately follow `ops.WhileNet` and contains code that is executed before each iteration. The last operator in the condition clause is expected to have a single boolean output that determines whether the other iteration is executed. # # In the example above we increment the counter ("i") before each iteration and accumulate its values in "y" blob, the loop's body is executed 7 times, the resulting blob values: # In[11]: # Initialize a Plan plan = Plan('while_net_test') # Add the NetBuilder definition above to the Plan plan.AddStep(to_execution_step(nb)) # Initialize workspace for blobs ws = workspace.C.Workspace() # Run the Plan ws.run(plan) # Fetch blobs and print print("i = ", ws.blobs["i"].fetch()) print("y = ", ws.blobs["y"].fetch()) # ### Loops Using Brew Module # # Now let's take a look at how to replicate the loop above using the `ModelHelper`+`brew` interface. # In[12]: # Initialize model, which will represent our main conditional model for this test model = ModelHelper(name="test_while_model") # Add variables and constants to our model model.param_init_net.ConstantFill([], ["i"], shape=[1], value=0) model.param_init_net.ConstantFill([], ["one"], shape=[1], value=1) model.param_init_net.ConstantFill([], ["seven"], shape=[1], value=7) model.param_init_net.ConstantFill([], ["y"], shape=[1], value=0) # Initialize a loop_model that represents the code to run inside of loop loop_model = ModelHelper(name="loop_test_model") loop_model.net.Add(["i", "y"], ["y"]) # Initialize cond_model that represents the conditional test that the loop # abides by, as well as the incrementation step cond_model = ModelHelper(name="cond_test_model") cond_model.net.Add(["i", "one"], "i") cond_model.net.LE(["i", "seven"], "cond") # Use brew's loop operator to facilitate the creation of the loop's operator graph brew.loop( model=model, # main model that contains data cond_blob="cond", # explicitly specifying condition blob external_blobs=["cond", "i", "one", "seven", "y"], # data blobs used in execution of the loop loop_model=loop_model, # pass loop_model cond_model=cond_model # pass condition model (optional) ) # Once again, let's visualize the net using the `net_drawer`. # In[13]: graph = net_drawer.GetPydotGraph(model.net, rankdir="LR") display.Image(graph.create_png(), width=800) # Finally, we'll run the `param_init_net` and `net` and print our final blob values. # In[14]: RunNetOnce(model.param_init_net) RunNetOnce(model.net) print("i = ", FetchBlob("i")) print("y = ", FetchBlob("y")) # ### Backpropagation # # Both 'If' and 'While' operators support backpropagation. To illustrate how backpropagation with control ops work, let's consider the following examples in which we construct the operator graph using `NetBuilder` and obtain calculate gradients using the `AddGradientOperators` function. The first example shows the following conditional statement: # # x = 1-D numpy float array # y = 4 # z = 0 # if (x > 0): # z = y^2 # else: # z = y^3 # In[15]: import numpy as np # Feed blob called x, which is simply a 1-D numpy array [0.5] FeedBlob("x", np.array(0.5, dtype='float32')) # _use_control_ops=True forces NetBuilder to output single net as a result # x is external for NetBuilder, so we let nb know about it through initial_scope param with NetBuilder(_use_control_ops=True, initial_scope=["x"]) as nb: ops.Const(0.0, blob_out="zero") ops.Const(1.0, blob_out="one") ops.Const(4.0, blob_out="y") ops.Const(0.0, blob_out="z") with ops.IfNet(ops.GT(["x", "zero"])): ops.Pow("y", "z", exponent=2.0) with ops.Else(): ops.Pow("y", "z", exponent=3.0) # we should get a single net as output assert len(nb.get()) == 1, "Expected a single net produced" net = nb.get()[0] # add gradient operators for 'z' blob grad_map = net.AddGradientOperators(["z"]) # In this case # # $$x = 0.5$$ # # $$z = y^2 = 4^2 = 16$$ # # We will fetch the blob `y_grad`, which was generated by the `AddGradientOperators` call above. This blob contains the gradient of blob z with respect to y. According to basic calculus: # # $$y\_grad = \frac{\partial{z}}{\partial{y}}y^2 = 2y = 2(4) = 8$$ # In[16]: # Run the net RunNetOnce(net) # Fetch blobs and print print("x = ", FetchBlob("x")) print("y = ", FetchBlob("y")) print("z = ", FetchBlob("z")) print("y_grad = ", FetchBlob("y_grad")) # Now, let's change value of blob "x" to -0.5 and rerun net: # In[17]: # To re-run net with different input, simply feed new blob FeedBlob("x", np.array(-0.5, dtype='float32')) RunNetOnce(net) print("x = ", FetchBlob("x")) print("y = ", FetchBlob("y")) print("z = ", FetchBlob("z")) print("y_grad = ", FetchBlob("y_grad")) # The next and final example illustrates backpropagation on the following loop: # # x = 2 # y = 3 # z = 2 # i = 0 # while (i <= 2): # x = x^2 # if (i < 2): # y = y^2 # else: # z = z^3 # i += 1 # s = x + y + z # # Note that this code essentially computes the sum of x^4 (by squaring x twice), y^2, and z^3. # In[18]: with NetBuilder(_use_control_ops=True) as nb: # Define variables and constants ops.Copy(ops.Const(0), "i") ops.Copy(ops.Const(1), "one") ops.Copy(ops.Const(2), "two") ops.Copy(ops.Const(2.0), "x") ops.Copy(ops.Const(3.0), "y") ops.Copy(ops.Const(2.0), "z") # Define loop statement # Computes x^4, y^2, z^3 with ops.WhileNet(): with ops.Condition(): ops.Add(["i", "one"], "i") ops.LE(["i", "two"]) ops.Pow("x", "x", exponent=2.0) with ops.IfNet(ops.LT(["i", "two"])): ops.Pow("y", "y", exponent=2.0) with ops.Else(): ops.Pow("z", "z", exponent=3.0) # Sum s = x + y + z ops.Add(["x", "y"], "x_plus_y") ops.Add(["x_plus_y", "z"], "s") assert len(nb.get()) == 1, "Expected a single net produced" net = nb.get()[0] # Add gradient operators to output blob 's' grad_map = net.AddGradientOperators(["s"]) # In[19]: workspace.RunNetOnce(net) print("x = ", FetchBlob("x")) print("x_grad = ", FetchBlob("x_grad")) # derivative: 4x^3 print("y = ", FetchBlob("y")) print("y_grad = ", FetchBlob("y_grad")) # derivative: 2y print("z = ", FetchBlob("z")) print("z_grad = ", FetchBlob("z_grad")) # derivative: 3z^2 # ### Implementation Notes # # On the low level, Caffe2 uses the following set of operators to implement forward and backward branching and loops: # - If - accepts *then_net* and *else_net* nets as arguments and executes one of them, depending on input condition blob value, nets are executed **in the same** workspace; # - While - repeats execution of *loop_net* net passed as argument, net is executed in the same workspace; # - Do - special operator that creates a separate inner workspace, sets up blob mappings between outer and inner workspaces, and runs a net in an inner workspace; # - CreateScope/HasScope - special operators that create and keep track of workspaces used by Do operator. # # Higher level libraries that implement branching and looping (e.g. in `NetBuilder`, `brew`), use these operators to build control flow, e.g. for 'If': # - do necessary sanity checks (e.g. determine which blobs are initialized and check that subnet does not read undefined blobs) # - wrap 'then' and 'else' branches into Do # - setup correct blob mappings by specifying which local names are mapped to outer blobs # - prepare scope structure, used by Do operator # # While 'If' and 'While' Caffe2 ops can be used directly without creating local block workspaces, we encourage users to use higher level Caffe2 interfaces that provide necessary correctness guarantees. # # Backpropagation for 'While' in general is expensive memory-wise - we have to save local workspace for every iteration of a block, including global blobs visible to the block. It is recommended that users use `RecurrentNetwork` operator instead in production environments.
6,005
975
<gh_stars>100-1000 package com.dianping.shield.node.useritem; import com.dianping.shield.node.itemcallbacks.HotZoneStateChangeCallBack; import java.util.ArrayList; /** * Created by runqi.wei at 2018/9/11 */ public class HotZoneInfo { public static final int PARENT_TOP = Integer.MIN_VALUE; public static final int PARENT_BOTTOM = Integer.MAX_VALUE; public int start; public int end; public ArrayList<HotZoneStateChangeCallBack> callBackList; }
162
590
<reponame>wingenedu/uflo /******************************************************************************* * Copyright 2017 Bstek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.bstek.uflo.model.variable; import javax.persistence.Column; import javax.persistence.DiscriminatorValue; import javax.persistence.Entity; @Entity @DiscriminatorValue("Character") public class CharacterVariable extends Variable { @Column(name="CHARACTER_VALUE_") private Character characterValue; public CharacterVariable(){} public CharacterVariable(Character value){ this.characterValue=value; } @Override public Object getValue() { return characterValue; } public Character getCharacterValue() { return characterValue; } public void setCharacterValue(Character characterValue) { this.characterValue = characterValue; } @Override public VariableType getType() { return VariableType.Character; } }
396
348
<filename>docs/data/leg-t2/029/02904184.json {"nom":"Plouénan","circ":"4ème circonscription","dpt":"Finistère","inscrits":2015,"abs":940,"votants":1075,"blancs":54,"nuls":13,"exp":1008,"res":[{"nuance":"LR","nom":"<NAME>","voix":648},{"nuance":"REM","nom":"<NAME>","voix":360}]}
112
1,089
// Copyright (C) 2016-2020 <NAME> <<EMAIL>> // This file is subject to the license terms in the LICENSE file // found in the top-level directory of this distribution. #ifndef TYPE_SAFE_VARIANT_HPP_INCLUDED #define TYPE_SAFE_VARIANT_HPP_INCLUDED #include <type_safe/detail/variant_impl.hpp> #include <type_safe/optional_ref.hpp> namespace type_safe { /// Convenience alias for [ts::union_type](). /// \module variant template <typename T> using variant_type = union_type<T>; /// Convenience alias for [ts::union_types](). /// \module variant template <typename... Ts> using variant_types = union_types<Ts...>; /// Tag type to mark a [ts::basic_variant]() without a value. /// \module variant struct nullvar_t { constexpr nullvar_t() {} }; /// Tag object of type [ts::nullvar_t](). /// \module variant constexpr nullvar_t nullvar; /// An improved `union` storing at most one of the given types at a time (or possibly none). /// /// It is an improved version of [std::variant](). /// A big problem with variant is implementing the operation that changes the type. /// It has to destroy the old value and then create the new one. /// But how to handle an exception when creating the new type? /// There are multiple ways of handling this, so it is outsourced in a policy. /// The variant policy is a class that must have the following members: /// * `allow_empty` - either [std::true_type]() or [std::false_type](). /// If it is "true", the variant can be put in the empty state explicitly. /// * `void change_value(variant_type<T>, tagged_union<Types...>&, Args&&... args)` - changes the /// value and type. It will be called when the variant already contains an object of a different /// type. It must destroy the old type and create a new one with the given type and arguments. /// \module variant template <class VariantPolicy, typename HeadT, typename... TailT> class basic_variant : detail::variant_copy<HeadT, TailT...>, detail::variant_move<HeadT, TailT...> { using union_t = tagged_union<HeadT, TailT...>; using traits = detail::traits<HeadT, TailT...>; public: using types = typename union_t::types; using type_id = typename union_t::type_id; using allow_empty = typename VariantPolicy::allow_empty; static constexpr type_id invalid_type = union_t::invalid_type; //=== constructors/destructors/assignment/swap ===// /// \effects Initializes the variant to the empty state. /// \notes This constructor only participates in overload resolution, /// if the policy allows an empty variant. /// \group default /// \param Dummy /// \exclude /// \param 1 /// \exclude template <typename Dummy = void, typename = typename std::enable_if<VariantPolicy::allow_empty::value, Dummy>::type> basic_variant() noexcept {} /// \group default /// \param Dummy /// \exclude /// \param 1 /// \exclude template <typename Dummy = void, typename = typename std::enable_if<VariantPolicy::allow_empty::value, Dummy>::type> basic_variant(nullvar_t) noexcept : basic_variant() {} /// Copy (1)/move (2) constructs a variant. /// \effects If the other variant is not empty, it will call /// [ts::copy](standardese://ts::copy_union/) (1) or [ts::move](standardese://ts::move_union/) /// (2). \throws Anything thrown by the copy (1)/move (2) constructor. \notes This constructor /// only participates in overload resolution, if all types are copy (1)/move (2) constructible./ /// \notes The move constructor only moves the stored value, /// and does not make the other variant empty. /// \group copy_move_ctor basic_variant(const basic_variant&) = default; /// \group copy_move_ctor basic_variant(basic_variant&&) TYPE_SAFE_NOEXCEPT_DEFAULT(traits::nothrow_move_constructible::value) = default; /// Initializes it containing a new object of the given type. /// \effects Creates it by calling `T`s constructor with the perfectly forwarded arguments. /// \throws Anything thrown by `T`s constructor. /// \notes This constructor does not participate in overload resolution, /// unless `T` is a valid type for the variant and constructible from the arguments. /// \param 2 /// \exclude template <typename T, typename... Args, typename = detail::enable_variant_type<union_t, T, Args&&...>> explicit basic_variant(variant_type<T> type, Args&&... args) { storage_.get_union().emplace(type, std::forward<Args>(args)...); } /// Initializes it with a copy of the given object. /// \effects Same as the type + argument constructor called with the decayed type of the /// argument and the object perfectly forwarded. \throws Anything thrown by `T`s copy/move /// constructor. \notes This constructor does not participate in overload resolution, unless `T` /// is a valid type for the variant and copy/move constructible. \param 1 \exclude template <typename T, typename = detail::enable_variant_type<union_t, T, T&&>> basic_variant(T&& obj) : basic_variant(variant_type<typename std::decay<T>::type>{}, std::forward<T>(obj)) {} /// Initializes it from a [ts::tagged_union](). /// \effects Copies the currently stored type of the union /// into the variant by calling the copy (1)/move (2) constructor of the stored type. /// \throws Anything thrown by the selected copy (1)/move (2) constructor. /// \requires If the variant policy does not allow the empty state, /// the union must not be empty. /// \group ctor_union explicit basic_variant(const tagged_union<HeadT, TailT...>& u) { DEBUG_ASSERT(allow_empty::value || u.has_value(), detail::precondition_error_handler{}); copy(storage_.get_union(), u); } /// \group ctor_union explicit basic_variant(tagged_union<HeadT, TailT...>&& u) { DEBUG_ASSERT(allow_empty::value || u.has_value(), detail::precondition_error_handler{}); move(storage_.get_union(), std::move(u)); } /// \effects Destroys the currently stored value, /// if there is any. ~basic_variant() noexcept = default; /// Copy (1)/move (2) assigns a variant. /// \effects If the other variant is empty, /// makes this one empty as well. /// Otherwise let the other variant contains an object of type `T`. /// If this variant contains the same type and there is a copy (1)/move (2) assignment operator /// available, assigns the object to this object. Else forwards to the variant policy's /// `change_value()` function. \throws Anything thrown by either the copy (1)/move (2) /// assignment operator or copy (1)/move (2) constructor. If the assignment operator throws, the /// variant will contain the partially assigned object. If the constructor throws, the state /// depends on the variant policy. \notes This function does not participate in overload /// resolution, unless all types are copy (1)/move (2) constructible. \group copy_move_assign basic_variant& operator=(const basic_variant&) = default; /// \group copy_move_assign basic_variant& operator=(basic_variant&&) TYPE_SAFE_NOEXCEPT_DEFAULT(traits::nothrow_move_assignable::value) = default; /// Alias for [*reset()](). /// \param Dummy /// \exclude /// \param 1 /// \exclude template <typename Dummy = void, typename = typename std::enable_if<VariantPolicy::allow_empty::value, Dummy>::type> basic_variant& operator=(nullvar_t) noexcept { reset(); return *this; } /// Same as the single argument `emplace()`. /// \effects Changes the value to a copy of `obj`. /// \throws Anything thrown by `T`s copy/move constructor. /// \notes This function does not participate in overload resolution, /// unless `T` is a valid type for the variant and copy/move constructible. /// \param 1 /// \exclude template <typename T, typename = detail::enable_variant_type<union_t, T, T&&>> basic_variant& operator=(T&& obj) { emplace(variant_type<typename std::decay<T>::type>{}, std::forward<T>(obj)); return *this; } /// Swaps two variants. /// \effects There are four cases: /// * Both variants are empty. Then the function has no effect. /// * Both variants contain the same type, `T`. Then it calls swap on the stored type. /// * Both variants contain a type, but different types. /// Then it swaps the variant by move constructing the objects from one type to the other, /// using the variant policy. /// * Only one variant contains an object. Then it moves the value to the empty variant, /// and destroys it in the non-empty variant. /// /// \effects In either case, it will only call the swap() function or the move constructor. /// \throws Anything thrown by the swap function, /// in which case both variants contain the partially swapped values, /// or the mvoe constructor, in which case the exact behavior depends on the variant policy. friend void swap(basic_variant& a, basic_variant& b) noexcept(traits::nothrow_swappable::value) { auto& a_union = a.storage_.get_union(); auto& b_union = b.storage_.get_union(); if (a_union.has_value() && b_union.has_value()) detail::swap_union<VariantPolicy, union_t>::swap(a_union, b_union); else if (a_union.has_value() && !b_union.has_value()) { b = std::move(a); a.reset(); } else if (!a_union.has_value() && b_union.has_value()) { a = std::move(b); b.reset(); } } //=== modifiers ===// /// \effects Destroys the stored value in the variant, if any. /// \notes This function only participate in overload resolution, /// if the variant policy allows the empty state. /// \param Dummy /// \exclude /// \param 1 /// \exclude template <typename Dummy = void, typename = typename std::enable_if<VariantPolicy::allow_empty::value, Dummy>::type> void reset() noexcept { destroy(storage_.get_union()); } /// Changes the value to a new object of the given type. /// \effects If the variant contains an object of the same type, /// assigns the argument to it. /// Otherwise behaves as the other emplace version. /// \throws Anything thrown by the chosen assignment operator /// or the other `emplace()`. /// If the assignment operator throws, /// the variant contains a partially assigned object. /// Otherwise it depends on the variant policy. /// \notes This function does not participate in overload resolution, /// unless `T` is a valid type for the variant and assignable from the argument /// without creating an additional temporary. /// \param 2 /// \exclude /// \param 3 /// \exclude template <typename T, typename Arg, typename = typename std::enable_if<detail::is_direct_assignable<T, Arg&&>::value>::type, typename = detail::enable_variant_type<union_t, T, Arg&&>> void emplace(variant_type<T> type, Arg&& arg) { if (storage_.get_union().type() == typename union_t::type_id(type)) storage_.get_union().value(type) = std::forward<Arg>(arg); else emplace_impl(type, std::forward<Arg>(arg)); } /// Changes the value to a new object of given type. /// \effects If variant is empty, creates the object directly in place /// by perfectly forwarding the arguments. /// Otherwise it forwards to the variant policy's `change_value()` function. /// \throws Anything thrown by `T`s constructor or possibly move constructor. /// If the variant was empty before, it is still empty afterwards. /// Otherwise the state depends on the policy. /// \notes This function does not participate in overload resolution, /// unless `T` is a valid type for the variant and constructible from the arguments. /// \param 2 /// \exclude template <typename T, typename... Args, typename = detail::enable_variant_type<union_t, T, Args&&...>> void emplace(variant_type<T> type, Args&&... args) { emplace_impl(type, std::forward<Args>(args)...); } private: template <typename T, typename... Args> void emplace_impl(variant_type<T> type, Args&&... args) { if (storage_.get_union().has_value()) VariantPolicy::change_value(type, storage_.get_union(), std::forward<Args>(args)...); else storage_.get_union().emplace(type, std::forward<Args>(args)...); } template <typename T> using enable_valid = typename std::enable_if<(type_id::template is_valid<T>)()>::type; public: //=== observers ===// /// \returns The type id representing the type of the value currently stored in the variant. /// \notes If it does not have a value stored, returns [*invalid_type](). type_id type() const noexcept { return storage_.get_union().type(); } /// \returns `true` if the variant currently contains a value, /// `false` otherwise. /// \notes Depending on the variant policy, /// it can be guaranteed to return `true` all the time. /// \group has_value bool has_value() const noexcept { return storage_.get_union().has_value(); } /// \group has_value explicit operator bool() const noexcept { return has_value(); } /// \group has_value bool has_value(variant_type<nullvar_t>) const noexcept { return !has_value(); } /// \returns `true` if the variant currently stores an object of type `T`, /// `false` otherwise. /// \notes `T` must not necessarily be a type that can be stored in the variant. template <typename T> bool has_value(variant_type<T> type) const noexcept { return this->type() == type_id(type); } /// \returns A copy of [ts::nullvar](). /// \requires The variant must be empty. nullvar_t value(variant_type<nullvar_t>) const noexcept { DEBUG_ASSERT(!has_value(), detail::precondition_error_handler{}); return nullvar; } /// \returns A (`const`) lvalue (1, 2)/rvalue (3, 4) reference to the stored object of the given /// type. \requires The variant must currently store an object of the given type, i.e. /// `has_value(type)` must return `true`. \group value \param 1 \exclude template <typename T, typename = enable_valid<T>> T& value(variant_type<T> type) TYPE_SAFE_LVALUE_REF noexcept { return storage_.get_union().value(type); } /// \group value /// \param 1 /// \exclude template <typename T, typename = enable_valid<T>> const T& value(variant_type<T> type) const TYPE_SAFE_LVALUE_REF noexcept { return storage_.get_union().value(type); } #if TYPE_SAFE_USE_REF_QUALIFIERS /// \group value /// \param 1 /// \exclude template <typename T, typename = enable_valid<T>> T&& value(variant_type<T> type) && noexcept { return std::move(storage_.get_union()).value(type); } /// \group value /// \param 1 /// \exclude template <typename T, typename = enable_valid<T>> const T&& value(variant_type<T> type) const&& noexcept { return std::move(storage_.get_union()).value(type); } #endif /// \returns A [ts::optional_ref]() to [ts::nullvar](). /// If the variant is not empty, returns a null reference. optional_ref<const nullvar_t> optional_value(variant_type<nullvar_t>) const noexcept { return has_value() ? nullptr : type_safe::opt_ref(&nullvar); } /// \returns A (`const`) [ts::optional_ref]() (1, 2)/[ts::optional_xvalue_ref]() to the stored /// value of given type. If it stores a different type, returns a null reference. \group /// optional_value template <typename T> optional_ref<T> optional_value(variant_type<T> type) TYPE_SAFE_LVALUE_REF noexcept { return has_value(type) ? type_safe::opt_ref(&storage_.get_union().value(type)) : nullptr; } /// \group optional_value template <typename T> optional_ref<const T> optional_value(variant_type<T> type) const TYPE_SAFE_LVALUE_REF noexcept { return has_value(type) ? type_safe::opt_ref(&storage_.get_union().value(type)) : nullptr; } #if TYPE_SAFE_USE_REF_QUALIFIERS /// \group optional_value template <typename T> optional_xvalue_ref<T> optional_value(variant_type<T> type) && noexcept { return has_value(type) ? type_safe::opt_xref(&storage_.get_union().value(type)) : nullptr; } /// \group optional_value template <typename T> optional_xvalue_ref<const T> optional_value(variant_type<T> type) const&& noexcept { return has_value(type) ? type_safe::opt_xref(&storage_.get_union().value(type)) : nullptr; } #endif /// \returns If the variant currently stores an object of type `T`, /// returns a copy of that by copy (1)/move (2) constructing. /// Otherwise returns `other` converted to `T`. /// \throws Anything thrown by `T`s copy (1)/move (2) constructor or the converting constructor. /// \notes `T` must not necessarily be a type that can be stored in the variant./ /// \notes This function does not participate in overload resolution, /// unless `T` is copy (1)/move (2) constructible and the fallback convertible to `T`. /// \group value_or /// \param 2 /// \exclude template <typename T, typename U> T value_or( variant_type<T> type, U&& other, typename std::enable_if< std::is_copy_constructible<T>::value && std::is_convertible<U&&, T>::value, int>::type = 0) const TYPE_SAFE_LVALUE_REF { return has_value(type) ? value(type) : static_cast<T>(std::forward<U>(other)); } #if TYPE_SAFE_USE_REF_QUALIFIERS /// \group value_or /// \param 2 /// \exclude template <typename T, typename U> T value_or( variant_type<T> type, U&& other, typename std::enable_if< std::is_move_constructible<T>::value && std::is_convertible<U&&, T>::value, int>::type = 0) && { return has_value(type) ? std::move(value(type)) : static_cast<T>(std::forward<U>(other)); } #endif /// Maps a variant with a function. /// \effects If the variant is not empty, /// calls the function using either `std::forward<Functor>(f)(current-value, /// std::forward<Args>(args)...)` or member call syntax /// `(current-value.*std::forward<Functor>(f))(std::forward<Args>(args)...)`. If those two /// expressions are both ill-formed, does nothing. \returns A new variant of the same type. It /// contains nothing, if `*this` contains nothing. Otherwise, if the function was called, it /// contains the result of the function. Otherwise, it is a copy of the current variant. \throws /// Anything thrown by the function or copy/move constructor, in which case the variant will be /// left unchanged, unless the object was already moved into the function and modified there. /// \requires The result of the function - if it is called - can be stored in the variant. /// \notes (1) will use the copy constructor, (2) will use the move constructor. /// The function does not participate in overload resolution, /// if copy (1)/move (2) constructors are not available for all types. /// \group map /// \param 1 /// \exclude /// \param 2 /// \exclude template <typename Functor, typename... Args, typename Dummy = void, typename = typename std::enable_if<traits::copy_constructible::value, Dummy>::type> basic_variant map(Functor&& f, Args&&... args) const TYPE_SAFE_LVALUE_REF { basic_variant result(force_empty{}); if (!has_value()) return result; detail::map_union<Functor&&, union_t>::map(result.storage_.get_union(), storage_.get_union(), std::forward<Functor>(f), std::forward<Args>(args)...); DEBUG_ASSERT(result.has_value(), detail::assert_handler{}); return result; } #if TYPE_SAFE_USE_REF_QUALIFIERS /// \group map /// \param 1 /// \exclude /// \param 2 /// \exclude template <typename Functor, typename... Args, typename Dummy = void, typename = typename std::enable_if<traits::move_constructible::value, Dummy>::type> basic_variant map(Functor&& f, Args&&... args) && { basic_variant result(force_empty{}); if (!has_value()) return result; detail::map_union<Functor&&, union_t>::map(result.storage_.get_union(), std::move(storage_.get_union()), std::forward<Functor>(f), std::forward<Args>(args)...); DEBUG_ASSERT(result.has_value(), detail::assert_handler{}); return result; } #endif private: struct force_empty {}; basic_variant(force_empty) noexcept {} detail::variant_storage<VariantPolicy, HeadT, TailT...> storage_; friend detail::storage_access; }; /// \exclude template <class VariantPolicy, typename Head, typename... Types> constexpr typename basic_variant<VariantPolicy, Head, Types...>::type_id basic_variant<VariantPolicy, Head, Types...>::invalid_type; //=== comparison ===// /// \exclude #define TYPE_SAFE_DETAIL_MAKE_OP(Op, Expr, Expr2) \ template <class VariantPolicy, typename Head, typename... Types> \ bool operator Op(const basic_variant<VariantPolicy, Head, Types...>& lhs, nullvar_t) \ { \ return (void)lhs, Expr; \ } \ /** \group variant_comp_null */ \ template <class VariantPolicy, typename Head, typename... Types> \ bool operator Op(nullvar_t, const basic_variant<VariantPolicy, Head, Types...>& rhs) \ { \ return (void)rhs, Expr2; \ } /// Compares a [ts::basic_variant]() with [ts::nullvar](). /// /// A variant compares equal to `nullvar`, when it does not have a value. /// A variant compares never less to `nullvar`, `nullvar` compares less only if the variant has a /// value. The other comparisons behave accordingly. \group variant_comp_null \module variant TYPE_SAFE_DETAIL_MAKE_OP(==, !lhs.has_value(), !rhs.has_value()) /// \group variant_comp_null TYPE_SAFE_DETAIL_MAKE_OP(!=, lhs.has_value(), rhs.has_value()) /// \group variant_comp_null TYPE_SAFE_DETAIL_MAKE_OP(<, false, rhs.has_value()) /// \group variant_comp_null TYPE_SAFE_DETAIL_MAKE_OP(<=, !lhs.has_value(), true) /// \group variant_comp_null TYPE_SAFE_DETAIL_MAKE_OP(>, lhs.has_value(), false) /// \group variant_comp_null TYPE_SAFE_DETAIL_MAKE_OP(>=, true, !rhs.has_value()) #undef TYPE_SAFE_DETAIL_MAKE_OP /// Compares a [ts::basic_variant]() with a value. /// /// A variant compares equal to a value, if it contains an object of the same type and the object /// compares equal. A variant compares less to a value, if - when it has a different type - the type /// id compares less than the type id of the value, or - when it has the same type - the object /// compares less to the value. The other comparisons behave accordingly. \notes The value must not /// necessarily have a type that can be stored in the variant. \group variant_comp_t \module variant template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator==(const basic_variant<VariantPolicy, Head, Types...>& lhs, const T& rhs) { return lhs.has_value(variant_type<T>{}) && lhs.value(variant_type<T>{}) == rhs; } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator==(const T& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return rhs == lhs; } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator!=(const basic_variant<VariantPolicy, Head, Types...>& lhs, const T& rhs) { return !(lhs == rhs); } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator!=(const T& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return !(rhs == lhs); } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator<(const basic_variant<VariantPolicy, Head, Types...>& lhs, const T& rhs) { constexpr auto id = typename basic_variant<VariantPolicy, Head, Types...>::type_id(variant_type<T>{}); if (lhs.type() != id) return lhs.type() < id; return lhs.value(variant_type<T>{}) < rhs; } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator<(const T& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { constexpr auto id = typename basic_variant<VariantPolicy, Head, Types...>::type_id(variant_type<T>{}); if (id != rhs.type()) return id < rhs.type(); return lhs < rhs.value(variant_type<T>{}); } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator<=(const basic_variant<VariantPolicy, Head, Types...>& lhs, const T& rhs) { return !(rhs < lhs); } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator<=(const T& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return !(rhs < lhs); } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator>(const basic_variant<VariantPolicy, Head, Types...>& lhs, const T& rhs) { return rhs < lhs; } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator>(const T& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return rhs < lhs; } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator>=(const basic_variant<VariantPolicy, Head, Types...>& lhs, const T& rhs) { return !(lhs < rhs); } /// \group variant_comp_t template <class VariantPolicy, typename Head, typename... Types, typename T> bool operator>=(const T& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return !(lhs < rhs); } /// Compares two [ts::basic_variant]()s. /// /// They compare equal if both store the same type (or none) and the stored object compares equal. /// A variant is less than another if they store mismatched types and the type id of the first is /// less than the other, or if they store the same type and the stored object compares less. The /// other comparisons behave accordingly. \module variant \group variant_comp template <class VariantPolicy, typename Head, typename... Types> bool operator==(const basic_variant<VariantPolicy, Head, Types...>& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return detail::compare_variant< basic_variant<VariantPolicy, Head, Types...>>::compare_equal(lhs, rhs); } /// \group variant_comp template <class VariantPolicy, typename Head, typename... Types> bool operator!=(const basic_variant<VariantPolicy, Head, Types...>& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return !(lhs == rhs); } /// \group variant_comp template <class VariantPolicy, typename Head, typename... Types> bool operator<(const basic_variant<VariantPolicy, Head, Types...>& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return detail::compare_variant<basic_variant<VariantPolicy, Head, Types...>>::compare_less(lhs, rhs); } /// \group variant_comp template <class VariantPolicy, typename Head, typename... Types> bool operator<=(const basic_variant<VariantPolicy, Head, Types...>& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return !(rhs < lhs); } /// \group variant_comp template <class VariantPolicy, typename Head, typename... Types> bool operator>(const basic_variant<VariantPolicy, Head, Types...>& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return rhs < lhs; } /// \group variant_comp template <class VariantPolicy, typename Head, typename... Types> bool operator>=(const basic_variant<VariantPolicy, Head, Types...>& lhs, const basic_variant<VariantPolicy, Head, Types...>& rhs) { return !(lhs < rhs); } /// \effects If the variant is empty, does nothing. /// Otherwise let the variant contain an object of type `T`. /// If the functor is callable for the `T`, calls its `operator()` passing it the stored object. /// Else does nothing. /// \module variant /// \group variant_with template <class VariantPolicy, typename Head, typename... Types, typename Func, typename... Args> void with(basic_variant<VariantPolicy, Head, Types...>& variant, Func&& func, Args&&... additional_args) { with(detail::storage_access::get(variant).get_union(), std::forward<Func>(func), std::forward<Args>(additional_args)...); } /// \group variant_with template <class VariantPolicy, typename Head, typename... Types, typename Func, typename... Args> void with(const basic_variant<VariantPolicy, Head, Types...>& variant, Func&& func, Args&&... additional_args) { with(detail::storage_access::get(variant).get_union(), std::forward<Func>(func), std::forward<Args>(additional_args)...); } /// \group variant_with template <class VariantPolicy, typename Head, typename... Types, typename Func, typename... Args> void with(basic_variant<VariantPolicy, Head, Types...>&& variant, Func&& func, Args&&... additional_args) { with(std::move(detail::storage_access::get(variant).get_union()), std::forward<Func>(func), std::forward<Args>(additional_args)...); } /// \group variant_with template <class VariantPolicy, typename Head, typename... Types, typename Func, typename... Args> void with(const basic_variant<VariantPolicy, Head, Types...>&& variant, Func&& func, Args&&... additional_args) { with(std::move(detail::storage_access::get(variant).get_union()), std::forward<Func>(func), std::forward<Args>(additional_args)...); } /// A variant policy for [ts::basic_variant]() that uses a fallback type. /// /// When changing the type of the variant throws an exception, /// the variant will create an object of the fallback type instead. /// The variant will never be empty. /// \requires `Fallback` must be nothrow default constructible /// and a type that can be stored in the variant. /// \module variant template <typename Fallback> class fallback_variant_policy { static_assert(std::is_nothrow_default_constructible<Fallback>::value, "fallback must be nothrow default constructible"); public: using allow_empty = std::false_type; template <typename T, typename... Types, typename... Args> static void change_value(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) { change_value_impl(type, storage, std::forward<Args>(args)...); } private: template <typename T, typename... Types, typename... Args> static auto change_value_impl(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) noexcept -> typename std::enable_if<std::is_nothrow_constructible<T, Args&&...>::value>::type { destroy(storage); // won't throw storage.emplace(type, std::forward<Args>(args)...); } template <typename T, typename... Types, typename... Args> static auto change_value_impl(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) -> typename std::enable_if<!std::is_nothrow_constructible<T, Args&&...>::value>::type { destroy(storage); TYPE_SAFE_TRY { // might throw storage.emplace(type, std::forward<Args>(args)...); } TYPE_SAFE_CATCH_ALL { // won't throw storage.emplace(union_type<Fallback>{}); TYPE_SAFE_RETHROW; } } }; /// A [ts::basic_variant]() using the [ts::fallback_variant_policy](). /// /// This is a variant that is never empty, where exceptions on changing the type /// leaves it with a default-constructed object of the `Fallback` type. /// \requires `Fallback` must be nothrow default constructible. /// \module variant template <typename Fallback, typename... OtherTypes> using fallback_variant = basic_variant<fallback_variant_policy<Fallback>, Fallback, OtherTypes...>; /// A variant policy for [ts::basic_variant]() that creates a variant with explicit empty state. /// /// It allows an empty variant explicitly. /// When changing the type of the variant throws an exception, /// the variant will be left in that empty state. /// \module variant class optional_variant_policy { public: using allow_empty = std::true_type; template <typename T, typename... Types, typename... Args> static void change_value(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) { destroy(storage); storage.emplace(type, std::forward<Args>(args)...); } }; /// \exclude namespace detail { template <bool ForceNonEmpty> class non_empty_variant_policy { public: using allow_empty = std::false_type; template <typename T, typename... Types, typename... Args> static void change_value(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) { change_value_impl(type, storage, std::forward<Args>(args)...); } private: template <typename T, typename... Types> static void move_emplace(union_type<T> type, tagged_union<Types...>& storage, T&& obj) noexcept(ForceNonEmpty) { // if this throws, there's nothing we can do storage.emplace(type, std::move(obj)); } template <typename T, typename... Types> static void change_value_impl(union_type<T> type, tagged_union<Types...>& storage, T&& obj) { destroy(storage); move_emplace(type, storage, std::move(obj)); // throw handled } template <typename T, typename... Types, typename... Args> static auto change_value_impl(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) -> typename std::enable_if<std::is_nothrow_constructible<T, Args&&...>::value>::type { destroy(storage); // won't throw storage.emplace(type, std::forward<Args>(args)...); } template <typename T, typename... Types, typename... Args> static auto change_value_impl(union_type<T> type, tagged_union<Types...>& storage, Args&&... args) -> typename std::enable_if<!std::is_nothrow_constructible<T, Args&&...>::value>::type { T tmp(std::forward<Args>(args)...); // might throw destroy(storage); move_emplace(type, storage, std::move(tmp)); // throw handled } }; } // namespace detail /// A variant policy for [ts::basic_variant]() that creates a variant which is rarely empty. /// /// When changing the type of the variant, it will use a the move constructor with a temporary. /// If the move constructor throws, the variant will be left in the empty state. /// Putting it into the empty state explicitly is not allowed. /// \module variant using rarely_empty_variant_policy = detail::non_empty_variant_policy<false>; /// A variant policy for [ts::basic_variant]() that creates a variant which is never empty. /// /// Similar to [ts::rarely_empty_variant_policy]() but when the move constructor throws, /// it calls [std::terminate()](). /// \module variant using never_empty_variant_policy = detail::non_empty_variant_policy<true>; /// \exclude namespace detail { template <typename... Types> struct select_variant_policy { using type = basic_variant<rarely_empty_variant_policy, Types...>; }; template <typename... Types> struct select_variant_policy<nullvar_t, Types...> { using type = basic_variant<optional_variant_policy, Types...>; }; } // namespace detail /// A [ts::basic_variant]() with the recommended default semantics. /// /// If the first type is [ts::nullvar_t]() it will use the [ts::optional_variant_policy](), /// which explicitly allows the empty state. /// Otherwise it will use the [ts::rarely_empty_variant_policy]() /// where it tries to avoid the empty state as good as possible. /// \notes If you pass [ts::nullvar_t]() as the first type, /// it is not actually one of the types that can be stored in the variant, /// but a tag to enable the empty state. /// \module variant template <typename... Types> using variant = typename detail::select_variant_policy<Types...>::type; } // namespace type_safe #endif // TYPE_SAFE_VARIANT_HPP_INCLUDED
14,439
1,428
<reponame>kennethsequeira/Hello-world<filename>Python/hello_earth.py print('hello earth!')
33
648
{"coverage":[{"coverage":{"reference":"Coverage/9876B1"},"focal":true,"relationship":{"code":"self"},"sequence":1}],"created":"2014-08-16","diagnosis":[{"diagnosis":{"code":"654456"},"sequence":1}],"id":"960150","identifier":[{"system":"http://happyhospital.com/claim","value":"9612345"}],"item":[{"net":{"code":"USD","system":"urn:iso:std:iso:4217","value":125.00},"provider":{"reference":"Practitioner/example"},"sequence":1,"service":{"code":"exam","system":"http://hl7.org/fhir/ex-serviceproduct"},"serviceDate":"2014-08-16","type":{"code":"service"},"unitPrice":{"code":"USD","system":"urn:iso:std:iso:4217","value":125.00}}],"organization":{"reference":"Organization/1"},"patient":{"reference":"Patient/1"},"payee":{"type":{"code":"provider"}},"priority":{"code":"normal"},"resourceType":"Claim","target":{"reference":"Organization/2"},"text":{"div":"<div>A human-readable rendering of the Claim</div>","status":"generated"},"type":"institutional","use":"complete"}
275
563
from .pacs import PACS from .vlcs import VLCS from .cifar_c import CIFAR10C, CIFAR100C from .digits_dg import DigitsDG from .digit_single import DigitSingle from .office_home_dg import OfficeHomeDG
73
322
<filename>HunterAdminApi/test/common/test_email_util.py #!/ usr/bin/env # coding=utf-8 """ author: b5mali4 Copyright (c) 2018 """ import unittest class EmailUtilTestCase(unittest.TestCase): def testSendBaseEmail(self): """ 测试发送邮件 :return: """ from common.email_util import EmailUtils EmailUtils().send_mail_with_ssl("<EMAIL>", "Hunter扫描完成提醒", "Hi,0:\n你好,欢迎使用Hunter,本次扫描结束,扫描到你有{}个漏洞。详情请查看附件", ) def testSendHunterTaskEmail(self): """ 测试发送hunter扫描结果邮件 :return: """ from common.email_util import EmailUtils def generate_report(task_id): """ 生成邮件发送报告 :param cls: :param task_id: :return: """ from common.config_util import get_system_config from model.vulnerability import Vulnerability, VulnerabilityService vulns_info = VulnerabilityService.get_fields_by_where(where=(Vulnerability.task_id == task_id)) if len(vulns_info) <= 0: content = """<br>你好,欢迎使用Hunter,本次扫描结束,扫描到你有0个漏洞。详情请可登录{}查看<br>""".format( get_system_config()['front_end']['index']) else: content = """<br>你好,欢迎使用Hunter,本次扫描结束,扫描到你有{}个漏洞。任务预览如下,详情请登录{}查看<br>""".format(len(vulns_info), get_system_config()[ 'front_end'][ 'index']) content += """ <table frame='border' cellpadding='15' cellspacing='0' align='center' style='border: 1px solid #d6d3d3;'> <tr style='background: #e6e6e6;'> <th style="border-right: 1px solid #bfbfbf;">序号</th> <th style="border-right: 1px solid #bfbfbf;">漏洞等级</th> <th style="border-right: 1px solid #bfbfbf;">漏洞类型</th> <th style="border-right: 1px solid #bfbfbf;">漏洞详情</th> </tr> """ index = 0 for vuln_info in vulns_info: index += 1 vuln_detail_url = '<a href="{}">{}</a>'.format( get_system_config()['front_end']['vuln_route'] + str(task_id), vuln_info.info) content += """ <tr> <td style="border-right: 1px solid #bfbfbf;">{}</td> <td style="border-right: 1px solid #bfbfbf;">{}</td> <td style="border-right: 1px solid #bfbfbf;">{}</td> <td style="border-right: 1px solid #bfbfbf;">{}</td> </tr> """.format(index, vuln_info.level, vuln_info.chinese_type, vuln_detail_url) content += """</table>""" return content email_content = generate_report(task_id=308) print(email_content) EmailUtils().send_mail_with_ssl("<EMAIL>", "Hunter扫描完成提醒", email_content) if __name__ == "__main__": unittest.main()
2,284
610
#pragma once #include "bit_vector.hpp" #include "mappable/mappable_vector.hpp" #include "mappable/mapper.hpp" #include "block_posting_list.hpp" #include "codec/compact_elias_fano.hpp" #include "mappable/mapper.hpp" #include "memory_source.hpp" #include "temporary_directory.hpp" namespace pisa { struct BlockIndexTag; template <typename BlockCodec, bool Profile = false> class block_freq_index { public: using index_layout_tag = BlockIndexTag; block_freq_index() = default; explicit block_freq_index(MemorySource source) : m_source(std::move(source)) { mapper::map(*this, m_source.data(), mapper::map_flags::warmup); } class builder { public: builder(uint64_t num_docs, global_parameters const& params) : m_params(params) { m_num_docs = num_docs; m_endpoints.push_back(0); } template <typename DocsIterator, typename FreqsIterator> void add_posting_list( uint64_t n, DocsIterator docs_begin, FreqsIterator freqs_begin, uint64_t /* occurrences */) { if (!n) { throw std::invalid_argument("List must be nonempty"); } block_posting_list<BlockCodec, Profile>::write(m_lists, n, docs_begin, freqs_begin); m_endpoints.push_back(m_lists.size()); } template <typename BlockDataRange> void add_posting_list(uint64_t n, BlockDataRange const& blocks) { if (!n) { throw std::invalid_argument("List must be nonempty"); } block_posting_list<BlockCodec>::write_blocks(m_lists, n, blocks); m_endpoints.push_back(m_lists.size()); } template <typename BytesRange> void add_posting_list(BytesRange const& data) { m_lists.insert(m_lists.end(), std::begin(data), std::end(data)); m_endpoints.push_back(m_lists.size()); } void build(block_freq_index& sq) { sq.m_params = m_params; sq.m_size = m_endpoints.size() - 1; sq.m_num_docs = m_num_docs; sq.m_lists.steal(m_lists); bit_vector_builder bvb; compact_elias_fano::write( bvb, m_endpoints.begin(), sq.m_lists.size(), sq.m_size, m_params); // XXX bit_vector(&bvb).swap(sq.m_endpoints); } private: global_parameters m_params; size_t m_num_docs; std::vector<uint64_t> m_endpoints; std::vector<uint8_t> m_lists; }; class stream_builder { public: stream_builder(uint64_t num_docs, global_parameters const& params) : m_params(params), m_postings_output((tmp.path() / "buffer").c_str()) { m_num_docs = num_docs; m_endpoints.push_back(0); } template <typename DocsIterator, typename FreqsIterator> void add_posting_list( uint64_t n, DocsIterator docs_begin, FreqsIterator freqs_begin, uint64_t /* occurrences */) { if (!n) { throw std::invalid_argument("List must be nonempty"); } std::vector<std::uint8_t> buf; block_posting_list<BlockCodec, Profile>::write(buf, n, docs_begin, freqs_begin); m_postings_bytes_written += buf.size(); m_postings_output.write(reinterpret_cast<char const*>(buf.data()), buf.size()); m_endpoints.push_back(m_postings_bytes_written); } template <typename BlockDataRange> void add_posting_list(uint64_t n, BlockDataRange const& blocks) { if (!n) { throw std::invalid_argument("List must be nonempty"); } std::vector<std::uint8_t> buf; block_posting_list<BlockCodec>::write_blocks(buf, n, blocks); m_postings_bytes_written += buf.size(); m_postings_output.write(reinterpret_cast<char const*>(buf.data()), buf.size()); m_endpoints.push_back(m_postings_bytes_written); } template <typename BytesRange> void add_posting_list(BytesRange const& data) { m_postings_bytes_written += data.size(); m_postings_output.write(reinterpret_cast<char const*>(data.data()), data.size()); m_endpoints.push_back(m_postings_bytes_written); } void build(std::string const& index_path) { std::ofstream os(index_path.c_str()); mapper::detail::freeze_visitor freezer(os, 0); freezer(m_params, "m_params"); std::size_t size = m_endpoints.size() - 1; freezer(size, "size"); freezer(m_num_docs, "m_num_docs"); bit_vector_builder bvb; compact_elias_fano::write( bvb, m_endpoints.begin(), m_postings_bytes_written, size, m_params); bit_vector endpoints(&bvb); freezer(endpoints, "endpoints"); std::ifstream buf((tmp.path() / "buffer").c_str()); m_postings_output.close(); os.write( reinterpret_cast<char const*>(&m_postings_bytes_written), sizeof(m_postings_bytes_written)); os << buf.rdbuf(); } private: global_parameters m_params{}; size_t m_num_docs = 0; size_t m_size = 0; std::vector<uint64_t> m_endpoints{}; Temporary_Directory tmp{}; std::ofstream m_postings_output; std::size_t m_postings_bytes_written{0}; }; size_t size() const { return m_size; } uint64_t num_docs() const { return m_num_docs; } using document_enumerator = typename block_posting_list<BlockCodec, Profile>::document_enumerator; document_enumerator operator[](size_t i) const { assert(i < size()); compact_elias_fano::enumerator endpoints(m_endpoints, 0, m_lists.size(), m_size, m_params); auto endpoint = endpoints.move(i).second; return document_enumerator(m_lists.data() + endpoint, num_docs(), i); } void warmup(size_t i) const { assert(i < size()); compact_elias_fano::enumerator endpoints(m_endpoints, 0, m_lists.size(), m_size, m_params); auto begin = endpoints.move(i).second; auto end = m_lists.size(); if (i + 1 != size()) { end = endpoints.move(i + 1).second; } volatile uint32_t tmp; for (size_t i = begin; i != end; ++i) { tmp = m_lists[i]; } (void)tmp; } void swap(block_freq_index& other) { std::swap(m_params, other.m_params); std::swap(m_size, other.m_size); m_endpoints.swap(other.m_endpoints); m_lists.swap(other.m_lists); } template <typename Visitor> void map(Visitor& visit) { visit(m_params, "m_params")(m_size, "m_size")(m_num_docs, "m_num_docs")( m_endpoints, "m_endpoints")(m_lists, "m_lists"); } private: global_parameters m_params; size_t m_size{0}; size_t m_num_docs{0}; bit_vector m_endpoints; mapper::mappable_vector<uint8_t> m_lists; MemorySource m_source; }; } // namespace pisa
3,677
309
#include <vtkImageGridSource.h> #include <vtkLookupTable.h> #include <vtkImageMapToColors.h> #include <vtkBMPReader.h> #include <vtkImageBlend.h> #include <vtkPoints.h> #include <vtkThinPlateSplineTransform.h> #include <vtkImageReslice.h> #include <vtkSmartPointer.h> #include <vtkDataObject.h> #include <vtkRenderer.h> #include <vtkRenderWindow.h> #include <vtkRenderWindowInteractor.h> #include <vtkInteractorStyleTerrain.h> #include <vtkImageMapper.h> #include <vtkActor2D.h> // Warp an image with a thin plate spline int main(int argc, char *argv[]) { if ( argc < 2 ) { std::cerr << "Usage: " << argv[0] << " Filename.bmp" << std::endl; return EXIT_FAILURE; } // First, create an image to warp vtkSmartPointer<vtkImageGridSource> imageGrid = vtkSmartPointer<vtkImageGridSource >::New(); imageGrid->SetGridSpacing( 16, 16, 0 ); imageGrid->SetGridOrigin( 0, 0, 0 ); imageGrid->SetDataExtent( 0, 255, 0, 255, 0, 0 ); imageGrid->SetDataScalarTypeToUnsignedChar(); vtkSmartPointer<vtkLookupTable> table = vtkSmartPointer<vtkLookupTable >::New(); table->SetTableRange( 0, 1 ); table->SetAlphaRange( 0.0, 1.0 ); table->SetHueRange( 0.15, 0.15 ); table->SetSaturationRange( 1, 1 ); table->SetValueRange( 0, 1 ); table->Build(); vtkSmartPointer<vtkImageMapToColors> alpha = vtkSmartPointer<vtkImageMapToColors >::New(); alpha->SetInputConnection( imageGrid->GetOutputPort() ); alpha->SetLookupTable( table ); vtkSmartPointer<vtkBMPReader> reader = vtkSmartPointer<vtkBMPReader>::New(); reader->SetFileName(argv[1]); reader->Update(); vtkSmartPointer<vtkImageBlend> blend = vtkSmartPointer<vtkImageBlend >::New(); blend->AddInputConnection( 0, reader->GetOutputPort() ); blend->AddInputConnection( 0, alpha->GetOutputPort() ); // Next, create a ThinPlateSpline transform vtkSmartPointer< vtkPoints > p1 = vtkSmartPointer< vtkPoints >::New(); p1->SetNumberOfPoints( 8 ); p1->SetPoint( 0, 0, 0, 0 ); p1->SetPoint( 1, 0, 255, 0); p1->SetPoint( 2, 255, 0, 0 ); p1->SetPoint( 3, 255, 255, 0 ); p1->SetPoint( 4, 96, 96, 0 ); p1->SetPoint( 5, 96, 159, 0 ); p1->SetPoint( 6, 159, 159, 0 ); p1->SetPoint( 7, 159, 96, 0 ); vtkSmartPointer< vtkPoints > p2 = vtkSmartPointer< vtkPoints >::New(); p2->SetNumberOfPoints( 8 ); p2->SetPoint( 0, 0, 0, 0 ); p2->SetPoint( 1, 0, 255, 0 ); p2->SetPoint( 2, 255, 0, 0 ); p2->SetPoint( 3, 255, 255, 0); p2->SetPoint( 4, 96, 159, 0 ); p2->SetPoint( 5, 159, 159, 0 ); p2->SetPoint( 6, 159, 96, 0 ); p2->SetPoint( 7, 96, 96, 0 ); vtkSmartPointer<vtkThinPlateSplineTransform> transform = vtkSmartPointer< vtkThinPlateSplineTransform >::New(); transform->SetSourceLandmarks( p2 ); transform->SetTargetLandmarks( p1 ); transform->SetBasisToR2LogR(); // You must invert the transform before passing it to vtkImageReslice transform->Inverse(); vtkSmartPointer< vtkImageReslice > reslice = vtkSmartPointer<vtkImageReslice >::New(); reslice->SetInputConnection( blend->GetOutputPort() ); reslice->SetResliceTransform( transform ); reslice->SetInterpolationModeToLinear(); vtkSmartPointer< vtkImageMapper > map = vtkSmartPointer<vtkImageMapper >::New(); map->SetInputConnection( reslice->GetOutputPort() ); map->SetColorWindow( 255.0); map->SetColorLevel( 127.5 ); map->SetZSlice( 0 ); vtkSmartPointer< vtkActor2D > act = vtkSmartPointer< vtkActor2D >::New(); act->SetMapper( map ); act->SetPosition( 0.0, 0.0 ); vtkSmartPointer< vtkRenderer > renderer = vtkSmartPointer<vtkRenderer >::New(); renderer->AddActor( act ); vtkSmartPointer< vtkRenderWindow > window = vtkSmartPointer<vtkRenderWindow >::New(); window->SetSize( 640, 480 ); window->AddRenderer(renderer); vtkSmartPointer< vtkRenderWindowInteractor > interactor = vtkSmartPointer< vtkRenderWindowInteractor >::New(); interactor->SetRenderWindow(window); vtkSmartPointer< vtkInteractorStyleTerrain > style = vtkSmartPointer< vtkInteractorStyleTerrain >::New(); interactor->SetInteractorStyle( style ); window->Render(); interactor->Initialize(); interactor->Start(); return EXIT_SUCCESS; }
1,627
1,273
<gh_stars>1000+ package org.broadinstitute.hellbender.engine; import htsjdk.tribble.Feature; import htsjdk.tribble.FeatureCodec; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.vcf.VCFCodec; import org.broadinstitute.barclay.argparser.Argument; import org.broadinstitute.barclay.argparser.CommandLineArgumentParser; import org.broadinstitute.barclay.argparser.CommandLineException; import org.broadinstitute.barclay.argparser.CommandLineParser; import org.broadinstitute.hellbender.GATKBaseTest; import org.broadinstitute.hellbender.testutils.SparkTestUtils; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.*; import java.util.Collections; public final class FeatureInputUnitTest extends GATKBaseTest { private static final String FEATURE_INPUT_TEST_DIRECTORY = publicTestDir + "org/broadinstitute/hellbender/engine/"; static class ArgumentContainer { @Argument(shortName="argName") public FeatureInput<Feature> fi; } private FeatureInput<Feature> runCommandLineWithTaggedFeatureInput(final String taggedFeatureArgument, final String argumentValue) { ArgumentContainer ac = new ArgumentContainer(); CommandLineArgumentParser clp = new CommandLineArgumentParser(ac); final String[] args = {"--" + taggedFeatureArgument, argumentValue}; clp.parseArguments(System.out, args); return ac.fi; } @DataProvider(name = "InvalidFeatureTagsDataProvider") public Object[][] getInvalidFeatureTags() { return new Object[][] { //{ "name:file:file2" }, //this is legal (argument has tag name "file:file2") { "name:" }, { ":file" }, { ",:file" }, { "name,key=value=fred:file" }, { "name,:file"}, { ",key=value:file"}, {"name,key:file"}, {"name,key=:file"}, {"name,=value:file"}, {"name,=:file"}, { ":" }, { ",:" }, { "::" }, { "" }, { "name,key=value1,key=value2:file" }, //duplicate key { "name,key=value,key=value:file" }, //duplicate key //{ "name:name:gendb://mydb" } //this is legal (argument has tag name "name:gendb://mydb") }; } @Test(dataProvider = "InvalidFeatureTagsDataProvider", expectedExceptions = CommandLineException.class) public void testInvalidFeatureTags( final String invalidFeatureArgument ) { runCommandLineWithTaggedFeatureInput(invalidFeatureArgument, "value"); } @DataProvider(name = "ValidFileOnlyFeatureArgumentValuesDataProvider") public Object[][] getValidFileOnlyFeatureArgumentValues() { return new Object[][] { {"myFile"}, {"myName,key1=value,myFile"}, //allowed - all of this is treated as a file name {"=myFile"}, //allowed - all of this is treated as a file name {",myFile"}, //allowed - all of this is treated as a file name {"=,myFile"}, //allowed - all of this is treated as a file name {"key1=value,myFile"} //allowed - all of this is treated as a file name }; } @Test(dataProvider = "ValidFileOnlyFeatureArgumentValuesDataProvider") public void testNoFeatureNameSpecified(final String validFileOnlyFeatureArgumentValue) { FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName", validFileOnlyFeatureArgumentValue); Assert.assertEquals(featureInput.getFeaturePath(), validFileOnlyFeatureArgumentValue, "Wrong File in FeatureInput"); // Name should default to the absolute path of the File when no name is specified Assert.assertEquals(featureInput.getName(), new File(validFileOnlyFeatureArgumentValue).getAbsolutePath(), "Wrong default name in FeatureInput"); } @DataProvider(name = "GenDbPathAndNameData") public Object[][] genDbPathAndNameData() { return new Object[][] { // input arg name, input value, expected Feature path, expected logical name {"argName", "gendb://myJsons", "gendb://myJsons", "gendb://" + new File("myJsons").getAbsolutePath()}, {"argName:myname", "gendb://myJsons", "gendb://myJsons", "myname"}, {"argName:myname,key1=value1", "gendb://myJsons", "gendb://myJsons", "myname"}, {"argName:myname//", "gendb://myJsons", "gendb://myJsons", "myname//"}, {"argName:myname", "gendb://", "gendb://", "myname"}, {"argName", "gendb.gs://myBucket/myJsons", "gendb.gs://myBucket/myJsons", "gendb.gs://myBucket/myJsons"}, {"argName:myname", "gendb.gs://myJsons", "gendb.gs://myJsons", "myname"}, {"argName:myname,key1=value1", "gendb.gs://myJsons", "gendb.gs://myJsons", "myname"}, {"argName:myname//", "gendb.gs://myJsons", "gendb.gs://myJsons", "myname//"}, {"argName:myname", "gendb.gs://", "gendb.gs://", "myname"}, {"argName", "gendb.hdfs://localhost/myJsons", "gendb.hdfs://localhost/myJsons", "gendb.hdfs://localhost/myJsons"}, {"argName:myname", "gendb.hdfs://myJsons", "gendb.hdfs://myJsons", "myname"}, {"argName:myname,key1=value1", "gendb.hdfs://myJsons", "gendb.hdfs://myJsons", "myname"}, {"argName:myname//", "gendb.hdfs://myJsons", "gendb.hdfs://myJsons", "myname//"}, {"argName:myname", "gendb.hdfs://", "gendb.hdfs://", "myname"} }; } @DataProvider(name = "GcsPathAndNameData") public Object[][] gcsPathAndNameData() { return new Object[][] { // input arg name, input value, expected Feature path, expected logical name {"argName", "gs://bucket/user/my.vcf", "gs://bucket/user/my.vcf", "gs://bucket/user/my.vcf"}, {"argName:myname", "gs://bucket/user/my.vcf", "gs://bucket/user/my.vcf", "myname"}, {"argName:myname,key1=value1", "gs://bucket/user/my.vcf", "gs://bucket/user/my.vcf", "myname"}, {"argName:myname//", "gs://bucket/user/my.vcf", "gs://bucket/user/my.vcf", "myname//"} }; } @DataProvider(name = "HdfsPathAndNameData") public Object[][] hdfsPathAndNameData() { return new Object[][] { // input arg name, input value, expected Feature path, expected logical name {"argName", "hdfs://localhost/user/my.vcf", "hdfs://localhost/user/my.vcf", "hdfs://localhost:8020/user/my.vcf"}, {"argName:myname", "hdfs://localhost/user/my.vcf", "hdfs://localhost/user/my.vcf", "myname"}, {"argName:myname,key1=value1", "hdfs://localhost/user/my.vcf", "hdfs://localhost/user/my.vcf", "myname"}, {"argName:myname//", "hdfs://localhost/user/my.vcf", "hdfs://localhost/user/my.vcf", "myname//"} }; } @Test(dataProvider = "GenDbPathAndNameData") public void testGenDbPathAndName( final String argWithTags, final String inputValue, final String expectedFeaturePath, final String expectedLogicalName ) { FeatureInput<Feature> gendbInput = runCommandLineWithTaggedFeatureInput(argWithTags, inputValue); Assert.assertEquals(gendbInput.getFeaturePath(), expectedFeaturePath, "wrong featurePath"); Assert.assertEquals(gendbInput.getName(), expectedLogicalName, "wrong logical name"); } @Test(dataProvider = "GcsPathAndNameData", groups={"bucket"}) public void testGcsPathAndName( final String argWithTags, final String inputValue, final String expectedFeaturePath, final String expectedLogicalName ) { final FeatureInput<Feature> gcsInput = runCommandLineWithTaggedFeatureInput(argWithTags, inputValue); Assert.assertEquals(gcsInput.getFeaturePath(), expectedFeaturePath, "wrong featurePath"); Assert.assertEquals(gcsInput.getName(), expectedLogicalName, "wrong logical name"); } @Test(dataProvider = "HdfsPathAndNameData") public void testHdfsPathAndName( final String argWithTags, final String inputValue, final String expectedFeaturePath, final String expectedLogicalName ) { final FeatureInput<Feature> hdfsInput = runCommandLineWithTaggedFeatureInput(argWithTags, inputValue); Assert.assertEquals(hdfsInput.getFeaturePath(), expectedFeaturePath, "wrong featurePath"); Assert.assertEquals(hdfsInput.getName(), expectedLogicalName, "wrong logical name"); } @Test public void testFeatureNameSpecified() { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName:myName", "myFile"); Assert.assertEquals(featureInput.getFeaturePath(), "myFile", "Wrong File in FeatureInput"); Assert.assertEquals(featureInput.getName(), "myName", "Wrong name in FeatureInput"); } @Test(expectedExceptions = IllegalArgumentException.class) public void testRejectNullFile() { new FeatureInput<>((GATKPath) null, "sourceName1"); } @Test public void testNullOKAsFeatureName() { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName:null", "myFile"); Assert.assertEquals(featureInput.getFeaturePath(), "myFile", "Wrong File in FeatureInput"); Assert.assertEquals(featureInput.getName(), "null", "Wrong name in FeatureInput"); } @Test public void testNullOKAsFileName() { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName:myName", "null"); Assert.assertEquals(featureInput.getFeaturePath(), "null", "Wrong File in FeatureInput"); Assert.assertEquals(featureInput.getName(), "myName", "Wrong name in FeatureInput"); } @Test public void testFeatureKeyValuePairsSpecified() { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName:myName,key1=value1,key2=value2,null=null", "myFile"); Assert.assertEquals(featureInput.getAttribute("key1"), "value1", "wrong attribute value for key1"); Assert.assertEquals(featureInput.getAttribute("key2"), "value2", "wrong attribute value for key2"); Assert.assertEquals(featureInput.getAttribute("null"), "null", "wrong attribute value for key \"null\""); Assert.assertEquals(featureInput.getAttribute("key3"), null, "wrong attribute value for key3 (not present)"); Assert.assertEquals(featureInput.getName(), "myName"); Assert.assertEquals(featureInput.getFeaturePath(), "myFile"); } @Test public void testFeatureKeyValuePairSpecified() { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName:myName,key1=value1", "myFile"); Assert.assertEquals(featureInput.getAttribute("key1"), "value1", "wrong attribute value for key1"); Assert.assertEquals(featureInput.getAttribute("key2"), null, "wrong attribute value for key2 (not present)"); Assert.assertEquals(featureInput.getName(), "myName"); Assert.assertEquals(featureInput.getFeaturePath(), "myFile"); } @Test public void testFeatureKeyValuePairsSpecifiedSameValue() { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput("argName:myName,key1=value,key2=value", "myFile"); Assert.assertEquals(featureInput.getAttribute("key1"), "value", "wrong attribute value for key1"); Assert.assertEquals(featureInput.getAttribute("key2"), "value", "wrong attribute value for key2"); Assert.assertEquals(featureInput.getName(), "myName"); Assert.assertEquals(featureInput.getFeaturePath(), "myFile"); } @DataProvider(name = "KeyValuesDataProviderForTestingNull") public Object[][] getKeyValuesDataProviderForTestingNull() { return new Object[][] { { "argName:myName,key1=value1,key2=value2", "myFile" }, { "argName:myName,null=value", "myFile"}, { "argName:myName", "myFile" }, { "argName", "myFile" }, //{ "argName", "null" }, // "null" has special meaning to the CLP { "argName:null", "myFile" }, { "argName:null", "null" }, }; } @Test(dataProvider = "KeyValuesDataProviderForTestingNull", expectedExceptions = IllegalArgumentException.class) public void testFeatureValuesForNullKey(final String argWithTags, final String inputValue ) { final FeatureInput<Feature> featureInput = runCommandLineWithTaggedFeatureInput(argWithTags, inputValue); featureInput.getAttribute(null); } @Test public void testFeatureCodecCache() { Assert.assertEquals(getVariantFeatureInputWithCachedCodec().getFeatureCodecClass(), VCFCodec.class); } @SuppressWarnings("unchecked") @Test public void testFeatureCodecCacheSerialization() throws IOException, ClassNotFoundException { final FeatureInput<VariantContext>featureInput = getVariantFeatureInputWithCachedCodec(); final FeatureInput<VariantContext> roundTrippedFeatureInput = SparkTestUtils.roundTripThroughJavaSerialization(featureInput); Assert.assertNotNull(roundTrippedFeatureInput); // we expect to lose the cached feature codec class on serialization, but retain the feature path Assert.assertNull(roundTrippedFeatureInput.getFeatureCodecClass()); Assert.assertEquals(featureInput.getFeaturePath(), roundTrippedFeatureInput.getFeaturePath()); } @SuppressWarnings("unchecked") private FeatureInput<VariantContext> getVariantFeatureInputWithCachedCodec() { final File inputVCFFile = new File(FEATURE_INPUT_TEST_DIRECTORY, "minimal_vcf4_file.vcf"); final FeatureInput<VariantContext> featureInput = new FeatureInput<>(inputVCFFile.getAbsolutePath()); Assert.assertNull(featureInput.getFeatureCodecClass()); final FeatureCodec<? extends Feature, ?> codec = FeatureManager.getCodecForFile(featureInput.toPath()); featureInput.setFeatureCodecClass((Class<FeatureCodec<VariantContext, ?>>)codec.getClass()); return featureInput; } @Test public void testToString() { final FeatureInput<Feature> namelessFeatureInput = runCommandLineWithTaggedFeatureInput("argName","file1"); final FeatureInput<Feature> namedFeatureInput = runCommandLineWithTaggedFeatureInput("argName:name", "file1"); final FeatureInput<Feature> namelessGenomicsDB = runCommandLineWithTaggedFeatureInput("argName", "gendb://file1"); final FeatureInput<Feature> namedGenomicsDB = runCommandLineWithTaggedFeatureInput("argName:name", "gendb://file1"); Assert.assertEquals(namelessFeatureInput.toString(), new File("file1").getAbsolutePath(), "String representation of nameless FeatureInput incorrect"); Assert.assertEquals(namedFeatureInput.toString(), "name:" + new File("file1").getAbsolutePath(), "String representation of named FeatureInput incorrect"); Assert.assertEquals(namelessGenomicsDB.toString(), "gendb://" + new File("file1").getAbsolutePath(), "String representation of nameless FeatureInput with genomicsDB path incorrect"); Assert.assertEquals(namedGenomicsDB.toString(), "name:gendb://" + new File("file1").getAbsolutePath(), "String representation of named FeatureInput with genomicsDB path incorrect"); } @DataProvider(name = "HasUserSuppliedNameData") public Object[][] hasUserSuppliedNameData() { return new Object[][] { {"argName", "hdfs://localhost/user/my.vcf", false}, {"argName:myname", "hdfs://localhost/user/my.vcf", true}, {"argName:myname,key1=value1", "hdfs://localhost/user/my.vcf", true}, {"argName:myname//", "hdfs://localhost/user/my.vcf", true}, {"argName:myname//", "/user/my.vcf", true}, {"argName", "/user/my.vcf", false}, }; } @Test(dataProvider = "HasUserSuppliedNameData") public void testHasUserSuppliedName(final String argWithTags, final String inputValue, final boolean isUserSupplied) { final FeatureInput<Feature> input = runCommandLineWithTaggedFeatureInput(argWithTags, inputValue); Assert.assertEquals(input.hasUserSuppliedName(), isUserSupplied); } }
6,507
897
/* Code Description: A linked list is a linear data structure, in which the elements are not stored at contiguous memory locations. Through this C program ,we pairwise swap elements in a given linked list. Following is the implementation of the same that swaps adjacent nodes pairwise and changes links instead of swapping data. */ #include <stdio.h> #include <stdlib.h> /* Defining a Node */ struct Node { int data; struct Node* next; }; /* Declaration of Function to swap nodes at addresses a and b */ void swap(int* a, int* b); /* Header points to first node and Last points to last one */ struct Node *header,*last; /* Function to pairwise swap elements of a linked list */ void pairWiseSwap() { struct Node* temp = header; /* Traverse further only if there are at-least two nodes available */ while (temp != NULL && temp->next != NULL) { /* Swap the data of node with its next node's data */ swap(&temp->data, &temp->next->data); /* Update temp by 2 for the next pair */ temp = temp->next->next; } } /* Function for Swapping */ void swap(int* a, int* b) { int temp; temp = *a; *a = *b; *b = temp; } /* Function to create a Linked List */ void create() { struct Node *temp=(struct Node*)malloc(sizeof(struct Node)); printf("Enter value of node : "); scanf("%d",&temp->data); temp->next=NULL; if (header==NULL) { header=temp; last=temp; return; } else { last->next=temp; last=temp; return; } } /* Function to print nodes in a given linked list */ void show() { struct Node *temp=header; while(temp!=NULL) { printf("-->%d",temp->data); temp=temp->next; } } /* Driver Function */ void main() { int i,num; printf("Enter the number of nodes: "); scanf("%d",&num); for(i=1;i<=num;i++) create(); printf("\nLinked List is\n"); show(); printf("\nLinked list after calling pairWiseSwap()\n"); pairWiseSwap(); show(); } /* COMPLEXITY: Time Complexity:O(n) Space Complexity:O(n) OUTPUT: Enter the number of nodes: 5 Enter value of node : 2 Enter value of node : 4 Enter value of node : 1 Enter value of node : 3 Enter value of node : 5 Linked List is -->2-->4-->1-->3-->5 Linked list after calling pairWiseSwap() -->4-->2-->3-->1-->5 */
844
986
""" Module for scope The motivation of Scope is to cache data for calculated ops. `scope` in Scope class is the main cache. It is a dictionary mapping ibis node instances to concrete data, and the time context associate with it (if any). When there are no time contexts associate with the cached result, getting and setting values in Scope would be as simple as get and set in a normal dictonary. With time contexts, we need the following logic for getting and setting items in scope: Before setting the value op in scope we need to perform the following check first: Test if `op` is in `scope` yet - No, then put `op` in `scope`, set 'timecontext' to be the current `timecontext` (None if `timecontext` is not present), set 'value' to be the actual data. - Yes, then get the time context stored in `scope` for `op` as `old_timecontext`, and compare it with current `timecontext`: If current `timecontext` is a subset of `_timecontext`, that means we already cached a larger range of data. Do nothing and we will trim data in later execution process. If current `timecontext` is a superset of `old_timecontext`, that means we need to update cache. Set 'value' to be the current data and set 'timecontext' to be the current `timecontext` for `op`. If current `timecontext` is neither a subset nor a superset of `old_timcontext`, but they overlap, or not overlap at all (For example when there is a window that looks forward, over a window that looks back), in this case, we should not trust the data stored either because the data stored in scope doesn't cover the current time context. For simplicity, we update cache in this case, instead of merge data of different time contexts. """ from collections import namedtuple from typing import Any, Dict, Iterable, Optional from ibis.expr.operations import Node from ibis.expr.timecontext import TimeContextRelation, compare_timecontext from ibis.expr.typing import TimeContext ScopeItem = namedtuple('ScopeItem', ['timecontext', 'value']) class Scope: def __init__( self, param: Dict[Node, Any] = None, timecontext: Optional[TimeContext] = None, ): """Take a dict of `op`, `result`, create a new scope and save those pairs in scope. Associate None as timecontext by default. This is mostly used to init a scope with a set of given params. """ self._items = ( {op: ScopeItem(timecontext, value) for op, value in param.items()} if param else {} ) def __contains__(self, op): """Given an `op`, return if `op` is present in Scope. Note that this `__contain__` method doesn't take `timecontext` as a parameter. This could be used to iterate all keys in current scope, or any case that doesn't care about value, just simply test if `op` is in scope or not. When trying to get value in scope, use `get_value(op, timecontext)` instead. Because the cached data could be trusted only if: 1. `op` is in `scope`, and, 2. The `timecontext` associated with `op` is a time context equal to, or larger than the current time context. """ return op in self._items def __iter__(self): return iter(self._items.keys()) def set_value( self, op: Node, timecontext: Optional[TimeContext], value: Any ) -> None: """Set values in scope. Given an `op`, `timecontext` and `value`, set `op` and `(value, timecontext)` in scope. Parameters ---------- scope : collections.Mapping a dictionary mapping :class:`~ibis.expr.operations.Node` subclass instances to concrete data, and the time context associate with it (if any). op: ibis.expr.operations.Node key in scope. timecontext: Optional[TimeContext] value: Any the cached result to save in scope, an object whose type may differ in different backends. """ # Note that this set method doesn't simply override and set, but # takes time context into consideration. # If there is a value associated with the key, but time context is # smaller than the current time context we are going to set, # `get_value` will return None and we will proceed to set the new # value in scope. if self.get_value(op, timecontext) is None: self._items[op] = ScopeItem(timecontext, value) def get_value( self, op: Node, timecontext: Optional[TimeContext] = None ) -> Any: """Given a op and timecontext, get the result from scope Parameters ---------- scope : collections.Mapping a dictionary mapping :class:`~ibis.expr.operations.Node` subclass instances to concrete data, and the time context associate with it (if any). op: ibis.expr.operations.Node key in scope. timecontext: Optional[TimeContext] Returns ------- result: the cached result, an object whose types may differ in different backends. """ if op not in self: return None # for ops without timecontext if timecontext is None: return self._items[op].value else: # For op with timecontext, ther are some ops cannot use cached # result with a different (larger) timecontext to get the # correct result. # For example, a groupby followed by count, if we use a larger or # smaller dataset from cache, we will get an error in result. # Such ops with global aggregation, ops whose result is # depending on other rows in result Dataframe, cannot use cached # result with different time context to optimize calculation. # These are time context sensitive operations. Since these cases # are rare in acutal use case, we just enable optimization for # all nodes for now. cached_timecontext = self._items[op].timecontext if cached_timecontext: relation = compare_timecontext(timecontext, cached_timecontext) if relation == TimeContextRelation.SUBSET: return self._items[op].value else: return self._items[op].value return None def merge_scope(self, other_scope: 'Scope', overwrite=False) -> 'Scope': """merge items in other_scope into this scope Parameters ---------- other_scope: Scope Scope to be merged with overwrite: bool if set to be True, force overwrite `value` if `op` already exists. Returns ------- Scope a new Scope instance with items in two scope merged. """ result = Scope() for op in self: result._items[op] = self._items[op] for op in other_scope: # if get_scope returns a not None value, then data is already # cached in scope and it is at least a greater range than # the current timecontext, so we drop the item. Otherwise # add it into scope. v = other_scope._items[op] if overwrite or result.get_value(op, v.timecontext) is None: result._items[op] = v return result def merge_scopes( self, other_scopes: Iterable['Scope'], overwrite=False ) -> 'Scope': """merge items in other_scopes into this scope Parameters ---------- other_scopes: Iterable[Scope] scopes to be merged with overwrite: Bool if set to be True, force overwrite value if op already exists. Returns ------- Scope a new Scope instance with items in two scope merged. """ result = Scope() for op in self: result._items[op] = self._items[op] for s in other_scopes: result = result.merge_scope(s, overwrite) return result
3,183
1,459
<filename>examples/python/alerts_windowed/alerts.py # Copyright 2017 The Wallaroo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. """ This is an example of a stateless application that takes a transaction and sends an alert if its value is above or below a threshold. """ import wallaroo def application_setup(args): out_host, out_port = wallaroo.tcp_parse_output_addrs(args)[0] gen_source = wallaroo.GenSourceConfig("Alerts (windowed)", TransactionsGenerator()) transactions = wallaroo.source("Alerts (windowed)", gen_source) pipeline = (transactions .key_by(extract_user) .to(wallaroo.range_windows(wallaroo.seconds(9)) .with_slide(wallaroo.seconds(3)) .over(TotalAggregation)) .to_sink(wallaroo.TCPSinkConfig(out_host, out_port, encode_alert))) return wallaroo.build_application("Alerts (windowed)", pipeline) class Transaction(object): def __init__(self, user, amount): self.user = user self.amount = amount class TransactionTotal(object): def __init__(self): self.total = 0 class DepositAlert(object): def __init__(self, user, amount): self.user = user self.amount = amount def __str__(self): return "Deposit Alert for " + self.user + ": " + str(self.amount) class WithdrawalAlert(object): def __init__(self, user, amount): self.user = user self.amount = amount def __str__(self): return "Withdrawal Alert for " + self.user + ": " + str(self.amount) @wallaroo.key_extractor def extract_user(transaction): return transaction.user class TotalAggregation(wallaroo.Aggregation): def initial_accumulator(self): return TransactionTotal() def update(self, transaction, t_total): t_total.total = t_total.total + transaction.amount def combine(self, t1, t2): new_t = TransactionTotal() new_t.total = t1.total + t2.total return new_t def output(self, user, t): print("!@TotalAggregation: Triggering output") if t.total > 2000: return DepositAlert(user, t.total) elif t.total < -2000: return WithdrawalAlert(user, t.total) @wallaroo.encoder def encode_alert(alert): return (str(alert) + "\n").encode() ############################################ # DEFINE A GENERATOR FOR ALERTS TEST INPUTS ############################################ class TransactionsGenerator(object): def __init__(self): self.user_idx = 0 self.user_totals = [1, 0, 0, 0, 0] self.users = ["Fido", "Rex", "Dr. Whiskers", "Feathers", "Mountaineer"] def initial_value(self): return Transaction("Fido", 1) def apply(self, v): # A simplistic way to get some numbers above, below, and within our # thresholds. amount = ((((v.amount * 2305843009213693951) + 7) % 2500) - 1250) self.user_idx = (self.user_idx + 1) % len(self.users) user = self.users[self.user_idx] total = self.user_totals[self.user_idx] if total > 5000: amount = -6000 elif total < -5000: amount = 6000 self.user_totals[self.user_idx] = total + amount return Transaction(user, amount)
1,479
560
<filename>cpp_solutions/chapter_05_bit_manipulation/problem_05_07_pairwiseSwap.h #pragma once namespace chapter_05{ int pairwiseSwap(int x); }
60
594
#pragma once #include <UI/Grid.h> namespace Ubpa { class Setting : public Grid { public: using Grid::Grid; protected: virtual ~Setting() = default; public: static Ptr<Setting> GetInstance() { static auto instance = Ubpa::New<Setting>(); return instance; } }; }
107
988
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.core.network.proxy.gnome; import java.io.BufferedReader; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.netbeans.core.network.proxy.NetworkProxySettings; import static org.netbeans.core.network.proxy.gnome.GnomeNetworkProxy.executeCommand; /** * * @author lfischme */ public class GconfNetworkProxy { private static final Logger LOGGER = Logger.getLogger(GconfNetworkProxy.class.getName()); private static final String EQUALS = "="; //NOI18N private static final String COLON = ","; //NOI18N private static final String SQ_BRACKET_LEFT = "["; //NOI18N private static final String SQ_BRACKET_RIGHT = "]"; //NOI18N protected static final String GCONF_PATH = "/usr/bin/gconftool-2"; //NOI18N private static final String GCONF_ARGUMENT_LIST_RECURSIVELY = " -R "; //NOI18N private static final String GCONF_NODE_PROXY = "/system/proxy"; //NOI18N private static final String GCONF_NODE_HTTP_PROXY = "/system/http_proxy"; //NOI18N private static final String GCONF_KEY_MODE = "mode"; //NOI18N private static final String GCONF_KEY_PAC_URL = "autoconfig_url"; //NOI18N private static final String GCONF_KEY_HTTP_ALL = "use_http_proxy"; //NOI18N private static final String GCONF_KEY_HTTP_HOST = "host"; //NOI18N private static final String GCONF_KEY_HTTP_PORT = "port"; //NOI18N private static final String GCONF_KEY_HTTPS_HOST = "secure_host"; //NOI18N private static final String GCONF_KEY_HTTPS_PORT = "secure_port"; //NOI18N private static final String GCONF_KEY_SOCKS_HOST = "socks_host"; //NOI18N private static final String GCONF_KEY_SOCKS_PORT = "socks_port"; //NOI18N private static final String GCONF_KEY_IGNORE_HOSTS = "ignore_hosts"; //NOI18N private static final String GCONF_VALUE_NONE = "none"; //NOI18N private static final String GCONF_VALUE_AUTO = "auto"; //NOI18N private static final String GCONF_VALUE_MANUAL = "manual"; //NOI18N /** * Returns network proxy settings - retrieved via gconftool. * * @return network proxy settings via GSettings. */ protected static NetworkProxySettings getNetworkProxySettings() { LOGGER.log(Level.FINE, "GConf system proxy resolver started."); //NOI18N Map<String, String> proxyProperties = getGconfMap(GCONF_NODE_PROXY); String proxyMode = proxyProperties.get(GCONF_KEY_MODE); if (proxyMode == null) { LOGGER.log(Level.SEVERE, "GConf proxy mode is null."); return new NetworkProxySettings(false); } if (proxyMode.equals(GCONF_VALUE_NONE)) { LOGGER.log(Level.INFO, "GConf system proxy resolver: direct connection"); //NOI18N return new NetworkProxySettings(); } if (proxyMode.equals(GCONF_VALUE_AUTO)) { String pacUrl = proxyProperties.get(GCONF_KEY_PAC_URL); LOGGER.log(Level.INFO, "GConf system proxy resolver: auto - PAC ({0})", pacUrl); //NOI18N if (pacUrl != null) { return new NetworkProxySettings(pacUrl); } else { return new NetworkProxySettings(""); } } if (proxyMode.equals(GCONF_VALUE_MANUAL)) { proxyProperties.putAll(getGconfMap(GCONF_NODE_HTTP_PROXY)); String httpProxyAll = proxyProperties.get(GCONF_KEY_HTTP_ALL); String httpProxyHost = proxyProperties.get(GCONF_KEY_HTTP_HOST); String httpProxyPort = proxyProperties.get(GCONF_KEY_HTTP_PORT); String noProxyHosts = proxyProperties.get(GCONF_KEY_IGNORE_HOSTS); LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - http for all ({0})", httpProxyAll); //NOI18N LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - http host ({0})", httpProxyHost); //NOI18N LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - http port ({0})", httpProxyPort); //NOI18N LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - no proxy hosts ({0})", noProxyHosts); //NOI18N if (httpProxyAll != null && Boolean.parseBoolean(httpProxyAll)) { return new NetworkProxySettings(httpProxyHost, httpProxyPort, getNoProxyHosts(noProxyHosts)); } else { String httpsProxyHost = proxyProperties.get(GCONF_KEY_HTTPS_HOST); String httpsProxyPort = proxyProperties.get(GCONF_KEY_HTTPS_PORT); String socksProxyHost = proxyProperties.get(GCONF_KEY_SOCKS_HOST); String socksProxyPort = proxyProperties.get(GCONF_KEY_SOCKS_PORT); LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - https host ({0})", httpsProxyHost); //NOI18N LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - https port ({0})", httpsProxyPort); //NOI18N LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - socks host ({0})", socksProxyHost); //NOI18N LOGGER.log(Level.INFO, "GConf system proxy resolver: manual - socks port ({0})", socksProxyPort); //NOI18N return new NetworkProxySettings(httpProxyHost, httpProxyPort, httpsProxyHost, httpsProxyPort, socksProxyHost, socksProxyPort, getNoProxyHosts(noProxyHosts)); } } return new NetworkProxySettings(false); } /** * Checks if gconftool returns suitable response * * @return true if gconftool returns suitable response */ protected static boolean isGconfValid() { String command = GCONF_PATH + GCONF_ARGUMENT_LIST_RECURSIVELY + GCONF_NODE_PROXY; try { BufferedReader reader = executeCommand(command); if (reader.ready()) { return true; } } catch (IOException ioe) { LOGGER.log(Level.SEVERE, "Cannot read line: " + command, ioe); //NOI18N } LOGGER.log(Level.WARNING, "GConf return empty list"); //NOI18N return false; } /** * Returns map of properties retrieved from gconftool-2. * * Executes the command "/usr/bin/gconftool-2 -R [node]". * * @param gconfNode Node for which the properties should be returned. * @return Map of properties retrieved from gconftool-2. */ private static Map<String, String> getGconfMap(String gconfNode) { Map<String, String> map = new HashMap<String, String>(); String command = GCONF_PATH + GCONF_ARGUMENT_LIST_RECURSIVELY + gconfNode; try { BufferedReader reader = executeCommand(command); if (reader != null) { String line = reader.readLine(); while (line != null) { String key = getKey(line).toLowerCase(); if (key != null && !key.isEmpty()) { String value = getValue(line); map.put(key, value); } line = reader.readLine(); } } else { return map; } } catch (IOException ioe) { LOGGER.log(Level.SEVERE, "Cannot read line: " + command, ioe); //NOI18N } return map; } /** * Returns the key for one line response from gconftool-2. * * @param line Line from gconftool-2 response. * @return The key for one line response from gconftool-2. */ private static String getKey(String line) { return line.substring(0, line.indexOf(EQUALS)).trim(); } /** * Returns the value for one line response from gconftool-2. * * @param line Line from gconftool-2 response. * @return The value for one line response from gconftool-2. */ private static String getValue(String line) { return line.substring(line.indexOf(EQUALS) + 1).trim(); } /** * Returns array of Strings of no proxy hosts. * * The value responding to "ignore_hosts" key. * * Parses the value returned from gconftool-2. * Usually [host1,host2,host3] * * @param noProxyHostsString The value returned from gconftool-2. * @return Array of Strings of no proxy hosts. */ private static String[] getNoProxyHosts(String noProxyHostsString) { if (noProxyHostsString != null && !noProxyHostsString.isEmpty()) { if (noProxyHostsString.startsWith(SQ_BRACKET_LEFT)) { noProxyHostsString = noProxyHostsString.substring(1); } if (noProxyHostsString.endsWith(SQ_BRACKET_RIGHT)) { noProxyHostsString = noProxyHostsString.substring(0, noProxyHostsString.length() - 1); } return noProxyHostsString.split(COLON); } return new String[0]; } }
4,392
678
<filename>iOSOpenDev/frameworks/CorePDF.framework/Headers/CPLayoutArea.h /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/CorePDF.framework/CorePDF */ #import <CorePDF/CorePDF-Structs.h> #import <CorePDF/CPRegion.h> @interface CPLayoutArea : CPRegion { BOOL isFirstLayout; // 68 = 0x44 } @property(assign) BOOL isFirstLayout; // G=0xa94d; S=0xa95d; converted property @property(assign) BOOL isImageRegion; // G=0xa979; S=0xac25; converted property - (id)init; // 0xa981 - (void)accept:(id)accept; // 0xaeb5 - (BOOL)isSimilarTo:(id)to; // 0xa949 // converted property getter: - (BOOL)isFirstLayout; // 0xa94d // converted property setter: - (void)setIsFirstLayout:(BOOL)layout; // 0xa95d - (void)addColumnBreaks; // 0xacad - (BOOL)isBoxRegion; // 0xa96d - (BOOL)isGraphicalRegion; // 0xa971 - (BOOL)isRowRegion; // 0xa975 // converted property setter: - (void)setIsImageRegion:(BOOL)region; // 0xac25 // converted property getter: - (BOOL)isImageRegion; // 0xa979 - (BOOL)isShapeRegion; // 0xa97d - (CGRect)layoutAreaBounds; // 0xab11 - (float)selectionBottom; // 0xaa79 - (id)properties; // 0xaa19 - (id)description; // 0xa9c5 @end
480
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef SD_SIDEBAR_PANELS_DCUMENT_HELPER_HXX #define SD_SIDEBAR_PANELS_DCUMENT_HELPER_HXX #include <tools/solar.h> #include <boost/shared_ptr.hpp> #include <vector> class SdDrawDocument; class SdPage; class String; namespace sd { namespace sidebar { /** A collection of methods supporting the handling of master pages. */ class DocumentHelper { public: /** Return a copy of the given master page in the given document. */ static SdPage* CopyMasterPageToLocalDocument ( SdDrawDocument& rTargetDocument, SdPage* pMasterPage); /** Return and, when not yet present, create a slide that uses the given masster page. */ static SdPage* GetSlideForMasterPage (SdPage* pMasterPage); /** Copy the styles used by the given page from the source document to the target document. */ static void ProvideStyles ( SdDrawDocument& rSourceDocument, SdDrawDocument& rTargetDocument, SdPage* pPage); /** Assign the given master page to the list of pages. @param rTargetDocument The document that is the owner of the pages in rPageList. @param pMasterPage This master page will usually be a member of the list of all available master pages as provided by the MasterPageContainer. @param rPageList The pages to which to assign the master page. These pages may be slides or master pages themselves. */ static void AssignMasterPageToPageList ( SdDrawDocument& rTargetDocument, SdPage* pMasterPage, const ::boost::shared_ptr<std::vector<SdPage*> >& rPageList); private: static SdPage* AddMasterPage ( SdDrawDocument& rTargetDocument, SdPage* pMasterPage); static SdPage* AddMasterPage ( SdDrawDocument& rTargetDocument, SdPage* pMasterPage, sal_uInt16 nInsertionIndex); static SdPage* ProvideMasterPage ( SdDrawDocument& rTargetDocument, SdPage* pMasterPage, const ::boost::shared_ptr<std::vector<SdPage*> >& rpPageList); /** Assign the given master page to the given page. @param pMasterPage In contrast to AssignMasterPageToPageList() this page is assumed to be in the target document, i.e. the same document that pPage is in. The caller will usually call AddMasterPage() to create a clone of a master page in a another document to create it. @param rsBaseLayoutName The layout name of the given master page. It is given so that it has not to be created on every call. It could be generated from the given master page, though. @param pPage The page to which to assign the master page. It can be a slide or a master page itself. */ static void AssignMasterPageToPage ( SdPage* pMasterPage, const String& rsBaseLayoutName, SdPage* pPage); }; } } // end of namespace sd::sidebar #endif
1,394
1,752
package cn.myperf4j.base.http.server; import cn.myperf4j.base.http.HttpHeaders; import cn.myperf4j.base.http.HttpRequest; import cn.myperf4j.base.http.HttpRequest.Builder; import cn.myperf4j.base.http.HttpRespStatus; import cn.myperf4j.base.http.HttpResponse; import cn.myperf4j.base.http.client.HttpClient; import cn.myperf4j.base.util.Logger; import cn.myperf4j.base.util.collections.MapUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import java.io.IOException; import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import static java.nio.charset.StandardCharsets.UTF_8; /** * Created by LinShunkang on 2020/07/12 */ public class SimpleHttpServerTest { private static final String RESP_STR = "Hello, SimpleHttpServer"; private static final int PORT = 1024; private static final HttpClient httpClient = new HttpClient.Builder().connectTimeout(3000).build(); private static final SimpleHttpServer server = new SimpleHttpServer.Builder() .port(PORT) .minWorkers(8) .maxWorkers(64) .acceptCnt(1024) .dispatcher(new Dispatcher() { @Override public HttpResponse dispatch(HttpRequest request) { Logger.info(" dispatching..."); return new HttpResponse(HttpRespStatus.OK, defaultHeaders(), RESP_STR.getBytes(UTF_8)); } private HttpHeaders defaultHeaders() { HttpHeaders headers = new HttpHeaders(6); headers.set("User-Agent", "MyPerf4J"); headers.set("Connection", "Keep-Alive"); headers.set("Charset", UTF_8.name()); return headers; } }) .build(); @BeforeClass public static void start() { server.startAsync(); } @AfterClass public static void stop() { server.stop(); } @Test public void test() throws InterruptedException { final int TEST_TIMES = 1000; final ExecutorService executor = Executors.newFixedThreadPool(10); final CountDownLatch latch = new CountDownLatch(TEST_TIMES); for (int i = 0; i < TEST_TIMES; i++) { executor.execute(new Runnable() { @Override public void run() { try { long startMillis = System.currentTimeMillis(); final HttpResponse response = httpClient.execute(new Builder() .url("127.0.0.1:" + PORT + "/test") .get() .params(MapUtils.of("k1", Collections.singletonList("v1"))) .build()); Logger.info(" Receive response=" + response.getBodyString() + ", cost=" + (System.currentTimeMillis() - startMillis) + "ms"); Assert.assertEquals(RESP_STR, response.getBodyString()); } catch (IOException e) { e.printStackTrace(); } finally { latch.countDown(); } } }); } latch.await(); executor.shutdownNow(); } }
1,721
684
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.explorer.ui; import org.activiti.explorer.I18nManager; import org.activiti.explorer.Messages; import org.activiti.explorer.navigation.NavigationFragmentChangeListener; import org.activiti.explorer.navigation.UriFragment; import org.activiti.explorer.ui.login.LoginPage; import org.activiti.explorer.ui.mainlayout.ExplorerLayout; import org.activiti.explorer.ui.mainlayout.MainLayout; import org.springframework.beans.factory.annotation.Autowired; import com.vaadin.ui.Component; import com.vaadin.ui.UriFragmentUtility; import com.vaadin.ui.Window; /** * @author <NAME> */ public class MainWindow extends Window { private static final long serialVersionUID = 1L; @Autowired protected I18nManager i18nManager; @Autowired protected NavigationFragmentChangeListener navigationFragmentChangeListener; // UI protected MainLayout mainLayout; protected UriFragmentUtility uriFragmentUtility; protected UriFragment currentUriFragment; protected boolean showingLoginPage; public MainWindow() { setTheme(ExplorerLayout.THEME); } @Override public void attach() { super.attach(); setCaption(i18nManager.getMessage(Messages.APP_TITLE)); } public void showLoginPage() { showingLoginPage = true; addStyleName(ExplorerLayout.STYLE_LOGIN_PAGE); setContent(new LoginPage()); } public void showDefaultContent() { showingLoginPage = false; removeStyleName(ExplorerLayout.STYLE_LOGIN_PAGE); addStyleName("Default style"); // Vaadin bug: must set something or old style (eg. login page style) is not overwritten // init general look and feel mainLayout = new MainLayout(); setContent(mainLayout); // init hidden components initHiddenComponents(); } // View handling public void switchView(Component component) { mainLayout.setMainContent(component); } public void setMainNavigation(String navigation) { mainLayout.setMainNavigation(navigation); } // URL handling protected void initHiddenComponents() { // Add the URI Fragent utility uriFragmentUtility = new UriFragmentUtility(); mainLayout.addComponent(uriFragmentUtility); // Add listener to control page flow based on URI uriFragmentUtility.addListener(navigationFragmentChangeListener); } public UriFragment getCurrentUriFragment() { return currentUriFragment; } /** * Sets the current {@link UriFragment}. * Won't trigger navigation, just updates the URI fragment in the browser. */ public void setCurrentUriFragment(UriFragment fragment) { this.currentUriFragment = fragment; if(fragmentChanged(fragment)) { if(fragment != null) { uriFragmentUtility.setFragment(fragment.toString(), false); } else { uriFragmentUtility.setFragment("", false); } } } private boolean fragmentChanged(UriFragment fragment) { String fragmentString = fragment.toString(); if(fragmentString == null) { return uriFragmentUtility.getFragment() != null; } else { return !fragmentString.equals(uriFragmentUtility.getFragment()); } } public boolean isShowingLoginPage() { return showingLoginPage; } public void setNavigationFragmentChangeListener(NavigationFragmentChangeListener navigationFragmentChangeListener) { this.navigationFragmentChangeListener = navigationFragmentChangeListener; } public void setI18nManager(I18nManager i18nManager) { this.i18nManager = i18nManager; } }
1,361
3,495
/* * * Copyright (c) 2020 Project CHIP Authors * Copyright (c) 2020 Texas Instruments Incorporated * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "aes_alt.h" #include "mbedtls/aes.h" #if defined(MBEDTLS_AES_ALT) #include <string.h> #include "ti_drivers_config.h" #include <ti/devices/DeviceFamily.h> #include <ti/drivers/AESECB.h> #include <ti/drivers/cryptoutils/cryptokey/CryptoKeyPlaintext.h> /* * number of active contexts, used for power on/off of the crypto core */ static unsigned int ref_num = 0; static AESECB_Handle AESECB_handle = NULL; void mbedtls_aes_init(mbedtls_aes_context * ctx) { AESECB_Params AESECBParams; if (ref_num == 0) { AESECB_Params_init(&AESECBParams); AESECBParams.returnBehavior = AESECB_RETURN_BEHAVIOR_POLLING; AESECB_handle = AESECB_open(CONFIG_AESECB_1, &AESECBParams); // handle will be NULL if open failed, subsequent calls will fail with a generic HW error } ref_num++; } void mbedtls_aes_free(mbedtls_aes_context * ctx) { if (ref_num > 0) { ref_num--; if (ref_num == 0) { AESECB_close(AESECB_handle); AESECB_handle = NULL; } } memset((void *) ctx, 0x00, sizeof(ctx)); } int mbedtls_aes_setkey_enc(mbedtls_aes_context * ctx, const unsigned char * key, unsigned int keybits) { int_fast16_t statusCrypto; size_t keylen = keybits / 8U; // 8 bits in a byte if (keylen > sizeof(ctx->keyMaterial)) { return MBEDTLS_ERR_AES_INVALID_KEY_LENGTH; } /* Initialize AES key */ memcpy(ctx->keyMaterial, key, keylen); statusCrypto = CryptoKeyPlaintext_initKey(&ctx->cryptoKey, (uint8_t *) ctx->keyMaterial, keylen); if (CryptoKey_STATUS_SUCCESS != statusCrypto) { return MBEDTLS_ERR_AES_HW_ACCEL_FAILED; } return 0; } int mbedtls_aes_setkey_dec(mbedtls_aes_context * ctx, const unsigned char * key, unsigned int keybits) { int_fast16_t statusCrypto; size_t keylen = keybits / 8U; // 8 bits in a byte if (keylen > sizeof(ctx->keyMaterial)) { return MBEDTLS_ERR_AES_INVALID_KEY_LENGTH; } /* Initialize AES key */ statusCrypto = CryptoKeyPlaintext_initKey(&ctx->cryptoKey, (uint8_t *) key, keylen); if (CryptoKey_STATUS_SUCCESS != statusCrypto) { return MBEDTLS_ERR_AES_HW_ACCEL_FAILED; } return 0; } int mbedtls_aes_crypt_ecb(mbedtls_aes_context * ctx, int mode, const unsigned char input[16], unsigned char output[16]) { int statusCrypto; AESECB_Operation operationOneStepEncrypt; /* run it through the authentication + encryption, pass the ccmLVal = 2 */ AESECB_Operation_init(&operationOneStepEncrypt); operationOneStepEncrypt.key = &ctx->cryptoKey; operationOneStepEncrypt.inputLength = 16; operationOneStepEncrypt.input = (uint8_t *) input; operationOneStepEncrypt.output = (uint8_t *) output; statusCrypto = AESECB_oneStepEncrypt(AESECB_handle, &operationOneStepEncrypt); if (CryptoKey_STATUS_SUCCESS != statusCrypto) { return MBEDTLS_ERR_AES_HW_ACCEL_FAILED; } return 0; } #endif
1,569
2,127
<gh_stars>1000+ package com.lance.hibernate.repository; import org.springframework.data.jpa.repository.JpaRepository; import com.lance.hibernate.entity.CityEntity; public interface CityRepository extends JpaRepository<CityEntity, Long>{ }
84
8,092
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import json from tempfile import NamedTemporaryFile from unittest import TestCase, mock from parameterized import parameterized from airflow.models import DAG, TaskInstance as TI from airflow.providers.google.marketing_platform.operators.campaign_manager import ( GoogleCampaignManagerBatchInsertConversionsOperator, GoogleCampaignManagerBatchUpdateConversionsOperator, GoogleCampaignManagerDeleteReportOperator, GoogleCampaignManagerDownloadReportOperator, GoogleCampaignManagerInsertReportOperator, GoogleCampaignManagerRunReportOperator, ) from airflow.utils import timezone from airflow.utils.session import create_session API_VERSION = "api_version" GCP_CONN_ID = "google_cloud_default" CONVERSION = { "kind": "dfareporting#conversion", "floodlightActivityId": 1234, "floodlightConfigurationId": 1234, "gclid": "971nc2849184c1914019v1c34c14", "ordinal": "0", "customVariables": [ { "kind": "dfareporting#customFloodlightVariable", "type": "U10", "value": "value", } ], } DEFAULT_DATE = timezone.datetime(2021, 1, 1) PROFILE_ID = "profile_id" REPORT_ID = "report_id" FILE_ID = "file_id" BUCKET_NAME = "test_bucket" REPORT_NAME = "test_report.csv" TEMP_FILE_NAME = "test" class TestGoogleCampaignManagerDeleteReportOperator(TestCase): @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator") def test_execute(self, mock_base_op, hook_mock): op = GoogleCampaignManagerDeleteReportOperator( profile_id=PROFILE_ID, report_id=REPORT_ID, api_version=API_VERSION, task_id="test_task", ) op.execute(context=None) hook_mock.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, delegate_to=None, api_version=API_VERSION, impersonation_chain=None, ) hook_mock.return_value.delete_report.assert_called_once_with( profile_id=PROFILE_ID, report_id=REPORT_ID ) class TestGoogleCampaignManagerDownloadReportOperator(TestCase): def setUp(self): with create_session() as session: session.query(TI).delete() def tearDown(self): with create_session() as session: session.query(TI).delete() @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.http") @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.tempfile") @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.GCSHook") @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator") @mock.patch( "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerDownloadReportOperator.xcom_push" ) def test_execute( self, xcom_mock, mock_base_op, gcs_hook_mock, hook_mock, tempfile_mock, http_mock, ): http_mock.MediaIoBaseDownload.return_value.next_chunk.return_value = ( None, True, ) tempfile_mock.NamedTemporaryFile.return_value.__enter__.return_value.name = TEMP_FILE_NAME op = GoogleCampaignManagerDownloadReportOperator( profile_id=PROFILE_ID, report_id=REPORT_ID, file_id=FILE_ID, bucket_name=BUCKET_NAME, report_name=REPORT_NAME, api_version=API_VERSION, task_id="test_task", ) op.execute(context=None) hook_mock.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, delegate_to=None, api_version=API_VERSION, impersonation_chain=None, ) hook_mock.return_value.get_report_file.assert_called_once_with( profile_id=PROFILE_ID, report_id=REPORT_ID, file_id=FILE_ID ) gcs_hook_mock.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, delegate_to=None, impersonation_chain=None, ) gcs_hook_mock.return_value.upload.assert_called_once_with( bucket_name=BUCKET_NAME, object_name=REPORT_NAME + ".gz", gzip=True, filename=TEMP_FILE_NAME, mime_type="text/csv", ) xcom_mock.assert_called_once_with(None, key="report_name", value=REPORT_NAME + ".gz") @parameterized.expand([BUCKET_NAME, f"gs://{BUCKET_NAME}", "XComArg", "{{ ti.xcom_pull(task_ids='f') }}"]) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.http") @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.tempfile") @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.GCSHook") def test_set_bucket_name( self, test_bucket_name, gcs_hook_mock, hook_mock, tempfile_mock, http_mock, ): http_mock.MediaIoBaseDownload.return_value.next_chunk.return_value = ( None, True, ) tempfile_mock.NamedTemporaryFile.return_value.__enter__.return_value.name = TEMP_FILE_NAME dag = DAG( dag_id="test_set_bucket_name", start_date=DEFAULT_DATE, schedule_interval=None, catchup=False, ) if BUCKET_NAME not in test_bucket_name: @dag.task def f(): return BUCKET_NAME taskflow_op = f() taskflow_op.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) op = GoogleCampaignManagerDownloadReportOperator( profile_id=PROFILE_ID, report_id=REPORT_ID, file_id=FILE_ID, bucket_name=test_bucket_name if test_bucket_name != "XComArg" else taskflow_op, report_name=REPORT_NAME, api_version=API_VERSION, task_id="test_task", dag=dag, ) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) gcs_hook_mock.return_value.upload.assert_called_once_with( bucket_name=BUCKET_NAME, object_name=REPORT_NAME + ".gz", gzip=True, filename=TEMP_FILE_NAME, mime_type="text/csv", ) class TestGoogleCampaignManagerInsertReportOperator(TestCase): @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator") @mock.patch( "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerInsertReportOperator.xcom_push" ) def test_execute(self, xcom_mock, mock_base_op, hook_mock): report = {"report": "test"} hook_mock.return_value.insert_report.return_value = {"id": REPORT_ID} op = GoogleCampaignManagerInsertReportOperator( profile_id=PROFILE_ID, report=report, api_version=API_VERSION, task_id="test_task", ) op.execute(context=None) hook_mock.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, delegate_to=None, api_version=API_VERSION, impersonation_chain=None, ) hook_mock.return_value.insert_report.assert_called_once_with(profile_id=PROFILE_ID, report=report) xcom_mock.assert_called_once_with(None, key="report_id", value=REPORT_ID) def test_prepare_template(self): report = {"key": "value"} with NamedTemporaryFile("w+", suffix=".json") as f: f.write(json.dumps(report)) f.flush() op = GoogleCampaignManagerInsertReportOperator( profile_id=PROFILE_ID, report=f.name, api_version=API_VERSION, task_id="test_task", ) op.prepare_template() assert isinstance(op.report, dict) assert op.report == report class TestGoogleCampaignManagerRunReportOperator(TestCase): @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator") @mock.patch( "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerRunReportOperator.xcom_push" ) def test_execute(self, xcom_mock, mock_base_op, hook_mock): synchronous = True hook_mock.return_value.run_report.return_value = {"id": FILE_ID} op = GoogleCampaignManagerRunReportOperator( profile_id=PROFILE_ID, report_id=REPORT_ID, synchronous=synchronous, api_version=API_VERSION, task_id="test_task", ) op.execute(context=None) hook_mock.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, delegate_to=None, api_version=API_VERSION, impersonation_chain=None, ) hook_mock.return_value.run_report.assert_called_once_with( profile_id=PROFILE_ID, report_id=REPORT_ID, synchronous=synchronous ) xcom_mock.assert_called_once_with(None, key="file_id", value=FILE_ID) class TestGoogleCampaignManagerBatchInsertConversionsOperator(TestCase): @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator") def test_execute(self, mock_base_op, hook_mock): op = GoogleCampaignManagerBatchInsertConversionsOperator( task_id="insert_conversion", profile_id=PROFILE_ID, conversions=[CONVERSION], encryption_source="AD_SERVING", encryption_entity_type="DCM_ADVERTISER", encryption_entity_id=123456789, ) op.execute(None) hook_mock.return_value.conversions_batch_insert.assert_called_once_with( profile_id=PROFILE_ID, conversions=[CONVERSION], encryption_source="AD_SERVING", encryption_entity_type="DCM_ADVERTISER", encryption_entity_id=123456789, max_failed_inserts=0, ) class TestGoogleCampaignManagerBatchUpdateConversionOperator(TestCase): @mock.patch( "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook" ) @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator") def test_execute(self, mock_base_op, hook_mock): op = GoogleCampaignManagerBatchUpdateConversionsOperator( task_id="update_conversion", profile_id=PROFILE_ID, conversions=[CONVERSION], encryption_source="AD_SERVING", encryption_entity_type="DCM_ADVERTISER", encryption_entity_id=123456789, ) op.execute(None) hook_mock.return_value.conversions_batch_update.assert_called_once_with( profile_id=PROFILE_ID, conversions=[CONVERSION], encryption_source="AD_SERVING", encryption_entity_type="DCM_ADVERTISER", encryption_entity_id=123456789, max_failed_updates=0, )
5,730
2,338
<reponame>medismailben/llvm-project // RUN: %clang_cl_asan -Od %s -Fe%t // RUN: %run %t 2>&1 | FileCheck %s #include <stdio.h> #include <windows.h> int main() { char *buffer; buffer = (char *)HeapAlloc(GetProcessHeap(), 0, 32), buffer[0] = 'a'; HeapFree(GetProcessHeap(), 0, buffer); puts("Okay"); // CHECK: Okay }
143
343
from shutil import copyfileobj import sh from django.utils.translation import ugettext_lazy as _ from mayan.apps.dependencies.exceptions import DependenciesException from mayan.apps.storage.utils import NamedTemporaryFile from ..classes import MIMETypeBackend from .literals import DEFAULT_MIMETYPE_PATH class MIMETypeBackendPerlFileMIMEInfo(MIMETypeBackend): def _init(self, copy_length=None, mimetype_path=None): self.mimetype_path = mimetype_path or DEFAULT_MIMETYPE_PATH self.copy_length = copy_length try: self.command_mimetype = sh.Command(path=self.mimetype_path).bake( '--magic-only' ) except sh.CommandNotFound: raise DependenciesException( _('mimetype not installed or not found.') ) def _get_mime_type(self, file_object, mime_type_only): with NamedTemporaryFile() as temporary_file_object: file_object.seek(0) copyfileobj( fsrc=file_object, fdst=temporary_file_object, length=self.copy_length ) file_object.seek(0) temporary_file_object.seek(0) filename, mime_type = self.command_mimetype( temporary_file_object.name ).split() return (mime_type, 'binary')
629
928
/* * File System Library by Parra Studios * A cross-platform library for managing file system, paths and files. * * Copyright (C) 2016 - 2021 <NAME> <<EMAIL>> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef FILESYSTEM_FILE_DESCRIPTOR_H #define FILESYSTEM_FILE_DESCRIPTOR_H 1 /* -- Headers -- */ #include <filesystem/filesystem_api.h> #include <filesystem/filesystem_directory_descriptor.h> #ifdef __cplusplus extern "C" { #endif /* -- Definitions -- */ #define FILE_DESCRIPTOR_NAME_SIZE 0x0100 #define FILE_DESCRIPTOR_EXTENSION_SIZE 0x0012 /* -- Forward Declarations -- */ struct file_descriptor_type; /* -- Type Definitions -- */ typedef struct file_descriptor_type *file_descriptor; /* -- Methods -- */ /** * @brief * Create a file descriptor from specified directory and name * * @param[in] owner * Directory which file belongs to * * @param[in] name * The name of the file * * @return * A pointer to the file descriptor if success, null pointer otherwhise */ FILESYSTEM_API file_descriptor file_descriptor_create(directory_descriptor owner, const char *name); /** * @brief * Get the owner directory where is the file * * @param[in] f * File descriptor pointer * * @return * A pointer to directory descriptor which owns the file */ FILESYSTEM_API directory_descriptor file_descriptor_owner(file_descriptor f); /** * @brief * Get the file name * * @param[in] f * File descriptor pointer * * @return * A constant string pointer to the name of @f */ FILESYSTEM_API const char *file_descriptor_name(file_descriptor f); /** * @brief * Get the file extension * * @param[in] f * File descriptor pointer * * @return * A constant string pointer to the extension of @f */ FILESYSTEM_API const char *file_descriptor_extension(file_descriptor f); /** * @brief * Destroy a file from memory * * @param[in] f * File descriptor pointer */ FILESYSTEM_API void file_descriptor_destroy(file_descriptor f); #ifdef __cplusplus } #endif #endif /* FILESYSTEM_FILE_DESCRIPTOR_H */
883
4,242
/* Copyright (c) 2015, <NAME> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef TORRENT_TRANSFER_SIM_HPP #define TORRENT_TRANSFER_SIM_HPP #include <string> #include <array> #include <iostream> #include "simulator/simulator.hpp" #include "simulator/socks_server.hpp" #include "simulator/utils.hpp" #include "libtorrent/session.hpp" #include "libtorrent/address.hpp" #include "libtorrent/session_stats.hpp" #include "libtorrent/settings_pack.hpp" #include "libtorrent/ip_filter.hpp" #include "libtorrent/alert_types.hpp" #include "libtorrent/aux_/proxy_settings.hpp" #include "libtorrent/settings_pack.hpp" #include "libtorrent/create_torrent.hpp" #include "libtorrent/random.hpp" #include "test.hpp" #include "create_torrent.hpp" #include "settings.hpp" #include "setup_swarm.hpp" #include "utils.hpp" #include "test_utils.hpp" #include "setup_transfer.hpp" // for addr() #include "disk_io.hpp" template <typename Setup, typename HandleAlerts, typename Test> void run_test( Setup setup , HandleAlerts on_alert , Test test , test_transfer_flags_t flags = {} , test_disk const downloader_disk_constructor = test_disk() , test_disk const seed_disk_constructor = test_disk() , lt::seconds const timeout = lt::seconds(60) ) { using lt::settings_pack; using lt::address; using lt::alert_cast; const bool use_ipv6 = bool(flags & tx::ipv6); char const* peer0_ip[2] = { "192.168.127.12", "fc00:db20:35b:7399::5" }; char const* peer1_ip[2] = { "172.16.31.10", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b" }; address peer0 = addr(peer0_ip[use_ipv6]); address peer1 = addr(peer1_ip[use_ipv6]); address proxy = (flags & tx::ipv6) ? addr("2001::2") : addr("172.16.31.10"); // setup the simulation sim::default_config network_cfg; sim::simulation sim{network_cfg}; sim::asio::io_context ios0 { sim, peer0 }; sim::asio::io_context ios1 { sim, peer1 }; lt::session_proxy zombie[2]; sim::asio::io_context proxy_ios{sim, proxy }; sim::socks_server socks4(proxy_ios, 4444, 4); sim::socks_server socks5(proxy_ios, 5555, 5); socks5.bind_start_port(3000); lt::session_params params; // setup settings pack to use for the session (customization point) lt::settings_pack& pack = params.settings; pack = settings(); pack.set_bool(settings_pack::disable_hash_checks, false); // disable utp by default pack.set_bool(settings_pack::enable_outgoing_utp, false); pack.set_bool(settings_pack::enable_incoming_utp, false); // disable encryption by default pack.set_bool(settings_pack::prefer_rc4, false); pack.set_int(settings_pack::in_enc_policy, settings_pack::pe_disabled); pack.set_int(settings_pack::out_enc_policy, settings_pack::pe_disabled); pack.set_int(settings_pack::allowed_enc_level, settings_pack::pe_plaintext); pack.set_str(settings_pack::listen_interfaces, make_ep_string(peer0_ip[use_ipv6], use_ipv6, "6881")); // create session std::shared_ptr<lt::session> ses[2]; // session 0 is a downloader, session 1 is a seed params.disk_io_constructor = downloader_disk_constructor; ses[0] = std::make_shared<lt::session>(params, ios0); pack.set_str(settings_pack::listen_interfaces, make_ep_string(peer1_ip[use_ipv6], use_ipv6, "6881")); params.disk_io_constructor = seed_disk_constructor.set_files(existing_files_mode::full_valid); ses[1] = std::make_shared<lt::session>(params, ios1); setup(*ses[0], *ses[1]); // only monitor alerts for session 0 (the downloader) print_alerts(*ses[0], [=](lt::session& ses, lt::alert const* a) { if (auto ta = alert_cast<lt::add_torrent_alert>(a)) { if (flags & tx::connect_proxy) ta->handle.connect_peer(lt::tcp::endpoint(proxy, 3000)); else ta->handle.connect_peer(lt::tcp::endpoint(peer1, 6881)); } on_alert(ses, a); }, 0); print_alerts(*ses[1], [](lt::session&, lt::alert const*){}, 1); lt::add_torrent_params atp = ::create_test_torrent(10 , (flags & tx::v2_only) ? lt::create_torrent::v2_only : (flags & tx::v1_only) ? lt::create_torrent::v1_only : lt::create_flags_t{} , (flags & tx::small_pieces) ? 1 : (flags & tx::large_pieces) ? 4 : 2 , (flags & tx::multiple_files) ? 3 : 1 ); atp.flags &= ~lt::torrent_flags::auto_managed; atp.flags &= ~lt::torrent_flags::paused; ses[1]->async_add_torrent(atp); auto torrent = atp.ti; atp.save_path = save_path(0); if (flags & tx::magnet_download) { atp.info_hashes = atp.ti->info_hashes(); atp.ti.reset(); } ses[0]->async_add_torrent(atp); sim::timer t(sim, timeout, [&](boost::system::error_code const&) { auto h = ses[0]->get_torrents(); auto ti = h[0].torrent_file_with_hashes(); // if we're a seed, we should definitely have the torrent info. If we're // note a seed, we may still have the torrent_info in case it's a v1 // torrent if (is_seed(*ses[0])) TEST_CHECK(ti); if (ti) { if (ti->v2()) TEST_EQUAL(ti->v2_piece_hashes_verified(), true); auto downloaded = serialize(*ti); auto added = serialize(*torrent); TEST_CHECK(downloaded == added); } test(ses); // shut down int idx = 0; for (auto& s : ses) { zombie[idx++] = s->abort(); s.reset(); } }); sim.run(); } void no_init(lt::session& ses0, lt::session& ses1); struct record_finished_pieces { record_finished_pieces(std::set<lt::piece_index_t>& p); void operator()(lt::session&, lt::alert const* a) const; std::set<lt::piece_index_t>* m_passed; }; struct expect_seed { expect_seed(bool e); void operator()(std::shared_ptr<lt::session> ses[2]) const; bool m_expect; }; int blocks_per_piece(test_transfer_flags_t const flags); int num_pieces(test_transfer_flags_t const flags); #endif
2,672
388
<reponame>trejkaz/clockwise<filename>ustwo-clockwise/src/main/java/com/ustwo/clockwise/companion/CompanionService.java package com.ustwo.clockwise.companion; import android.util.Log; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.data.FreezableUtils; import com.google.android.gms.wearable.DataEvent; import com.google.android.gms.wearable.DataEventBuffer; import com.google.android.gms.wearable.DataMap; import com.google.android.gms.wearable.MessageEvent; import com.google.android.gms.wearable.WearableListenerService; import com.ustwo.clockwise.common.DataChangedHandler; import com.ustwo.clockwise.common.MessageReceivedHandler; import com.ustwo.clockwise.common.WearableAPIHelper; import com.ustwo.clockwise.companion.permissions.CompanionPermissionRequestMessageHandler; import java.util.ArrayList; import java.util.List; /** * Service that responds to all com.google.android.gms.wearable.BIND_LISTENER events from the wearable and routes the * incoming messages and data paths to the required handlers. * <p/> * Note that this is required because each application may only have one Service for the BIND_LISTENER action. * * @author <EMAIL> */ public class CompanionService extends WearableListenerService { private static final String TAG = CompanionService.class.getSimpleName(); protected WearableAPIHelper mWearableAPIHelper; protected final List<DataChangedHandler> mDataChangedHandlers = new ArrayList<>(); protected final List<MessageReceivedHandler> mMessageReceivedHandlers = new ArrayList<>(); @Override public void onCreate() { super.onCreate(); mMessageReceivedHandlers.add(new CompanionPermissionRequestMessageHandler(this)); mWearableAPIHelper = new WearableAPIHelper(this, new WearableAPIHelper.WearableAPIHelperListener() { @Override public void onWearableAPIConnected(GoogleApiClient apiClient) { } @Override public void onWearableAPIConnectionSuspended(int cause) { } @Override public void onWearableAPIConnectionFailed(ConnectionResult result) { } }); } @Override public void onDestroy() { mWearableAPIHelper.onDestroy(); super.onDestroy(); } @Override public void onDataChanged(DataEventBuffer dataEvents) { // Route the changed data event to the correct handler. final List<DataEvent> events = FreezableUtils.freezeIterable(dataEvents); for (DataEvent event : events) { String path = event.getDataItem().getUri().getPath(); for (DataChangedHandler handler : mDataChangedHandlers) { for(String supportedPath : handler.getSupportedPaths()) { // allow handler to define just the first part of the entire URI. if(path.startsWith(supportedPath)) { handler.onDataChanged(path, event, mWearableAPIHelper); break; } } } } } @Override public void onMessageReceived(MessageEvent messageEvent) { String path = messageEvent.getPath(); Log.v(TAG, "onMessageReceived: " + path); for (MessageReceivedHandler handler : mMessageReceivedHandlers) { if (null != handler.getSupportedPaths() && handler.getSupportedPaths().contains(path)) { byte[] rawData = messageEvent.getData(); DataMap dataMap = DataMap.fromByteArray(rawData); handler.onMessageReceived(path, dataMap, mWearableAPIHelper); } } } }
1,432
1,199
<filename>net/fetch/files/patch-fetch.c --- fetch.c.orig 2011-11-11 05:20:22.000000000 +0100 +++ fetch.c 2007-09-08 23:13:26.000000000 +0200 @@ -33,6 +33,7 @@ #include <sys/socket.h> #include <sys/stat.h> #include <sys/time.h> +#include <sys/ioctl.h> #include <ctype.h> #include <err.h> @@ -204,12 +204,16 @@ fprintf(stderr, "\r%-46.46s", xs->name); if (xs->size <= 0) { +#ifdef __FreeBSD__ setproctitle("%s [%s]", xs->name, stat_bytes(xs->rcvd)); +#endif fprintf(stderr, " %s", stat_bytes(xs->rcvd)); } else { +#ifdef __FreeBSD__ setproctitle("%s [%d%% of %s]", xs->name, (int)((100.0 * xs->rcvd) / xs->size), stat_bytes(xs->size)); +#endif fprintf(stderr, "%3d%% of %s", (int)((100.0 * xs->rcvd) / xs->size), stat_bytes(xs->size));
402
9,680
<filename>nni/algorithms/hpo/ppo_tuner/util.py<gh_stars>1000+ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. """ util functions """ import os import random import multiprocessing import numpy as np import tensorflow.compat.v1 as tf tf.disable_v2_behavior() from gym.spaces import Discrete, Box, MultiDiscrete def set_global_seeds(i): """set global seeds""" rank = 0 myseed = i + 1000 * rank if i is not None else None tf.set_random_seed(myseed) np.random.seed(myseed) random.seed(myseed) def batch_to_seq(h, nbatch, nsteps, flat=False): """convert from batch to sequence""" if flat: h = tf.reshape(h, [nbatch, nsteps]) else: h = tf.reshape(h, [nbatch, nsteps, -1]) return [tf.squeeze(v, [1]) for v in tf.split(axis=1, num_or_size_splits=nsteps, value=h)] def seq_to_batch(h, flat=False): """convert from sequence to batch""" shape = h[0].get_shape().as_list() if not flat: assert len(shape) > 1 nh = h[0].get_shape()[-1].value return tf.reshape(tf.concat(axis=1, values=h), [-1, nh]) else: return tf.reshape(tf.stack(values=h, axis=1), [-1]) def lstm(xs, ms, s, scope, nh, init_scale=1.0): """lstm cell""" _, nin = [v.value for v in xs[0].get_shape()] # the first is nbatch with tf.variable_scope(scope): wx = tf.get_variable("wx", [nin, nh*4], initializer=ortho_init(init_scale)) wh = tf.get_variable("wh", [nh, nh*4], initializer=ortho_init(init_scale)) b = tf.get_variable("b", [nh*4], initializer=tf.constant_initializer(0.0)) c, h = tf.split(axis=1, num_or_size_splits=2, value=s) for idx, (x, m) in enumerate(zip(xs, ms)): c = c*(1-m) h = h*(1-m) z = tf.matmul(x, wx) + tf.matmul(h, wh) + b i, f, o, u = tf.split(axis=1, num_or_size_splits=4, value=z) i = tf.nn.sigmoid(i) f = tf.nn.sigmoid(f) o = tf.nn.sigmoid(o) u = tf.tanh(u) c = f*c + i*u h = o*tf.tanh(c) xs[idx] = h s = tf.concat(axis=1, values=[c, h]) return xs, s def lstm_model(nlstm=128, layer_norm=False): """ Builds LSTM (Long-Short Term Memory) network to be used in a policy. Note that the resulting function returns not only the output of the LSTM (i.e. hidden state of lstm for each step in the sequence), but also a dictionary with auxiliary tensors to be set as policy attributes. Specifically, S is a placeholder to feed current state (LSTM state has to be managed outside policy) M is a placeholder for the mask (used to mask out observations after the end of the episode, but can be used for other purposes too) initial_state is a numpy array containing initial lstm state (usually zeros) state is the output LSTM state (to be fed into S at the next call) An example of usage of lstm-based policy can be found here: common/tests/test_doc_examples.py/test_lstm_example Parameters ---------- nlstm : int LSTM hidden state size layer_norm : bool if True, layer-normalized version of LSTM is used Returns ------- function that builds LSTM with a given input tensor / placeholder """ def network_fn(X, nenv=1, obs_size=-1): with tf.variable_scope("emb", reuse=tf.AUTO_REUSE): w_emb = tf.get_variable("w_emb", [obs_size+1, 32]) X = tf.nn.embedding_lookup(w_emb, X) nbatch = X.shape[0] nsteps = nbatch // nenv h = tf.layers.flatten(X) M = tf.placeholder(tf.float32, [nbatch]) #mask (done t-1) S = tf.placeholder(tf.float32, [nenv, 2*nlstm]) #states xs = batch_to_seq(h, nenv, nsteps) ms = batch_to_seq(M, nenv, nsteps) assert not layer_norm h5, snew = lstm(xs, ms, S, scope='lstm', nh=nlstm) h = seq_to_batch(h5) initial_state = np.zeros(S.shape.as_list(), dtype=float) return h, {'S':S, 'M':M, 'state':snew, 'initial_state':initial_state} return network_fn def ortho_init(scale=1.0): """init approach""" def _ortho_init(shape, dtype, partition_info=None): #lasagne ortho init for tf shape = tuple(shape) if len(shape) == 2: flat_shape = shape elif len(shape) == 4: # assumes NHWC flat_shape = (np.prod(shape[:-1]), shape[-1]) else: raise NotImplementedError a = np.random.normal(0.0, 1.0, flat_shape) u, _, v = np.linalg.svd(a, full_matrices=False) q = u if u.shape == flat_shape else v # pick the one with the correct shape q = q.reshape(shape) return (scale * q[:shape[0], :shape[1]]).astype(np.float32) return _ortho_init def fc(x, scope, nh, *, init_scale=1.0, init_bias=0.0): """fully connected op""" with tf.variable_scope(scope): nin = x.get_shape()[1].value w = tf.get_variable("w", [nin, nh], initializer=ortho_init(init_scale)) b = tf.get_variable("b", [nh], initializer=tf.constant_initializer(init_bias)) return tf.matmul(x, w)+b def _check_shape(placeholder_shape, data_shape): """ check if two shapes are compatible (i.e. differ only by dimensions of size 1, or by the batch dimension) """ return True # ================================================================ # Shape adjustment for feeding into tf placeholders # ================================================================ def adjust_shape(placeholder, data): """ adjust shape of the data to the shape of the placeholder if possible. If shape is incompatible, AssertionError is thrown Parameters ---------- placeholder tensorflow input placeholder data input data to be (potentially) reshaped to be fed into placeholder Returns ------- reshaped data """ if not isinstance(data, np.ndarray) and not isinstance(data, list): return data if isinstance(data, list): data = np.array(data) placeholder_shape = [x or -1 for x in placeholder.shape.as_list()] assert _check_shape(placeholder_shape, data.shape), \ 'Shape of data {} is not compatible with shape of the placeholder {}'.format(data.shape, placeholder_shape) return np.reshape(data, placeholder_shape) # ================================================================ # Global session # ================================================================ def get_session(config=None): """Get default session or create one with a given config""" sess = tf.get_default_session() if sess is None: sess = make_session(config=config, make_default=True) return sess def make_session(config=None, num_cpu=None, make_default=False, graph=None): """Returns a session that will use <num_cpu> CPU's only""" if num_cpu is None: num_cpu = int(os.getenv('RCALL_NUM_CPU', multiprocessing.cpu_count())) if config is None: config = tf.ConfigProto( allow_soft_placement=True, inter_op_parallelism_threads=num_cpu, intra_op_parallelism_threads=num_cpu) config.gpu_options.allow_growth = True if make_default: return tf.InteractiveSession(config=config, graph=graph) else: return tf.Session(config=config, graph=graph) ALREADY_INITIALIZED = set() def initialize(): """Initialize all the uninitialized variables in the global scope.""" new_variables = set(tf.global_variables()) - ALREADY_INITIALIZED get_session().run(tf.variables_initializer(new_variables)) ALREADY_INITIALIZED.update(new_variables) def observation_placeholder(ob_space, batch_size=None, name='Ob'): """ Create placeholder to feed observations into of the size appropriate to the observation space Parameters ---------- ob_space : gym.Space observation space batch_size : int size of the batch to be fed into input. Can be left None in most cases. name : str name of the placeholder Returns ------- tensorflow placeholder tensor """ assert isinstance(ob_space, (Discrete, Box, MultiDiscrete)), \ 'Can only deal with Discrete and Box observation spaces for now' dtype = ob_space.dtype if dtype == np.int8: dtype = np.uint8 return tf.placeholder(shape=(batch_size,) + ob_space.shape, dtype=dtype, name=name) def explained_variance(ypred, y): """ Computes fraction of variance that ypred explains about y. Returns 1 - Var[y-ypred] / Var[y] interpretation: ev=0 => might as well have predicted zero ev=1 => perfect prediction ev<0 => worse than just predicting zero """ assert y.ndim == 1 and ypred.ndim == 1 vary = np.var(y) return np.nan if vary == 0 else 1 - np.var(y-ypred)/vary
3,625
348
{"nom":"Vailly","circ":"5ème circonscription","dpt":"Haute-Savoie","inscrits":670,"abs":431,"votants":239,"blancs":13,"nuls":0,"exp":226,"res":[{"nuance":"DVD","nom":"<NAME>","voix":128},{"nuance":"REM","nom":"<NAME>","voix":98}]}
91
302
{ "sample": { "url": "/Users/jonathan/devwork/alphatools/alphatools/data/factory/sample.csv", "schema": "var*{asof_date: datetime, sid: int64, value: float64}" } }
76
3,418
#include <stdio.h> #include <string.h> #include "pico/stdlib.h" #include "hardware/gpio.h" #include "ssd1306.h" #define YELLOW_BUTTON 9 #define BLUE_BUTTON 10 #define BLACK_BUTTON 11 #define RED_BUTTON 12 #define GREEN_BUTTON 13 int submit_char_flag = 0; char* init_gpio_buttons() { gpio_init(YELLOW_BUTTON); gpio_set_dir(YELLOW_BUTTON, GPIO_IN); gpio_pull_up(YELLOW_BUTTON); gpio_init(BLUE_BUTTON); gpio_set_dir(BLUE_BUTTON, GPIO_IN); gpio_pull_up(BLUE_BUTTON); gpio_init(BLACK_BUTTON); gpio_set_dir(BLACK_BUTTON, GPIO_IN); gpio_pull_up(BLACK_BUTTON); gpio_init(RED_BUTTON); gpio_set_dir(RED_BUTTON, GPIO_IN); gpio_pull_up(RED_BUTTON); gpio_init(GREEN_BUTTON); gpio_set_dir(GREEN_BUTTON, GPIO_IN); gpio_pull_up(GREEN_BUTTON); } void button_input_proc(char* p_c, char* p_button_string, const int* p_BUTTON_STRING_SIZE, int* p_x, int* p_y) { if(!gpio_get(YELLOW_BUTTON)) { if(*p_x == -7) *p_x = 0; if(*p_c < 'z') { *p_c = *p_c + 1;; draw_letter_at(*p_x, *p_y, *p_c); show_scr(); submit_char_flag = 1; } sleep_ms(250); } if(!gpio_get(BLUE_BUTTON)) { if(*p_x == -7) *p_x = 0; if(*p_c > 'a') { *p_c = *p_c - 1;; draw_letter_at(*p_x, *p_y, *p_c); show_scr(); submit_char_flag = 1; } sleep_ms(250); } if(!gpio_get(BLACK_BUTTON)) { if(*p_x < 70 && submit_char_flag) { *p_x = *p_x + 7; strncat(p_button_string, p_c, 1); submit_char_flag = 0; } sleep_ms(250); } if(!gpio_get(RED_BUTTON)) { if(*p_x >= 0) { *p_x = *p_x - 7; if(p_button_string[0] != '\0') { p_button_string[strlen(p_button_string)-1] = '\0'; } draw_letter_at(*p_x+7, *p_y, ' '); draw_letter_at(*p_x, *p_y, ' '); show_scr(); } sleep_ms(250); } if(!gpio_get(GREEN_BUTTON)) { fill_scr(0); *p_x = 0; *p_c = 'a'; draw_letter_at(*p_x, *p_y, *p_c); show_scr(); p_button_string[0] = '\0'; ssd1306_print("\n"); ssd1306_print(p_button_string); show_scr(); sleep_ms(250); } }
1,182
338
package com.nurkiewicz.asyncretry.policy; /** * @author <NAME> * @since 7/16/13, 10:23 PM */ public class AbortRetryException extends RuntimeException { public AbortRetryException() { } }
70
543
/* * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.accessibility; import java.util.Locale; import java.util.MissingResourceException; import java.util.ResourceBundle; import sun.awt.AWTAccessor; /** * Base class used to maintain a strongly typed enumeration. This is the * superclass of {@link AccessibleState} and {@link AccessibleRole}. * <p> * The {@link #toDisplayString()} method allows you to obtain the localized * string for a locale independent key from a predefined {@code ResourceBundle} * for the keys defined in this class. This localized string is intended to be * readable by humans. * * @author <NAME> * @author <NAME> * @author <NAME> * @see AccessibleRole * @see AccessibleState */ public abstract class AccessibleBundle { private final String defaultResourceBundleName = "com.sun.accessibility.internal.resources.accessibility"; static { AWTAccessor.setAccessibleBundleAccessor( new AWTAccessor.AccessibleBundleAccessor() { @Override public String getKey(AccessibleBundle accessibleBundle) { return accessibleBundle.key; } }); } /** * Construct an {@code AccessibleBundle}. */ public AccessibleBundle() { } /** * The locale independent name of the state. This is a programmatic name * that is not intended to be read by humans. * * @see #toDisplayString */ protected String key = null; /** * Obtains the key as a localized string. If a localized string cannot be * found for the key, the locale independent key stored in the role will be * returned. This method is intended to be used only by subclasses so that * they can specify their own resource bundles which contain localized * strings for their keys. * * @param name the name of the resource bundle to use for lookup * @param locale the locale for which to obtain a localized string * @return a localized string for the key */ protected String toDisplayString(final String name, final Locale locale) { try { return ResourceBundle.getBundle(name, locale).getString(key); } catch (ClassCastException | MissingResourceException ignored) { return key; // return the non-localized key } } /** * Obtains the key as a localized string. If a localized string cannot be * found for the key, the locale independent key stored in the role will be * returned. * * @param locale the locale for which to obtain a localized string * @return a localized string for the key */ public String toDisplayString(Locale locale) { return toDisplayString(defaultResourceBundleName, locale); } /** * Gets localized string describing the key using the default locale. * * @return a localized string describing the key using the default locale */ public String toDisplayString() { return toDisplayString(Locale.getDefault()); } /** * Gets localized string describing the key using the default locale. * * @return a localized string describing the key using the default locale * @see #toDisplayString */ public String toString() { return toDisplayString(); } }
1,453
675
<gh_stars>100-1000 """ Implements classes and methods to define domains for neural networks. The NNConstraintChecker class and its subclasses are used to check if a neural network architecture satisfies certain constriants. This is mostly needed to constrain the search space in NASBOT. -- <EMAIL> """ # pylint: disable=invalid-name # pylint: disable=too-many-arguments import numpy as np from copy import copy # Local from ..exd.domains import Domain class NNConstraintChecker(object): """ A class for checking if a neural network satisfies constraints. """ def __init__(self, max_num_layers, min_num_layers, max_mass, min_mass, max_in_degree, max_out_degree, max_num_edges, max_num_units_per_layer, min_num_units_per_layer): """ Constructor. """ super(NNConstraintChecker, self).__init__() self.max_num_layers = max_num_layers self.min_num_layers = min_num_layers self.max_mass = max_mass self.min_mass = min_mass self.max_in_degree = max_in_degree self.max_out_degree = max_out_degree self.max_num_edges = max_num_edges self.max_num_units_per_layer = max_num_units_per_layer self.min_num_units_per_layer = min_num_units_per_layer self.constraint_names = ['max_num_layers', 'min_num_layers', 'max_mass', 'min_mass', 'max_in_degree', 'max_out_degree', 'max_num_edges', 'max_num_units_per_layer', 'min_num_units_per_layer'] def __call__(self, nn, *args, **kwargs): """ Checks if the constraints are satisfied for the given nn. """ return self.constraints_are_satisfied(nn, *args, **kwargs) def constraints_are_satisfied(self, nn, return_violation=False): """ Checks if the neural network nn satisfies the constraints. If return_violation is True, it returns a string representing the violation. """ violation = '' if not self._check_leq_constraint(len(nn.layer_labels), self.max_num_layers): violation = 'too_many_layers' elif not self._check_geq_constraint(len(nn.layer_labels), self.min_num_layers): violation = 'too_few_layers' elif not self._check_leq_constraint(nn.get_total_mass(), self.max_mass): violation = 'too_much_mass' elif not self._check_geq_constraint(nn.get_total_mass(), self.min_mass): violation = 'too_little_mass' elif not self._check_leq_constraint(nn.get_out_degrees().max(), self.max_out_degree): violation = 'large_max_out_degree' elif not self._check_leq_constraint(nn.get_in_degrees().max(), self.max_in_degree): violation = 'large_max_in_degree' elif not self._check_leq_constraint(nn.conn_mat.sum(), self.max_num_edges): violation = 'too_many_edges' elif not self._check_leq_constraint( self._finite_max_or_min(nn.num_units_in_each_layer, 1), self.max_num_units_per_layer): violation = 'max_units_per_layer_exceeded' elif not self._check_geq_constraint( self._finite_max_or_min(nn.num_units_in_each_layer, 0), self.min_num_units_per_layer): violation = 'min_units_per_layer_not_exceeded' else: violation = self._child_constraints_are_satisfied(nn) return violation if return_violation else (violation == '') @classmethod def _check_leq_constraint(cls, value, bound): """ Returns true if bound is None or if value is less than or equal to bound. """ return bound is None or (value <= bound) @classmethod def _check_geq_constraint(cls, value, bound): """ Returns true if bound is None or if value is greater than or equal to bound. """ return bound is None or (value >= bound) def _child_constraints_are_satisfied(self, nn): """ Checks if the constraints of the child class are satisfied. """ raise NotImplementedError('Implement in a child class.') @classmethod def _finite_max_or_min(cls, iterable, is_max): """ Returns the max ignorning Nones, nans and infs. """ finite_vals = [x for x in iterable if x is not None and np.isfinite(x)] return max(finite_vals) if is_max else min(finite_vals) class CNNConstraintChecker(NNConstraintChecker): """ A class for checking if a CNN satisfies constraints. """ def __init__(self, max_num_layers, min_num_layers, max_mass, min_mass, max_in_degree, max_out_degree, max_num_edges, max_num_units_per_layer, min_num_units_per_layer, max_num_2strides=None): """ Constructor. max_num_2strides is the maximum number of 2-strides (either via pooling or conv operations) that the image can go through in the network. """ super(CNNConstraintChecker, self).__init__( max_num_layers, min_num_layers, max_mass, min_mass, max_in_degree, max_out_degree, max_num_edges, max_num_units_per_layer, min_num_units_per_layer) self.max_num_2strides = max_num_2strides self.constraint_names.append('max_num_2strides') def _child_constraints_are_satisfied(self, nn): """ Checks if the constraints of the child class are satisfied. """ img_inv_sizes = [piis for piis in nn.post_img_inv_sizes if piis != 'x'] max_post_img_inv_sizes = None if self.max_num_2strides is None \ else 2**self.max_num_2strides violation = '' if not self._check_leq_constraint(max(img_inv_sizes), max_post_img_inv_sizes): violation = 'too_many_2strides' return violation class MLPConstraintChecker(NNConstraintChecker): """ A class for checking if a MLP satisfies constraints. """ def __init__(self, *args, **kwargs): """ Constructor. """ super(MLPConstraintChecker, self).__init__(*args, **kwargs) def _child_constraints_are_satisfied(self, nn): """ Checks if the constraints of the child class are satisfied. """ return '' # An NN Domain class ----------------------------------------------------------------- class NNDomain(Domain): """ Domain for Neural Network architectures. """ def __init__(self, nn_type, constraint_checker=None): """ Constructor. """ self.nn_type = nn_type self.constraint_checker = constraint_checker super(NNDomain, self).__init__() def get_type(self): """ Returns type of the domain. """ return "neural_network" def get_dim(self): """ Return dimension. """ return 1 def is_a_member(self, point): """ Returns true if point is in the domain. """ if not self.nn_type == point.nn_class: return False else: return self.constraint_checker(point) @classmethod def members_are_equal(cls, point_1, point_2): """ Returns true if they are equal. """ return neural_nets_are_equal(point_1, point_2) def __str__(self): """ Returns a string representation. """ cc_attrs = {key:getattr(self.constraint_checker, key) for key in self.constraint_checker.constraint_names} return 'NN(%s):%s'%(self.nn_type, cc_attrs) def neural_nets_are_equal(net1, net2): """ Returns true if both net1 and net2 are equal. """ is_true = True for key in net1.__dict__.keys(): val1 = net1.__dict__[key] val2 = net2.__dict__[key] is_true = True if isinstance(val1, dict): for val_key in val1.keys(): is_true = is_true and np.all(val1[val_key] == val2[val_key]) elif hasattr(val1, '__iter__'): is_true = is_true and np.all(val1 == val2) else: is_true = is_true and val1 == val2 if not is_true: # break here if necessary return is_true return is_true # An API to return an NN Domain using the constraints -------------------------------- def get_nn_domain_from_constraints(nn_type, constraint_dict): """ nn_type is the type of the network. See CNNConstraintChecker, MLPConstraintChecker, NNConstraintChecker constructors for args and kwargs. """ constraint_dict = copy(constraint_dict) # Specify the mandatory and optional key values mandatory_keys = ['max_num_layers', 'max_mass'] optional_key_vals = [('min_num_layers', 5), ('min_mass', 0), ('max_out_degree', np.inf), ('max_in_degree', np.inf), ('max_num_edges', np.inf), ('max_num_units_per_layer', 10001), ('min_num_units_per_layer', 5), ] if nn_type.startswith('cnn'): optional_key_vals += [('max_num_2strides', np.inf)] # Check if the mandatory keys exist in constraint_dict for mkey in mandatory_keys: if mkey not in constraint_dict.keys(): raise ValueError('Must specify keys %s in constraint_dict.'%( ', '.join(mandatory_keys))) # If an optional key does not exist, then add it for okey, oval in optional_key_vals: if okey not in constraint_dict.keys(): constraint_dict[okey] = oval # Specify the constructor if nn_type.startswith('cnn'): cc_constructor = CNNConstraintChecker elif nn_type.startswith('mlp'): cc_constructor = MLPConstraintChecker else: raise ValueError('Unknown nn_type: %s.'%(nn_type)) # Now create constraint checker object cc_attributes = mandatory_keys + [okv[0] for okv in optional_key_vals] constraint_dict_to_pass = {key: constraint_dict[key] for key in cc_attributes} constraint_checker = cc_constructor(**constraint_dict_to_pass) # Create Domain and return return NNDomain(nn_type, constraint_checker)
3,799
1,375
<gh_stars>1000+ // Copyright lowRISC contributors. // Licensed under the Apache License, Version 2.0, see LICENSE for details. // SPDX-License-Identifier: Apache-2.0 #ifndef OPENTITAN_SW_DEVICE_SILICON_CREATOR_LIB_DRIVERS_UART_H_ #define OPENTITAN_SW_DEVICE_SILICON_CREATOR_LIB_DRIVERS_UART_H_ #include <stddef.h> #include <stdint.h> #include "sw/device/lib/base/mmio.h" #include "sw/device/silicon_creator/lib/error.h" #ifdef __cplusplus extern "C" { #endif /** * Initialize the UART with the request parameters. * * @param precalculated_nco NCO value used to set the speed of the UART. * @return kErrorOk if successful, else an error code. */ rom_error_t uart_init(uint32_t precalculated_nco); /** * Write a single byte to the UART. * * @param byte Byte to send. */ void uart_putchar(uint8_t byte); /** * Write a buffer to the UART. * * Writes the complete buffer to the UART and wait for transmision to complete. * * @param data Pointer to buffer to write. * @param len Length of the buffer to write. * @return Number of bytes written. */ size_t uart_write(const uint8_t *data, size_t len); /** * Sink a buffer to the UART. * * This is a wrapper for uart_write which conforms to the type signature * required by the print library. * * @param uart Pointer a target so satisfy the shape of the sink API. * @param data Pointer to buffer to write. * @param len Length of the buffer to write. * @return Number of bytes written. */ size_t uart_sink(void *uart, const char *data, size_t len); #ifdef __cplusplus } #endif #endif // OPENTITAN_SW_DEVICE_SILICON_CREATOR_LIB_DRIVERS_UART_H_
574
11,396
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from awx.conf.migrations import _rename_setting def copy_session_settings(apps, schema_editor): _rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_PER_USER', new_key='SESSIONS_PER_USER') _rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_EXPIRATION', new_key='SESSION_COOKIE_AGE') def reverse_copy_session_settings(apps, schema_editor): _rename_setting.rename_setting(apps, schema_editor, old_key='SESSION_COOKIE_AGE', new_key='AUTH_TOKEN_EXPIRATION') _rename_setting.rename_setting(apps, schema_editor, old_key='SESSIONS_PER_USER', new_key='AUTH_TOKEN_PER_USER') class Migration(migrations.Migration): dependencies = [('conf', '0004_v320_reencrypt')] operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
333
1,914
import pkgutil import pytest from funcy.compat import PY2, PY3 import funcy from funcy import py2, py3 from funcy.py3 import cat, lcat, count_reps, is_iter, is_list # Introspect all modules exclude = ('compat', 'cross', '_inspect', 'py2', 'py3', 'simple_funcs', 'funcmakers') module_names = list(name for _, name, _ in pkgutil.iter_modules(funcy.__path__) if name not in exclude) modules = [getattr(funcy, name) for name in module_names] def test_match(): assert funcy.__all__ == (py2 if PY2 else py3).__all__ @pytest.mark.skipif(PY2, reason="modules use python 3 internally") def test_full_py3(): assert sorted(funcy.__all__) == sorted(lcat(m.__all__ for m in modules) + ['lzip']) def test_full(): assert len(py2.__all__) == len(py3.__all__) def test_name_clashes(): counts = count_reps(cat(m.__all__ for m in modules)) clashes = [name for name, c in counts.items() if c > 1] assert not clashes, 'names clash for ' + ', '.join(clashes) def test_renames(): inames = [n for n in py2.__all__ if n.startswith('i')] ipairs = [n[1:] for n in inames if n[1:] in py2.__all__] for name in inames: if name != 'izip': assert name in py3.__all__ or name[1:] in py3.__all__ for name in ipairs: assert name in py3.__all__ assert 'l' + name in py3.__all__ lnames = [n for n in py3.__all__ if n.startswith('l')] lpairs = [n[1:] for n in lnames if n[1:] in py3.__all__] for name in lnames: if name != 'lzip': assert name in py2.__all__ or name[1:] in py2.__all__ for name in lpairs: assert name in py2.__all__ assert 'i' + name in py2.__all__ # Only inames a renamed assert set(py2.__all__) - set(py3.__all__) <= set(inames) # Only lnames a new, and zip_values/zip_dicts assert set(py3.__all__) - set(py2.__all__) <= set(lnames) | set(['zip_values', 'zip_dicts']) def test_docs(): exports = [(name, getattr(funcy, name)) for name in funcy.__all__ if name not in ('print_errors', 'print_durations', 'ErrorRateExceeded') and getattr(funcy, name).__module__ not in ('funcy.types', 'funcy.primitives')] # NOTE: we are testing this way and not with all() to immediately get a list of offenders assert [name for name, f in exports if f.__name__ in ('<lambda>', '_decorator')] == [] assert [name for name, f in exports if f.__doc__ is None] == [] def test_list_iter(): assert is_list(py2.map(None, [])) assert is_iter(py3.map(None, [])) assert is_list(funcy.map(None, [])) == PY2 assert is_iter(funcy.map(None, [])) == PY3
1,121
2,350
<reponame>msrdic/RoaringBitmap package org.roaringbitmap.arraycontainer; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.roaringbitmap.ArrayContainer; import org.roaringbitmap.ZipRealDataRangeRetriever; import org.roaringbitmap.buffer.MappeableArrayContainer; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.Lists; @State(Scope.Benchmark) public class AddBenchmark { private ArrayContainer ac1; private MappeableArrayContainer mac1; private Cache<String, List<int[][]>> DATASET_CACHE; private final String dataset = "random_range"; private List<int[][]> ints; @Setup public void setup() throws ExecutionException { ac1 = new ArrayContainer(); mac1 = new MappeableArrayContainer(); DATASET_CACHE = CacheBuilder.newBuilder().maximumSize(1).build(); ints = DATASET_CACHE.get(dataset, new Callable<List<int[][]>>() { @Override public List<int[][]> call() throws Exception { System.out.println("Loading" + dataset); ZipRealDataRangeRetriever<int[][]> dataRetriever = new ZipRealDataRangeRetriever<>(dataset, "/random-generated-data/"); return Lists.newArrayList(dataRetriever.fetchNextRange()); } }); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public ArrayContainer add() throws ExecutionException { for (int[][] i : ints) { for (int[] j : i) { ac1.iadd(j[0], j[1]); } } return ac1; } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public MappeableArrayContainer addBuffer() throws ExecutionException { for (int[][] i : ints) { for (int[] j : i) { mac1.iadd(j[0], j[1]); } } return mac1; } }
862
9,425
<gh_stars>1000+ """ tasks.docstrings ~~~~~~~~~~~~~~~~ Docstrings related tasks """ import ast import pathlib import re from invoke import task # pylint: disable=3rd-party-module-not-gated from salt.loader import SALT_INTERNAL_LOADERS_PATHS from salt.version import SaltStackVersion from tasks import utils CODE_DIR = pathlib.Path(__file__).resolve().parent.parent SALT_CODE_DIR = CODE_DIR / "salt" SALT_MODULES_PATH = SALT_CODE_DIR / "modules" THIS_FILE = pathlib.Path(__file__).relative_to(CODE_DIR) MISSING_DOCSTRINGS = { "salt/auth/django.py": ["is_connection_usable"], "salt/auth/rest.py": ["rest_auth_setup"], "salt/auth/yubico.py": ["groups"], "salt/beacons/inotify.py": ["close"], "salt/beacons/junos_rre_keys.py": ["beacon"], "salt/beacons/salt_monitor.py": ["validate", "beacon"], "salt/beacons/watchdog.py": ["close", "to_salt_event"], "salt/cache/localfs.py": ["get_storage_id", "init_kwargs"], "salt/cache/redis_cache.py": ["init_kwargs"], "salt/cloud/clouds/clc.py": [ "get_creds", "get_configured_provider", "get_queue_data", ], "salt/cloud/clouds/ec2.py": ["query", "sign"], "salt/cloud/clouds/hetzner.py": [ "show_instance", "avail_sizes", "avail_images", "avail_locations", "list_ssh_keys", "list_nodes", "list_nodes_full", ], "salt/cloud/clouds/libvirt.py": [ "get_domain_ips", "destroy_domain", "create_volume_with_backing_store_xml", "generate_new_name", "find_pool_and_volume", "to_ip_addr_type", "get_domain_ip", "get_domain_volumes", "create_volume_xml", ], "salt/cloud/clouds/lxc.py": [ "avail_images", "list_nodes_full", "list_nodes", "get_provider", ], "salt/cloud/clouds/packet.py": ["get_devices_by_token", "is_profile_configured"], "salt/cloud/clouds/profitbricks.py": ["signal_event"], "salt/cloud/clouds/pyrax.py": [ "queues_exists", "queues_show", "queues_delete", "queues_create", ], "salt/engines/junos_syslog.py": ["start"], "salt/engines/logentries.py": ["event_bus_context"], "salt/engines/logstash_engine.py": ["event_bus_context"], "salt/engines/reactor.py": ["start"], "salt/engines/redis_sentinel.py": ["start"], "salt/engines/test.py": ["event_bus_context"], "salt/grains/chronos.py": ["os_family", "os_data", "kernel", "os"], "salt/grains/cimc.py": ["cimc"], "salt/grains/esxi.py": ["os_family", "kernel", "os", "esxi"], "salt/grains/fx2.py": ["kernel", "os_family", "fx2", "location", "os_data"], "salt/grains/junos.py": ["facts", "os_family", "defaults"], "salt/grains/marathon.py": ["kernel", "os", "os_family", "os_data", "marathon"], "salt/grains/metadata.py": ["metadata"], "salt/grains/nxos.py": ["system_information"], "salt/grains/panos.py": ["panos"], "salt/grains/philips_hue.py": ["vendor", "kernel", "os", "os_family", "product"], "salt/grains/rest_sample.py": ["kernel", "os", "os_family", "location", "os_data"], "salt/grains/ssh_sample.py": ["location", "os_data", "kernel"], "salt/log/handlers/fluent_mod.py": ["setup", "setup_handlers", "get_global_sender"], "salt/log/handlers/log4mongo_mod.py": ["setup_handlers"], "salt/log/handlers/logstash_mod.py": ["setup_handlers"], "salt/metaproxy/proxy.py": [ "post_master_init", "target", "handle_payload", "target_load", ], "salt/modules/chassis.py": ["chassis_credentials", "cmd"], "salt/modules/csf.py": [ "disable_testing_mode", "skip_nic", "get_testing_status", "build_directions", "get_option", "enable_testing_mode", "remove_temp_rule", "get_skipped_nics", "set_option", "remove_rule", "split_option", "skip_nics", ], "salt/modules/dracr.py": [ "get_general", "set_dns_dracname", "set_nicvlan", "get_dns_dracname", "bare_rac_cmd", "set_general", "inventory", "set_niccfg", ], "salt/modules/dummyproxy_pkg.py": [ "remove", "installed", "install", "list_pkgs", "upgrade", ], "salt/modules/esxcluster.py": ["get_details"], "salt/modules/esxdatacenter.py": ["get_details"], "salt/modules/esxi.py": ["cmd", "get_details"], "salt/modules/esxvm.py": ["get_details"], "salt/modules/powerpath.py": ["has_powerpath"], "salt/modules/rest_pkg.py": [ "remove", "installed", "install", "list_pkgs", "upgrade", ], "salt/modules/ssh_pkg.py": ["install", "list_pkgs", "remove"], "salt/modules/swift.py": ["head"], "salt/modules/sysbench.py": ["ping"], "salt/modules/test.py": ["missing_func"], "salt/modules/test_virtual.py": ["ping"], "salt/modules/vcenter.py": ["get_details"], "salt/netapi/rest_tornado/__init__.py": ["get_application"], "salt/output/__init__.py": ["progress_end"], "salt/pillar/extra_minion_data_in_pillar.py": ["ext_pillar"], "salt/pillar/gpg.py": ["ext_pillar"], "salt/pillar/makostack.py": ["ext_pillar"], "salt/pillar/nacl.py": ["ext_pillar"], "salt/pillar/stack.py": ["ext_pillar"], "salt/proxy/cisconso.py": ["init"], "salt/proxy/esxi.py": ["is_connected_via_vcenter"], "salt/proxy/fx2.py": ["host"], "salt/proxy/junos.py": [ "reboot_active", "conn", "get_reboot_active", "initialized", "reboot_clear", "get_serialized_facts", ], "salt/proxy/netmiko_px.py": ["connection", "make_con"], "salt/proxy/rest_sample.py": ["init", "alive", "fns", "fix_outage"], "salt/queues/pgjsonb_queue.py": ["handle_queue_creation"], "salt/renderers/pydsl.py": ["render"], "salt/renderers/pyobjects.py": ["render"], "salt/renderers/stateconf.py": [ "add_goal_state", "rewrite_sls_includes_excludes", "add_implicit_requires", "add_start_state", "extract_state_confs", "rename_state_ids", "has_names_decls", "statelist", "render", ], "salt/returners/zabbix_return.py": ["returner", "zbx", "zabbix_send"], "salt/roster/range.py": ["target_glob", "target_range"], "salt/sdb/consul.py": ["set_", "get"], "salt/states/apache.py": ["configfile"], "salt/states/boto_elasticache.py": [ "replication_group_present", "cache_cluster_present", "replication_group_absent", "subnet_group_absent", "cache_cluster_absent", ], "salt/states/boto_rds.py": ["subnet_group_absent"], "salt/states/boto_route53.py": ["rr_absent", "rr_present"], "salt/states/boto_vpc.py": ["vpc_peering_connection_absent"], "salt/states/cmd.py": ["wait_call"], "salt/states/esxdatacenter.py": ["mod_init"], "salt/states/junos.py": ["resultdecorator"], "salt/states/keystone_role_grant.py": ["present", "absent"], "salt/states/libcloud_dns.py": ["state_result"], "salt/states/libcloud_loadbalancer.py": ["state_result"], "salt/states/libcloud_storage.py": ["state_result"], "salt/states/pkgng.py": ["update_packaging_site"], "salt/utils/aws.py": ["assumed_creds"], "salt/utils/boto3mod.py": ["exactly_one", "get_error", "ordered"], "salt/utils/boto_elb_tag.py": ["get_tag_descriptions"], "salt/utils/botomod.py": ["exactly_one", "get_error"], "salt/utils/dictdiffer.py": ["diff", "deep_diff"], "salt/utils/dictupdate.py": [ "merge", "merge_recurse", "merge_overwrite", "merge_list", "merge_aggregate", ], "salt/utils/dockermod/__init__.py": ["get_client_args"], "salt/utils/dockermod/translate/container.py": [ "ipc_mode", "volumes_from", "cpu_period", "network_mode", "domainname", "stop_signal", "cpuset_mems", "command", "memswap_limit", "pid_mode", "pids_limit", "security_opt", "network_disabled", "labels", "sysctls", "log_driver", "userns_mode", "cpuset_cpus", "lxc_conf", "environment", "read_only", "oom_score_adj", "device_write_iops", "mem_swappiness", "isolation", "blkio_weight", "entrypoint", "hostname", "dns_opt", "mac_address", "cpu_shares", "privileged", "stdin_open", "dns", "publish_all_ports", "mem_limit", "log_opt", "devices", "auto_remove", "cap_add", "group_add", "stop_timeout", "oom_kill_disable", "tty", "detach", "storage_opt", "shm_size", "name", "host_config", "device_read_bps", "cpu_group", "device_read_iops", "dns_search", "links", "volume_driver", "extra_hosts", "tmpfs", "ulimits", "cap_drop", "device_write_bps", ], "salt/utils/dockermod/translate/helpers.py": [ "validate_subnet", "translate_str", "validate_ip", "translate_bool", "split", "translate_int", ], "salt/utils/dockermod/translate/network.py": [ "attachable", "subnet", "driver", "ipam_opts", "aux_addresses", "ingress", "ipam_driver", "ipam_pools", "iprange", "gateway", "enable_ipv6", "internal", "check_duplicate", "options", "ipam", "labels", ], "salt/utils/entrypoints.py": [ "name_and_version_from_entry_point", "iter_entry_points", ], "salt/utils/error.py": ["pack_exception"], "salt/utils/etcd_util.py": ["get_conn", "tree"], "salt/utils/find.py": ["path_depth"], "salt/utils/gzip_util.py": ["open_fileobj", "uncompress", "open"], "salt/utils/icinga2.py": ["get_certs_path"], "salt/utils/jinja.py": [ "jinja_raise", "method_call", "show_full_context", "regex_escape", ], "salt/utils/listdiffer.py": ["list_diff"], "salt/utils/master.py": ["get_master_key", "ping_all_connected_minions"], "salt/utils/namecheap.py": [ "atts_to_dict", "get_opts", "post_request", "string_to_value", "xml_to_dict", "get_request", ], "salt/utils/nxos.py": ["version_info"], "salt/utils/openstack/neutron.py": [ "check_keystone", "check_neutron", "sanitize_neutronclient", ], "salt/utils/openstack/nova.py": [ "sanatize_novaclient", "get_entry", "get_endpoint_url_v3", "get_entry_multi", "check_nova", ], "salt/utils/openstack/swift.py": ["mkdirs", "check_swift"], "salt/utils/pkg/__init__.py": ["split_comparison"], "salt/utils/process.py": ["systemd_notify_call", "default_signals"], "salt/utils/profile.py": ["activate_profile", "output_profile"], "salt/utils/pyobjects.py": ["need_salt"], "salt/utils/reclass.py": [ "set_inventory_base_uri_default", "filter_out_source_path_option", "prepend_reclass_source_path", ], "salt/utils/roster_matcher.py": ["targets"], "salt/utils/saltclass.py": [ "find_and_process_re", "get_tops", "render_yaml", "get_env_from_dict", "get_pillars", "expand_variables", "render_jinja", "expand_classes_in_order", "dict_search_and_replace", "expanded_dict_from_minion", "find_value_to_expand", "dict_merge", "get_class", ], "salt/utils/smb.py": ["mkdirs", "delete_file", "delete_directory"], "salt/utils/ssh.py": ["key_is_encrypted"], "salt/utils/stringio.py": ["is_writable", "is_stringio", "is_readable"], "salt/utils/stringutils.py": ["random"], "salt/utils/templates.py": [ "wrap_tmpl_func", "render_mako_tmpl", "render_jinja_tmpl", "render_wempy_tmpl", ], "salt/utils/verify.py": ["verify_logs_filter"], "salt/utils/virtualbox.py": [ "machine_get_machinestate_str", "machine_get_machinestate_tuple", ], "salt/utils/win_osinfo.py": ["get_os_version_info"], "salt/utils/win_runas.py": ["split_username"], "salt/utils/yamldumper.py": [ "represent_undefined", "represent_ordereddict", "get_dumper", ], "salt/utils/yamlloader.py": ["load"], "salt/utils/yamlloader_old.py": ["load"], "salt/utils/zeromq.py": ["check_ipc_path_max_len"], } MISSING_EXAMPLES = { "salt/modules/acme.py": ["has", "renew_by", "needs_renewal"], "salt/modules/ansiblegate.py": ["help", "list_"], "salt/modules/apkpkg.py": ["purge"], "salt/modules/aptpkg.py": ["expand_repo_def"], "salt/modules/arista_pyeapi.py": ["get_connection"], "salt/modules/artifactory.py": [ "get_latest_release", "get_latest_snapshot", "get_release", "get_snapshot", ], "salt/modules/bigip.py": ["delete_pool"], "salt/modules/boto3_elasticache.py": [ "delete_cache_cluster", "describe_cache_subnet_groups", "create_cache_parameter_group", "describe_replication_groups", "cache_security_group_exists", "add_tags_to_resource", "authorize_cache_security_group_ingress", "modify_cache_cluster", "cache_subnet_group_exists", "describe_cache_clusters", "cache_cluster_exists", "delete_cache_security_group", "describe_cache_parameter_groups", "copy_snapshot", "delete_cache_parameter_group", "delete_cache_subnet_group", "describe_cache_security_groups", "create_cache_cluster", "list_tags_for_resource", "revoke_cache_security_group_ingress", "modify_cache_subnet_group", "replication_group_exists", "remove_tags_from_resource", "create_replication_group", "modify_replication_group", "create_cache_subnet_group", "create_cache_security_group", "delete_replication_group", "list_cache_subnet_groups", ], "salt/modules/boto3_elasticsearch.py": [ "delete_elasticsearch_domain", "describe_elasticsearch_domain", "describe_reserved_elasticsearch_instances", "wait_for_upgrade", "start_elasticsearch_service_software_update", "cancel_elasticsearch_service_software_update", "list_domain_names", "exists", "describe_reserved_elasticsearch_instance_offerings", "list_elasticsearch_instance_types", "list_tags", "delete_elasticsearch_service_role", "get_upgrade_status", "get_upgrade_history", "get_compatible_elasticsearch_versions", "purchase_reserved_elasticsearch_instance_offering", "describe_elasticsearch_domain_config", "list_elasticsearch_versions", ], "salt/modules/boto3_route53.py": ["aws_encode"], "salt/modules/boto_cloudwatch.py": ["delete_alarm"], "salt/modules/boto_ec2.py": ["set_volumes_tags"], "salt/modules/boto_elasticache.py": ["create_subnet_group"], "salt/modules/boto_elb.py": [ "set_health_check", "set_attributes", "create", "delete", ], "salt/modules/boto_rds.py": [ "create_read_replica", "describe_parameters", "create_parameter_group", "modify_db_instance", "create_subnet_group", "create_option_group", "create", "describe_parameter_group", ], "salt/modules/boto_sns.py": [ "get_all_subscriptions_by_topic", "create", "subscribe", "delete", ], "salt/modules/boto_ssm.py": ["get_parameter", "delete_parameter", "put_parameter"], "salt/modules/capirca_acl.py": ["get_filter_pillar", "get_term_pillar"], "salt/modules/ceph.py": ["zap"], "salt/modules/chroot.py": ["exist"], "salt/modules/ciscoconfparse_mod.py": [ "find_objects", "find_objects_wo_child", "find_objects_w_child", ], "salt/modules/cisconso.py": [ "get_data", "get_rollbacks", "get_rollback", "info", "set_data_value", "apply_rollback", ], "salt/modules/cp.py": ["envs", "recv", "recv_chunked"], "salt/modules/cryptdev.py": ["active"], "salt/modules/datadog_api.py": ["post_event"], "salt/modules/defaults.py": ["deepcopy", "update"], "salt/modules/dracr.py": ["update_firmware", "update_firmware_nfs_or_cifs"], "salt/modules/dummyproxy_service.py": ["enabled", "running"], "salt/modules/ebuildpkg.py": ["porttree_matches"], "salt/modules/eselect.py": ["exec_action", "set_target", "get_current_target"], "salt/modules/freebsd_update.py": [ "update", "ids", "rollback", "install", "fetch", "upgrade", ], "salt/modules/glanceng.py": [ "setup_clouds", "get_openstack_cloud", "get_operator_cloud", "compare_changes", ], "salt/modules/glassfish.py": [ "delete_jdbc_connection_pool", "create_connector_c_pool", "delete_jdbc_resource", "create_connector_resource", "enum_jdbc_connection_pool", "enum_connector_c_pool", "enum_jdbc_resource", "enum_admin_object_resource", "get_system_properties", "get_jdbc_connection_pool", "delete_system_properties", "delete_connector_c_pool", "create_jdbc_connection_pool", "update_jdbc_connection_pool", "update_system_properties", "update_connector_c_pool", "create_admin_object_resource", "enum_connector_resource", "create_jdbc_resource", "update_connector_resource", "delete_connector_resource", "delete_admin_object_resource", "update_admin_object_resource", "update_jdbc_resource", "get_admin_object_resource", "get_connector_resource", "get_connector_c_pool", "get_jdbc_resource", ], "salt/modules/google_chat.py": ["send_message"], "salt/modules/hadoop.py": ["namenode_format"], "salt/modules/highstate_doc.py": [ "processor_markdown", "read_file", "process_lowstates", "markdown_full_jinja_template", "markdown_default_jinja_template", "render", "markdown_basic_jinja_template", ], "salt/modules/ifttt.py": ["trigger_event"], "salt/modules/influxdbmod.py": ["query", "revoke_privilege", "grant_privilege"], "salt/modules/infoblox.py": ["diff_objects"], "salt/modules/kapacitor.py": ["version"], "salt/modules/keystoneng.py": [ "get_openstack_cloud", "compare_changes", "get_operator_cloud", "setup_clouds", "get_entity", ], "salt/modules/kubernetesmod.py": [ "replace_service", "create_deployment", "create_service", "create_pod", "replace_deployment", ], "salt/modules/logmod.py": [ "warning", "critical", "info", "exception", "error", "debug", ], "salt/modules/lxc.py": ["create"], "salt/modules/lxd.py": [ "pylxd_save_object", "container_start", "container_delete", "container_freeze", "container_unfreeze", "container_device_delete", "pylxd_client_get", "container_config_get", "container_restart", "container_rename", "container_device_get", "container_device_add", "container_config_delete", "container_stop", "container_get", "sync_config_devices", "container_file_get", "container_state", "container_config_set", ], "salt/modules/nacl.py": ["enc", "dec"], "salt/modules/nagios.py": ["run_all", "retcode"], "salt/modules/napalm_formula.py": ["dictupdate"], "salt/modules/napalm_mod.py": ["netmiko_conn", "pyeapi_conn"], "salt/modules/napalm_netacl.py": ["get_filter_pillar", "get_term_pillar"], "salt/modules/napalm_probes.py": ["delete_probes", "schedule_probes", "set_probes"], "salt/modules/netbox.py": ["get_", "filter_", "slugify"], "salt/modules/netmiko_mod.py": ["call", "multi_call", "get_connection"], "salt/modules/network.py": ["fqdns"], "salt/modules/neutronng.py": [ "get_openstack_cloud", "compare_changes", "get_operator_cloud", "subnet_update", "setup_clouds", ], "salt/modules/nexus.py": [ "get_latest_release", "get_latest_snapshot", "get_release", "get_snapshot_version_string", "get_snapshot", ], "salt/modules/nix.py": [ "collect_garbage", "uninstall", "list_pkgs", "install", "upgrade", ], "salt/modules/nspawn.py": ["stop", "restart"], "salt/modules/nxos.py": [ "set_role", "get_user", "find", "cmd", "get_roles", "show_run", "grains", "save_running_config", "remove_user", "check_role", "grains_refresh", "add_config", "system_info", "sendline", "delete_config", "show_ver", "check_password", "set_password", "ping", "unset_role", "replace", ], "salt/modules/nxos_upgrade.py": ["upgrade", "check_upgrade_impact"], "salt/modules/openbsdpkg.py": ["install"], "salt/modules/opkg.py": ["version_clean", "check_extra_requirements"], "salt/modules/pagerduty_util.py": [ "create_or_update_resource", "resource_absent", "resource_present", "get_resource", "delete_resource", ], "salt/modules/parallels.py": [ "delete", "exists", "list_vms", "status", "revert_snapshot", "stop", "prlsrvctl", "clone", "snapshot", "reset", "restart", "delete_snapshot", "exec_", "prlctl", "list_snapshots", "start", ], "salt/modules/pcs.py": ["item_create", "item_show"], "salt/modules/pkg_resource.py": ["format_pkg_list", "format_version"], "salt/modules/pkgin.py": ["normalize_name"], "salt/modules/portage_config.py": [ "get_all_cpv_use", "is_changed_uses", "get_installed_use", "filter_flags", "get_iuse", "get_cleared_flags", ], "salt/modules/poudriere.py": ["create_ports_tree"], "salt/modules/powerpath.py": ["add_license", "list_licenses", "remove_license"], "salt/modules/ps.py": ["pkill", "kill_pid", "pgrep"], "salt/modules/rest_service.py": ["enabled", "running"], "salt/modules/runit.py": [ "get_svc_alias", "get_svc_avail_path", "add_svc_avail_path", ], "salt/modules/s3.py": ["put", "get", "delete"], "salt/modules/saltcheck.py": ["parallel_scheck"], "salt/modules/selinux.py": ["filetype_id_to_string"], "salt/modules/sensehat.py": [ "get_pressure", "get_temperature_from_humidity", "get_pixels", "get_temperature", "set_pixels", "get_pixel", "get_humidity", "get_temperature_from_pressure", ], "salt/modules/sensors.py": ["sense"], "salt/modules/slsutil.py": ["banner", "boolstr"], "salt/modules/smartos_imgadm.py": ["docker_to_uuid"], "salt/modules/ssh_service.py": ["enabled", "running"], "salt/modules/state.py": ["test", "get_pauses", "apply_"], "salt/modules/swift.py": ["put", "get", "delete"], "salt/modules/system.py": ["set_reboot_required_witnessed"], "salt/modules/test.py": ["rand_str", "try_"], "salt/modules/tls.py": ["validate"], "salt/modules/tomcat.py": ["extract_war_version"], "salt/modules/trafficserver.py": [ "refresh", "set_config", "startup", "read_metric", "shutdown", "bounce_local", "alarms", "clear_node", "bounce_cluster", "status", "match_config", "offline", "zero_cluster", "zero_node", "restart_local", "read_config", "clear_alarms", "clear_cluster", "restart_cluster", "match_metric", ], "salt/modules/vagrant.py": ["get_machine_id", "get_vm_info"], "salt/modules/vault.py": ["read_secret"], "salt/modules/virt.py": [ "nesthash", "pool_update", "init", "node_devices", "network_update", ], "salt/modules/virtualenv_mod.py": ["virtualenv_ver"], "salt/modules/vsphere.py": [ "create_storage_policy", "power_off_vm", "list_diskgroups", "list_default_vsan_policy", "create_diskgroup", "list_dvportgroups", "assign_license", "update_vm", "list_cluster", "update_dvportgroup", "update_cluster", "list_dvss", "set_advanced_configs", "add_license", "list_capability_definitions", "register_vm", "list_disk_partitions", "remove_datastore", "update_storage_policy", "erase_disk_partitions", "get_vm_config", "remove_dvportgroup", "delete_vm", "create_datacenter", "add_capacity_to_diskgroup", "get_vm_config_file", "list_datacenters_via_proxy", "power_on_vm", "rename_datastore", "list_licenses", "configure_host_cache", "compare_vm_configs", "get_host_cache", "unregister_vm", "create_cluster", "update_dvs", "list_uplink_dvportgroup", "get_advanced_configs", "delete_advanced_configs", "create_dvportgroup", "list_disks", "assign_default_storage_policy_to_datastore", "create_dvs", "list_datastores_via_proxy", "list_default_storage_policy_of_datastore", "create_vmfs_datastore", "list_assigned_licenses", "get_vm", "remove_diskgroup", "remove_capacity_from_diskgroup", "list_storage_policies", ], "salt/modules/win_pkg.py": ["get_package_info"], "salt/modules/win_timezone.py": ["zone_compare"], "salt/modules/zabbix.py": [ "substitute_params", "get_zabbix_id_mapper", "get_object_id_by_params", "compare_params", ], "salt/modules/zk_concurrency.py": [ "lock", "party_members", "unlock", "lock_holders", ], } @task(iterable=["files"], positional=["files"]) def check(ctx, files, check_proper_formatting=False, error_on_known_failures=False): """ Check salt's docstrings """ # CD into Salt's repo root directory ctx.cd(CODE_DIR) # Unfortunately invoke does not support nargs. # We migth have been passed --files="foo.py bar.py" # Turn that into a list of paths _files = [] for path in files: if not path: continue _files.extend(path.split()) if not _files: _files = SALT_CODE_DIR.rglob("*.py") else: _files = [pathlib.Path(fname) for fname in _files] _files = [path.resolve() for path in _files] errors = 0 exitcode = 0 warnings = 0 for path in _files: contents = path.read_text() try: module = ast.parse(path.read_text(), filename=str(path)) module_docstring = ast.get_docstring(module, clean=False) if module_docstring: error = _check_valid_versions_on_docstrings(module_docstring) if error: errors += 1 exitcode = 1 utils.error( "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", path.relative_to(CODE_DIR), *error, ) for funcdef in [ node for node in module.body if isinstance(node, ast.FunctionDef) ]: docstring = ast.get_docstring(funcdef, clean=False) if docstring: error = _check_valid_versions_on_docstrings(docstring) if error: errors += 1 exitcode = 1 utils.error( "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", path.relative_to(CODE_DIR), *error, ) if not str(path).startswith(SALT_INTERNAL_LOADERS_PATHS): # No further docstrings checks are needed continue funcname = funcdef.name relpath = str(path.relative_to(CODE_DIR)) # We're dealing with a salt loader module if funcname.startswith("_"): # We're not interested in internal functions continue if not docstring: if ( funcname in MISSING_DOCSTRINGS.get(relpath, ()) and error_on_known_failures is False ): warnings += 1 utils.warn( "The function '{}' on '{}' does not have a docstring", funcname, relpath, ) continue errors += 1 exitcode = 1 utils.error( "The function '{}' on '{}' does not have a docstring", funcname, relpath, ) continue elif funcname in MISSING_DOCSTRINGS.get(relpath, ()): # This was previously a know function with a missing docstring. # Warn about it so that it get's removed from this list warnings += 1 utils.warn( "The function '{}' on '{}' was previously known to not have a docstring, " "which is no longer the case. Please remove it from 'MISSING_DOCSTRINGS' ." "in '{}'", funcname, relpath, THIS_FILE, ) try: salt_modules_relpath = path.relative_to(SALT_MODULES_PATH) if str(salt_modules_relpath.parent) != ".": # We don't want to check nested packages continue # But this is a module under salt/modules, let's check # the CLI examples except ValueError: # We're not checking CLI examples in any other salt loader modules continue if _check_cli_example_present(docstring) is False: if ( funcname in MISSING_EXAMPLES.get(relpath, ()) and error_on_known_failures is False ): warnings += 1 utils.warn( "The function '{}' on '{}' does not have a 'CLI Example:' in it's docstring", funcname, relpath, ) continue errors += 1 exitcode = 1 utils.error( "The function '{}' on '{}' does not have a 'CLI Example:' in it's docstring", funcname, relpath, ) continue elif funcname in MISSING_EXAMPLES.get(relpath, ()): # This was previously a know function with a missing CLI example # Warn about it so that it get's removed from this list warnings += 1 utils.warn( "The function '{}' on '{}' was previously known to not have a CLI Example, " "which is no longer the case. Please remove it from 'MISSING_EXAMPLES'. " "in '{}'", funcname, relpath, THIS_FILE, ) if check_proper_formatting is False: continue # By now we now this function has a docstring and it has a CLI Example section # Let's now check if it's properly formatted if _check_cli_example_proper_formatting(docstring) is False: errors += 1 exitcode = 1 utils.error( "The function {!r} on '{}' does not have a proper 'CLI Example:' section in " "it's docstring. The proper format is:\n" "CLI Example:\n" "\n" ".. code-block:: bash\n" "\n" " salt '*' <insert example here>\n", funcdef.name, path.relative_to(CODE_DIR), ) continue finally: if contents != path.read_text(): path.write_text(contents) if warnings: utils.warn("Found {} warnings", warnings) if exitcode: utils.error("Found {} errors", errors) utils.exit_invoke(exitcode) CHECK_VALID_VERSION_RE = re.compile( "(?P<vtype>(versionadded|versionchanged|deprecated))::(?P<version>.*)" ) def _check_valid_versions_on_docstrings(docstring): for match in CHECK_VALID_VERSION_RE.finditer(docstring): vtype = match.group("vtype") version = match.group("version") versions = [vs.strip() for vs in version.split(",")] bad_versions = [] for vs in versions: try: SaltStackVersion.parse(vs) except ValueError: bad_versions.append(vs) if bad_versions: return vtype, ", ".join(bad_versions) return False CLI_EXAMPLE_PRESENT_RE = re.compile(r"CLI Example(?:s)?:") def _check_cli_example_present(docstring): return CLI_EXAMPLE_PRESENT_RE.search(docstring) is not None CLI_EXAMPLE_PROPER_FORMATTING_RE = re.compile( r"CLI Example(?:s)?:\n\n.. code-block:: bash\n\n salt (.*) '*'", re.MULTILINE ) def _check_cli_example_proper_formatting(docstring): return CLI_EXAMPLE_PROPER_FORMATTING_RE.search(docstring) is not None
18,238
1,232
/*- * << * DBus * == * Copyright (C) 2016 - 2019 Bridata * == * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * >> */ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.creditease.dbus.notopen.mongo; import com.creditease.dbus.common.format.InputSplit; import org.bson.Document; /** * A InputSplit of mongo DB. */ public class MongoInputSplit extends InputSplit { private String pullTargetMongoUrl = null; private Document dataQueryObject = null; /** * Default Constructor. */ public MongoInputSplit() { } public MongoInputSplit(String pullTargetMongoUrl, Document dataQueryObject) { this.pullTargetMongoUrl = pullTargetMongoUrl; this.dataQueryObject = dataQueryObject; } public String getPullTargetMongoUrl() { return pullTargetMongoUrl; } public void setPullTargetMongoUrl(String pullTargetMongoUrl) { this.pullTargetMongoUrl = pullTargetMongoUrl; } public Document getDataQueryObject() { return dataQueryObject; } public void setDataQueryObject(Document dataQueryObject) { this.dataQueryObject = dataQueryObject; } public long getLength() { return -1; } @Override public String toString() { return "MongoInputSplit{" + "pullTargetMongoUrl='" + pullTargetMongoUrl + '\'' + ", dataQueryObject=" + dataQueryObject + '}'; } }
872
788
<gh_stars>100-1000 /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.usergrid.utils; import java.util.Arrays; import java.util.HashSet; import java.util.Set; public class ClassUtils extends org.apache.commons.lang.ClassUtils { @SuppressWarnings("unchecked") public static <A, B> B cast( A a ) { return ( B ) a; } @SuppressWarnings("unchecked") private static final Set<Class<?>> WRAPPER_TYPES = new HashSet<Class<?>>( Arrays.asList( Boolean.class, Byte.class, Character.class, Double.class, Float.class, Integer.class, Long.class, Short.class, Void.class ) ); public static boolean isWrapperType( Class<?> clazz ) { return WRAPPER_TYPES.contains( clazz ); } public static boolean isPrimitiveType( Class<?> clazz ) { if ( clazz == null ) { return false; } return clazz.isPrimitive() || isWrapperType( clazz ); } public static boolean isBasicType( Class<?> clazz ) { if ( clazz == null ) { return false; } return ( String.class.isAssignableFrom( clazz ) ) || isPrimitiveType( clazz ); } }
667
513
void swap(char &a, char &b){ char temp = a; a = b; b = temp; } void swapBetween(int i, int j, string &A){ int k = 1; for(int z = i; z < (i+j)/2; z++){ swap(A[z], A[j-k]); k++; } } // string extractWord(string &A, int &i){ // int temp = i; // while(!isspace(A[i])){ // i++; // } // return A.substr(temp, i-temp); // } void Solution::reverseWords(string &A) { // Do not write main() function. // Do not read input, instead use the arguments to the function. // Do not print the output, instead return values as specified // Still have a doubt. Checkout www.interviewbit.com/pages/sample_codes/ for more details int i; for(i = 0; i < A.size(); i++){ if(!isspace(A[i])){ int j = i+1; if(j >= A.size()){ break; } while(!isspace(A[j])){ j++; if(j == A.size()){ break; } } swapBetween(i, j, A); i = j; } } string B = ""; i = A.size()-1; int count = 0; while(i >= 0){ if(isspace(A[i])){ if(i == 0){ break; } if(isspace(A[i-1])){ } else{ if(count != 0){ B = B + " "; } } } else{ count ++; B = B + A[i]; } i--; } A = B; }
933
301
<filename>package.json { "name": "serverless-prune-plugin", "version": "1.6.1", "description": "Serverless plugin to delete old versions of deployed functions from AWS", "author": "<NAME> <<EMAIL>> (https://claygregory.com/)", "keywords": [ "serverless", "serverless-plugin", "aws", "aws-lambda", "lambda" ], "repository": "claygregory/serverless-prune-plugin", "homepage": "https://github.com/claygregory/serverless-prune-plugin", "license": "MIT", "main": "index.js", "scripts": { "lint": "eslint .", "test": "mocha", "cover": "istanbul cover _mocha", "coveralls": "npm run cover -- --report lcovonly && cat ./coverage/lcov.info | coveralls" }, "dependencies": { "bluebird": "^3.4.7" }, "devDependencies": { "coveralls": "^3.0.0", "eslint": "^4.8.0", "istanbul": "^0.4.5", "mocha": "^4.0.0", "mocha-lcov-reporter": "^1.2.0", "sinon": "^4.0.1" }, "peerDependencies": { "serverless": "1 || 2" } }
448
1,045
/*************************************************************************************************** Tencent is pleased to support the open source community by making RapidView available. Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MITLicense (the "License"); you may not use this file except in compliance withthe License. You mayobtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ***************************************************************************************************/ package com.tencent.rapidview.framework; import android.content.Context; import com.tencent.rapidview.deobfuscated.IRapidActionListener; import com.tencent.rapidview.animation.RapidAnimationCenter; import com.tencent.rapidview.data.DataExpressionsParser; import com.tencent.rapidview.data.RapidDataBinder; import com.tencent.rapidview.data.Var; import com.tencent.rapidview.deobfuscated.IRapidView; import com.tencent.rapidview.lua.RapidLuaEnvironment; import com.tencent.rapidview.param.ParamsObject; import com.tencent.rapidview.parser.ViewStubParser; import com.tencent.rapidview.task.RapidTaskCenter; import com.tencent.rapidview.utils.RapidStringUtils; import com.tencent.rapidview.utils.RapidXmlLoader; import com.tencent.rapidview.utils.XLog; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * @Class RapidObjectImpl * @Desc 由于对外接口包含对外调用和内部递归两种功能,不应放在一起, * 因此抽出共同的实现逻辑部分,增加子类易读性和安全。 * * @author arlozhang * @date 2016.04.12 */ public abstract class RapidObjectImpl { public class CONCURRENT_LOAD_STATE { public volatile boolean mCalledLoad = false; public volatile boolean mCalledInitialize = false; public volatile boolean mInitialized = false; public volatile boolean mWaited = false; public List<IRapidView> mPreloadList = new ArrayList<IRapidView>(); } protected IRapidView loadView(Context parent, IRapidView rapidView, ParamsObject paramsObject, IRapidActionListener listener){ if( parent == null || rapidView == null || paramsObject == null ){ return null; } rapidView.load(parent, paramsObject, listener); return rapidView; } protected IRapidView initXml(Context context, String rapidID, boolean limitLevel, String xmlName, Map<String, String> envMap, RapidLuaEnvironment luaEnv, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ Document doc = null; IRapidView[] arrayView = null; if( context == null || RapidStringUtils.isEmpty(xmlName) ){ if( context == null ){ XLog.d(RapidConfig.RAPID_ERROR_TAG, "初始化失败(context为空)"); } if( RapidStringUtils.isEmpty(xmlName) ){ XLog.d(RapidConfig.RAPID_ERROR_TAG, "初始化失败(xmlName为空)"); } return null; } doc = RapidXmlLoader.self().getDocument(context, xmlName, rapidID, limitLevel); if( doc == null ){ XLog.d(RapidConfig.RAPID_ERROR_TAG, "初始化失败(XMLDOC为空):" + xmlName); return null; } try{ Element root = doc.getDocumentElement(); if( root == null ){ XLog.d(RapidConfig.RAPID_ERROR_TAG, "初始化失败(XML没有根节点):" + xmlName); return null; } if( isMergeTag(root) ){ XLog.d(RapidConfig.RAPID_ERROR_TAG, "根节点禁止使用merge标签:" + xmlName); return null; } arrayView = initElement(context, rapidID, limitLevel, root, envMap, luaEnv, null, taskCenter, animationCenter, binder, concState); if( arrayView[0] == null ){ XLog.d(RapidConfig.RAPID_ERROR_TAG, "初始化的对象为空:" + xmlName); return null; } arrayView[0].getParser().getTaskCenter().setRapidView(arrayView[0]); arrayView[0].getParser().getXmlLuaCenter().setRapidView(arrayView[0]); }catch (Exception e){ e.printStackTrace(); } return arrayView[0]; } protected boolean isSpecialTag(Element element){ String tagName; if( element == null ){ return false; } tagName = element.getTagName(); if( tagName.compareToIgnoreCase("merge") != 0 && tagName.compareToIgnoreCase("include") != 0 && tagName.compareToIgnoreCase("viewstub") != 0 ){ return false; } return true; } protected boolean isMergeTag(Element element){ String tagName; if( element == null ){ return false; } tagName = element.getTagName(); if( tagName.compareToIgnoreCase("merge") != 0 ){ return false; } return true; } protected IRapidView[] initElement(Context context, String rapidID, boolean limitLevel, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, Map<String, IRapidView> brotherMap, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ IRapidView[] arrayObj; if( isSpecialTag(element) ){ arrayObj = initSpecialTag(context, rapidID, limitLevel, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); return arrayObj; } arrayObj = new IRapidView[1]; arrayObj[0] = initNormalTag(context, rapidID, limitLevel, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); return arrayObj; } protected IRapidView[] initSpecialTag(Context context, String rapidID, boolean limitLevel, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, Map<String, IRapidView> brotherMap, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ String tagName = element.getTagName(); if( tagName.compareToIgnoreCase("merge") == 0 ){ return initMerge(context, rapidID, limitLevel, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); } if( tagName.compareToIgnoreCase("include") == 0 ){ return initInclude(context, rapidID, limitLevel, element, envMap, luaEnv, taskCenter, animationCenter, binder, concState); } if( tagName.compareToIgnoreCase("viewstub") == 0 ){ return initViewStub(context, rapidID, limitLevel, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); } return null; } protected IRapidView initNormalTag(Context context, String rapidID, boolean limitLevel, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, Map<String, IRapidView> brotherMap, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ Object obj; Class clazz; try{ if( taskCenter != null && addTask(element, taskCenter, envMap) ) { return null; } if( addScript(element, luaEnv, envMap) ){ return null; } if( addPreCompile(element, luaEnv) ){ return null; } if( addAnimation(element, animationCenter) ){ return null; } clazz = RapidChooser.getInstance().getDisposalClass(element, limitLevel); if( clazz == null ){ return null; } obj = clazz.newInstance(); if( !( obj instanceof IRapidView) ){ return null; } initControl(context, rapidID, limitLevel, (IRapidView)obj, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); ((IRapidView) obj).getParser().mControlName = element.getTagName().toLowerCase(); }catch (Exception e){ e.printStackTrace(); obj = null; } return (IRapidView)obj; } protected boolean initControl( Context context, String rapidID, boolean limitLevel, IRapidView view, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, Map<String, IRapidView> brotherMap, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ if( view == null || element == null || envMap == null || taskCenter == null ){ return false; } return view.initialize(context, rapidID, limitLevel, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); } protected IRapidView[] initMerge(Context context, String rapidID, boolean limitLevel, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, Map<String, IRapidView> brotherMap, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ IRapidView[] arrayView; NodeList listChild; List<IRapidView> listView = new ArrayList<IRapidView>(); if( context == null ||element == null || envMap == null){ return null; } listChild = element.getChildNodes(); for( int i = 0; i < listChild.getLength(); i++ ){ Element childElement; IRapidView rapidView; Node node = listChild.item(i); if( node.getNodeType() != Node.ELEMENT_NODE ){ continue; } childElement = (Element)node; if( isSpecialTag(childElement) ){ IRapidView[] arrayChildView = initSpecialTag(context, rapidID, limitLevel, childElement, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); for( int j = 0 ; j < arrayChildView.length; j++ ){ listView.add(arrayChildView[j]); } continue; } rapidView = initNormalTag(context, rapidID, limitLevel, childElement, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); if( rapidView != null ){ listView.add(rapidView); } } arrayView = new IRapidView[listView.size()]; for( int i = 0; i < listView.size(); i++ ){ arrayView[i] = listView.get(i); } return arrayView; } protected IRapidView[] initInclude(Context context, String rapidID, boolean limitLevel, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ IRapidView[] arrayView; List<IRapidView> listView = new ArrayList<IRapidView>(); String strXmlList; String strEnvList; String strBinder; String strLuaEnv; NamedNodeMap mapAttrs; List<String> listXml; List<Map<String, String>> listMapEnv; Map<String, String> mapAttributes = new ConcurrentHashMap<String, String>(); DataExpressionsParser parser = new DataExpressionsParser(); RapidLuaEnvironment luaEnvironment = luaEnv; if( context == null ||element == null ){ return null; } mapAttrs = element.getAttributes(); for( int i = 0; i < mapAttrs.getLength(); i++){ mapAttributes.put(mapAttrs.item(i).getNodeName().toLowerCase(), mapAttrs.item(i).getNodeValue()); } strXmlList = mapAttributes.get("layout"); if( strXmlList == null ){ strXmlList = mapAttributes.get("xml"); if( strXmlList == null ){ strXmlList = ""; } } if( parser.isDataExpression(strXmlList) ){ strXmlList = parser.get(binder, envMap, null, null, strXmlList).getString(); } strEnvList = mapAttributes.get("environment"); if( strEnvList == null ){ strEnvList = ""; } if( parser.isDataExpression(strEnvList) ){ Var list = parser.get(binder, envMap, null, null, strEnvList); if( list != null ){ strEnvList = list.getString(); } } strBinder = mapAttributes.get("binder"); if( strBinder != null && strBinder.compareToIgnoreCase("new") == 0 ){ binder = new RapidDataBinder(new ConcurrentHashMap<String, Var>()); } strLuaEnv = mapAttributes.get("luaenvironment"); if( strLuaEnv != null && strLuaEnv.compareToIgnoreCase("new") == 0 ){ luaEnvironment = new RapidLuaEnvironment(null, rapidID, limitLevel); } listXml = RapidStringUtils.stringToList(strXmlList); listMapEnv = RapidStringUtils.stringToListMap(strEnvList); for( int i = 0; i < listXml.size(); i++ ){ IRapidView xmlView = initXml( context, rapidID, limitLevel, listXml.get(i), listMapEnv.size() > i ? listMapEnv.get(i) : new ConcurrentHashMap<String, String>(), luaEnvironment, taskCenter, animationCenter, binder, concState); if( xmlView == null ){ continue; } listView.add(xmlView); } arrayView = new IRapidView[listView.size()]; for( int i = 0; i < listView.size(); i++ ){ arrayView[i] = listView.get(i); } return arrayView; } protected IRapidView[] initViewStub(Context context, String rapidID, boolean limitLevel, Element element, Map<String, String> envMap, RapidLuaEnvironment luaEnv, Map<String, IRapidView> brotherMap, RapidTaskCenter taskCenter, RapidAnimationCenter animationCenter, RapidDataBinder binder, CONCURRENT_LOAD_STATE concState){ IRapidView[] arrayView = new IRapidView[1]; String strLayout; NamedNodeMap mapAttrs; IRapidView xmlView; IRapidView stubView; Map<String, String> mapAttributes = new ConcurrentHashMap<String, String>(); DataExpressionsParser parser = new DataExpressionsParser(); if( context == null ||element == null ){ return null; } mapAttrs = element.getAttributes(); for( int i = 0; i < mapAttrs.getLength(); i++){ mapAttributes.put(mapAttrs.item(i).getNodeName().toLowerCase(), mapAttrs.item(i).getNodeValue()); } strLayout = mapAttributes.get("layout"); if( parser.isDataExpression(strLayout) ){ Var layout = parser.get(binder, envMap, null, null, strLayout); if( layout != null ){ strLayout = layout.getString(); } } xmlView = initXml( context, rapidID, limitLevel, strLayout, new ConcurrentHashMap<String, String>(), luaEnv, taskCenter, animationCenter, binder, concState); stubView = initNormalTag(context, rapidID, limitLevel, element, envMap, luaEnv, brotherMap, taskCenter, animationCenter, binder, concState); if( stubView == null ){ return null; } if( !(stubView.getParser() instanceof ViewStubParser) ){ return null; } ((ViewStubParser) stubView.getParser()).setReplaceView(xmlView); arrayView[0] = stubView; return arrayView; } protected boolean addTask(Element element, RapidTaskCenter taskCenter, Map<String, String> envMap){ if( element.getTagName().compareToIgnoreCase("task") != 0 || taskCenter == null ){ return false; } taskCenter.setEnvironment(envMap); taskCenter.add(element); return true; } protected boolean addScript(Element element, RapidLuaEnvironment luaEnv, Map<String, String> envMap){ if( element.getTagName().compareToIgnoreCase("script") != 0 ){ return false; } luaEnv.getXmlLuaCenter().add(element, envMap); return true; } protected boolean addAnimation(Element element, RapidAnimationCenter animationCenter){ if( animationCenter == null || !animationCenter.isAnimation(element) ){ return false; } animationCenter.add(element); return true; } protected boolean addPreCompile(Element element, RapidLuaEnvironment luaEnv){ String file = null; if( element.getTagName().compareToIgnoreCase("precompile") != 0 ){ return false; } file = element.getAttribute("file"); if( file != null ){ luaEnv.initClosure(file); } return true; } }
11,010
563
/* * Copyright 2020 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.athenz.common.server.ssh; import com.yahoo.athenz.common.server.ssh.SSHCertRecord; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; public class SSHCertRecordTest { @Test public void testSSHCertRecord() { SSHCertRecord certRecord = new SSHCertRecord(); certRecord.setService("cn"); certRecord.setInstanceId("instance-id"); certRecord.setPrincipals("host1,host2"); certRecord.setClientIP("10.1.1.1"); certRecord.setPrivateIP("10.1.1.2"); assertEquals(certRecord.getService(), "cn"); assertEquals(certRecord.getInstanceId(), "instance-id"); assertEquals(certRecord.getPrincipals(), "host1,host2"); assertEquals(certRecord.getClientIP(), "10.1.1.1"); assertEquals(certRecord.getPrivateIP(), "10.1.1.2"); } }
530
6,989
#include "linear_system.h" #include <catboost/libs/helpers/exception.h> #include <util/generic/ymath.h> #include <util/generic/vector.h> #include <util/stream/output.h> #include <contrib/libs/clapack/clapack.h> void SolveLinearSystem(TArrayRef<double> matrix, TArrayRef<double> target) { const auto expectedMatrixSize = target.size() * (target.size() + 1) / 2; CB_ENSURE_INTERNAL( matrix.size() == expectedMatrixSize, "Packed matrix size for right hand side size " << target.size() << " should be " << expectedMatrixSize << ", not " << matrix.size() ); if (target.size() == 1) { target[0] /= matrix[0]; return; } char matrixStorageType[] = {'L', '\0'}; int systemSize = target.ysize(); int numberOfRightHandSides = 1; int info = 0; dppsv_(matrixStorageType, &systemSize, &numberOfRightHandSides, matrix.data(), target.data(), &systemSize, &info); CB_ENSURE(info == 0, "System of linear equations is not positive definite"); } void SolveLinearSystemCholesky(TVector<double>* matrix, TVector<double>* target) { if (target->size() == 1) { (*target)[0] /= (*matrix)[0]; return; } char matrixStorageType[] = {'U', '\0'}; int systemSize = target->ysize(); int numberOfRightHandSides = 1; int info = 0; dposv_(matrixStorageType, &systemSize, &numberOfRightHandSides, matrix->data(), &systemSize, target->data(), &systemSize, &info); Y_VERIFY(info >= 0); }
628
575
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/public/platform/file_path_conversion.h" #include "third_party/blink/public/platform/web_string.h" #include "third_party/blink/renderer/platform/fonts/font.h" #include "third_party/blink/renderer/platform/fonts/font_description.h" #include "third_party/blink/renderer/platform/testing/font_test_helpers.h" #include "third_party/blink/renderer/platform/testing/unit_test_helpers.h" using blink::test::CreateTestFont; namespace blink { class CursorPositionTest : public ::testing::Test { public: enum FontName { ahem, amiri, megalopolis, roboto, }; float GetWidth(FontName font_name, const String& text, bool ltr, int start = 0, int end = -1) { FontDescription::VariantLigatures ligatures( FontDescription::kEnabledLigaturesState); Font font = CreateTestFont( "TestFont", test::PlatformTestDataPath(font_path.find(font_name)->value), 100, &ligatures); TextRun text_run( text, /* xpos */ 0, /* expansion */ 0, TextRun::kAllowTrailingExpansion | TextRun::kForbidLeadingExpansion, ltr ? TextDirection::kLtr : TextDirection::kRtl, false); if (end == -1) end = text_run.length(); DCHECK_GE(start, 0); DCHECK_LE(start, static_cast<int>(text_run.length())); DCHECK_GE(end, -1); DCHECK_LE(end, static_cast<int>(text_run.length())); FloatRect rect = font.SelectionRectForText(text_run, FloatPoint(), 12, start, end); return rect.Width(); } int GetCharacter(FontName font_name, const String& text, bool ltr, float position, bool partial) { FontDescription::VariantLigatures ligatures( FontDescription::kEnabledLigaturesState); Font font = CreateTestFont( "TestFont", test::PlatformTestDataPath(font_path.find(font_name)->value), 100, &ligatures); TextRun text_run( text, /* xpos */ 0, /* expansion */ 0, TextRun::kAllowTrailingExpansion | TextRun::kForbidLeadingExpansion, ltr ? TextDirection::kLtr : TextDirection::kRtl, false); return font.OffsetForPosition( text_run, position, partial ? IncludePartialGlyphs : OnlyFullGlyphs, BreakGlyphs); } private: HashMap<FontName, String, WTF::IntHash<FontName>> font_path = { {ahem, "Ahem.woff"}, {amiri, "third_party/Amiri/amiri_arabic.woff2"}, {megalopolis, "third_party/MEgalopolis/MEgalopolisExtra.woff"}, {roboto, "third_party/Roboto/roboto-regular.woff2"}, }; }; TEST_F(CursorPositionTest, LTRMouse) { EXPECT_EQ(GetCharacter(ahem, "X", true, 0, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", true, 0, true), 0); EXPECT_EQ(GetCharacter(ahem, "X", true, 10, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", true, 10, true), 0); EXPECT_EQ(GetCharacter(ahem, "X", true, 60, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", true, 60, true), 1); EXPECT_EQ(GetCharacter(ahem, "X", true, 100, false), 1); EXPECT_EQ(GetCharacter(ahem, "X", true, 100, true), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 10, false), 0); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 10, true), 0); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 60, false), 0); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 60, true), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 100, true), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 100, false), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 125, true), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 125, true), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 151, false), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 151, true), 2); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 175, false), 1); EXPECT_EQ(GetCharacter(ahem, "XXX", true, 175, true), 2); } TEST_F(CursorPositionTest, LTRLigatureMouse) { const float kFUWidth = GetWidth(megalopolis, "FU", true); const float kRAWidth = GetWidth(megalopolis, "RA", true); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 4 - 1, false), 0); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 4 - 1, true), 0); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 4 + 1, false), 0); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 4 + 1, true), 1); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 2 - 1, false), 0); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 2 - 1, true), 1); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 2 + 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth / 2 + 1, true), 1); EXPECT_EQ( GetCharacter(megalopolis, "FURA", true, kFUWidth * 3 / 4 - 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth * 3 / 4 - 1, true), 1); EXPECT_EQ( GetCharacter(megalopolis, "FURA", true, kFUWidth * 3 / 4 + 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth * 3 / 4 + 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth - 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth - 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + 1, false), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 4 - 1, false), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 4 - 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 4 + 1, false), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 4 + 1, true), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 2 - 1, false), 2); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 2 - 1, true), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 2 + 1, false), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth / 2 + 1, true), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth * 3 / 4 - 1, false), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth * 3 / 4 - 1, true), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth * 3 / 4 + 1, false), 3); EXPECT_EQ(GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth * 3 / 4 + 1, true), 4); EXPECT_EQ( GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth - 1, false), 3); EXPECT_EQ( GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth - 1, true), 4); EXPECT_EQ( GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth + 1, false), 4); EXPECT_EQ( GetCharacter(megalopolis, "FURA", true, kFUWidth + kRAWidth + 1, true), 4); } TEST_F(CursorPositionTest, RTLMouse) { // The widths below are from the final shaped version, not from the single // characters. They were extracted with "hb-shape --font-size=100" EXPECT_EQ(GetCharacter(ahem, "X", false, 0, false), 1); EXPECT_EQ(GetCharacter(ahem, "X", false, 0, true), 1); EXPECT_EQ(GetCharacter(ahem, "X", false, 10, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 10, true), 1); EXPECT_EQ(GetCharacter(ahem, "X", false, 49, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 49, true), 1); EXPECT_EQ(GetCharacter(ahem, "X", false, 51, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 51, true), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 60, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 60, true), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 100, false), 0); EXPECT_EQ(GetCharacter(ahem, "X", false, 100, true), 0); const float kAloneTaWidth = GetWidth(amiri, u"ت", false); EXPECT_EQ(GetCharacter(amiri, u"ت", false, 0, false), 1); EXPECT_EQ(GetCharacter(amiri, u"ت", false, 0, true), 1); EXPECT_EQ(GetCharacter(amiri, u"ت", false, kAloneTaWidth / 4, false), 0); EXPECT_EQ(GetCharacter(amiri, u"ت", false, kAloneTaWidth / 4, true), 1); EXPECT_EQ(GetCharacter(amiri, u"ت", false, kAloneTaWidth * 2 / 3, false), 0); EXPECT_EQ(GetCharacter(amiri, u"ت", false, kAloneTaWidth * 2 / 3, true), 0); EXPECT_EQ(GetCharacter(amiri, u"ت", false, 2 * kAloneTaWidth, false), 0); EXPECT_EQ(GetCharacter(amiri, u"ت", false, 2 * kAloneTaWidth, true), 0); const float kAboveTaWidth = 10; const float kAboveKhaWidth = 55; EXPECT_EQ(GetCharacter(amiri, u"تخ", false, 0, false), 2); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, 0, true), 2); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth / 4, false), 1); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth / 4, true), 2); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth * 2 / 3, false), 1); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth * 2 / 3, true), 1); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + 1, false), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + 1, true), 1); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + kAboveKhaWidth / 4, false), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + kAboveKhaWidth / 4, true), 1); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + kAboveKhaWidth * 2 / 3, false), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + kAboveKhaWidth * 2 / 3, true), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + kAboveKhaWidth + 1, false), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, kAboveTaWidth + kAboveKhaWidth + 1, true), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, 2 * (kAboveTaWidth + kAboveKhaWidth), false), 0); EXPECT_EQ(GetCharacter(amiri, u"تخ", false, 2 * (kAboveTaWidth + kAboveKhaWidth), true), 0); } TEST_F(CursorPositionTest, RTLLigatureMouse) { const float kFUWidth = GetWidth(megalopolis, "FU", true); const float kRAWidth = GetWidth(megalopolis, "RA", true); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 4 - 1, false), 3); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 4 - 1, true), 4); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 4 + 1, false), 3); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 4 + 1, true), 3); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 2 - 1, false), 3); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 2 - 1, true), 3); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 2 + 1, false), 2); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth / 2 + 1, true), 3); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth * 3 / 4 - 1, false), 2); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth * 3 / 4 - 1, true), 3); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth * 3 / 4 + 1, false), 2); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth * 3 / 4 + 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth - 1, false), 2); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth - 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 4 - 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 4 - 1, true), 2); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 4 + 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 4 + 1, true), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 2 - 1, false), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 2 - 1, true), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 2 + 1, false), 0); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth / 2 + 1, true), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth * 3 / 4 - 1, false), 0); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth * 3 / 4 - 1, true), 1); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth * 3 / 4 + 1, false), 0); EXPECT_EQ(GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth * 3 / 4 + 1, true), 0); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth - 1, false), 0); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth - 1, true), 0); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth + 1, false), 0); EXPECT_EQ( GetCharacter(megalopolis, "ARUF", false, kFUWidth + kRAWidth + 1, true), 0); } TEST_F(CursorPositionTest, LTRText) { EXPECT_EQ(GetWidth(ahem, "X", true, 0, 1), 100); EXPECT_EQ(GetWidth(ahem, "XXX", true, 0, 1), 100); EXPECT_EQ(GetWidth(ahem, "XXX", true, 0, 2), 200); EXPECT_EQ(GetWidth(ahem, "XXX", true, 0, 3), 300); EXPECT_EQ(GetWidth(ahem, "XXX", true, 1, 2), 100); EXPECT_EQ(GetWidth(ahem, "XXX", true, 1, 3), 200); EXPECT_EQ(GetWidth(ahem, "XXX", true, 2, 3), 100); } TEST_F(CursorPositionTest, LTRLigature) { const float kFUWidth = GetWidth(megalopolis, "FU", true); const float kRAWidth = GetWidth(megalopolis, "RA", true); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 0, 1), kFUWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 0, 2), kFUWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 0, 3), kFUWidth + kRAWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 0, 4), kFUWidth + kRAWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 1, 2), kFUWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 1, 3), kFUWidth / 2 + kRAWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 1, 4), kFUWidth / 2 + kRAWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 2, 3), kRAWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 2, 4), kRAWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "FURA", true, 3, 4), kRAWidth / 2, 1.0); const float kFFIWidth = GetWidth(roboto, "ffi", true); const float kFFWidth = GetWidth(roboto, "ff", true); const float kIWidth = GetWidth(roboto, u"î", true); EXPECT_NEAR(GetWidth(roboto, "ffi", true, 0, 1), kFFIWidth / 3.0, 1.0); EXPECT_NEAR(GetWidth(roboto, "ffi", true, 0, 2), kFFIWidth * 2.0 / 3.0, 1.0); EXPECT_NEAR(GetWidth(roboto, "ffi", true, 0, 3), kFFIWidth, 1.0); EXPECT_NEAR(GetWidth(roboto, "ffi", true, 1, 2), kFFIWidth / 3.0, 1.0); EXPECT_NEAR(GetWidth(roboto, "ffi", true, 1, 3), kFFIWidth * 2.0 / 3.0, 1.0); EXPECT_NEAR(GetWidth(roboto, "ffi", true, 2, 3), kFFIWidth / 3.0, 1.0); EXPECT_NEAR(GetWidth(roboto, u"ffî", true, 0, 1), kFFWidth / 2.0, 1.0); EXPECT_NEAR(GetWidth(roboto, u"ffî", true, 0, 2), kFFWidth, 1.0); EXPECT_NEAR(GetWidth(roboto, u"ffî", true, 0, 3), kFFWidth + kIWidth, 1.0); EXPECT_NEAR(GetWidth(roboto, u"ffî", true, 1, 2), kFFWidth / 2.0, 1.0); EXPECT_NEAR(GetWidth(roboto, u"ffî", true, 1, 3), kFFWidth / 2.0 + kIWidth, 1.0); EXPECT_NEAR(GetWidth(roboto, u"ffî", true, 2, 3), kIWidth, 1.0); } TEST_F(CursorPositionTest, RTLText) { // The widths below are from the final shaped version, not from the single // characters. They were extracted with "hb-shape --font-size=100" EXPECT_EQ(GetWidth(amiri, u"ت", false, 0, 1), 93); const float kAboveKhaWidth = 55; const float kAboveTaWidth = 10; EXPECT_NEAR(GetWidth(amiri, u"تخ", false, 0, 1), kAboveKhaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"تخ", false, 0, 2), kAboveKhaWidth + kAboveTaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"تخ", false, 1, 2), kAboveTaWidth, 1.0); const float kTaWidth = 75; const float kKhaWidth = 7; const float kLamWidth = 56; const float kAlifWidth = 22; EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 0, 1), kAlifWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 0, 2), kAlifWidth + kLamWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 0, 3), kAlifWidth + kLamWidth + kKhaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 0, 4), kAlifWidth + kLamWidth + kKhaWidth + kTaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 1, 2), kLamWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 1, 3), kLamWidth + kKhaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 1, 4), kLamWidth + kKhaWidth + kTaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 2, 3), kKhaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 2, 4), kKhaWidth + kTaWidth, 1.0); EXPECT_NEAR(GetWidth(amiri, u"الخط", false, 3, 4), kTaWidth, 1.0); const float kMeemWidth = GetWidth(amiri, u"م", false); EXPECT_EQ(GetWidth(amiri, u"مَ", false, 0, 1), kMeemWidth); EXPECT_EQ(GetWidth(amiri, u"مَ", false, 0, 2), kMeemWidth); EXPECT_EQ(GetWidth(amiri, u"مَ", false, 1, 2), kMeemWidth); } TEST_F(CursorPositionTest, RTLLigature) { const float kFUWidth = GetWidth(megalopolis, "FU", true); const float kRAWidth = GetWidth(megalopolis, "RA", true); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 0, 1), kRAWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 0, 2), kRAWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 0, 3), kRAWidth + kFUWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 0, 4), kRAWidth + kFUWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 1, 2), kRAWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 1, 3), kRAWidth / 2 + kFUWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 1, 4), kRAWidth / 2 + kFUWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 2, 3), kFUWidth / 2, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 2, 4), kFUWidth, 1.0); EXPECT_NEAR(GetWidth(megalopolis, "ARUF", false, 3, 4), kFUWidth / 2, 1.0); } } // namespace blink
9,342
778
<gh_stars>100-1000 // Copyright (C) 2018-2021 <NAME> <<EMAIL>> // // This Source Code Form is subject to the terms of the Mozilla // Public License v. 2.0. If a copy of the MPL was not distributed // with this file, You can obtain one at https://mozilla.org/MPL/2.0/. #ifndef SPECTRA_TYPE_TRAITS_H #define SPECTRA_TYPE_TRAITS_H #include <Eigen/Core> #include <limits> /// \cond // Clang-Format will have unintended effects: // static constexpr Scalar(min)() // So we turn it off here // // clang-format off namespace Spectra { // For a real value type "Scalar", we want to know its smallest // positive value, i.e., std::numeric_limits<Scalar>::min(). // However, we must take non-standard value types into account, // so we rely on Eigen::NumTraits. // // Eigen::NumTraits has defined epsilon() and lowest(), but // lowest() means negative highest(), which is a very small // negative value. // // Therefore, we manually define this limit, and use eplison()^3 // to mimic it for non-standard types. // Generic definition template <typename Scalar> struct TypeTraits { static constexpr Scalar epsilon() { return Eigen::numext::numeric_limits<Scalar>::epsilon(); } static constexpr Scalar (min)() { return epsilon() * epsilon() * epsilon(); } }; // Full specialization template <> struct TypeTraits<float> { static constexpr float epsilon() { return std::numeric_limits<float>::epsilon(); } static constexpr float (min)() { return (std::numeric_limits<float>::min)(); } }; template <> struct TypeTraits<double> { static constexpr double epsilon() { return std::numeric_limits<double>::epsilon(); } static constexpr double (min)() { return (std::numeric_limits<double>::min)(); } }; template <> struct TypeTraits<long double> { static constexpr long double epsilon() { return std::numeric_limits<long double>::epsilon(); } static constexpr long double (min)() { return (std::numeric_limits<long double>::min)(); } }; // Get the element type of a "scalar" // ElemType<double> => double // ElemType<std::complex<double>> => double template <typename T> using ElemType = typename Eigen::NumTraits<T>::Real; } // namespace Spectra /// \endcond #endif // SPECTRA_TYPE_TRAITS_H
895
10,028
{ "settings_ruffle_enable": { "message": "Flash içeriğini Ruffle ile oynat" }, "settings_page_ignore_optout": { "message": "Ruffle'a engel olan sitelerde dahi Flash içeriğini oynatmaya çalış" }, "settings_ignore_optout": { "message": "Sitelerin uyumsuzluk uyarılarını yoksay" }, "status_init": { "message": "Mevcut sekme okunuyor…" }, "status_no_tabs": { "message": "Aktif sekme yok." }, "status_tabs_error": { "message": "Mevcut sekmeyi okurken bir hata oluştu" }, "status_message_init": { "message": "Mevcut sekmede Ruffle'ın durumu inceleniyor…" }, "status_result_running": { "message": "Ruffle yüklendi ve mevcut sekmedeki Flash içeriğini oynatmakta." }, "status_result_optout": { "message": "Ruffle yüklenmedi çünkü mevcut sayfa kendini uyumsuz olarak işaretledi." }, "status_result_disabled": { "message": "Ruffle yüklenmedi çünkü kullanıcısınca kapatılmış." }, "status_result_error": { "message": "Mevcut sekmedeki Ruffle instanceı sorgulanırken bir hata oluştu." }, "status_result_protected": { "message": "Korumalı tarayıcı sayfalarında Ruffle yüklenemez." }, "action_reload": { "message": "Değişiklikleri uygulamak için sekmeyi yenileyin" }, "open_settings_page": { "message": "Ayarlar Sayfasını Aç" }, "settings_page": { "message": "Ayarlar Sayfası" }, "description": { "message": "Flash'ı ait olduğu Web'de yerini geri verir." }, "save_settings": { "message": "Ayarları Kaydet" }, "settings_saved": { "message": "Kaydedildi" } }
868
575
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /* C++ implementation of a .size file parser. * The .size file spec is found in libsupersize/file_format.py */ #include "tools/binary_size/libsupersize/caspian/file_format.h" #include <assert.h> #include <stdint.h> #include <stdlib.h> #include <cstring> #include <iostream> #include <memory> #include <numeric> #include <set> #include <sstream> #include <string> #include <vector> #include "third_party/jsoncpp/source/include/json/json.h" #include "third_party/zlib/google/compression_utils_portable.h" #include "tools/binary_size/libsupersize/caspian/model.h" namespace { const char kDiffHeader[] = "# Created by //tools/binary_size\nDIFF\n"; const char kSerializationVersionSingleContainer[] = "Size File Format v1"; const char kSerializationVersionMultiContainer[] = "Size File Format v1.1"; int ReadLoneInt(char** rest) { char* token = strsep(rest, "\n"); return std::strtol(token, nullptr, 10); } void Decompress(const char* gzipped, unsigned long len, std::vector<char>* uncompressed) { // gzip format stores the uncompressed size in the last four bytes. if (len < sizeof(uint32_t)) { std::cerr << "Input too short to be gzipped" << std::endl; exit(1); } uint32_t uncompressed_size = *reinterpret_cast<const uint32_t*>( &gzipped[len - sizeof(uncompressed_size)]); // Should be little-endian. int num = 1; if (*reinterpret_cast<char*>(&num) != 1) { uncompressed_size = __builtin_bswap32(uncompressed_size); } uncompressed->resize(uncompressed_size + 1); // Add terminating null for safety. (*uncompressed)[uncompressed_size] = '\0'; unsigned long long_uncompressed_size(uncompressed_size); auto ret = zlib_internal::GzipUncompressHelper( reinterpret_cast<Bytef*>(&(*uncompressed)[0]), &long_uncompressed_size, reinterpret_cast<const Bytef*>(gzipped), len); if (Z_OK == ret) { return; } std::cerr << "Failed to decompress. Zlib code: " << ret << std::endl; exit(1); } std::vector<const char*> ReadValuesFromLine(char** rest, const char* delimiter) { std::vector<const char*> ret; char* rest_of_line = strsep(rest, "\n"); // Check for empty line (otherwise "" is added). if (!*rest_of_line) return ret; while (true) { char* token = strsep(&rest_of_line, delimiter); if (!token) break; ret.push_back(token); } return ret; } template <typename T> std::vector<T> ReadIntList(char** rest, const char* delim, int n, bool stored_as_delta) { char* rest_of_line = strsep(rest, "\n"); std::vector<T> result; result.resize(n); for (int i = 0; i < n; i++) { char* token = strsep(&rest_of_line, delim); result[i] = std::strtol(token, nullptr, 10); } if (stored_as_delta) std::partial_sum(result.begin(), result.end(), result.begin()); return result; } template <typename T> std::vector<std::vector<T>> ReadIntListForEachSection( char** rest, const std::vector<int>& symbol_counts, bool stored_as_delta) { std::vector<std::vector<T>> ret; ret.reserve(symbol_counts.size()); for (int nsymbols : symbol_counts) { ret.push_back(ReadIntList<T>(rest, " ", nsymbols, stored_as_delta)); } return ret; } void ReadJsonBlob(char** rest, Json::Value* fields) { // Metadata begins with its length in bytes, followed by a json blob. int fields_len = ReadLoneInt(rest); if (fields_len < 0) { std::cerr << "Unexpected negative fields length: " << fields_len << std::endl; exit(1); } char* json_start = *rest; *rest += fields_len + 1; std::unique_ptr<Json::CharReader> reader; reader.reset(Json::CharReaderBuilder().newCharReader()); std::string json_errors; if (!reader->parse(json_start, json_start + fields_len, fields, &json_errors)) { std::cerr << "Failed to parse JSON fields:" << *rest << std::endl; std::cerr << json_errors << std::endl; exit(1); } } void CheckNoNonEmptyLinesRemain(char* rest) { if (rest) { int lines_remaining = 50; bool newlines_only = true; char* line = nullptr; while (lines_remaining > 0 && (line = strsep(&rest, "\n"))) { if (strcmp("", line)) { std::cerr << "Unparsed line: " << line << std::endl; newlines_only = false; lines_remaining--; } } if (!newlines_only) { exit(1); } } } } // namespace namespace caspian { void CalculatePadding(std::vector<Symbol>* raw_symbols) { std::set<const char*> seen_sections; for (size_t i = 1; i < raw_symbols->size(); i++) { const Symbol& prev_symbol = (*raw_symbols)[i - 1]; Symbol& symbol = (*raw_symbols)[i]; if (symbol.IsOverhead()) { symbol.padding_ = symbol.size_; } if (prev_symbol.SectionName() != symbol.SectionName()) { if (seen_sections.count(symbol.section_name_)) { std::cerr << "Input symbols must be sorted by section, then address: " << prev_symbol << ", " << symbol << std::endl; exit(1); } seen_sections.insert(symbol.SectionName()); continue; } if (symbol.Address() <= 0 || prev_symbol.Address() <= 0 || !symbol.IsNative() || !prev_symbol.IsNative()) { continue; } if (symbol.Address() == prev_symbol.Address()) { if (symbol.aliases_ && symbol.aliases_ == prev_symbol.aliases_) { symbol.padding_ = prev_symbol.padding_; symbol.size_ = prev_symbol.size_; continue; } if (prev_symbol.SizeWithoutPadding() != 0) { // Padding-only symbols happen for ** symbol gaps. std::cerr << "Found duplicate symbols: " << prev_symbol << ", " << symbol << std::endl; exit(1); } } int32_t padding = symbol.Address() - prev_symbol.EndAddress(); symbol.padding_ = padding; symbol.size_ += padding; if (symbol.size_ < 0) { std::cerr << "Symbol has negative size (likely not sorted properly):" << symbol << std::endl; std::cerr << "prev symbol: " << prev_symbol << std::endl; exit(1); } } } void ParseSizeInfo(const char* gzipped, unsigned long len, SizeInfo* info) { // To avoid memory allocations, all the char* in our final Symbol set will // be pointers into the region originally pointed to by |decompressed_start|. // Calls to strsep() replace delimiter characters with null terminators. Decompress(gzipped, len, &info->raw_decompressed); char* rest = &info->raw_decompressed[0]; // Ignore generated header. char* line = strsep(&rest, "\n"); // Serialization version. line = strsep(&rest, "\n"); bool has_multi_containers = false; if (!std::strcmp(line, kSerializationVersionSingleContainer)) { has_multi_containers = false; } else if (!std::strcmp(line, kSerializationVersionMultiContainer)) { has_multi_containers = true; } else { std::cerr << "Serialization version: '" << line << "' not recognized." << std::endl; exit(1); } ReadJsonBlob(&rest, &info->fields); if (has_multi_containers) { const Json::Value& container_values = info->fields["containers"]; for (const auto& container_value : container_values) { const std::string name = container_value["name"].asString(); info->containers.push_back(Container(name)); } } else { info->containers.push_back(Container("")); } const bool has_components = info->fields["has_components"].asBool(); const bool has_padding = info->fields["has_padding"].asBool(); // List of paths: (object_path, [source_path]). int n_paths = ReadLoneInt(&rest); if (n_paths < 0) { std::cerr << "Unexpected negative path list length: " << n_paths << std::endl; exit(1); } std::cout << "Reading " << n_paths << " paths" << std::endl; info->object_paths.reserve(n_paths); info->source_paths.reserve(n_paths); for (int i = 0; i < n_paths; i++) { char* line = strsep(&rest, "\n"); char* first = strsep(&line, "\t"); char* second = strsep(&line, "\t"); if (second) { info->object_paths.push_back(first); info->source_paths.push_back(second); } else if (first) { info->object_paths.push_back(first); info->source_paths.push_back(""); } else if (line) { std::cerr << "Too many tokens on path row: " << i << std::endl; exit(1); } else { info->object_paths.push_back(""); info->source_paths.push_back(""); } } if (has_components) { // List of component names. int n_components = ReadLoneInt(&rest); if (n_components < 0) { std::cerr << "Unexpected negative components list length: " << n_components << std::endl; exit(1); } std::cout << "Reading " << n_components << " components" << std::endl; info->components.reserve(n_components); for (int i = 0; i < n_components; i++) { info->components.push_back(strsep(&rest, "\n")); } } // Segments = List of (Container, section name). std::vector<const char*> segment_names; segment_names = ReadValuesFromLine(&rest, "\t"); int n_segments = segment_names.size(); // Parse segment name into Container pointers and section names. std::vector<Container*> segment_containers(n_segments); std::vector<const char*> segment_section_names(n_segments); for (int segment_idx = 0; segment_idx < n_segments; segment_idx++) { const char* segment_name = segment_names[segment_idx]; if (has_multi_containers) { // |segment_name| is formatted as "<container_idx>section_name". std::string t = segment_name; assert(t.length() > 0 && t[0] == '<'); size_t sep_pos = t.find('>'); assert(sep_pos != std::string::npos); std::string container_idx_str = t.substr(1, sep_pos - 1); int container_idx = std::atoi(container_idx_str.c_str()); assert(container_idx >= 0 && container_idx < static_cast<int>(info->containers.size())); segment_containers[segment_idx] = &info->containers[container_idx]; segment_section_names[segment_idx] = segment_name + (sep_pos + 1); } else { // Segments are already container names. segment_containers[segment_idx] = &info->containers[0]; segment_section_names[segment_idx] = segment_name; } } // Symbol counts for each section. std::vector<int> symbol_counts = ReadIntList<int>(&rest, "\t", n_segments, false); std::cout << "Symbol counts:" << std::endl; int total_symbols = std::accumulate(symbol_counts.begin(), symbol_counts.end(), 0); for (int segment_idx = 0; segment_idx < n_segments; segment_idx++) { std::cout << " "; if (has_multi_containers) { std::cout << "<" << segment_containers[segment_idx]->name << ">"; } std::cout << segment_section_names[segment_idx]; std::cout << '\t' << symbol_counts[segment_idx] << std::endl; } std::vector<std::vector<int64_t>> addresses = ReadIntListForEachSection<int64_t>(&rest, symbol_counts, true); std::vector<std::vector<int32_t>> sizes = ReadIntListForEachSection<int32_t>(&rest, symbol_counts, false); std::vector<std::vector<int32_t>> paddings; if (has_padding) { paddings = ReadIntListForEachSection<int32_t>(&rest, symbol_counts, false); } else { paddings.resize(addresses.size()); } std::vector<std::vector<int32_t>> path_indices = ReadIntListForEachSection<int32_t>(&rest, symbol_counts, true); std::vector<std::vector<int32_t>> component_indices; if (has_components) { component_indices = ReadIntListForEachSection<int32_t>(&rest, symbol_counts, true); } else { component_indices.resize(addresses.size()); } info->raw_symbols.reserve(total_symbols); // Construct raw symbols. for (int segment_idx = 0; segment_idx < n_segments; segment_idx++) { const Container* cur_container = segment_containers[segment_idx]; const char* cur_section_name = segment_section_names[segment_idx]; caspian::SectionId cur_section_id = info->ShortSectionName(cur_section_name); const int cur_section_count = symbol_counts[segment_idx]; const std::vector<int64_t>& cur_addresses = addresses[segment_idx]; const std::vector<int32_t>& cur_sizes = sizes[segment_idx]; const std::vector<int32_t>& cur_paddings = paddings[segment_idx]; const std::vector<int32_t>& cur_path_indices = path_indices[segment_idx]; const std::vector<int32_t>& cur_component_indices = component_indices[segment_idx]; int32_t alias_counter = 0; for (int i = 0; i < cur_section_count; i++) { info->raw_symbols.emplace_back(); caspian::Symbol& new_sym = info->raw_symbols.back(); int32_t flags = 0; int32_t num_aliases = 0; char* line = strsep(&rest, "\n"); if (*line) { new_sym.full_name_ = strsep(&line, "\t"); char* first = nullptr; char* second = nullptr; if (line) { first = strsep(&line, "\t"); } if (line) { second = strsep(&line, "\t"); } if (second) { num_aliases = std::strtol(first, nullptr, 16); flags = std::strtol(second, nullptr, 16); } else if (first) { if (first[0] == '0') { // full_name aliases_part num_aliases = std::strtol(first, nullptr, 16); } else { // full_name flags_part flags = std::strtol(first, nullptr, 16); } } } new_sym.section_id_ = cur_section_id; new_sym.address_ = cur_addresses[i]; new_sym.size_ = cur_sizes[i]; if (has_padding) { new_sym.padding_ = cur_paddings[i]; if (!new_sym.IsOverhead()) { new_sym.size_ += new_sym.padding_; } } new_sym.section_name_ = cur_section_name; new_sym.object_path_ = info->object_paths[cur_path_indices[i]]; new_sym.source_path_ = info->source_paths[cur_path_indices[i]]; if (has_components) { new_sym.component_ = info->components[cur_component_indices[i]]; } new_sym.flags_ = flags; new_sym.size_info_ = info; // When we encounter a symbol with an alias count, the next N symbols we // encounter should be placed in the same symbol group. if (num_aliases) { assert(alias_counter == 0); info->alias_groups.emplace_back(); alias_counter = num_aliases; } if (alias_counter > 0) { new_sym.aliases_ = &info->alias_groups.back(); new_sym.aliases_->push_back(&new_sym); alias_counter--; } new_sym.container_ = cur_container; } } info->is_sparse = has_padding; if (!has_padding) { CalculatePadding(&info->raw_symbols); } // If there are unparsed non-empty lines, something's gone wrong. CheckNoNonEmptyLinesRemain(rest); std::cout << "Parsed " << info->raw_symbols.size() << " symbols" << std::endl; } bool IsDiffSizeInfo(const char* file, unsigned long len) { return !strncmp(file, kDiffHeader, 4); } void ParseDiffSizeInfo(char* file, unsigned long len, SizeInfo* before, SizeInfo* after) { // Skip "DIFF" header. char* rest = file; rest += strlen(kDiffHeader); Json::Value fields; ReadJsonBlob(&rest, &fields); if (fields["version"].asInt() != 1) { std::cerr << ".sizediff version mismatch, write some upgrade code. version=" << fields["version"] << std::endl; exit(1); } unsigned long header_len = rest - file; unsigned long before_len = fields["before_length"].asUInt(); unsigned long after_len = len - header_len - before_len; ParseSizeInfo(rest, before_len, before); ParseSizeInfo(rest + before_len, after_len, after); } } // namespace caspian
6,679
2,092
<reponame>lwhsu/ck /* * Copyright 2018 <NAME>, Google LLC. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ /* * Overview * ======== * * ck_ec implements 32- and 64- bit event counts. Event counts let us * easily integrate OS-level blocking (e.g., futexes) in lock-free * protocols. Waiters block conditionally, if the event count's value * is still equal to some old value. * * Event counts come in four variants: 32 and 64 bit (with one bit * stolen for internal signaling, so 31 and 63 bit counters), and * single or multiple producers (wakers). Waiters are always multiple * consumers. The 32 bit variants are smaller, and more efficient, * especially in single producer mode. The 64 bit variants are larger, * but practically invulnerable to ABA. * * The 32 bit variant is always available. The 64 bit variant is only * available if CK supports 64-bit atomic operations. Currently, * specialization for single producer is only implemented for x86 and * x86-64, on compilers that support GCC extended inline assembly; * other platforms fall back to the multiple producer code path. * * A typical usage pattern is: * * 1. On the producer side: * * - Make changes to some shared data structure, without involving * the event count at all. * - After each change, call ck_ec_inc on the event count. The call * acts as a write-write barrier, and wakes up any consumer blocked * on the event count (waiting for new changes). * * 2. On the consumer side: * * - Snapshot ck_ec_value of the event count. The call acts as a * read barrier. * - Read and process the shared data structure. * - Wait for new changes by calling ck_ec_wait with the snapshot value. * * Some data structures may opt for tighter integration with their * event count. For example, an SPMC ring buffer or disruptor might * use the event count's value as the write pointer. If the buffer is * regularly full, it might also make sense to store the read pointer * in an MP event count. * * This event count implementation supports tighter integration in two * ways. * * Producers may opt to increment by an arbitrary value (less than * INT32_MAX / INT64_MAX), in order to encode, e.g., byte * offsets. Larger increment values make wraparound more likely, so * the increments should still be relatively small. * * Consumers may pass a predicate to ck_ec_wait_pred. This predicate * can make `ck_ec_wait_pred` return early, before the event count's * value changes, and can override the deadline passed to futex_wait. * This lets consumer block on one eventcount, while optimistically * looking at other waking conditions. * * API Reference * ============= * * When compiled as C11 or later, this header defines type-generic * macros for ck_ec32 and ck_ec64; the reference describes this * type-generic API. * * ck_ec needs additional OS primitives to determine the current time, * to wait on an address, and to wake all threads waiting on a given * address. These are defined with fields in a struct ck_ec_ops. Each * ck_ec_ops may additionally define the number of spin loop * iterations in the slow path, as well as the initial wait time in * the internal exponential backoff, the exponential scale factor, and * the right shift count (< 32). * * The ops, in addition to the single/multiple producer flag, are * encapsulated in a struct ck_ec_mode, passed to most ck_ec * operations. * * ec is a struct ck_ec32 *, or a struct ck_ec64 *. * * value is an uint32_t for ck_ec32, and an uint64_t for ck_ec64. It * never exceeds INT32_MAX and INT64_MAX respectively. * * mode is a struct ck_ec_mode *. * * deadline is either NULL, or a `const struct timespec *` that will * be treated as an absolute deadline. * * `void ck_ec_init(ec, value)`: initializes the event count to value. * * `value ck_ec_value(ec)`: returns the current value of the event * counter. This read acts as a read (acquire) barrier. * * `bool ck_ec_has_waiters(ec)`: returns whether some thread has * marked the event count as requiring an OS wakeup. * * `void ck_ec_inc(ec, mode)`: increments the value of the event * counter by one. This writes acts as a write barrier. Wakes up * any waiting thread. * * `value ck_ec_add(ec, mode, value)`: increments the event counter by * `value`, and returns the event counter's previous value. This * write acts as a write barrier. Wakes up any waiting thread. * * `int ck_ec_deadline(struct timespec *new_deadline, * mode, * const struct timespec *timeout)`: * computes a deadline `timeout` away from the current time. If * timeout is NULL, computes a deadline in the infinite future. The * resulting deadline is written to `new_deadline`. Returns 0 on * success, and -1 if ops->gettime failed (without touching errno). * * `int ck_ec_wait(ec, mode, value, deadline)`: waits until the event * counter's value differs from `value`, or, if `deadline` is * provided and non-NULL, until the current time is after that * deadline. Use a deadline with tv_sec = 0 for a non-blocking * execution. Returns 0 if the event counter has changed, and -1 on * timeout. This function acts as a read (acquire) barrier. * * `int ck_ec_wait_pred(ec, mode, value, pred, data, deadline)`: waits * until the event counter's value differs from `value`, or until * `pred` returns non-zero, or, if `deadline` is provided and * non-NULL, until the current time is after that deadline. Use a * deadline with tv_sec = 0 for a non-blocking execution. Returns 0 if * the event counter has changed, `pred`'s return value if non-zero, * and -1 on timeout. This function acts as a read (acquire) barrier. * * `pred` is always called as `pred(data, iteration_deadline, now)`, * where `iteration_deadline` is a timespec of the deadline for this * exponential backoff iteration, and `now` is the current time. If * `pred` returns a non-zero value, that value is immediately returned * to the waiter. Otherwise, `pred` is free to modify * `iteration_deadline` (moving it further in the future is a bad * idea). * * Implementation notes * ==================== * * The multiple producer implementation is a regular locked event * count, with a single flag bit to denote the need to wake up waiting * threads. * * The single producer specialization is heavily tied to * [x86-TSO](https://www.cl.cam.ac.uk/~pes20/weakmemory/cacm.pdf), and * to non-atomic read-modify-write instructions (e.g., `inc mem`); * these non-atomic RMW let us write to the same memory locations with * atomic and non-atomic instructions, without suffering from process * scheduling stalls. * * The reason we can mix atomic and non-atomic writes to the `counter` * word is that every non-atomic write obviates the need for the * atomically flipped flag bit: we only use non-atomic writes to * update the event count, and the atomic flag only informs the * producer that we would like a futex_wake, because of the update. * We only require the non-atomic RMW counter update to prevent * preemption from introducing arbitrarily long worst case delays. * * Correctness does not rely on the usual ordering argument: in the * absence of fences, there is no strict ordering between atomic and * non-atomic writes. The key is instead x86-TSO's guarantee that a * read is satisfied from the most recent buffered write in the local * store queue if there is one, or from memory if there is no write to * that address in the store queue. * * x86-TSO's constraint on reads suffices to guarantee that the * producer will never forget about a counter update. If the last * update is still queued, the new update will be based on the queued * value. Otherwise, the new update will be based on the value in * memory, which may or may not have had its flag flipped. In either * case, the value of the counter (modulo flag) is correct. * * When the producer forwards the counter's value from its store * queue, the new update might not preserve a flag flip. Any waiter * thus has to check from time to time to determine if it wasn't * woken up because the flag bit was silently cleared. * * In reality, the store queue in x86-TSO stands for in-flight * instructions in the chip's out-of-order backend. In the vast * majority of cases, instructions will only remain in flight for a * few hundred or thousand of cycles. That's why ck_ec_wait spins on * the `counter` word for ~100 iterations after flipping its flag bit: * if the counter hasn't changed after that many iterations, it is * very likely that the producer's next counter update will observe * the flag flip. * * That's still not a hard guarantee of correctness. Conservatively, * we can expect that no instruction will remain in flight for more * than 1 second... if only because some interrupt will have forced * the chip to store its architectural state in memory, at which point * an instruction is either fully retired or rolled back. Interrupts, * particularly the pre-emption timer, are why single-producer updates * must happen in a single non-atomic read-modify-write instruction. * Having a single instruction as the critical section means we only * have to consider the worst-case execution time for that * instruction. That's easier than doing the same for a pair of * instructions, which an unlucky pre-emption could delay for * arbitrarily long. * * Thus, after a short spin loop, ck_ec_wait enters an exponential * backoff loop, where each "sleep" is instead a futex_wait. The * backoff is only necessary to handle rare cases where the flag flip * was overwritten after the spin loop. Eventually, more than one * second will have elapsed since the flag flip, and the sleep timeout * becomes infinite: since the flag bit has been set for much longer * than the time for which an instruction may remain in flight, the * flag will definitely be observed at the next counter update. * * The 64 bit ck_ec_wait pulls another trick: futexes only handle 32 * bit ints, so we must treat the 64 bit counter's low 32 bits as an * int in futex_wait. That's a bit dodgy, but fine in practice, given * that the OS's futex code will always read whatever value is * currently in memory: even if the producer thread were to wait on * its own event count, the syscall and ring transition would empty * the store queue (the out-of-order execution backend). * * Finally, what happens when the producer is migrated to another core * or otherwise pre-empted? Migration must already incur a barrier, so * that thread always sees its own writes, so that's safe. As for * pre-emption, that requires storing the architectural state, which * means every instruction must either be executed fully or not at * all when pre-emption happens. */ #ifndef CK_EC_H #define CK_EC_H #include <ck_cc.h> #include <ck_pr.h> #include <ck_stdbool.h> #include <ck_stdint.h> #include <ck_stddef.h> #include <sys/time.h> /* * If we have ck_pr_faa_64 (and, presumably, ck_pr_load_64), we * support 63 bit counters. */ #ifdef CK_F_PR_FAA_64 #define CK_F_EC64 #endif /* CK_F_PR_FAA_64 */ /* * GCC inline assembly lets us exploit non-atomic read-modify-write * instructions on x86/x86_64 for a fast single-producer mode. * * If we CK_F_EC_SP is not defined, CK_EC always uses the slower * multiple producer code. */ #if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) #define CK_F_EC_SP #endif /* GNUC && (__i386__ || __x86_64__) */ struct ck_ec_ops; struct ck_ec_wait_state { struct timespec start; /* Time when we entered ck_ec_wait. */ struct timespec now; /* Time now. */ const struct ck_ec_ops *ops; void *data; /* Opaque pointer for the predicate's internal state. */ }; /* * ck_ec_ops define system-specific functions to get the current time, * atomically wait on an address if it still has some expected value, * and to wake all threads waiting on an address. * * Each platform is expected to have few (one) opaque pointer to a * const ops struct, and reuse it for all ck_ec_mode structs. */ struct ck_ec_ops { /* Populates out with the current time. Returns non-zero on failure. */ int (*gettime)(const struct ck_ec_ops *, struct timespec *out); /* * Waits on address if its value is still `expected`. If * deadline is non-NULL, stops waiting once that deadline is * reached. May return early for any reason. */ void (*wait32)(const struct ck_ec_wait_state *, const uint32_t *, uint32_t expected, const struct timespec *deadline); /* * Same as wait32, but for a 64 bit counter. Only used if * CK_F_EC64 is defined. * * If underlying blocking primitive only supports 32 bit * control words, it should be safe to block on the least * significant half of the 64 bit address. */ void (*wait64)(const struct ck_ec_wait_state *, const uint64_t *, uint64_t expected, const struct timespec *deadline); /* Wakes up all threads waiting on address. */ void (*wake32)(const struct ck_ec_ops *, const uint32_t *address); /* * Same as wake32, but for a 64 bit counter. Only used if * CK_F_EC64 is defined. * * When wait64 truncates the control word at address to `only` * consider its least significant half, wake64 should perform * any necessary fixup (e.g., on big endian platforms). */ void (*wake64)(const struct ck_ec_ops *, const uint64_t *address); /* * Number of iterations for the initial busy wait. 0 defaults * to 100 (not ABI stable). */ uint32_t busy_loop_iter; /* * Delay in nanoseconds for the first iteration of the * exponential backoff. 0 defaults to 2 ms (not ABI stable). */ uint32_t initial_wait_ns; /* * Scale factor for the exponential backoff. 0 defaults to 8x * (not ABI stable). */ uint32_t wait_scale_factor; /* * Right shift count for the exponential backoff. The update * after each iteration is * wait_ns = (wait_ns * wait_scale_factor) >> wait_shift_count, * until one second has elapsed. After that, the deadline goes * to infinity. */ uint32_t wait_shift_count; }; /* * ck_ec_mode wraps the ops table, and informs the fast path whether * it should attempt to specialize for single producer mode. * * mode structs are expected to be exposed by value, e.g., * * extern const struct ck_ec_ops system_ec_ops; * * static const struct ck_ec_mode ec_sp = { * .ops = &system_ec_ops, * .single_producer = true * }; * * static const struct ck_ec_mode ec_mp = { * .ops = &system_ec_ops, * .single_producer = false * }; * * ck_ec_mode structs are only passed to inline functions defined in * this header, and never escape to their slow paths, so they should * not result in any object file size increase. */ struct ck_ec_mode { const struct ck_ec_ops *ops; /* * If single_producer is true, the event count has a unique * incrementer. The implementation will specialize ck_ec_inc * and ck_ec_add if possible (if CK_F_EC_SP is defined). */ bool single_producer; }; struct ck_ec32 { /* Flag is "sign" bit, value in bits 0:30. */ uint32_t counter; }; typedef struct ck_ec32 ck_ec32_t; #ifdef CK_F_EC64 struct ck_ec64 { /* * Flag is bottom bit, value in bits 1:63. Eventcount only * works on x86-64 (i.e., little endian), so the futex int * lies in the first 4 (bottom) bytes. */ uint64_t counter; }; typedef struct ck_ec64 ck_ec64_t; #endif /* CK_F_EC64 */ #define CK_EC_INITIALIZER { .counter = 0 } /* * Initializes the event count to `value`. The value must not * exceed INT32_MAX. */ static void ck_ec32_init(struct ck_ec32 *ec, uint32_t value); #ifndef CK_F_EC64 #define ck_ec_init ck_ec32_init #else /* * Initializes the event count to `value`. The value must not * exceed INT64_MAX. */ static void ck_ec64_init(struct ck_ec64 *ec, uint64_t value); #if __STDC_VERSION__ >= 201112L #define ck_ec_init(EC, VALUE) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_init, \ struct ck_ec64 : ck_ec64_init)((EC), (VALUE))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Returns the counter value in the event count. The value is at most * INT32_MAX. */ static uint32_t ck_ec32_value(const struct ck_ec32* ec); #ifndef CK_F_EC64 #define ck_ec_value ck_ec32_value #else /* * Returns the counter value in the event count. The value is at most * INT64_MAX. */ static uint64_t ck_ec64_value(const struct ck_ec64* ec); #if __STDC_VERSION__ >= 201112L #define ck_ec_value(EC) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_value, \ struct ck_ec64 : ck_ec64_value)((EC))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Returns whether there may be slow pathed waiters that need an * explicit OS wakeup for this event count. */ static bool ck_ec32_has_waiters(const struct ck_ec32 *ec); #ifndef CK_F_EC64 #define ck_ec_has_waiters ck_ec32_has_waiters #else static bool ck_ec64_has_waiters(const struct ck_ec64 *ec); #if __STDC_VERSION__ >= 201112L #define ck_ec_has_waiters(EC) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_has_waiters, \ struct ck_ec64 : ck_ec64_has_waiters)((EC))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Increments the counter value in the event count by one, and wakes * up any waiter. */ static void ck_ec32_inc(struct ck_ec32 *ec, const struct ck_ec_mode *mode); #ifndef CK_F_EC64 #define ck_ec_inc ck_ec32_inc #else static void ck_ec64_inc(struct ck_ec64 *ec, const struct ck_ec_mode *mode); #if __STDC_VERSION__ >= 201112L #define ck_ec_inc(EC, MODE) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_inc, \ struct ck_ec64 : ck_ec64_inc)((EC), (MODE))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Increments the counter value in the event count by delta, wakes * up any waiter, and returns the previous counter value. */ static uint32_t ck_ec32_add(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t delta); #ifndef CK_F_EC64 #define ck_ec_add ck_ec32_add #else static uint64_t ck_ec64_add(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t delta); #if __STDC_VERSION__ >= 201112L #define ck_ec_add(EC, MODE, DELTA) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_add, \ struct ck_ec64 : ck_ec64_add)((EC), (MODE), (DELTA))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Populates `new_deadline` with a deadline `timeout` in the future. * Returns 0 on success, and -1 if clock_gettime failed, in which * case errno is left as is. */ static int ck_ec_deadline(struct timespec *new_deadline, const struct ck_ec_mode *mode, const struct timespec *timeout); /* * Waits until the counter value in the event count differs from * old_value, or, if deadline is non-NULL, until CLOCK_MONOTONIC is * past the deadline. * * Returns 0 on success, and -1 on timeout. */ static int ck_ec32_wait(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t old_value, const struct timespec *deadline); #ifndef CK_F_EC64 #define ck_ec_wait ck_ec32_wait #else static int ck_ec64_wait(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t old_value, const struct timespec *deadline); #if __STDC_VERSION__ >= 201112L #define ck_ec_wait(EC, MODE, OLD_VALUE, DEADLINE) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_wait, \ struct ck_ec64 : ck_ec64_wait)((EC), (MODE), \ (OLD_VALUE), (DEADLINE))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Waits until the counter value in the event count differs from * old_value, pred returns non-zero, or, if deadline is non-NULL, * until CLOCK_MONOTONIC is past the deadline. * * Returns 0 on success, -1 on timeout, and the return value of pred * if it returns non-zero. * * A NULL pred represents a function that always returns 0. */ static int ck_ec32_wait_pred(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t old_value, int (*pred)(const struct ck_ec_wait_state *, struct timespec *deadline), void *data, const struct timespec *deadline); #ifndef CK_F_EC64 #define ck_ec_wait_pred ck_ec32_wait_pred #else static int ck_ec64_wait_pred(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t old_value, int (*pred)(const struct ck_ec_wait_state *, struct timespec *deadline), void *data, const struct timespec *deadline); #if __STDC_VERSION__ >= 201112L #define ck_ec_wait_pred(EC, MODE, OLD_VALUE, PRED, DATA, DEADLINE) \ (_Generic(*(EC), \ struct ck_ec32 : ck_ec32_wait_pred, \ struct ck_ec64 : ck_ec64_wait_pred) \ ((EC), (MODE), (OLD_VALUE), (PRED), (DATA), (DEADLINE))) #endif /* __STDC_VERSION__ */ #endif /* CK_F_EC64 */ /* * Inline implementation details. 32 bit first, then 64 bit * conditionally. */ CK_CC_FORCE_INLINE void ck_ec32_init(struct ck_ec32 *ec, uint32_t value) { ec->counter = value & ~(1UL << 31); return; } CK_CC_FORCE_INLINE uint32_t ck_ec32_value(const struct ck_ec32 *ec) { uint32_t ret = ck_pr_load_32(&ec->counter) & ~(1UL << 31); ck_pr_fence_acquire(); return ret; } CK_CC_FORCE_INLINE bool ck_ec32_has_waiters(const struct ck_ec32 *ec) { return ck_pr_load_32(&ec->counter) & (1UL << 31); } /* Slow path for ck_ec{32,64}_{inc,add} */ void ck_ec32_wake(struct ck_ec32 *ec, const struct ck_ec_ops *ops); CK_CC_FORCE_INLINE void ck_ec32_inc(struct ck_ec32 *ec, const struct ck_ec_mode *mode) { #if !defined(CK_F_EC_SP) /* Nothing to specialize if we don't have EC_SP. */ ck_ec32_add(ec, mode, 1); return; #else char flagged; #if __GNUC__ >= 6 /* * We don't want to wake if the sign bit is 0. We do want to * wake if the sign bit just flipped from 1 to 0. We don't * care what happens when our increment caused the sign bit to * flip from 0 to 1 (that's once per 2^31 increment). * * This leaves us with four cases: * * old sign bit | new sign bit | SF | OF | ZF * ------------------------------------------- * 0 | 0 | 0 | 0 | ? * 0 | 1 | 1 | 0 | ? * 1 | 1 | 1 | 0 | ? * 1 | 0 | 0 | 0 | 1 * * In the first case, we don't want to hit ck_ec32_wake. In * the last two cases, we do want to call ck_ec32_wake. In the * second case, we don't care, so we arbitrarily choose to * call ck_ec32_wake. * * The "le" condition checks if SF != OF, or ZF == 1, which * meets our requirements. */ #define CK_EC32_INC_ASM(PREFIX) \ __asm__ volatile(PREFIX " incl %0" \ : "+m"(ec->counter), "=@ccle"(flagged) \ :: "cc", "memory") #else #define CK_EC32_INC_ASM(PREFIX) \ __asm__ volatile(PREFIX " incl %0; setle %1" \ : "+m"(ec->counter), "=r"(flagged) \ :: "cc", "memory") #endif /* __GNUC__ */ if (mode->single_producer == true) { ck_pr_fence_store(); CK_EC32_INC_ASM(""); } else { ck_pr_fence_store_atomic(); CK_EC32_INC_ASM("lock"); } #undef CK_EC32_INC_ASM if (CK_CC_UNLIKELY(flagged)) { ck_ec32_wake(ec, mode->ops); } return; #endif /* CK_F_EC_SP */ } CK_CC_FORCE_INLINE uint32_t ck_ec32_add_epilogue(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t old) { const uint32_t flag_mask = 1U << 31; uint32_t ret; ret = old & ~flag_mask; /* These two only differ if the flag bit is set. */ if (CK_CC_UNLIKELY(old != ret)) { ck_ec32_wake(ec, mode->ops); } return ret; } static CK_CC_INLINE uint32_t ck_ec32_add_mp(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t delta) { uint32_t old; ck_pr_fence_store_atomic(); old = ck_pr_faa_32(&ec->counter, delta); return ck_ec32_add_epilogue(ec, mode, old); } #ifdef CK_F_EC_SP static CK_CC_INLINE uint32_t ck_ec32_add_sp(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t delta) { uint32_t old; /* * Correctness of this racy write depends on actually * having an update to write. Exit here if the update * is a no-op. */ if (CK_CC_UNLIKELY(delta == 0)) { return ck_ec32_value(ec); } ck_pr_fence_store(); old = delta; __asm__ volatile("xaddl %1, %0" : "+m"(ec->counter), "+r"(old) :: "cc", "memory"); return ck_ec32_add_epilogue(ec, mode, old); } #endif /* CK_F_EC_SP */ CK_CC_FORCE_INLINE uint32_t ck_ec32_add(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t delta) { #ifdef CK_F_EC_SP if (mode->single_producer == true) { return ck_ec32_add_sp(ec, mode, delta); } #endif return ck_ec32_add_mp(ec, mode, delta); } int ck_ec_deadline_impl(struct timespec *new_deadline, const struct ck_ec_ops *ops, const struct timespec *timeout); CK_CC_FORCE_INLINE int ck_ec_deadline(struct timespec *new_deadline, const struct ck_ec_mode *mode, const struct timespec *timeout) { return ck_ec_deadline_impl(new_deadline, mode->ops, timeout); } int ck_ec32_wait_slow(struct ck_ec32 *ec, const struct ck_ec_ops *ops, uint32_t old_value, const struct timespec *deadline); CK_CC_FORCE_INLINE int ck_ec32_wait(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t old_value, const struct timespec *deadline) { if (ck_ec32_value(ec) != old_value) { return 0; } return ck_ec32_wait_slow(ec, mode->ops, old_value, deadline); } int ck_ec32_wait_pred_slow(struct ck_ec32 *ec, const struct ck_ec_ops *ops, uint32_t old_value, int (*pred)(const struct ck_ec_wait_state *state, struct timespec *deadline), void *data, const struct timespec *deadline); CK_CC_FORCE_INLINE int ck_ec32_wait_pred(struct ck_ec32 *ec, const struct ck_ec_mode *mode, uint32_t old_value, int (*pred)(const struct ck_ec_wait_state *state, struct timespec *deadline), void *data, const struct timespec *deadline) { if (ck_ec32_value(ec) != old_value) { return 0; } return ck_ec32_wait_pred_slow(ec, mode->ops, old_value, pred, data, deadline); } #ifdef CK_F_EC64 CK_CC_FORCE_INLINE void ck_ec64_init(struct ck_ec64 *ec, uint64_t value) { ec->counter = value << 1; return; } CK_CC_FORCE_INLINE uint64_t ck_ec64_value(const struct ck_ec64 *ec) { uint64_t ret = ck_pr_load_64(&ec->counter) >> 1; ck_pr_fence_acquire(); return ret; } CK_CC_FORCE_INLINE bool ck_ec64_has_waiters(const struct ck_ec64 *ec) { return ck_pr_load_64(&ec->counter) & 1; } void ck_ec64_wake(struct ck_ec64 *ec, const struct ck_ec_ops *ops); CK_CC_FORCE_INLINE void ck_ec64_inc(struct ck_ec64 *ec, const struct ck_ec_mode *mode) { /* We always xadd, so there's no special optimization here. */ (void)ck_ec64_add(ec, mode, 1); return; } CK_CC_FORCE_INLINE uint64_t ck_ec_add64_epilogue(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t old) { uint64_t ret = old >> 1; if (CK_CC_UNLIKELY(old & 1)) { ck_ec64_wake(ec, mode->ops); } return ret; } static CK_CC_INLINE uint64_t ck_ec64_add_mp(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t delta) { uint64_t inc = 2 * delta; /* The low bit is the flag bit. */ ck_pr_fence_store_atomic(); return ck_ec_add64_epilogue(ec, mode, ck_pr_faa_64(&ec->counter, inc)); } #ifdef CK_F_EC_SP /* Single-producer specialisation. */ static CK_CC_INLINE uint64_t ck_ec64_add_sp(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t delta) { uint64_t old; /* * Correctness of this racy write depends on actually * having an update to write. Exit here if the update * is a no-op. */ if (CK_CC_UNLIKELY(delta == 0)) { return ck_ec64_value(ec); } ck_pr_fence_store(); old = 2 * delta; /* The low bit is the flag bit. */ __asm__ volatile("xaddq %1, %0" : "+m"(ec->counter), "+r"(old) :: "cc", "memory"); return ck_ec_add64_epilogue(ec, mode, old); } #endif /* CK_F_EC_SP */ /* * Dispatch on mode->single_producer in this FORCE_INLINE function: * the end result is always small, but not all compilers have enough * foresight to inline and get the reduction. */ CK_CC_FORCE_INLINE uint64_t ck_ec64_add(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t delta) { #ifdef CK_F_EC_SP if (mode->single_producer == true) { return ck_ec64_add_sp(ec, mode, delta); } #endif return ck_ec64_add_mp(ec, mode, delta); } int ck_ec64_wait_slow(struct ck_ec64 *ec, const struct ck_ec_ops *ops, uint64_t old_value, const struct timespec *deadline); CK_CC_FORCE_INLINE int ck_ec64_wait(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t old_value, const struct timespec *deadline) { if (ck_ec64_value(ec) != old_value) { return 0; } return ck_ec64_wait_slow(ec, mode->ops, old_value, deadline); } int ck_ec64_wait_pred_slow(struct ck_ec64 *ec, const struct ck_ec_ops *ops, uint64_t old_value, int (*pred)(const struct ck_ec_wait_state *state, struct timespec *deadline), void *data, const struct timespec *deadline); CK_CC_FORCE_INLINE int ck_ec64_wait_pred(struct ck_ec64 *ec, const struct ck_ec_mode *mode, uint64_t old_value, int (*pred)(const struct ck_ec_wait_state *state, struct timespec *deadline), void *data, const struct timespec *deadline) { if (ck_ec64_value(ec) != old_value) { return 0; } return ck_ec64_wait_pred_slow(ec, mode->ops, old_value, pred, data, deadline); } #endif /* CK_F_EC64 */ #endif /* !CK_EC_H */
11,199
577
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ ============= TAP plus ============= @author: <NAME> @contact: <EMAIL> European Space Astronomy Centre (ESAC) European Space Agency (ESA) Created on 30 jun. 2016 """ try: from tkinter import Tk as TKTk except ImportError: TKTk = None try: from tkinter import Toplevel as TKToplevel except ImportError: TKToplevel = None try: from tkinter import Button as TKButton except ImportError: TKButton = None try: from tkinter import Label as TKLabel except ImportError: TKLabel = None try: from tkinter import Entry as TKEntry except ImportError: TKEntry = None class LoginDialog: def __init__(self, host): self.__interna_init() self.__host = host self.__initialized = False if TKTk is not None: self.__create_content() self.__initialized = True def __interna_init(self): self.__rootFrame = None self.__top = None self.__usrEntry = None self.__pwdEntry = None self.__accepted = False self.__host = None self.__usr = None self.__pwd = None def __cancel_action(self): self.__accepted = False self.__rootFrame.destroy() def __login_action(self): self.__accepted = True self.__usr = self.__usrEntry.get() self.__pwd = self.__pwdEntry.get() self.__rootFrame.destroy() def __enter_action(self, event): self.__login_action() def __create_content(self): self.__rootFrame = TKTk() self.__rootFrame.withdraw() self.__top = TKToplevel(self.__rootFrame) self.__top.title("Login") self.__top.protocol("WM_DELETE_WINDOW", self.__rootFrame.destroy) self.__top.bind('<Return>', self.__enter_action) self.__top.update_idletasks() width = self.__top.winfo_width() height = self.__top.winfo_height() x = (self.__top.winfo_screenwidth() // 2) - (width // 2) y = (self.__top.winfo_screenheight() // 2) - (height // 2) self.__top.geometry(f"+{x}+{y}") row = 0 expLabel = TKLabel(self.__top, text='Login to host:') expLabel.grid(row=row, column=0, columnspan=4, padx=5, pady=2) row = row+1 urlLabel = TKLabel(self.__top, text=self.__host) urlLabel.grid(row=row, column=0, columnspan=4, padx=5, pady=2) row = row+1 usrLabel = TKLabel(self.__top, text='User') usrLabel.grid(row=row, column=0, columnspan=2, padx=20, pady=5) self.__usrEntry = TKEntry(self.__top, width=20) self.__usrEntry.grid(row=row, column=2, columnspan=2, padx=5, pady=5) row = row+1 pwdLabel = TKLabel(self.__top, text='Password') pwdLabel.grid(row=row, column=0, columnspan=2, padx=20, pady=5) self.__pwdEntry = TKEntry(self.__top, width=20, show="*") self.__pwdEntry.grid(row=row, column=2, columnspan=2, padx=5, pady=5) row = row+1 cancelButton = TKButton(self.__top, text='Cancel', command=self.__cancel_action) cancelButton.grid(row=row, column=1, padx=5, pady=5) loginButton = TKButton(self.__top, text='Login', command=self.__login_action) loginButton.grid(row=row, column=2, padx=5, pady=5) def show_login(self): if self.__initialized: self.__usrEntry.focus_set() self.__rootFrame.mainloop() else: print("tkinter python module is not available.\n\ Please, install tkinter module or use command line login utility.") def is_accepted(self): return self.__accepted def get_user(self): return self.__usr def get_password(self): return self.__<PASSWORD>
1,736
454
package io.vertx.up.uca.micro.ipc.tower; interface Info { String MSG_FLOW = "Current flow is {0}, return type = {1}"; }
49
1,307
<gh_stars>1000+ import asyncio import sqlalchemy as sa from aiopg.sa import create_engine metadata = sa.MetaData() tbl = sa.Table( "tbl", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("val", sa.String(255)), ) async def create_table(conn): await conn.execute("DROP TABLE IF EXISTS tbl") await conn.execute( """CREATE TABLE tbl ( id serial PRIMARY KEY, val varchar(255))""" ) async def go(): async with create_engine( user="aiopg", database="aiopg", host="127.0.0.1", password="<PASSWORD>" ) as engine: async with engine.acquire() as conn: await create_table(conn) async with engine.acquire() as conn: await conn.execute(tbl.insert().values(val="abc")) async for row in conn.execute(tbl.select()): print(row.id, row.val) asyncio.run(go())
391
14,668
<filename>extensions/renderer/set_icon_natives.cc // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "extensions/renderer/set_icon_natives.h" #include <stddef.h> #include <stdint.h> #include <limits> #include <memory> #include "base/bind.h" #include "base/strings/string_number_conversions.h" #include "extensions/renderer/script_context.h" #include "gin/data_object_builder.h" #include "skia/public/mojom/bitmap.mojom.h" #include "third_party/blink/public/web/web_array_buffer_converter.h" #include "third_party/skia/include/core/SkBitmap.h" #include "v8/include/v8-context.h" #include "v8/include/v8-exception.h" #include "v8/include/v8-function-callback.h" #include "v8/include/v8-isolate.h" #include "v8/include/v8-object.h" #include "v8/include/v8-primitive.h" // TODO(devlin): Looks like there are lots of opportunities to use gin helpers // like gin::Dictionary and gin::DataObjectBuilder here. namespace { const char kInvalidDimensions[] = "ImageData has invalid dimensions."; const char kInvalidData[] = "ImageData data length does not match dimensions."; const char kNoMemory[] = "Chrome was unable to initialize icon."; void ThrowException(v8::Isolate* isolate, const char* error_message) { isolate->ThrowException(v8::Exception::Error( v8::String::NewFromUtf8(isolate, error_message, v8::NewStringType::kInternalized) .ToLocalChecked())); } int GetIntPropertyFromV8Object(v8::Local<v8::Object> v8_object, v8::Local<v8::Context> v8_context, const char* property_name) { v8::Local<v8::Value> v8_property_value; if (!v8_object ->Get(v8_context, v8::String::NewFromUtf8( v8_context->GetIsolate(), property_name, v8::NewStringType::kInternalized) .ToLocalChecked()) .ToLocal(&v8_property_value)) { return 0; } return v8_property_value->Int32Value(v8_context).FromMaybe(0); } int GetIntPropertyFromV8Object(v8::Local<v8::Object> v8_object, v8::Local<v8::Context> v8_context, int index) { v8::Local<v8::Value> v8_property_value; if (!v8_object ->Get(v8_context, v8::Integer::New(v8_context->GetIsolate(), index)) .ToLocal(&v8_property_value)) { return 0; } return v8_property_value->Int32Value(v8_context).FromMaybe(0); } } // namespace namespace extensions { SetIconNatives::SetIconNatives(ScriptContext* context) : ObjectBackedNativeHandler(context) {} void SetIconNatives::AddRoutes() { RouteHandlerFunction("IsInServiceWorker", base::BindRepeating(&SetIconNatives::IsInServiceWorker, base::Unretained(this))); RouteHandlerFunction("SetIconCommon", base::BindRepeating(&SetIconNatives::SetIconCommon, base::Unretained(this))); } bool SetIconNatives::ConvertImageDataToBitmapValue( const v8::Local<v8::Object> image_data, v8::Local<v8::Value>* image_data_bitmap) { v8::Local<v8::Context> v8_context = context()->v8_context(); v8::Isolate* isolate = v8_context->GetIsolate(); v8::Local<v8::Value> value; if (!image_data ->Get(v8_context, v8::String::NewFromUtf8(isolate, "data", v8::NewStringType::kInternalized) .ToLocalChecked()) .ToLocal(&value)) { ThrowException(isolate, kInvalidData); return false; } v8::Local<v8::Object> data; if (!value->ToObject(v8_context).ToLocal(&data)) { ThrowException(isolate, kInvalidData); return false; } int width = GetIntPropertyFromV8Object(image_data, v8_context, "width"); int height = GetIntPropertyFromV8Object(image_data, v8_context, "height"); if (width <= 0 || height <= 0) { ThrowException(isolate, kInvalidDimensions); return false; } // We need to be able to safely check |data_length| == 4 * width * height // without overflowing below. int max_width = (std::numeric_limits<int>::max() / 4) / height; if (width > max_width) { ThrowException(isolate, kInvalidDimensions); return false; } int data_length = GetIntPropertyFromV8Object(data, v8_context, "length"); if (data_length != 4 * width * height) { ThrowException(isolate, kInvalidData); return false; } SkBitmap bitmap; if (!bitmap.tryAllocN32Pixels(width, height)) { ThrowException(isolate, kNoMemory); return false; } bitmap.eraseARGB(0, 0, 0, 0); uint32_t* pixels = bitmap.getAddr32(0, 0); for (int t = 0; t < width * height; t++) { // |data| is RGBA, pixels is ARGB. pixels[t] = SkPreMultiplyColor( ((GetIntPropertyFromV8Object(data, v8_context, 4 * t + 3) & 0xFF) << 24) | ((GetIntPropertyFromV8Object(data, v8_context, 4 * t + 0) & 0xFF) << 16) | ((GetIntPropertyFromV8Object(data, v8_context, 4 * t + 1) & 0xFF) << 8) | ((GetIntPropertyFromV8Object(data, v8_context, 4 * t + 2) & 0xFF) << 0)); } // Construct the Value object. std::vector<uint8_t> s = skia::mojom::InlineBitmap::Serialize(&bitmap); blink::WebArrayBuffer buffer = blink::WebArrayBuffer::Create(s.size(), 1); memcpy(buffer.Data(), s.data(), s.size()); *image_data_bitmap = blink::WebArrayBufferConverter::ToV8Value( &buffer, context()->v8_context()->Global(), isolate); return true; } bool SetIconNatives::ConvertImageDataSetToBitmapValueSet( v8::Local<v8::Object>& details, v8::Local<v8::Object>* bitmap_set_value) { v8::Local<v8::Context> v8_context = context()->v8_context(); v8::Isolate* isolate = v8_context->GetIsolate(); v8::Local<v8::Value> v8_value; if (!details ->Get(v8_context, v8::String::NewFromUtf8(isolate, "imageData", v8::NewStringType::kInternalized) .ToLocalChecked()) .ToLocal(&v8_value)) { return false; } v8::Local<v8::Object> image_data_set; if (!v8_value->ToObject(v8_context).ToLocal(&image_data_set)) { return false; } DCHECK(bitmap_set_value); v8::Local<v8::Array> property_names( image_data_set->GetOwnPropertyNames(v8_context) .FromMaybe(v8::Local<v8::Array>())); for (size_t i = 0; i < property_names->Length(); ++i) { v8::Local<v8::Value> key = property_names->Get(v8_context, i).ToLocalChecked(); v8::String::Utf8Value utf8_key(isolate, key); int size; if (!base::StringToInt(std::string(*utf8_key), &size)) continue; v8::Local<v8::Value> v8_image_value; if (!image_data_set->Get(v8_context, key).ToLocal(&v8_image_value)) { return false; } v8::Local<v8::Object> image_data; if (!v8_image_value->ToObject(v8_context).ToLocal(&image_data)) { return false; } v8::Local<v8::Value> image_data_bitmap; if (!ConvertImageDataToBitmapValue(image_data, &image_data_bitmap)) return false; (*bitmap_set_value) ->Set(v8_context, key, image_data_bitmap) .FromMaybe(false); } return true; } void SetIconNatives::IsInServiceWorker( const v8::FunctionCallbackInfo<v8::Value>& args) { CHECK_EQ(0, args.Length()); const bool is_in_service_worker = context()->IsForServiceWorker(); args.GetReturnValue().Set( v8::Boolean::New(args.GetIsolate(), is_in_service_worker)); } void SetIconNatives::SetIconCommon( const v8::FunctionCallbackInfo<v8::Value>& args) { CHECK_EQ(1, args.Length()); CHECK(args[0]->IsObject()); v8::Local<v8::Context> v8_context = context()->v8_context(); v8::Isolate* isolate = args.GetIsolate(); v8::Local<v8::Object> details = args[0].As<v8::Object>(); v8::Local<v8::Object> bitmap_set_value(v8::Object::New(isolate)); auto set_null_prototype = [v8_context, isolate](v8::Local<v8::Object> obj) { // Avoid any pesky Object.prototype manipulation. bool succeeded = obj->SetPrototype(v8_context, v8::Null(isolate)).ToChecked(); CHECK(succeeded); }; set_null_prototype(bitmap_set_value); if (!ConvertImageDataSetToBitmapValueSet(details, &bitmap_set_value)) return; gin::DataObjectBuilder dict_builder(isolate); dict_builder.Set("imageData", bitmap_set_value); v8::Local<v8::String> tab_id_key = v8::String::NewFromUtf8(isolate, "tabId", v8::NewStringType::kInternalized) .ToLocalChecked(); bool has_tab_id = false; if (!details->HasOwnProperty(v8_context, tab_id_key).To(&has_tab_id)) return; // HasOwnProperty() threw - bail. if (has_tab_id) { v8::Local<v8::Value> tab_id; if (!details->Get(v8_context, tab_id_key).ToLocal(&tab_id)) { return; // Get() threw - bail. } dict_builder.Set("tabId", tab_id); } v8::Local<v8::Object> dict = dict_builder.Build(); set_null_prototype(dict); args.GetReturnValue().Set(dict); } } // namespace extensions
4,012
316
<filename>vigranumpy/src/core/blockwise.cxx<gh_stars>100-1000 /************************************************************************/ /* */ /* Copyright 2011 by <NAME> */ /* */ /* This file is part of the VIGRA computer vision library. */ /* The VIGRA Website is */ /* http://hci.iwr.uni-heidelberg.de/vigra/ */ /* Please direct questions, bug reports, and contributions to */ /* <EMAIL> or */ /* <EMAIL> */ /* */ /* Permission is hereby granted, free of charge, to any person */ /* obtaining a copy of this software and associated documentation */ /* files (the "Software"), to deal in the Software without */ /* restriction, including without limitation the rights to use, */ /* copy, modify, merge, publish, distribute, sublicense, and/or */ /* sell copies of the Software, and to permit persons to whom the */ /* Software is furnished to do so, subject to the following */ /* conditions: */ /* */ /* The above copyright notice and this permission notice shall be */ /* included in all copies or substantial portions of the */ /* Software. */ /* */ /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND */ /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES */ /* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND */ /* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT */ /* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, */ /* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING */ /* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR */ /* OTHER DEALINGS IN THE SOFTWARE. */ /* */ /************************************************************************/ #define PY_ARRAY_UNIQUE_SYMBOL vigranumpyblockwise_PyArray_API //#define NO_IMPORT_ARRAY #include <vigra/numpy_array.hxx> #include <vigra/numpy_array_converters.hxx> #include <vigra/multi_blocking.hxx> #include <vigra/multi_blockwise.hxx> namespace python = boost::python; namespace vigra{ template<unsigned int DIM, class T_IN, class T_OUT> NumpyAnyArray pyBlockwiseGaussianSmoothMultiArray( const NumpyArray<DIM, T_IN> & source, const BlockwiseConvolutionOptions<DIM> & opt, NumpyArray<DIM, T_OUT> dest ){ dest.reshapeIfEmpty(source.taggedShape()); gaussianSmoothMultiArray(source, dest, opt); return dest; } template<unsigned int DIM, class T_IN, class T_OUT> NumpyAnyArray pyBlockwiseGaussianGradientMagnitudeMultiArray( const NumpyArray<DIM, T_IN> & source, const BlockwiseConvolutionOptions<DIM> & opt, NumpyArray<DIM, T_OUT> dest ){ dest.reshapeIfEmpty(source.taggedShape()); gaussianGradientMagnitudeMultiArray(source, dest, opt); return dest; } template<unsigned int DIM, class T_IN, class T_OUT> NumpyAnyArray pyBlockwiseGaussianGradientMultiArray( const NumpyArray<DIM, T_IN> & source, const BlockwiseConvolutionOptions<DIM> & opt, NumpyArray<DIM, T_OUT> dest ){ dest.reshapeIfEmpty(source.taggedShape()); gaussianGradientMultiArray(source, dest, opt); return dest; } template<unsigned int DIM, class T_IN, class T_OUT> NumpyAnyArray pyBlockwiseHessianOfGaussianEigenvaluesMultiArray( const NumpyArray<DIM, T_IN> & source, const BlockwiseConvolutionOptions<DIM> & opt, NumpyArray<DIM, T_OUT> dest ){ dest.reshapeIfEmpty(source.taggedShape()); hessianOfGaussianEigenvaluesMultiArray(source, dest, opt); return dest; } template<unsigned int DIM, class T_IN, class T_OUT> NumpyAnyArray pyBlockwiseHessianOfGaussianFirstEigenvalueMultiArray( const NumpyArray<DIM, T_IN> & source, const BlockwiseConvolutionOptions<DIM> & opt, NumpyArray<DIM, T_OUT> dest ){ dest.reshapeIfEmpty(source.taggedShape()); hessianOfGaussianFirstEigenvalueMultiArray(source, dest, opt); return dest; } template<unsigned int DIM, class T_IN, class T_OUT> NumpyAnyArray pyBlockwiseHessianOfGaussianLastEigenvalueMultiArray( const NumpyArray<DIM, T_IN> & source, const BlockwiseConvolutionOptions<DIM> & opt, NumpyArray<DIM, T_OUT> dest ){ dest.reshapeIfEmpty(source.taggedShape()); hessianOfGaussianLastEigenvalueMultiArray(source, dest, opt); return dest; } template<unsigned int DIM, class T_IN> void defineBlockwiseFilters(){ //typedef BlockwiseConvolutionOptions<DIM> Opt; python::def("_gaussianSmooth",registerConverters(&pyBlockwiseGaussianSmoothMultiArray<DIM, T_IN, float>), ( python::arg("source"), python::arg("options"), python::arg("out") = python::object() ) ); python::def("_gaussianGradientMagnitude",registerConverters(&pyBlockwiseGaussianGradientMagnitudeMultiArray<DIM, T_IN, float>), ( python::arg("source"), python::arg("options"), python::arg("out") = python::object() ) ); python::def("_gaussianGradient",registerConverters(&pyBlockwiseGaussianGradientMultiArray<DIM, T_IN, TinyVector<float, DIM> >), ( python::arg("source"), python::arg("options"), python::arg("out") = python::object() ) ); python::def("_hessianOfGaussianEigenvalues",registerConverters(&pyBlockwiseHessianOfGaussianEigenvaluesMultiArray<DIM, T_IN, vigra::TinyVector<float, DIM> >), ( python::arg("source"), python::arg("options"), python::arg("out") = python::object() ) ); python::def("_hessianOfGaussianFirstEigenvalue",registerConverters(&pyBlockwiseHessianOfGaussianFirstEigenvalueMultiArray<DIM, T_IN, float>), ( python::arg("source"), python::arg("options"), python::arg("out") = python::object() ) ); python::def("_hessianOfGaussianLastEigenvalue",registerConverters(&pyBlockwiseHessianOfGaussianLastEigenvalueMultiArray<DIM, T_IN, float>), ( python::arg("source"), python::arg("options"), python::arg("out") = python::object() ) ); } template<class MB> NumpyAnyArray intersectingBlocks( const MB & mb, const typename MB::Shape begin, const typename MB::Shape end, NumpyArray<1, UInt32> out ){ std::vector<UInt32> outVec = mb.intersectingBlocks(begin,end); out.reshapeIfEmpty(typename NumpyArray<1,UInt32>::difference_type(outVec.size())); std::copy(outVec.begin(),outVec.end(), out.begin()); return out; } template<class MB> python::tuple getBlock( const MB & mb, const UInt32 blockIndex ){ const auto iter = mb.blockBegin(); const auto & block = iter[blockIndex]; auto tl = block.begin(); auto br = block.end(); return python::make_tuple(tl,br); } template<class MB> python::tuple getBlock2( const MB & mb, const typename MB::BlockDesc desc ){ const auto block = mb.blockDescToBlock(desc); auto tl = block.begin(); auto br = block.end(); return python::make_tuple(tl,br); } template<class BLOCK> typename BLOCK::Vector blockBegin(const BLOCK & b){ return b.begin(); } template<class BLOCK> typename BLOCK::Vector blockEnd(const BLOCK & b){ return b.end(); } template<class BLOCK> typename BLOCK::Vector blockShape(const BLOCK & b){ return b.size(); } template<unsigned int DIM> void defineMultiBlocking(const std::string & clsName){ typedef MultiBlocking<DIM> Blocking; typedef typename Blocking::Shape Shape; typedef typename Blocking::Block Block; python::class_<Blocking>(clsName.c_str(), python::init<const Shape &, const Shape &>()) .def("intersectingBlocks",registerConverters(&intersectingBlocks<Blocking>), ( python::arg("begin"), python::arg("end"), python::arg("out") = python::object() ) ) .def("__len__", &Blocking::numBlocks) .def("__getitem__", &getBlock<Blocking>) .def("__getitem__", &getBlock2<Blocking>) ; const std::string blockName = clsName + std::string("Block"); python::class_<Block>(blockName.c_str()) .add_property("begin",&blockBegin<Block>) .add_property("end", &blockEnd<Block>) .add_property("shape",&blockShape<Block>) ; } template<unsigned int DIM> void defineBlockwiseConvolutionOptions(const std::string & clsName){ typedef BlockwiseConvolutionOptions<DIM> Opt; python::class_<Opt>(clsName.c_str(), python::init<>()) .add_property("stdDev", &Opt::getStdDev, &Opt::setStdDev) //.add_property("scale", &Opt::getScale, &Opt::setScale) .add_property("innerScale", &Opt::getInnerScale, &Opt::setInnerScale) .add_property("outerScale", &Opt::getOuterScale, &Opt::setOuterScale) .add_property("blockShape", &Opt::readBlockShape, &Opt::setBlockShape) .add_property("numThreads", &Opt::getNumThreads, &Opt::setNumThreads) ; } } using namespace vigra; using namespace boost::python; BOOST_PYTHON_MODULE_INIT(blockwise) { import_vigranumpy(); python::docstring_options doc_options(true, true, false); defineMultiBlocking<2>("Blocking2D"); defineMultiBlocking<3>("Blocking3D"); defineBlockwiseConvolutionOptions<2>("BlockwiseConvolutionOptions2D"); defineBlockwiseConvolutionOptions<3>("BlockwiseConvolutionOptions3D"); defineBlockwiseConvolutionOptions<4>("BlockwiseConvolutionOptions4D"); defineBlockwiseConvolutionOptions<5>("BlockwiseConvolutionOptions4D"); defineBlockwiseFilters<2, float>(); defineBlockwiseFilters<3, float>(); }
5,221
1,248
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.atlas.repository.impexp; import org.apache.atlas.exception.AtlasBaseException; import org.apache.atlas.model.instance.AtlasClassification; import org.apache.atlas.model.instance.AtlasEntity; import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; import org.apache.atlas.model.instance.AtlasObjectId; import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; public class ImportTransformsTest { private final String ATTR_NAME_QUALIFIED_NAME = "qualifiedName"; private final String COLUMN_QUALIFIED_NAME_FORMAT = "col%s.TABLE1.default@cl1"; private final String lowerCaseCL1 = "@cl1"; private final String lowerCaseCL2 = "@cl2"; private final String jsonLowerCaseReplace = "{ \"hive_table\": { \"qualifiedName\":[ \"lowercase\", \"replace:@cl1:@cl2\" ] } }"; private final String jsonReplaceLowerCase = "{ \"Asset\": { \"qualifiedName\":[ \"replace:@cl1:@cl2\" ] }, \"hive_table\": { \"qualifiedName\":[ \"lowercase\", \"replace:@cl1:@cl2\" ] } }"; private final String jsonReplaceRemoveClassification = "{ \"hive_table\": { \"qualifiedName\":[ \"replace:@%s:@%s\"], \"*\":[ \"removeClassification:%s_to_%s\" ] } }"; private final String jsonReplaceAndAddAttrValue = "{ \"hive_table\": { \"qualifiedName\":[ \"replace:@%s:@%s\"], \"*\":[ \"add:%s=list:%s\" ] } }"; private final String jsonSingleClearAttrValue = "{ \"hive_table\": { \"*\":[ \"clearAttrValue:replicatedToCluster\", \"clearAttrValue:replicatedFromCluster\" ] } }"; private final String jsonMultipleClearAttrValue = "{ \"hive_table\": { \"*\":[ \"clearAttrValue:replicatedToCluster,replicatedFromCluster\" ] } }"; private final String jsonSetDeleted = "{ \"hive_table\": { \"*\":[ \"setDeleted\" ] } }"; private final String jsonAddClasification = "{ \"hive_table\": { \"*\":[ \"addClassification:REPLICATED\" ] } }"; private final String jsonAddClasification2 = "{ \"hive_table\": { \"*\":[ \"addClassification:REPLICATED_2\" ] } }"; private final String jsonAddClasificationScoped = "{ \"hive_column\": { \"*\":[ \"addClassification:REPLICATED_2:topLevel\" ] } }"; private ImportTransforms transform; private String HIVE_TABLE_ATTR_SYNC_INFO = "hive_table.syncInfo"; private String HIVE_TABLE_ATTR_REPLICATED_FROM = "replicatedFromCluster"; private String HIVE_TABLE_ATTR_REPLICATED_TO = "replicatedToCluster"; @BeforeTest public void setup() { transform = ImportTransforms.fromJson(jsonLowerCaseReplace); } @BeforeMethod public void setUp() { } @Test public void transformEntityWith2Transforms() throws AtlasBaseException { AtlasEntity entity = getHiveTableAtlasEntity(); String attrValue = (String) entity.getAttribute(ATTR_NAME_QUALIFIED_NAME); transform.apply(entity); assertEquals(entity.getAttribute(ATTR_NAME_QUALIFIED_NAME), applyDefaultTransform(attrValue)); } @Test public void transformEntityWithExtInfo() throws AtlasBaseException { addColumnTransform(transform); AtlasEntityWithExtInfo entityWithExtInfo = getAtlasEntityWithExtInfo(); AtlasEntity entity = entityWithExtInfo.getEntity(); String attrValue = (String) entity.getAttribute(ATTR_NAME_QUALIFIED_NAME); String[] expectedValues = getExtEntityExpectedValues(entityWithExtInfo); transform.apply(entityWithExtInfo); assertEquals(entityWithExtInfo.getEntity().getAttribute(ATTR_NAME_QUALIFIED_NAME), applyDefaultTransform(attrValue)); for (int i = 0; i < expectedValues.length; i++) { assertEquals(entityWithExtInfo.getReferredEntities().get(Integer.toString(i)).getAttribute(ATTR_NAME_QUALIFIED_NAME), expectedValues[i]); } } @Test public void transformEntityWithExtInfoNullCheck() throws AtlasBaseException { addColumnTransform(transform); AtlasEntityWithExtInfo entityWithExtInfo = getAtlasEntityWithExtInfo(); entityWithExtInfo.setReferredEntities(null); AtlasEntityWithExtInfo transformedEntityWithExtInfo = transform.apply(entityWithExtInfo); assertNotNull(transformedEntityWithExtInfo); assertEquals(entityWithExtInfo.getEntity().getGuid(), transformedEntityWithExtInfo.getEntity().getGuid()); } @Test public void transformFromJsonWithMultipleEntries() { ImportTransforms t = ImportTransforms.fromJson(jsonReplaceLowerCase); assertNotNull(t); assertEquals(t.getTransforms().size(), 2); } @Test public void removeClassificationTransform_RemovesSpecifiedClassification() throws AtlasBaseException { List<AtlasClassification> classifications = new ArrayList<>(); classifications.add(new AtlasClassification("cl2_to_cl1")); String s = String.format(jsonReplaceRemoveClassification, "cl1", "cl2", "cl2", "cl1"); ImportTransforms t = ImportTransforms.fromJson(s); AtlasEntity entity = getHiveTableAtlasEntity(); String expected_qualifiedName = entity.getAttribute(ATTR_NAME_QUALIFIED_NAME).toString().replace("@cl1", "@cl2"); entity.setClassifications(classifications); assertEquals(entity.getClassifications().size(), 1); t.apply(entity); assertEquals(entity.getClassifications().size(), 0); assertNotNull(t); assertEquals(entity.getAttribute(ATTR_NAME_QUALIFIED_NAME), expected_qualifiedName); } @Test public void add_setsValueOfAttribute() throws AtlasBaseException { final String expected_syncInfo = "cl1:import"; String s = String.format(jsonReplaceAndAddAttrValue, "cl1", "cl2", HIVE_TABLE_ATTR_SYNC_INFO, expected_syncInfo); ImportTransforms t = ImportTransforms.fromJson(s); AtlasEntity entity = getHiveTableAtlasEntity(); String expected_qualifiedName = entity.getAttribute(ATTR_NAME_QUALIFIED_NAME).toString().replace("@cl1", "@cl2"); t.apply(entity); assertNotNull(t); assertEquals(entity.getAttribute(ATTR_NAME_QUALIFIED_NAME), expected_qualifiedName); assertEquals(entity.getAttribute(HIVE_TABLE_ATTR_SYNC_INFO), new ArrayList<String>() {{ add(expected_syncInfo); }}); } @Test public void clearAttrValue_removesValueOfAttribute() throws AtlasBaseException { AtlasEntity entity = getHiveTableAtlasEntity(); assertNotNull(entity.getAttribute(HIVE_TABLE_ATTR_REPLICATED_FROM)); assertNotNull(entity.getAttribute(HIVE_TABLE_ATTR_REPLICATED_TO)); ImportTransforms t = ImportTransforms.fromJson(jsonSingleClearAttrValue); assertTrue(t.getTransforms().size() > 0); t.apply(entity); assertNotNull(t); assertNull(entity.getAttribute(HIVE_TABLE_ATTR_REPLICATED_FROM)); assertNull(entity.getAttribute(HIVE_TABLE_ATTR_REPLICATED_TO)); } @Test public void clearAttrValueForMultipleAttributes_removesValueOfAttribute() throws AtlasBaseException { AtlasEntity entity = getHiveTableAtlasEntity(); ImportTransforms t = ImportTransforms.fromJson(jsonMultipleClearAttrValue); assertTrue(t.getTransforms().size() > 0); t.apply(entity); assertNotNull(t); assertNull(entity.getAttribute(HIVE_TABLE_ATTR_REPLICATED_FROM)); assertNull(entity.getAttribute(HIVE_TABLE_ATTR_REPLICATED_TO)); } @Test public void setDeleted_SetsStatusToDeleted() throws AtlasBaseException { AtlasEntity entity = getHiveTableAtlasEntity(); assertEquals(entity.getStatus(), AtlasEntity.Status.ACTIVE); ImportTransforms t = ImportTransforms.fromJson(jsonSetDeleted); assertTrue(t.getTransforms().size() > 0); t.apply(entity); assertNotNull(t); assertEquals(entity.getStatus(), AtlasEntity.Status.DELETED); } @Test public void addClassification_AddsClassificationToEntitiy() throws AtlasBaseException { AtlasEntity entity = getHiveTableAtlasEntity(); int existingClassificationsCount = entity.getClassifications() != null ? entity.getClassifications().size() : 0; ImportTransforms t = ImportTransforms.fromJson(jsonAddClasification); assertTrue(t.getTransforms().size() > 0); t.apply(entity); assertNotNull(t); assertEquals(entity.getClassifications().size(), existingClassificationsCount + 1); addClassification_ExistingClassificationsAreHandled(entity); addClassification_MultipleClassificationsAreAdded(entity); } @Test public void addScopedClassification() throws AtlasBaseException { AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = getAtlasEntityWithExtInfo(); AtlasEntity entity = entityWithExtInfo.getReferredEntities().get("2"); int existingClassificationsCount = entityWithExtInfo.getEntity().getClassifications() != null ? entity.getClassifications().size() : 0; ImportTransforms t = ImportTransforms.fromJson(jsonAddClasificationScoped); assertTrue(t.getTransforms().size() > 0); ImportTransformer.AddClassification classification = (ImportTransformer.AddClassification) t.getTransforms().get("hive_column").get("*").get(0); AtlasObjectId objectId = new AtlasObjectId("hive_column", ATTR_NAME_QUALIFIED_NAME, String.format(COLUMN_QUALIFIED_NAME_FORMAT, 2)); classification.addFilter(objectId); t.apply(entityWithExtInfo); assertNotNull(t); assertNull(entityWithExtInfo.getEntity().getClassifications()); assertNull(entityWithExtInfo.getReferredEntities().get("0").getClassifications()); assertEquals(entityWithExtInfo.getReferredEntities().get("1").getClassifications().size(), existingClassificationsCount + 1); assertNull(entityWithExtInfo.getReferredEntities().get("2").getClassifications()); } private void addClassification_ExistingClassificationsAreHandled(AtlasEntity entity) throws AtlasBaseException { int existingClassificationsCount = entity.getClassifications() != null ? entity.getClassifications().size() : 0; assertTrue(existingClassificationsCount > 0); ImportTransforms.fromJson(jsonAddClasification).apply(entity); assertEquals(entity.getClassifications().size(), existingClassificationsCount); } private void addClassification_MultipleClassificationsAreAdded(AtlasEntity entity) throws AtlasBaseException { int existingClassificationsCount = entity.getClassifications().size(); ImportTransforms.fromJson(jsonAddClasification2).apply(entity); assertEquals(entity.getClassifications().size(), existingClassificationsCount + 1); } private String[] getExtEntityExpectedValues(AtlasEntityWithExtInfo entityWithExtInfo) { String[] ret = new String[entityWithExtInfo.getReferredEntities().size()]; for (int i = 0; i < ret.length; i++) { String attrValue = (String) entityWithExtInfo.getReferredEntities().get(Integer.toString(i)).getAttribute(ATTR_NAME_QUALIFIED_NAME); ret[i] = attrValue.replace(lowerCaseCL1, lowerCaseCL2); } return ret; } private void addColumnTransform(ImportTransforms transform) throws AtlasBaseException { Map<String, List<ImportTransformer>> tr = new HashMap<>(); List<ImportTransformer> trList = new ArrayList<>(); trList.add(ImportTransformer.getTransformer(String.format("replace:%s:%s", lowerCaseCL1, lowerCaseCL2))); tr.put(ATTR_NAME_QUALIFIED_NAME, trList); transform.getTransforms().put("hive_column", tr); } private String applyDefaultTransform(String attrValue) { return attrValue.toLowerCase().replace(lowerCaseCL1, lowerCaseCL2); } private AtlasEntity getHiveTableAtlasEntity() { AtlasEntity entity = new AtlasEntity("hive_table"); entity.setStatus(AtlasEntity.Status.ACTIVE); Map<String, Object> attributes = new HashMap<>(); attributes.put(ATTR_NAME_QUALIFIED_NAME, "TABLE1.default" + lowerCaseCL1); attributes.put("dbname", "someDB"); attributes.put("name", "somename"); attributes.put(HIVE_TABLE_ATTR_SYNC_INFO, null); attributes.put(HIVE_TABLE_ATTR_REPLICATED_FROM, "cl1"); attributes.put(HIVE_TABLE_ATTR_REPLICATED_TO, "clx"); entity.setAttributes(attributes); return entity; } private AtlasEntity getHiveColumnAtlasEntity(int index) { AtlasEntity entity = new AtlasEntity("hive_column"); Map<String, Object> attributes = new HashMap<>(); attributes.put(ATTR_NAME_QUALIFIED_NAME, String.format(COLUMN_QUALIFIED_NAME_FORMAT, index)); attributes.put("name", "col" + index); entity.setAttributes(attributes); return entity; } private AtlasEntityWithExtInfo getAtlasEntityWithExtInfo() { AtlasEntityWithExtInfo ret = new AtlasEntityWithExtInfo(getHiveTableAtlasEntity()); Map<String, AtlasEntity> referredEntities = new HashMap<>(); referredEntities.put("0", getHiveColumnAtlasEntity(1)); referredEntities.put("1", getHiveColumnAtlasEntity(2)); referredEntities.put("2", getHiveColumnAtlasEntity(3)); ret.setReferredEntities(referredEntities); return ret; } }
5,181
2,326
/** * Copyright (c) 2016-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE_render file in the root directory of this subproject. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ #include <iostream> #include <string> #include <vector> #include "SystemUtil.h" #include <glog/logging.h> using namespace std; using namespace surround360::util; int main(int argc, char** argv) { initSurround360(argc, argv); LOG(INFO) << "something at info level"; LOG(WARNING) << "something at warn level"; LOG(ERROR) << "something at error level"; LOG(FATAL) << "something at fatal level"; return EXIT_SUCCESS; }
233
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ package org.openoffice.xmerge.converter.xml.sxw.pocketword; import org.openoffice.xmerge.util.EndianConverter; import java.io.ByteArrayOutputStream; import java.io.OutputStream; import java.io.IOException; import java.util.Vector; /** * This class to represent the data structure stored by a Pocket Word file that * describes that file. * * The data structure is of variable length, beginning at the end of the * font declarations and ending 10 bytes before the first instance of 0xFF 0xFF * marking a paragraph block. * * The variable length component arises from an 8 byte structure describing each * paragraph in the document. These paragraph descriptors appear at the end * of the Document Descriptor. * * @author <NAME> * @version 1.1 */ class DocumentDescriptor { private short numParagraphs = 0; private short length = 0; private short numLines = 0; private Vector paragraphDesc = null; DocumentDescriptor() { paragraphDesc = new Vector(0, 1); } /** * Updates the <code>DocumentDescriptor</code> to include details of another * paragraph in the document. * * @param len The number of characters in the paragraph. * @param lines The number of lines on screen that the paragraph uses. */ public void addParagraph(short len, short lines) { ParagraphDescriptor pd = new ParagraphDescriptor(len, lines); paragraphDesc.add(pd); numParagraphs++; numLines += lines; length += pd.length; } /** * Retrieve the <code>DocumentDescriptor's</code> data. Due to the variable * length nature of the descriptor, certain fields can only be * calculated/written after the addition of all paragraphs. * * @return Byte array containing the Pocket Word representation of this * <code>DocumentDescriptor</code>. */ public byte[] getDescriptor () { ByteArrayOutputStream descStream = new ByteArrayOutputStream(); writeHeader(descStream); /* * This value seems to increment by 0x02 for each paragraph. * For a single paragraph doc, the value is 0x08, 0x0A for two, * 0x0C for three ... */ try { descStream.write(EndianConverter.writeShort((short)(6 + (numParagraphs * 2)))); descStream.write(EndianConverter.writeShort(numParagraphs)); descStream.write(EndianConverter.writeShort((short)0)); descStream.write(EndianConverter.writeShort(numParagraphs)); descStream.write(EndianConverter.writeShort((short)0)); descStream.write(EndianConverter.writeShort((short)length)); descStream.write(EndianConverter.writeShort((short)0)); descStream.write(EndianConverter.writeShort(numLines)); descStream.write(new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 } ); for (int i = 0; i < paragraphDesc.size(); i++) { ParagraphDescriptor pd = (ParagraphDescriptor)paragraphDesc.elementAt(i); descStream.write(pd.getDescriptor()); } // Byte sequence marking the end of this DocumentDescriptor descStream.write(EndianConverter.writeShort((short)0)); descStream.write(EndianConverter.writeShort((short)0x41)); } catch (IOException ioe) { // Should never happen as this is a memory based stream. } return descStream.toByteArray(); } /* * This method loads the initial fixed portion of the descriptor and the * mid-section. The mid-section is variable but Pocket Word doesn't seem * to mind default values. */ private void writeHeader(OutputStream descStream) { try { descStream.write(new byte[] { 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x06, 0x00, 0x15, 0x00, 0x10, 0x00, 0x01, 0x00, (byte)0xD0, 0x2F, 0x00, 0x00, (byte)0xE0, 0x3D, 0x00, 0x00, (byte)0xF0, 0x00, 0x00, 0x00, (byte)0xA0, 0x05, 0x00, 0x00, (byte)0xA0, 0x05, 0x00, 0x00, (byte)0xA0, 0x05, 0x00, 0x00, (byte)0xA0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x07, 0x00, 0x10, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1F, 0x04, 0x00, 0x00 } ); /* * The next four bytes are variable, but a pattern hasn't yet been * established. Pocket Word seems to accept this constant value. * * The bytes are repeated after another 12 byte sequence which does * not seem to change from one file to the next. */ descStream.write(new byte[] { (byte)0xE2, 0x02, 0x00, 0x00 } ); descStream.write(new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3D, 0x04, 0x00, 0x00 } ); descStream.write(new byte[] { (byte)0xE2, 0x02, 0x00, 0x00 } ); descStream.write(new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x08, 0x00 } ); } catch (IOException ioe) { /* Shouldn't happen with a ByteArrayOutputStream */ } } /** * <code>ParagraphDescriptor</code> represents the data structure used to * describe individual paragraphs within a <code>DocumentDescriptor.</code> * * It is used solely by the <code>DocumentDescriptor<code> class. */ private class ParagraphDescriptor { private short filler = 0; private short lines = 0; private short length = 0; private short unknown = 0x23; public ParagraphDescriptor(short len, short numLines) { lines = numLines; length = (short)(len + 1); } public byte[] getDescriptor() { ByteArrayOutputStream desc = new ByteArrayOutputStream(); try { desc.write(EndianConverter.writeShort(filler)); desc.write(EndianConverter.writeShort(lines)); desc.write(EndianConverter.writeShort(length)); desc.write(EndianConverter.writeShort(unknown)); } catch (IOException ioe) { /* Should never happen */ } return desc.toByteArray(); } } }
5,199
5,079
<filename>desktop/core/src/desktop/lib/django_forms_test.py #!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from desktop.lib.django_forms import DependencyAwareForm from django import forms from nose.tools import assert_true, assert_false, assert_equal def test_dependency_aware_form(): class Form(DependencyAwareForm): cond = forms.BooleanField(required=False) if_true = forms.CharField(required=False) if_false = forms.CharField(required=False) dependencies = [ ("cond", True, "if_true") ] dependencies += [ ("cond", False, "if_false") ] assert_true(Form({'cond': '', 'if_false': 'hi'}).is_valid()) assert_true(Form({'cond': 'on', 'if_true': 'hi'}).is_valid()) assert_false(Form({}).is_valid()) # Because 'cond' is a boolean field, if it's not specified, # it renders as False in the form. f = Form({'if_false': ''}) assert_false(f.is_valid()) # Make sure errors gets populated assert_equal(1, len(f.errors["if_false"])) assert_true(Form({'if_false': 'foo'}).is_valid()) a = Form(prefix="prefix") assert_equal([('prefix-cond', "True", "prefix-if_true"), ('prefix-cond', 'False', 'prefix-if_false')], a._calculate_data()) assert_true(" " not in a.render_dep_metadata()) # Check that cleaned_data still gets populated. f = Form({'if_false': 'foo'}) f.is_valid() assert_true(f.cleaned_data)
687
642
<gh_stars>100-1000 /** * */ package test.excel; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import com.jeesuite.common2.excel.annotation.TitleCell; import com.jeesuite.common2.excel.model.TitleMeta; /** * 个人工资数据 * @description <br> * @author <a href="mailto:<EMAIL>">vakin</a> * @date 2016年11月21日 */ public class SalaryInfo { private int id; @TitleCell(name="*姓名",column = 1,notNull = true) private String name; @TitleCell(name="部门",column = 2) private String department; @TitleCell(name="身份证号",column = 3 ) private String idCard; @TitleCell(name="基本工资",column = 4,row = 2,parentName = "应发工资",type = Float.class) private float baseSalary;//基本工资 @TitleCell(name="岗位工资",column = 5,row = 2,parentName = "应发工资",type = Float.class) private float postSalary;//岗位工资 @TitleCell(name="绩效工资",column = 6,row = 2,parentName = "应发工资",type = Float.class) private float performSalary;//绩效工资 @TitleCell(name="福利津贴",column = 7,row = 2,parentName = "应发工资",type = Float.class) private float subsidies;//福利津贴 @TitleCell(name="扣除金额",column = 8,row = 2,parentName = "应发工资",type = Float.class) private float deductSalary; //扣除金额 @TitleCell(name="*总计",column = 9,row = 2,parentName = "应发工资",notNull = true,type = Float.class) private float total; @TitleCell(name="*社保基数",column = 10,notNull = true,type = Float.class) private float insuranceBase;//社保基数 @TitleCell(name="*公积金基数",column = 11,notNull = true,type = Float.class) private float housefundBase; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDepartment() { return department; } public void setDepartment(String department) { this.department = department; } public String getIdCard() { return idCard; } public void setIdCard(String idCard) { this.idCard = idCard; } public float getBaseSalary() { return baseSalary; } public void setBaseSalary(float baseSalary) { this.baseSalary = baseSalary; } public float getPerformSalary() { return performSalary; } public void setPerformSalary(float performSalary) { this.performSalary = performSalary; } public float getPostSalary() { return postSalary; } public void setPostSalary(float postSalary) { this.postSalary = postSalary; } public float getSubsidies() { return subsidies; } public void setSubsidies(float subsidies) { this.subsidies = subsidies; } public float getDeductSalary() { return deductSalary; } public void setDeductSalary(float deductSalary) { this.deductSalary = deductSalary; } public float getTotal() { return total; } public void setTotal(float total) { this.total = total; } public float getHousefundBase() { return housefundBase; } public void setHousefundBase(float housefundBase) { this.housefundBase = housefundBase; } public float getInsuranceBase() { return insuranceBase; } public void setInsuranceBase(float insuranceBase) { this.insuranceBase = insuranceBase; } @Override public String toString() { return "PersonSalaryInfo [id=" + id + ", name=" + name + ", department=" + department + ", idCard=" + idCard + ", baseSalary=" + baseSalary + ", performSalary=" + performSalary + ", postSalary=" + postSalary + ", subsidies=" + subsidies + ", deductSalary=" + deductSalary + ", total=" + total + ", housefundBase=" + housefundBase + ", insuranceBase=" + insuranceBase + "]"; } public static void main(String[] args) { List<TitleMeta> titleCellBeans = new ArrayList<>(); Field[] fields = SalaryInfo.class.getDeclaredFields(); Map<String, TitleMeta> parentMap = new HashMap<>(); int index = 0,subIndex = 0; for (Field field : fields) { if(!field.isAnnotationPresent(TitleCell.class))continue; TitleCell annotation = field.getAnnotation(TitleCell.class); TitleMeta cell = new TitleMeta(annotation.name()); if(StringUtils.isBlank(annotation.parentName())){ cell.setColumnIndex(++index); titleCellBeans.add(cell); }else{ TitleMeta cellParent = parentMap.get(annotation.parentName()); if(cellParent == null){ subIndex = index; cellParent = new TitleMeta(annotation.parentName()); cellParent.setColumnIndex(++index); parentMap.put(annotation.parentName(), cellParent); titleCellBeans.add(cellParent); } cell.setColumnIndex(++subIndex); cell.setRowIndex(1); cellParent.addChildren(cell); } } for (TitleMeta cell : titleCellBeans) { System.out.println(cell); if(cell.getChildren().size() > 0){ for (TitleMeta child : cell.getChildren()) { System.out.println("--" + child); } } } } }
1,978
795
<filename>ai_economist/foundation/entities/endogenous.py # Copyright (c) 2020, salesforce.com, inc. # All rights reserved. # SPDX-License-Identifier: BSD-3-Clause # For full license text, see the LICENSE file in the repo root # or https://opensource.org/licenses/BSD-3-Clause from ai_economist.foundation.base.registrar import Registry class Endogenous: """Base class for endogenous entity classes. Endogenous entities are those that, conceptually, describe the internal state of an agent. This provides a convenient way to separate physical entities (which may exist in the world, be exchanged among agents, or are otherwise in principal observable by others) from endogenous entities (such as the amount of labor effort an agent has experienced). Endogenous entities are registered in the "endogenous" portion of an agent's state and should only be observable by the agent itself. """ name = None def __init__(self): assert self.name is not None endogenous_registry = Registry(Endogenous) @endogenous_registry.add class Labor(Endogenous): """Labor accumulated through working. Included in all environments by default.""" name = "Labor"
342
2,151
# -*- coding: utf-8 -*- # Copyright 2017 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Helpers for interacting with LUCI Milo service.""" from __future__ import print_function import base64 import collections import json from chromite.lib import prpc from chromite.cbuildbot import topology class MiloClient(prpc.PRPCClient): """Milo client to interact with the LUCI Milo service.""" def _GetHost(self): """Get LUCI Milo Server host from topology.""" return topology.topology.get(topology.LUCI_MILO_HOST_KEY) def GetBuildbotBuildJSON(self, master, builder, build_num, dryrun=False): """Get Buildbot build summary JSON file. Args: master: waterfall master to query. builder: builder to query. build_num: build number to query. dryrun: Whether a dryrun. Returns: Dictionary corresponding to parsed JSON file. """ body = json.dumps({ 'master': master, 'builder': builder, 'build_num': int(build_num), }) resp = self.SendRequest('prpc/milo.Buildbot', 'GetBuildbotBuildJSON', body, dryrun=dryrun) data = base64.b64decode(resp['data']) if not data: return None result = json.loads(data) properties = {p[0] : p[1] for p in result['properties']} result['properties'] = properties steps = {step['name'] : step for step in result['steps']} result['steps'] = steps result['masterName'] = master return result def BuildInfoGetBuildbot(self, master, builder, build_num, project_hint=None, dryrun=False): """Get BuildInfo corresponding to Buildbot build. Args: master: waterfall master to query. builder: builder to query. build_num: build number to query. project_hint: Logdog project hint. dryrun: Whether a dryrun. Returns: Dictionary of response protobuf including extra key 'steps'. 'steps' value is a collections.OrderedDict() keyed by tuple of step name path (and a scalar of level 1 steps) to the steps. The protobuf allows duplicates but the steps dictionary only includes the most recent occurrence. """ request = { 'buildbot': { 'masterName': master, 'builderName': builder, 'buildNumber': int(build_num), }, } if project_hint is not None: request['projectHint'] = project_hint result = self.SendRequest('prpc/milo.BuildInfo', 'Get', json.dumps(request), dryrun=dryrun) def AddSteps(steps, step, root): """Recursive helper function to build mapping of step names to steps.""" if step is not None: # Build a tuple of the path to the step. Root is always unnamed # step so start keys at the level 1 substeps. # Level 0: root = (), name = (None,), key = (None,) # Level 1: root = (None,), name = (xyz,), key = (xyz,) # Level 2: root = (xyz,), name = (abc,), key = (xyz,abc) name = (step.get('name'),) key = (root + name) if root != (None,) else name # Add both the full path tuple, and the scalar value for level 1 steps. # This allows handling most lookups as buildinfo['steps']['cidb name']. steps[key] = step if len(key) == 1: steps[key[0]] = step # Recurse. for substep in step.get('substep', []): AddSteps(steps, substep.get('step', None), key) steps = collections.OrderedDict() AddSteps(steps, result.get('step'), ()) result['steps'] = steps return result
1,446
5,411
#include <StdInc.h> #include <sstream> #include <IteratorView.h> #include <boost/algorithm/string.hpp> #include <CoreConsole.h> #include <NetAddress.h> #define FOLLY_NO_CONFIG #ifdef _WIN32 #undef ssize_t #pragma comment(lib, "iphlpapi.lib") #else #include <sys/types.h> #endif #include <folly/IPAddress.h> #include <folly/String.h> class NetworkList { public: NetworkList() { } explicit NetworkList(std::string_view str) { // make_split_iterator currently does not understand string_view auto strRef = std::string{ str }; for (auto item : fx::GetIteratorView( std::make_pair( boost::algorithm::make_split_iterator( strRef, boost::algorithm::token_finder( boost::algorithm::is_space(), boost::algorithm::token_compress_on ) ), boost::algorithm::split_iterator<std::string::iterator>() ) ) ) { auto network = folly::IPAddress::tryCreateNetwork(folly::range(&*item.begin(), &*item.end())); if (network) { networks.push_back(*network); } } } bool ContainsIP(const net::PeerAddress& ip) const { auto sockaddr = ip.GetSocketAddress(); folly::ByteRange ipRange; if (sockaddr->sa_family == AF_INET) { sockaddr_in* inAddr = (sockaddr_in*)sockaddr; ipRange = folly::ByteRange{ (uint8_t*)&inAddr->sin_addr, (uint8_t*)&inAddr->sin_addr + sizeof(inAddr->sin_addr) }; } else if (sockaddr->sa_family == AF_INET6) { sockaddr_in6* inAddr = (sockaddr_in6*)sockaddr; ipRange = folly::ByteRange{ (uint8_t*)&inAddr->sin6_addr, (uint8_t*)&inAddr->sin6_addr + sizeof(inAddr->sin6_addr) }; } if (!ipRange.empty()) { auto ipAddress = folly::IPAddress::tryFromBinary(ipRange); if (ipAddress) { for (const auto& network : networks) { if (ipAddress->inSubnet(network.first, network.second)) { return true; } } } } return false; } bool ContainsIP(std::string_view ip) const { auto ipAddress = folly::IPAddress::tryFromString(ip); if (ipAddress) { for (const auto& network : networks) { if (ipAddress->inSubnet(network.first, network.second)) { return true; } } } return false; } std::string ToString() const { std::stringstream ss; for (auto& network : networks) { ss << folly::IPAddress::networkToString(network) << " "; } // strip the final space auto s = ss.str(); if (!s.empty()) { s = s.substr(0, s.length() - 1); } return s; } inline bool operator==(const NetworkList& right) const { return networks == right.networks; } inline bool operator!=(const NetworkList& right) const { return !(*this == right); } private: std::vector<folly::CIDRNetwork> networks; }; template<> struct ConsoleArgumentType<NetworkList> { static std::string Unparse(const NetworkList& input) { return input.ToString(); } static bool Parse(const std::string& input, NetworkList* out) { *out = NetworkList{ input }; return true; } }; template<> struct ConsoleArgumentName<NetworkList> { inline static const char* Get() { return "NetworkList"; } }; ConVar<NetworkList>* g_networkListVar; namespace fx { bool DLL_EXPORT IsProxyAddress(std::string_view ep) { return g_networkListVar->GetValue().ContainsIP(ep); } bool DLL_EXPORT IsProxyAddress(const net::PeerAddress& ep) { return g_networkListVar->GetValue().ContainsIP(ep); } } static InitFunction initFunction([]() { static ConVar<NetworkList> allowedIpCidr("sv_proxyIPRanges", ConVar_None, NetworkList{ "10.0.0.0/8 127.0.0.0/8 192.168.0.0/16 172.16.0.0/12" }); g_networkListVar = &allowedIpCidr; });
1,543
1,590
{ "swagger": "2.0", "info": { "version": "2018-01-01", "title": "Schema of Azure SignalR Service events published to Azure Event Grid", "description": "Describes the schema of the Azure SignalR Service events published to Azure Event Grid. This corresponds to the Data property of an EventGridEvent." }, "paths": {}, "definitions": { "SignalRServiceClientConnectionConnectedEventData": { "description": "Schema of the Data property of an EventGridEvent for a Microsoft.SignalRService.ClientConnectionConnected event.", "type": "object", "properties": { "timestamp": { "description": "The time at which the event occurred.", "format": "date-time", "type": "string" }, "hubName": { "description": "The hub of connected client connection.", "type": "string" }, "connectionId": { "description": "The connection Id of connected client connection.", "type": "string" }, "userId": { "description": "The user Id of connected client connection.", "type": "string" } } }, "SignalRServiceClientConnectionDisconnectedEventData": { "description": "Schema of the Data property of an EventGridEvent for a Microsoft.SignalRService.ClientConnectionDisconnected event.", "type": "object", "properties": { "timestamp": { "description": "The time at which the event occurred.", "format": "date-time", "type": "string" }, "hubName": { "description": "The hub of connected client connection.", "type": "string" }, "connectionId": { "description": "The connection Id of connected client connection.", "type": "string" }, "userId": { "description": "The user Id of connected client connection.", "type": "string" }, "errorMessage": { "description": "The message of error that cause the client connection disconnected.", "type": "string" } } } } }
870
1,467
/** * Copyright Soramitsu Co., Ltd. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ #ifndef IROHA_TRANSACTION_BATCH_FACTORY_HPP #define IROHA_TRANSACTION_BATCH_FACTORY_HPP #include <memory> #include "common/result.hpp" #include "interfaces/common_objects/transaction_sequence_common.hpp" namespace shared_model { namespace interface { class TransactionBatch; /** * Provides methods that create transaction batch from a single transaction, * or a collection of transactions. Field validator is used by default */ class TransactionBatchFactory { public: virtual ~TransactionBatchFactory() = default; template <typename BatchType> using FactoryResult = iroha::expected::Result<BatchType, std::string>; /** * Create transaction batch out of collection of transactions * @param transactions collection of transactions, should be from the same * batch * @return valid batch of transactions or string error */ virtual FactoryResult<std::unique_ptr<TransactionBatch>> createTransactionBatch( const types::SharedTxsCollectionType &transactions) const = 0; /** * Creates transaction batch from single transaction * @param transaction is transaction being validated and used to create * batch * @return batch with single transaction or string error * @note transactions in such batches may not have batch meta information */ virtual FactoryResult<std::unique_ptr<TransactionBatch>> createTransactionBatch( std::shared_ptr<Transaction> transaction) const = 0; }; } // namespace interface } // namespace shared_model #endif // IROHA_TRANSACTION_BATCH_FACTORY_HPP
588
503
<filename>app/models/PostingComment.java /** * Yona, 21st Century Project Hosting SW * <p> * Copyright Yona & Yobi Authors & NAVER Corp. & NAVER LABS Corp. * https://yona.io **/ package models; import models.enumeration.ResourceType; import models.resource.Resource; import javax.persistence.Entity; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; import java.util.ArrayList; import java.util.List; @Entity public class PostingComment extends Comment { private static final long serialVersionUID = 1L; public static final Finder<Long, PostingComment> find = new Finder<>(Long.class, PostingComment.class); @ManyToOne public Posting posting; @OneToOne private PostingComment parentComment; public PostingComment(Posting posting, User author, String contents) { super(author, contents); this.posting = posting; this.projectId = posting.project.id; } /** * @see Comment#getParent() */ public AbstractPosting getParent() { return posting; } @Override public PostingComment getParentComment() { return parentComment; } @Override public void setParentComment(Comment comment) { this.parentComment = (PostingComment)comment; } @Override public List<PostingComment> getSiblingComments() { if (parentComment == null) { return null; } List<PostingComment> comments = find.where() .eq("parentComment.id", parentComment.id) .findList(); return comments; } @Override public List<PostingComment> getChildComments() { List<PostingComment> comments = find.where() .eq("parentComment.id", id) .findList(); return comments; } /** * @see Comment#asResource() */ @Override public Resource asResource() { return new Resource() { @Override public String getId() { return id.toString(); } @Override public Project getProject() { return posting.project; } @Override public ResourceType getType() { return ResourceType.NONISSUE_COMMENT; } @Override public Long getAuthorId() { return authorId; } @Override public Resource getContainer() { return posting.asResource(); } }; } public static List<PostingComment> findAllBy(Posting posting) { return find.where() .eq("id", posting.id) .findList(); } public static int countAllCreatedBy(User user) { return find.where().eq("author_id", user.id).findRowCount(); } }
1,238
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef _SVX_EXTRUSION_CONTROLS_HXX #define _SVX_EXTRUSION_CONTROLS_HXX #include "svx/svxdllapi.h" #include <svtools/valueset.hxx> #include <svtools/svtreebx.hxx> #include <vcl/button.hxx> #include <vcl/dialog.hxx> #include <vcl/field.hxx> #include <vcl/fixed.hxx> #include <svtools/toolbarmenu.hxx> #include <svtools/popupwindowcontroller.hxx> #include <svtools/popupmenucontrollerbase.hxx> class SfxBindings; class SfxStatusForwarder; //======================================================================== namespace svx { class ToolboxButtonColorUpdater; class ExtrusionDirectionWindow : public svtools::ToolbarMenu { public: ExtrusionDirectionWindow( svt::ToolboxController& rController, const ::com::sun::star::uno::Reference< ::com::sun::star::frame::XFrame >& rFrame, Window* pParentWindow ); virtual void SAL_CALL statusChanged( const ::com::sun::star::frame::FeatureStateEvent& Event ) throw ( ::com::sun::star::uno::RuntimeException ); virtual void DataChanged( const DataChangedEvent& rDCEvt ); private: svt::ToolboxController& mrController; ValueSet* mpDirectionSet; Image maImgDirection[9]; Image maImgDirectionH[9]; Image maImgPerspective; Image maImgPerspectiveH; Image maImgParallel; Image maImgParallelH; const rtl::OUString msExtrusionDirection; const rtl::OUString msExtrusionProjection; DECL_LINK( SelectHdl, void * ); void FillValueSet(); void implSetDirection( sal_Int32 nSkew, bool bEnabled = true ); void implSetProjection( sal_Int32 nProjection, bool bEnabled = true ); }; //======================================================================== class ExtrusionDirectionControl : public svt::PopupWindowController { public: ExtrusionDirectionControl( const com::sun::star::uno::Reference< com::sun::star::lang::XMultiServiceFactory >& rServiceManager ); virtual ::Window* createPopupWindow( ::Window* pParent ); // XServiceInfo virtual ::rtl::OUString SAL_CALL getImplementationName() throw( ::com::sun::star::uno::RuntimeException ); virtual ::com::sun::star::uno::Sequence< ::rtl::OUString > SAL_CALL getSupportedServiceNames() throw( ::com::sun::star::uno::RuntimeException ); using svt::PopupWindowController::createPopupWindow; }; //======================================================================== class ExtrusionDepthWindow : public svtools::ToolbarMenu { private: svt::ToolboxController& mrController; Image maImgDepth0; Image maImgDepth1; Image maImgDepth2; Image maImgDepth3; Image maImgDepth4; Image maImgDepthInfinity; Image maImgDepth0h; Image maImgDepth1h; Image maImgDepth2h; Image maImgDepth3h; Image maImgDepth4h; Image maImgDepthInfinityh; FieldUnit meUnit; double mfDepth; const rtl::OUString msExtrusionDepth; const rtl::OUString msMetricUnit; DECL_LINK( SelectHdl, void * ); void implFillStrings( FieldUnit eUnit ); void implSetDepth( double fDepth ); public: ExtrusionDepthWindow( svt::ToolboxController& rController, const ::com::sun::star::uno::Reference< ::com::sun::star::frame::XFrame >& rFrame, Window* pParentWindow ); virtual void SAL_CALL statusChanged( const ::com::sun::star::frame::FeatureStateEvent& Event ) throw ( ::com::sun::star::uno::RuntimeException ); virtual void DataChanged( const DataChangedEvent& rDCEvt ); }; //======================================================================== class ExtrusionDepthController : public svt::PopupWindowController { public: ExtrusionDepthController( const com::sun::star::uno::Reference< com::sun::star::lang::XMultiServiceFactory >& rServiceManager ); virtual ::Window* createPopupWindow( ::Window* pParent ); // XServiceInfo virtual ::rtl::OUString SAL_CALL getImplementationName() throw( ::com::sun::star::uno::RuntimeException ); virtual ::com::sun::star::uno::Sequence< ::rtl::OUString > SAL_CALL getSupportedServiceNames() throw( ::com::sun::star::uno::RuntimeException ); using svt::PopupWindowController::createPopupWindow; }; //======================================================================== class ExtrusionLightingWindow : public svtools::ToolbarMenu { private: svt::ToolboxController& mrController; ValueSet* mpLightingSet; Image maImgLightingOff[9]; Image maImgLightingOn[9]; Image maImgLightingPreview[9]; Image maImgLightingOffh[9]; Image maImgLightingOnh[9]; Image maImgLightingPreviewh[9]; Image maImgBright; Image maImgNormal; Image maImgDim; Image maImgBrighth; Image maImgNormalh; Image maImgDimh; int mnLevel; bool mbLevelEnabled; int mnDirection; bool mbDirectionEnabled; const rtl::OUString msExtrusionLightingDirection; const rtl::OUString msExtrusionLightingIntensity; void implSetIntensity( int nLevel, bool bEnabled ); void implSetDirection( int nDirection, bool bEnabled ); DECL_LINK( SelectHdl, void * ); public: ExtrusionLightingWindow( svt::ToolboxController& rController, const ::com::sun::star::uno::Reference< ::com::sun::star::frame::XFrame >& rFrame, Window* pParentWindow ); virtual void SAL_CALL statusChanged( const ::com::sun::star::frame::FeatureStateEvent& Event ) throw ( ::com::sun::star::uno::RuntimeException ); virtual void DataChanged( const DataChangedEvent& rDCEvt ); }; //======================================================================== class ExtrusionLightingControl : public svt::PopupWindowController { public: ExtrusionLightingControl( const com::sun::star::uno::Reference< com::sun::star::lang::XMultiServiceFactory >& rServiceManager ); virtual ::Window* createPopupWindow( ::Window* pParent ); // XServiceInfo virtual ::rtl::OUString SAL_CALL getImplementationName() throw( ::com::sun::star::uno::RuntimeException ); virtual ::com::sun::star::uno::Sequence< ::rtl::OUString > SAL_CALL getSupportedServiceNames() throw( ::com::sun::star::uno::RuntimeException ); using svt::PopupWindowController::createPopupWindow; }; //======================================================================== class ExtrusionSurfaceWindow : public svtools::ToolbarMenu { private: svt::ToolboxController& mrController; Image maImgSurface1; Image maImgSurface2; Image maImgSurface3; Image maImgSurface4; Image maImgSurface1h; Image maImgSurface2h; Image maImgSurface3h; Image maImgSurface4h; const rtl::OUString msExtrusionSurface; DECL_LINK( SelectHdl, void * ); void implSetSurface( int nSurface, bool bEnabled ); public: ExtrusionSurfaceWindow( svt::ToolboxController& rController, const ::com::sun::star::uno::Reference< ::com::sun::star::frame::XFrame >& rFrame, Window* pParentWindow ); virtual void SAL_CALL statusChanged( const ::com::sun::star::frame::FeatureStateEvent& Event ) throw ( ::com::sun::star::uno::RuntimeException ); }; //======================================================================== class ExtrusionSurfaceControl : public svt::PopupWindowController { public: ExtrusionSurfaceControl( const com::sun::star::uno::Reference< com::sun::star::lang::XMultiServiceFactory >& rServiceManager ); virtual ::Window* createPopupWindow( ::Window* pParent ); // XServiceInfo virtual ::rtl::OUString SAL_CALL getImplementationName() throw( ::com::sun::star::uno::RuntimeException ); virtual ::com::sun::star::uno::Sequence< ::rtl::OUString > SAL_CALL getSupportedServiceNames() throw( ::com::sun::star::uno::RuntimeException ); using svt::PopupWindowController::createPopupWindow; }; //======================================================================== } #endif
2,623
344
<gh_stars>100-1000 /* * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #ifndef NET_DCSCTP_PACKET_PARAMETER_SSN_TSN_RESET_REQUEST_PARAMETER_H_ #define NET_DCSCTP_PACKET_PARAMETER_SSN_TSN_RESET_REQUEST_PARAMETER_H_ #include <stddef.h> #include <stdint.h> #include <string> #include <vector> #include "absl/strings/string_view.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/parameter/parameter.h" #include "net/dcsctp/packet/tlv_trait.h" namespace dcsctp { // https://tools.ietf.org/html/rfc6525#section-4.3 struct SSNTSNResetRequestParameterConfig : ParameterConfig { static constexpr int kType = 15; static constexpr size_t kHeaderSize = 8; static constexpr size_t kVariableLengthAlignment = 0; }; class SSNTSNResetRequestParameter : public Parameter, public TLVTrait<SSNTSNResetRequestParameterConfig> { public: static constexpr int kType = SSNTSNResetRequestParameterConfig::kType; explicit SSNTSNResetRequestParameter( ReconfigRequestSN request_sequence_number) : request_sequence_number_(request_sequence_number) {} static absl::optional<SSNTSNResetRequestParameter> Parse( rtc::ArrayView<const uint8_t> data); void SerializeTo(std::vector<uint8_t>& out) const override; std::string ToString() const override; ReconfigRequestSN request_sequence_number() const { return request_sequence_number_; } private: ReconfigRequestSN request_sequence_number_; }; } // namespace dcsctp #endif // NET_DCSCTP_PACKET_PARAMETER_SSN_TSN_RESET_REQUEST_PARAMETER_H_
692
1,073
// RUN: %clang_cc1 -fcxx-exceptions -fexceptions -triple x86_64-apple-darwin10 -emit-llvm %s -o - > %t // RUN: FileCheck %s -check-prefix=CHECK-1 < %t // RUN: FileCheck %s -check-prefix=CHECK-2 < %t int f(); namespace { // CHECK-1: @_ZN12_GLOBAL__N_11bE = internal global i32 0 // CHECK-1: @_ZN12_GLOBAL__N_1L1cE = internal global i32 0 // CHECK-1: @_ZN12_GLOBAL__N_11D1dE = internal global i32 0 // CHECK-1: @_ZN12_GLOBAL__N_11aE = internal global i32 0 int a = 0; int b = f(); static int c = f(); class D { static int d; }; int D::d = f(); // Check for generation of a VTT with internal linkage // CHECK-1: @_ZTSN12_GLOBAL__N_11X1EE = internal constant struct X { struct EBase { }; struct E : public virtual EBase { virtual ~E() {} }; }; // CHECK-1-LABEL: define internal i32 @_ZN12_GLOBAL__N_13fooEv() int foo() { return 32; } // CHECK-1-LABEL: define internal i32 @_ZN12_GLOBAL__N_11A3fooEv() namespace A { int foo() { return 45; } } } int concrete() { return a + foo() + A::foo(); } void test_XE() { throw X::E(); } // Miscompile on llvmc plugins. namespace test2 { struct A { template <class T> struct B { static void foo() {} }; }; namespace { struct C; } // CHECK-2-LABEL: define void @_ZN5test24testEv() // CHECK-2: call void @_ZN5test21A1BINS_12_GLOBAL__N_11CEE3fooEv() void test() { A::B<C>::foo(); } // CHECK-2-LABEL: define internal void @_ZN5test21A1BINS_12_GLOBAL__N_11CEE3fooEv() } namespace { int bar() { extern int a; return a; } } // namespace
714
381
<reponame>jsalinaspolo/JGiven package com.tngtech.jgiven.impl; import static org.assertj.core.api.Assertions.assertThat; import com.tngtech.jgiven.annotation.AfterScenario; import com.tngtech.jgiven.annotation.AfterStage; import com.tngtech.jgiven.annotation.BeforeScenario; import com.tngtech.jgiven.annotation.BeforeStage; import com.tngtech.jgiven.impl.intercept.StepInterceptorImpl; import org.junit.Test; public class StageLifecycleManagerTest { private final StepInterceptorImpl mockInterceptor = new StepInterceptorImpl(null, null, null); private LifecycleMethodContainer methodContainer = new LifecycleMethodContainer(); private StageLifecycleManager underTest = new StageLifecycleManager(methodContainer, mockInterceptor); @Test public void exectuesAnnotatedMethodRepeatedly() throws Throwable { executeAllLifecycleMethods(underTest, false); executeAllLifecycleMethods(underTest, false); assertThat(methodContainer.repeatableBeforeMethodInvoked).isEqualTo(2); assertThat(methodContainer.repeatableAfterMethodInvoked).isEqualTo(2); } @Test public void exectuesNonAnnotatedMethodOnlyOnce() throws Throwable { executeAllLifecycleMethods(underTest, false); executeAllLifecycleMethods(underTest, false); assertThat(methodContainer.beforeScenarioMethodInvoked).isEqualTo(1); assertThat(methodContainer.beforeMethodInvoked).isEqualTo(1); assertThat(methodContainer.afterMethodInvoked).isEqualTo(1); assertThat(methodContainer.afterScenarioMethodInvoked).isEqualTo(1); } @Test public void executesAllLifecycleMethods() throws Throwable { executeAllLifecycleMethods(underTest, false); assertAllMethodsHaveBeenExecuted(1); } @Test public void findsLifecycleMethodInSuperclasses() throws Throwable { methodContainer = new LifecycleMethodContainer() { }; underTest = new StageLifecycleManager(methodContainer, mockInterceptor); executeAllLifecycleMethods(underTest, false); assertAllMethodsHaveBeenExecuted(1); } @Test public void noExecutionIfFakeExecutionRequested() throws Throwable { executeAllLifecycleMethods(underTest, true); assertAllMethodsHaveBeenExecuted(0); } private void executeAllLifecycleMethods(StageLifecycleManager underTest, boolean dryRun) throws Throwable { underTest.executeBeforeScenarioMethods(dryRun); underTest.executeBeforeStageMethods(dryRun); underTest.executeAfterStageMethods(dryRun); underTest.executeAfterScenarioMethods(dryRun); } private void assertAllMethodsHaveBeenExecuted(int times) { assertThat(methodContainer.beforeScenarioMethodInvoked).isEqualTo(times); assertThat(methodContainer.beforeMethodInvoked).isEqualTo(times); assertThat(methodContainer.repeatableBeforeMethodInvoked).isEqualTo(times); assertThat(methodContainer.repeatableAfterMethodInvoked).isEqualTo(times); assertThat(methodContainer.afterMethodInvoked).isEqualTo(times); assertThat(methodContainer.afterScenarioMethodInvoked).isEqualTo(times); } private static class LifecycleMethodContainer { int beforeScenarioMethodInvoked = 0; int afterScenarioMethodInvoked = 0; int repeatableBeforeMethodInvoked = 0; int repeatableAfterMethodInvoked = 0; int beforeMethodInvoked = 0; int afterMethodInvoked = 0; @BeforeScenario private void beforeScenario() { beforeScenarioMethodInvoked++; } @AfterScenario private void afterScenario() { afterScenarioMethodInvoked++; } @BeforeStage private void beforeMethod() { beforeMethodInvoked++; } @AfterStage private void afterMethod() { afterMethodInvoked++; } @BeforeStage(repeatable = true) private void repeatableBeforeMethod() { repeatableBeforeMethodInvoked++; } @AfterStage(repeatable = true) private void repeatableAfterMethod() { repeatableAfterMethodInvoked++; } } }
1,553
385
<reponame>3096/starlight /** * @file KCPrismHeader.h * @brief Represents the header for the triangle section in a KCL. */ #pragma once #include "types.h" namespace al { class KCPrismHeader { public: u32 mVertsOffset; // _0 u32 mNormalsOffset; // _4 u32 mTrisOffset; // _8 u32 mSpatialsOffset; // _C f32 _10; f32 mSpatialGridX; // _14 f32 mSpatialGridY; // _18 f32 mSpatialGridZ; // _1C u32 mXMask; // _20 u32 mYMask; // _24 u32 mZMask; // _28 u32 mCoordShift; // _2C u32 mYShift; // _30 u32 mZShift; // _34 f32 _38; }; };
353
3,062
#include "testing/testing.hpp" #include "platform/downloader_utils.hpp" #include "platform/local_country_file_utils.hpp" #include "platform/mwm_version.hpp" UNIT_TEST(UrlConversionTest) { { std::string const mwmName = "Luna"; std::string const fileName = platform::GetFileName(mwmName, MapFileType::Map); int64_t const dataVersion = version::FOR_TESTING_MWM1; int64_t const diffVersion = 0; MapFileType const fileType = MapFileType::Map; auto const path = platform::GetFileDownloadPath(dataVersion, platform::CountryFile(mwmName), fileType); auto const url = downloader::GetFileDownloadUrl(fileName, dataVersion, diffVersion); auto const resultPath = downloader::GetFilePathByUrl(url); TEST_EQUAL(path, resultPath, ()); } { std::string const mwmName = "Luna"; std::string const fileName = platform::GetFileName(mwmName, MapFileType::Diff); int64_t const dataVersion = version::FOR_TESTING_MWM2; int64_t const diffVersion = version::FOR_TESTING_MWM1; MapFileType const fileType = MapFileType::Diff; auto const path = platform::GetFileDownloadPath(dataVersion, platform::CountryFile(mwmName), fileType); auto const url = downloader::GetFileDownloadUrl(fileName, dataVersion, diffVersion); auto const resultPath = downloader::GetFilePathByUrl(url); TEST_EQUAL(path, resultPath, ()); } } UNIT_TEST(IsUrlSupportedTest) { std::string const mwmName = "Luna"; std::string fileName = platform::GetFileName(mwmName, MapFileType::Map); int64_t dataVersion = version::FOR_TESTING_MWM1; int64_t diffVersion = 0; auto url = downloader::GetFileDownloadUrl(fileName, dataVersion, diffVersion); TEST(downloader::IsUrlSupported(url), ()); TEST(downloader::IsUrlSupported("maps/991215/Luna.mwm"), ()); TEST(downloader::IsUrlSupported("maps/0/Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("maps/x/Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("macarena/0/Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("/hack/maps/0/Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("0/Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("maps/0/Luna"), ()); TEST(!downloader::IsUrlSupported("0/Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("Luna.mwm"), ()); TEST(!downloader::IsUrlSupported("Luna"), ()); fileName = platform::GetFileName(mwmName, MapFileType::Diff); diffVersion = version::FOR_TESTING_MWM1; url = downloader::GetFileDownloadUrl(fileName, dataVersion, diffVersion); TEST(downloader::IsUrlSupported(url), ()); TEST(downloader::IsUrlSupported("diffs/991215/991215/Luna.mwmdiff"), ()); TEST(downloader::IsUrlSupported("diffs/0/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/x/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/0/x/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/x/x/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("beefs/0/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/0/0/Luna.mwmdiff.f"), ()); TEST(!downloader::IsUrlSupported("maps/diffs/0/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/0/0/Luna"), ()); TEST(!downloader::IsUrlSupported("0/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/0"), ()); TEST(!downloader::IsUrlSupported("diffs/0/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("diffs/Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("Luna.mwmdiff"), ()); TEST(!downloader::IsUrlSupported("Luna"), ()); }
1,313
2,151
#include <stdio.h> #include <string.h> #include <stdlib.h> #include "brl_checks.h" /* Note that this test used to fail worse than it does now. The current situation isn't hugely critical, though probably still incorrect. There are two key portions of the string: the "ing" (which gets contracted to one character) and the double space at the end. When translated, you get: "greetings " -> "greet+s " Notice that the translation also contracts the double space into a single space. With regard to cursor position, compbrlAtCursor is set, which means that the word encompassed by the cursor will be uncontracted (computer braille). This means that if the cursor is anywhere within "greetings", the translated output will also be "greetings", so the cursor positions are identical up to the end of the s (position 8). It gets more interesting at position 9 (the first space). Now, greetings gets contracted, so the output cursor position becomes 7. Still correct so far. Position 10 (the second space) is the problem. Because compbrlAtCursor is set, the current word should probably be expanded. In this case, it is just a space. However, the two spaces are still compressed into one, even though the second should have been expanded. The translation has still contracted the second space, even though it should have stopped contracting at the cursor. See also the description in http://code.google.com/p/liblouis/issues/detail?id=4 */ int main (int argc, char **argv) { const char *str2 = "greetings "; const int expected_pos2[]={0,1,2,3,4,5,6,7,8,7,8}; return check_cursor_pos(str2, expected_pos2); }
519
2,338
<reponame>medismailben/llvm-project g_extra_args = None
23
879
<reponame>qianfei11/zstack package org.zstack.network.service.virtualrouter; import org.zstack.header.message.Message; import org.zstack.header.network.*; import org.zstack.header.network.l2.L2NetworkInventory; import org.zstack.header.network.service.APIAttachNetworkServiceProviderToL2NetworkMsg; import org.zstack.header.network.service.APIDetachNetworkServiceProviderFromL2NetworkMsg; import org.zstack.header.network.service.NetworkServiceProvider; import org.zstack.header.network.service.NetworkServiceProviderVO; public class VirtualRouterProvider implements NetworkServiceProvider { private NetworkServiceProviderVO self; VirtualRouterProvider(NetworkServiceProviderVO vo) { self = vo; } public void handleMessage(Message msg) { } public void attachToL2Network(L2NetworkInventory l2Network, APIAttachNetworkServiceProviderToL2NetworkMsg msg) { } public void detachFromL2Network(L2NetworkInventory l2Network, APIDetachNetworkServiceProviderFromL2NetworkMsg msg) throws NetworkException { } }
305
343
default_app_config = 'mayan.apps.redactions.apps.RedactionsApp'
22
753
<gh_stars>100-1000 # -*- coding: utf-8 -*- """ __init__.py Created on 2017-11-19 by hbldh <<EMAIL>> """ import objc objc.options.verbose = True
68
348
<filename>docs/data/leg-t2/001/00103114.json {"nom":"Confort","circ":"3ème circonscription","dpt":"Ain","inscrits":390,"abs":213,"votants":177,"blancs":25,"nuls":0,"exp":152,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":91},{"nuance":"LR","nom":"Mme <NAME>","voix":61}]}
111
326
/**************************************************************************** **************************************************************************** *** *** This header was automatically generated from a Linux kernel header *** of the same name, to make information necessary for userspace to *** call into the kernel available to libc. It contains only constants, *** structures, and macros generated from the original header, and thus, *** contains no copyrightable information. *** *** To edit the content of this header, modify the corresponding *** source file (e.g. under external/kernel-headers/original/) then *** run bionic/libc/kernel/tools/update_all.py *** *** Any manual change here will be lost the next time this script will *** be run. You've been warned! *** **************************************************************************** ****************************************************************************/ #ifndef _UAPI__SOUND_SB16_CSP_H #define _UAPI__SOUND_SB16_CSP_H #define SNDRV_SB_CSP_MODE_NONE 0x00 #define SNDRV_SB_CSP_MODE_DSP_READ 0x01 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_MODE_DSP_WRITE 0x02 #define SNDRV_SB_CSP_MODE_QSOUND 0x04 #define SNDRV_SB_CSP_LOAD_FROMUSER 0x01 #define SNDRV_SB_CSP_LOAD_INITBLOCK 0x02 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_SAMPLE_8BIT 0x01 #define SNDRV_SB_CSP_SAMPLE_16BIT 0x02 #define SNDRV_SB_CSP_MONO 0x01 #define SNDRV_SB_CSP_STEREO 0x02 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_RATE_8000 0x01 #define SNDRV_SB_CSP_RATE_11025 0x02 #define SNDRV_SB_CSP_RATE_22050 0x04 #define SNDRV_SB_CSP_RATE_44100 0x08 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_RATE_ALL 0x0f #define SNDRV_SB_CSP_ST_IDLE 0x00 #define SNDRV_SB_CSP_ST_LOADED 0x01 #define SNDRV_SB_CSP_ST_RUNNING 0x02 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_ST_PAUSED 0x04 #define SNDRV_SB_CSP_ST_AUTO 0x08 #define SNDRV_SB_CSP_ST_QSOUND 0x10 #define SNDRV_SB_CSP_QSOUND_MAX_RIGHT 0x20 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_MAX_MICROCODE_FILE_SIZE 0x3000 struct snd_sb_csp_mc_header { char codec_name[16]; unsigned short func_req; /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ }; struct snd_sb_csp_microcode { struct snd_sb_csp_mc_header info; unsigned char data[SNDRV_SB_CSP_MAX_MICROCODE_FILE_SIZE]; /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ }; struct snd_sb_csp_start { int sample_width; int channels; /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ }; struct snd_sb_csp_info { char codec_name[16]; unsigned short func_nr; /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ unsigned int acc_format; unsigned short acc_channels; unsigned short acc_width; unsigned short acc_rates; /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ unsigned short csp_mode; unsigned short run_channels; unsigned short run_width; unsigned short version; /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ unsigned short state; }; #define SNDRV_SB_CSP_IOCTL_INFO _IOR('H', 0x10, struct snd_sb_csp_info) #define SNDRV_SB_CSP_IOCTL_LOAD_CODE _IOC(_IOC_WRITE, 'H', 0x11, sizeof(struct snd_sb_csp_microcode)) /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_IOCTL_UNLOAD_CODE _IO('H', 0x12) #define SNDRV_SB_CSP_IOCTL_START _IOW('H', 0x13, struct snd_sb_csp_start) #define SNDRV_SB_CSP_IOCTL_STOP _IO('H', 0x14) #define SNDRV_SB_CSP_IOCTL_PAUSE _IO('H', 0x15) /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ #define SNDRV_SB_CSP_IOCTL_RESTART _IO('H', 0x16) #endif
1,452
1,011
<reponame>Emiliano978/twarc #!/usr/bin/env python """ Filter out tweets or retweets that Twitter thinks are sensitive (mostly porn). """ from __future__ import print_function import json import fileinput for line in fileinput.input(): tweet = json.loads(line) if "possibly_sensitive" in tweet and tweet["possibly_sensitive"]: pass elif ( "retweeted_status" in tweet and "possibly_sensitive" in tweet["retweeted_status"] and tweet["retweeted_status"]["possibly_sensitive"] ): pass else: print(json.dumps(tweet))
220
7,447
{ "data": { "addPendingDocument": { "id": "content/movies/empire-strikes-back.md" } } }
54
582
/** * This program and the accompanying materials * are made available under the terms of the License * which accompanies this distribution in the file LICENSE.txt */ package com.archimatetool.editor.ui.factory.elements; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.swt.graphics.Color; import com.archimatetool.editor.ArchiPlugin; import com.archimatetool.editor.preferences.IPreferenceConstants; import com.archimatetool.editor.ui.factory.AbstractGraphicalObjectUIProvider; import com.archimatetool.editor.ui.factory.IArchimateElementUIProvider; import com.archimatetool.model.IDiagramModelArchimateObject; /** * Abstract Archimate Element UI Provider * * @author <NAME> */ public abstract class AbstractArchimateElementUIProvider extends AbstractGraphicalObjectUIProvider implements IArchimateElementUIProvider { protected static Color defaultBusinessColor = new Color(255, 255, 181); protected static Color defaultApplicationColor = new Color(181, 255, 255); protected static Color defaultTechnologyColor = new Color(201, 231, 183); protected static Color defaultMotivationColor = new Color(204, 204, 255); protected static Color defaultStrategyColor = new Color(245, 222, 170); protected static Color defaultImplMigrationColor1 = new Color(255, 224, 224); protected static Color defaultImplMigrationColor2 = new Color(224, 255, 224); protected AbstractArchimateElementUIProvider() { } @Override public Dimension getDefaultSize() { // If we have an instance, get the default size for its figure type, else default user preference default size return instance != null ? getDefaultSizeForFigureType(((IDiagramModelArchimateObject)instance).getType()) : getDefaultUserPreferenceSize(); } @Override public boolean hasAlternateFigure() { return true; } @Override public int getDefaultTextAlignment() { return ArchiPlugin.PREFERENCES.getInt(IPreferenceConstants.DEFAULT_ARCHIMATE_FIGURE_TEXT_ALIGNMENT); } @Override public int getDefaultTextPosition() { return ArchiPlugin.PREFERENCES.getInt(IPreferenceConstants.DEFAULT_ARCHIMATE_FIGURE_TEXT_POSITION); } @Override public boolean hasIcon() { return true; } /** * @return the default size for the given figure type */ protected Dimension getDefaultSizeForFigureType(int figureType) { return getDefaultUserPreferenceSize(); } /** * @return The default figure size from Preferences */ protected static Dimension getDefaultUserPreferenceSize() { return new Dimension(ArchiPlugin.PREFERENCES.getInt(IPreferenceConstants.DEFAULT_ARCHIMATE_FIGURE_WIDTH), ArchiPlugin.PREFERENCES.getInt(IPreferenceConstants.DEFAULT_ARCHIMATE_FIGURE_HEIGHT)); } /** * @return a square size based on the smallest default width or height of user preferences default size */ protected static Dimension getDefaultSquareSize() { Dimension d = getDefaultUserPreferenceSize(); int length = Math.min(d.width, d.height); return new Dimension(length, length); } /** * @return a default size with a minimum width */ protected static Dimension getDefaultSizeWithMinumumWidth(int minWidth) { Dimension d = getDefaultSquareSize(); d.width = Math.max(d.width, minWidth); return d; } }
1,185