max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
6,270
[ { "type": "feature", "category": "DocDB", "description": "This release provides support for cluster delete protection and the ability to stop and start clusters." }, { "type": "feature", "category": "EC2", "description": "This release adds support for specifying a maximum hourly price for all On-Demand and Spot instances in both Spot Fleet and EC2 Fleet." }, { "type": "feature", "category": "Organizations", "description": "Specifying the tag key and tag value is required for tagging requests." }, { "type": "feature", "category": "RDS", "description": "This release adds support for RDS DB Cluster major version upgrade " } ]
278
482
package io.cattle.platform.util.exception; import org.slf4j.Logger; public class ServiceInstanceAllocateException extends InstanceException implements LoggableException { private static final long serialVersionUID = -5376205462062705074L; @Override public void log(Logger log) { log.info(this.getMessage()); } public ServiceInstanceAllocateException(String message, Exception ex, Object instance) { super(message, ex, instance); } }
155
711
package com.java110.api.bmo.payFeeDetailDiscount.impl; import com.alibaba.fastjson.JSONObject; import com.java110.api.bmo.ApiBaseBMO; import com.java110.api.bmo.payFeeDetailDiscount.IPayFeeDetailDiscountBMO; import com.java110.core.context.DataFlowContext; import com.java110.intf.fee.IPayFeeDetailDiscountInnerServiceSMO; import com.java110.po.fee.PayFeePo; import com.java110.po.payFeeDetailDiscount.PayFeeDetailDiscountPo; import com.java110.utils.constant.BusinessTypeConstant; import com.java110.utils.constant.CommonConstant; import com.java110.utils.util.BeanConvertUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service("payFeeDetailDiscountBMOImpl") public class PayFeeDetailDiscountBMOImpl extends ApiBaseBMO implements IPayFeeDetailDiscountBMO { @Autowired private IPayFeeDetailDiscountInnerServiceSMO payFeeDetailDiscountInnerServiceSMOImpl; /** * 添加小区信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ public JSONObject addPayFeeDetailDiscount(JSONObject paramInJson, JSONObject discountJson, DataFlowContext dataFlowContext) { JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_SAVE_DETAIL_DISCOUNT_INFO); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ + 1); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessFee = new JSONObject(); businessFee.put("detailDiscountId", "-1"); businessFee.put("discountPrice", discountJson.getString("discountPrice")); businessFee.put("discountId", discountJson.getString("discountId")); businessFee.put("detailId", paramInJson.getString("detailId")); businessFee.put("communityId", paramInJson.getString("communityId")); businessFee.put("feeId", paramInJson.getString("feeId")); //businessFee.putAll(feeMap); business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put(PayFeeDetailDiscountPo.class.getSimpleName(), businessFee); return business; } /** * 添加活动信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ public void updatePayFeeDetailDiscount(JSONObject paramInJson, DataFlowContext dataFlowContext) { PayFeeDetailDiscountPo payFeeDetailDiscountPo = BeanConvertUtil.covertBean(paramInJson, PayFeeDetailDiscountPo.class); super.update(dataFlowContext, payFeeDetailDiscountPo, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_DETAIL_DISCOUNT_INFO); } /** * 添加小区信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ public void deletePayFeeDetailDiscount(JSONObject paramInJson, DataFlowContext dataFlowContext) { PayFeeDetailDiscountPo payFeeDetailDiscountPo = BeanConvertUtil.covertBean(paramInJson, PayFeeDetailDiscountPo.class); super.update(dataFlowContext, payFeeDetailDiscountPo, BusinessTypeConstant.BUSINESS_TYPE_DELETE_DETAIL_DISCOUNT_INFO); } }
1,448
775
/* automatically generated by pepy 7.0 #14 (nanook.mcc.com), do not edit! */ #include "psap.h" #define advise PY_advise void advise (); /* Generated from module IMISC */ #include <stdio.h> #include "IMISC-types.h" #ifndef PEPYPARM #define PEPYPARM char * #endif /* PEPYPARM */ extern PEPYPARM NullParm; /* ARGSUSED */ int decode_IMISC_TimeResult (pe, explicit, len, buffer, parm) register PE pe; int explicit; int *len; char **buffer; struct type_IMISC_TimeResult ** parm; { register integer p5; #ifdef DEBUG (void) testdebug (pe, "IMISC.TimeResult"); #endif if (explicit) { if (pe -> pe_class != PE_CLASS_UNIV || pe -> pe_form != PE_FORM_PRIM || pe -> pe_id != PE_PRIM_INT) { advise (NULLCP, "TimeResult bad class/form/id: %s/%d/0x%x", pe_classlist[pe -> pe_class], pe -> pe_form, pe -> pe_id); return NOTOK; } } else if (pe -> pe_form != PE_FORM_PRIM) { advise (NULLCP, "TimeResult bad form: %d", pe -> pe_form); return NOTOK; } { # line 62 "IMISC-types.py" if ((*(parm) = (struct type_IMISC_TimeResult *) calloc (1, sizeof **(parm))) == ((struct type_IMISC_TimeResult *) 0)) { advise (NULLCP, "%s", PEPY_ERR_NOMEM); return NOTOK; } } if ((p5 = prim2num (pe)) == NOTOK && pe -> pe_errno != PE_ERR_NONE) { advise (NULLCP, "TimeResult %s%s", PEPY_ERR_BAD_INTEGER, pe_error (pe -> pe_errno)); return NOTOK; } (*parm) -> parm = p5; if (len) *len = p5; return OK; }
853
2,392
// Copyright (c) 1997-2007 ETH Zurich (Switzerland). // All rights reserved. // // This file is part of CGAL (www.cgal.org). // // $URL: https://github.com/CGAL/cgal/blob/v5.1/QP_solver/include/CGAL/QP_solver/QP_solver_bounds_impl.h $ // $Id: QP_solver_bounds_impl.h 0779373 2020-03-26T13:31:46+01:00 Sébastien Loriot // SPDX-License-Identifier: GPL-3.0-or-later OR LicenseRef-Commercial // // // Author(s) : <NAME> // <NAME> <<EMAIL>> // <NAME> // <NAME> namespace CGAL { template < typename Q, typename ET, typename Tags > bool QP_solver<Q, ET, Tags>::has_finite_lower_bound(int i) const // Given an index of an original or slack variable, returns whether // or not the variable has a finite lower bound. { CGAL_qpe_assertion(i < qp_n + static_cast<int>(slack_A.size())); return i>=qp_n || check_tag(Is_nonnegative()) || *(qp_fl+i); } template < typename Q, typename ET, typename Tags > bool QP_solver<Q, ET, Tags>::has_finite_upper_bound(int i) const // Given an index of an original or slack variable, returns whether // or not the variable has a finite upper bound. { CGAL_qpe_assertion(i < qp_n + static_cast<int>(slack_A.size())); return i<qp_n && !check_tag(Is_nonnegative()) && *(qp_fu+i); } template < typename Q, typename ET, typename Tags > ET QP_solver<Q, ET, Tags>::lower_bound(int i) const // Given an index of an original or slack variable, returns its // lower bound. { CGAL_qpe_assertion(i < qp_n + static_cast<int>(slack_A.size())); if (i < qp_n) // original variable? if (check_tag(Is_nonnegative())) return et0; else { CGAL_qpe_assertion(has_finite_lower_bound(i)); return *(qp_l+i); } else // slack variable? return et0; } template < typename Q, typename ET, typename Tags > ET QP_solver<Q, ET, Tags>::upper_bound(int i) const // Given an index of an original variable, returns its upper bound. { CGAL_qpe_assertion(i < qp_n); // Note: slack variables cannot have // finite upper bounds. CGAL_qpe_assertion(has_finite_upper_bound(i)); return *(qp_u+i); } template < typename Q, typename ET, typename Tags > typename QP_solver<Q, ET, Tags>::Bnd QP_solver<Q, ET, Tags>::lower_bnd(int i) const // Given an index of an original, slack, or artificial variable, // return its lower bound. { if (i < qp_n) { // original variable? const bool is_finite = has_finite_lower_bound(i); return Bnd(false, is_finite, is_finite? lower_bound(i) : ET(0)); } else // slacky or art. var.? return Bnd(false, true, ET(0)); } template < typename Q, typename ET, typename Tags > typename QP_solver<Q, ET, Tags>::Bnd QP_solver<Q, ET, Tags>::upper_bnd(int i) const // Given an index of an original, slack, or artificial variable, // return its upper bound. { if (i < qp_n) { // original variable? const bool is_finite = has_finite_upper_bound(i); return Bnd(true, is_finite, is_finite? upper_bound(i) : ET(0)); } else // slacky or art. var.? return Bnd(true, false, ET(0)); } } //namespace CGAL // ===== EOF ==================================================================
1,487
412
/* Copyright 2015 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "graphd/graphd.h" #include "graphd/graphd-hash.h" #include <limits.h> #include <string.h> #include <stdio.h> #include <errno.h> void graphd_guid_set_initialize(graphd_guid_set *gs) { gs->gs_next = NULL; gs->gs_guid = gs->gs_buf; gs->gs_n = gs->gs_m = 0; gs->gs_null = false; } bool graphd_guid_set_contains_null(graphd_guid_set const *gs) { return gs->gs_null || gs->gs_n == 0; } void graphd_guid_set_move(graphd_guid_set *dst, graphd_guid_set *src) { *dst = *src; if (src->gs_guid == src->gs_buf) dst->gs_guid = dst->gs_buf; } /* Return whether this guid is in this set. * * Unlike graphd_guid_set_find(), this function interprets a * zero-length set as matching NULL. */ bool graphd_guid_set_match(graphd_guid_set const *gs, graph_guid const *guid) { if (guid == NULL) return graphd_guid_set_contains_null(gs); if (gs->gs_n == 0) return false; return graphd_guid_set_find(gs, guid) < gs->gs_n; } /* Return the index if found, n if not. NULL lives at 0, if any. */ size_t graphd_guid_set_find(graphd_guid_set const *gs, graph_guid const *guid) { size_t i; if (guid == NULL) return graphd_guid_set_contains_null(gs) ? 0 : gs->gs_n; for (i = 0; i < gs->gs_n; i++) if (GRAPH_GUID_EQ(gs->gs_guid[i], *guid)) break; return i; } /* Returns true if found, false if not * * To delete the "null" from a set, use guid=NULL. */ bool graphd_guid_set_delete(graphd_guid_set *gs, graph_guid const *guid) { size_t i; if (guid == NULL) { if (gs->gs_null) { gs->gs_null = false; return true; } return false; } if ((i = graphd_guid_set_find(gs, guid)) >= gs->gs_n) return false; if (i < gs->gs_n - 1) memmove(gs->gs_guid + i, gs->gs_guid + i + 1, (gs->gs_n - (i + 1)) * sizeof(*guid)); gs->gs_n--; return true; } /** * @brief Add a GUID to a guid constraint. * * @param greq request we're working for * @param gs constraint to add to. * @param guid NULL or GUID to add * * @return 0 on success, a nonzero error code on error. */ int graphd_guid_set_add(graphd_request *greq, graphd_guid_set *gs, graph_guid const *guid) { graph_guid *tmp; cm_handle *cm = greq->greq_req.req_cm; cl_handle *cl = graphd_request_cl(greq); size_t i; cl_assert(cl, gs != NULL); if (guid == NULL) { cl_log(cl, CL_LEVEL_VERBOSE, "graphd_guid_set_add: null to %p", (void *)gs); gs->gs_null = true; return 0; } /* Most common case: a single GUID. It lives either * in preallocated storage or in a built-in buffer. * * Note that adding a GUID to an empty set yields just * that GUID, even though the empty set is treated as * containing NULL. Add NULL explicitly if you want * to keep it! */ if (gs->gs_n == 0) { if (gs->gs_m == 0) gs->gs_guid = gs->gs_buf; gs->gs_guid[0] = *guid; gs->gs_n = 1; return 0; } /* If the GUID already exists in the list, don't store it again. */ if ((i = graphd_guid_set_find(gs, guid)) < gs->gs_n) return 0; /* Grow the dynamic array out of the buffer, if needed. */ if (gs->gs_guid == gs->gs_buf) { tmp = cm_malloc(cm, (i + 1) * sizeof(*tmp)); if (tmp == NULL) { cl_log(cl, CL_LEVEL_ERROR, "graphd_guid_set_add: " "failed to allocate %lu bytes", (unsigned long)((i + 1) * sizeof(*tmp))); return ENOMEM; } memcpy(tmp, gs->gs_guid, i * sizeof(*tmp)); gs->gs_m = i + 1; gs->gs_guid = tmp; } else { if (gs->gs_n >= gs->gs_m) { tmp = cm_realloc(cm, gs->gs_guid, (gs->gs_m + 8) * sizeof(*tmp)); if (tmp == NULL) { cl_log(cl, CL_LEVEL_ERROR, "graphd_guid_set_add: " "failed to allocate %zu bytes", (i + 1) * sizeof(*tmp)); return ENOMEM; } gs->gs_m += 8; gs->gs_guid = tmp; } } gs->gs_guid[gs->gs_n++] = *guid; return 0; } /** * @brief Add the generations of a GUID to a list * * The GUIDs aren't sorted or uniq'ed at this point; * that's the caller's job. * It's also the caller's job to make sure that the * gen_i..gen_i+gen_n range is valid. * * @param greq request handle * @param guid guid to expand * @param gen_i first generation to use * @param gen_n number of generations after gen_i * @param gs gs to add them to. * * @return 0 on success, a nonzero error code on (system) error. */ int graphd_guid_set_add_generations(graphd_request *greq, graph_guid const *guid, unsigned long gen_i, unsigned long gen_n, graphd_guid_set *gs) { pdb_handle *pdb = graphd_request_graphd(greq)->g_pdb; cl_handle *cl = graphd_request_cl(greq); cl_assert(cl, gs != NULL); if (guid == NULL) { cl_log(cl, CL_LEVEL_VERBOSE, "graphd_guid_set_add: null to %p", (void *)gs); gs->gs_null = true; return 0; } cl_assert(cl, guid != NULL); for (; gen_n > 0; gen_i++, gen_n--) { graph_guid g; int err = 0; err = pdb_generation_nth(pdb, greq->greq_asof, guid, false /* is-newest */, gen_i, NULL, &g); if (err != 0) { char buf[GRAPH_GUID_SIZE]; cl_log_errno(cl, CL_LEVEL_FAIL, "pdb_generation_nth", err, "GUID=%s, generation=%lu", graph_guid_to_string(guid, buf, sizeof buf), (unsigned long)gen_i); return err; } { char buf[200]; cl_log(cl, CL_LEVEL_VERBOSE, "graphd_guid_set_add: add %s", graph_guid_to_string(&g, buf, sizeof buf)); } err = graphd_guid_set_add(greq, gs, &g); if (err != 0) { cl_log_errno(cl, CL_LEVEL_FAIL, "graphd_guid_set_add", err, "i=%lu", gen_i); return err; } } return 0; } /** * @brief Go from guid ~= GUID / guidset to guid = GUID / guidset, * * if gs == &con->con_guid.*, do this under control of * con's generational constraint. (The other ones are not * affected by con's generational settings.) */ int graphd_guid_set_convert_generations(graphd_request *greq, graphd_constraint *con, bool is_guid, graphd_guid_set *gs) { int err = 0; graphd_handle *graphd = graphd_request_graphd(greq); graph_guid const *r; graph_guid *w; graph_guid *new_g = NULL; size_t new_m = 0, new_n = 0, i; cm_handle *cm = greq->greq_req.req_cm; cl_handle *cl = graphd_request_cl(greq); if (gs->gs_n == 0) return 0; /* Most common case: we want the newest, * or some other single generation. */ if (is_guid && !con->con_oldest.gencon_valid && (!con->con_newest.gencon_valid || (con->con_newest.gencon_min == con->con_newest.gencon_max))) { graph_guid *w; /* Translate the guids to their relevant instances. */ for (i = 0, r = w = gs->gs_guid; i < gs->gs_n; i++, r++) { if (GRAPH_GUID_IS_NULL(*r)) { /* null -> null */ *w++ = *r; continue; } err = pdb_generation_nth(graphd->g_pdb, greq->greq_asof, r, true /* is-newest */, con->con_newest.gencon_min, NULL, w); if (err == 0) w++; else if (err != GRAPHD_ERR_NO) { char buf[GRAPH_GUID_SIZE]; cl_log_errno(cl, CL_LEVEL_FAIL, "pdb_generation_nth", err, "failed to get newest generation of %s", graph_guid_to_string(r, buf, sizeof buf)); return err; } } if (gs->gs_n == 0) { con->con_false = true; con->con_error = "SEMANTICS no GUIDs in the " "request range of versions"; cl_log(cl, CL_LEVEL_DEBUG, "FALSE: [%s:%d] no GUIDs is requested range", __FILE__, __LINE__); } return 0; } /* Partially constrained case. Each GUID may evaluate * to zero or more GUIDs. */ for (i = 0, r = w = gs->gs_guid; i < gs->gs_n; i++, r++) { graph_guid guid; pdb_id n, last; long long gen_i; long long gen_min, gen_max, gen_size; size_t need; err = pdb_generation_last_n(graphd->g_pdb, greq->greq_asof, r, &last, &n); if (err == GRAPHD_ERR_NO) continue; if (err != 0) return err; /* If there's no generation table entry, * there's just one generation - the one * we're holding. */ if (n == 0) n = 1; gen_min = 0; gen_max = ULONG_MAX; if (is_guid) { if (con->con_newest.gencon_valid) { gen_max = (con->con_newest.gencon_min > n - 1 ? -1 : n - (1 + con->con_newest.gencon_min)); gen_min = (con->con_newest.gencon_max > n - 1 ? -1 : n - (1 + con->con_newest.gencon_max)); } if (con->con_oldest.gencon_valid) { if (gen_min < con->con_oldest.gencon_min) gen_min = con->con_oldest.gencon_min; if (gen_max > con->con_oldest.gencon_max) gen_max = con->con_oldest.gencon_max; } } if (gen_min < 0) gen_min = 0; if (gen_max > n - 1) gen_max = n - 1; if (gen_max < gen_min) continue; gen_size = 1 + (gen_max - gen_min); /* How many do we already have? */ new_n = w - (new_g ? new_g : gs->gs_guid); /* How many will we need with the expansions * of this one, plus the rest on the list? */ need = new_n + gen_size + (gs->gs_n - (i + 1)); if (need > (new_g ? new_m : gs->gs_n)) { void *tmp; tmp = cm_realloc(cm, new_g, need * sizeof(*new_g)); if (tmp == NULL) { if (new_g != NULL) cm_free(cm, new_g); cl_log(cl, CL_LEVEL_ERROR, "graphd_guid_set_" "convert_generations: " "failed to allocate %llu " "bytes for %llu generations", (unsigned long long)(need * sizeof(*new_g)), (unsigned long long)need); return ENOMEM; } if (new_g == NULL && new_n > 0) memcpy(tmp, gs->gs_guid, new_n * sizeof(*w)); new_g = tmp; w = new_g + new_n; new_m = need; } /* In case we'll overwrite *r, copy *r to guid. */ guid = *r; for (gen_i = gen_min; gen_i <= gen_max; gen_i++) { /* If there's just one generation, * and we like it, there is no * generation table entry; our input * is simply our output. */ if (gen_i == 0 && n == 1) { *w++ = guid; continue; } err = pdb_generation_nth(graphd->g_pdb, greq->greq_asof, &guid, false /* is-oldest */, gen_i, NULL, w); if (err == 0) w++; else { char buf[GRAPH_GUID_SIZE]; cl_log(cl, CL_LEVEL_FAIL, "graphd_constraint_convert_" "gs_generations: " "failed to get generation " "#%llu of %s: %s", (unsigned long long)gen_i, graph_guid_to_string(&guid, buf, sizeof buf), graphd_strerror(err)); } } } if (new_g == NULL) gs->gs_m = gs->gs_n = w - gs->gs_guid; else { if (gs->gs_guid != gs->gs_buf) cm_free(cm, gs->gs_guid); gs->gs_guid = new_g; gs->gs_m = gs->gs_n = w - new_g; } if (gs->gs_n == 0 && !gs->gs_null) { con->con_false = true; con->con_error = "SEMANTICS no GUIDs in the " "request range of versions"; cl_log(cl, CL_LEVEL_DEBUG, "FALSE: [%s:%d] no GUIDs is requested range", __FILE__, __LINE__); } return 0; } /** * @brief Replace generational identifiers with their root ancestor * * If we do this, we can intersect two match groups by intersecting * their IDs. * * For example, if we have a versioning chain 1 <- 2 <- 3, * then * (GUID ~= 2 GUID ~= 3) * normalizes to * (GUID ~= 1 GUID ~= 1) * and then to a single * (GUID ~= 1) * * @param greq the request we're doing this for * @param gs the set whose IDs the request wants normalized * * @return 0 on success, a nonzero error code on error. */ int graphd_guid_set_normalize_match(graphd_request *greq, graphd_guid_set *gs) { graphd_handle *graphd = graphd_request_graphd(greq); cl_handle *cl = graphd_request_cl(greq); int err = 0; size_t i; if (gs->gs_n == 0) return 0; for (i = 0; i < gs->gs_n; i++) { if (GRAPH_GUID_IS_NULL(gs->gs_guid[i])) continue; err = pdb_generation_nth(graphd->g_pdb, /* asof */ NULL, /* in */ gs->gs_guid + i, /* oldest */ false, /* off */ 0, /* id_out */ NULL, /* guid_out */ gs->gs_guid + i); if (err != 0 && err != GRAPHD_ERR_NO) { char buf[GRAPH_GUID_SIZE]; cl_log_errno(cl, CL_LEVEL_FAIL, "pdb_generation_nth", err, "failed to get oldest generation of %s", graph_guid_to_string(gs->gs_guid + i, buf, sizeof buf)); return err; } } return 0; } /** * @brief Are these two guid sets equal? * * False negatives are okay. In particular, * GUID sets with the same GUID in the wrong order * are misclassified as unequal. * * @param cl Log through here * @param a guid set * @param b another guid set * * @return true if they're equal, false otherwise. */ bool graphd_guid_set_equal(cl_handle *cl, graphd_guid_set const *a, graphd_guid_set const *b) { size_t i; cl_assert(cl, a != NULL); cl_assert(cl, b != NULL); /* One contains null, the other one doesn't? */ if ((a->gs_null || a->gs_n == 0) != (b->gs_null || b->gs_n == 0)) return false; do { if (a->gs_n != b->gs_n) return false; for (i = 0; i < a->gs_n; i++) if (!GRAPH_GUID_EQ(a->gs_guid[i], b->gs_guid[i])) return false; if (((a = a->gs_next) == NULL) != ((b = b->gs_next) == NULL)) return false; } while (a != NULL); if (a->gs_null != b->gs_null) return false; return true; } /** * @brief Hash a guid set * * @param cl Log through here * @param gs guid set * @param hash_inout hash accumulator */ void graphd_guid_set_hash(cl_handle *const cl, graphd_guid_set const *gs, unsigned long *const hash_inout) { size_t i; cl_assert(cl, gs != NULL); cl_assert(cl, hash_inout != NULL); do { for (i = 0; i < gs->gs_n; i++) GRAPHD_HASH_GUID(*hash_inout, gs->gs_guid[i]); } while ((gs = gs->gs_next) != NULL); GRAPHD_HASH_BIT(*hash_inout, gs->gs_null); } /** * @brief Intersection of two GUID constraint sets. * * @param greq Request for which all this happens * @param con Containing constraint (for con_false marking) * @param postpone Postpone intersect if needed * @param accu set to merge into * @param in incoming gs to merge * * @return 0 on success, otherwise a nonzero error code. */ int graphd_guid_set_intersect(graphd_request *greq, graphd_constraint *con, bool postpone, graphd_guid_set *accu, graphd_guid_set *in) { cl_handle *cl = graphd_request_cl(greq); size_t i, f; graph_guid *w; graph_guid const *r; int res; cl_assert(cl, accu != NULL); cl_assert(cl, in != NULL); if (in->gs_n == 0) { if (accu->gs_n > 0) { if (accu->gs_null) /* The filter was {null}. The <accu> contains * {null}, so {null} survives as a result. */ accu->gs_n = 0; else { /* The filter was {null}. The <accu> does not * contain {null}, therefore the result set * is empty. (It doesn't even contain null.) */ con->con_false = true; accu->gs_n = 0; cl_log(cl, CL_LEVEL_DEBUG, "FALSE [%s:%d] intersect non-null " "with null", __FILE__, __LINE__); } } return 0; } if (accu->gs_n == 0) { cl_assert(cl, in->gs_n != 0); /* The <accu> set is {null}. If and only if the filter * contains {null}, <accu> stays {null}; otherwise the * result set is empty. */ if (!graphd_guid_set_contains_null(in)) { con->con_false = true; cl_log(cl, CL_LEVEL_DEBUG, "FALSE [%s:%d] intersect null " "with non-null", __FILE__, __LINE__); } return 0; } /* If postpone is set, .* we can't compute the intersection during parse time; * we have to wait until execution time. */ if (postpone) { graphd_guid_set *gs; gs = cm_malloc(greq->greq_req.req_cm, sizeof *gs); if (gs == NULL) return errno ? errno : ENOMEM; graphd_guid_set_initialize(gs); graphd_guid_set_move(gs, in); gs->gs_next = accu->gs_next; accu->gs_next = gs; return 0; } if (in->gs_n > 1) qsort(in->gs_guid, in->gs_n, sizeof(*in->gs_guid), graph_guid_compare); if (accu->gs_n > 1) qsort(accu->gs_guid, accu->gs_n, sizeof(*accu->gs_guid), graph_guid_compare); i = 0, f = 0; r = w = accu->gs_guid; /* Intersect */ while (i < accu->gs_n && f < in->gs_n) { res = graph_guid_compare(r, in->gs_guid + f); while (res > 0) { f++; if (f >= in->gs_n) goto done; res = graph_guid_compare(r, in->gs_guid + f); } if (res == 0) *w++ = *r; r++; i++; } done: accu->gs_n = w - accu->gs_guid; accu->gs_null &= graphd_guid_set_contains_null(in); if (accu->gs_n == 0 && !accu->gs_null) { cl_log(cl, CL_LEVEL_DEBUG, "FALSE [%s:%d] nothing left after proper intersect", __FILE__, __LINE__); con->con_false = true; } return 0; } /** * @brief Filter a GUID set by lineage. * * Compute the result of * guid=(1 2 3) guid~=(4 5) * * All those GUIDs in <accu> whose root ancestors * are in <fil> are allowed to stay. * * @param greq Request for which all this happens * @param con Containing constraint (for con_false marking) * @param accu set to filter * @param fil root ancestors of allowed IDs. * * @return 0 on success, a nonzero error code on error. */ int graphd_guid_set_filter_match(graphd_request *greq, graphd_constraint *con, graphd_guid_set *accu, graphd_guid_set *fil) { graphd_handle *graphd = graphd_request_graphd(greq); cl_handle *cl = graphd_request_cl(greq); size_t i; graph_guid *w; cl_assert(cl, accu != NULL); cl_assert(cl, fil != NULL); cl_log(cl, CL_LEVEL_VERBOSE, "gs_intersect: %zd vs %zd", accu->gs_n, fil->gs_n); if (accu->gs_n == 0) { cl_log(cl, CL_LEVEL_VERBOSE, "graphd_guid_set_filter_match: null %p", (void *)accu); accu->gs_null = true; } /* NULL stays if it's allowed in the filter. */ if (accu->gs_null) { if (fil->gs_n > 0 && !fil->gs_null) { cl_log(cl, CL_LEVEL_DEBUG, "FALSE [%s:%d] =/~ against " "a null", __FILE__, __LINE__); con->con_false = true; } return 0; } /* NULL is the only thing that remains in the * accumulator. */ if (fil->gs_n == 0) { if (accu->gs_null) { accu->gs_n = 0; return 0; } /* The filter was {null}. The <accu> does not contain {null}, * therefore the result set is empty. (It doesn't even contain * null.) */ con->con_false = true; cl_log(cl, CL_LEVEL_DEBUG, "FALSE [%s:%d] =/~ null against " "a non-null", __FILE__, __LINE__); return 0; } if (accu->gs_n == 0) { cl_assert(cl, fil->gs_n != 0); /* The <accu> set is {null}. If and only if the filter * contains {null}, <accu> stays {null}; otherwise the * result set is empty. */ return graphd_guid_set_contains_null(fil); } for (i = 0, w = accu->gs_guid; i < accu->gs_n; i++) { graph_guid guid; int err; if (GRAPH_GUID_IS_NULL(accu->gs_guid[i])) { *w++ = accu->gs_guid[i]; continue; } /* Normalize accu->gs[i] */ err = pdb_generation_nth(graphd->g_pdb, /* asof */ NULL, /* in */ accu->gs_guid + i, /* oldest */ false, /* off */ 0, /* id_out */ NULL, /* guid_out */ &guid); if (err == GRAPHD_ERR_NO) { guid = accu->gs_guid[i]; err = 0; } if (err != 0) { char buf[GRAPH_GUID_SIZE]; cl_log_errno(cl, CL_LEVEL_FAIL, "pdb_generation_nth", err, "failed to get oldest generation of %s", graph_guid_to_string(accu->gs_guid + i, buf, sizeof buf)); return err; } if (graphd_guid_set_find(fil, &guid) < fil->gs_n) *w++ = accu->gs_guid[i]; } accu->gs_n = w - accu->gs_guid; if (fil->gs_n == 0) { con->con_false = true; cl_log(cl, CL_LEVEL_DEBUG, "FALSE [%s:%d] =/~ no overlap", __FILE__, __LINE__); } return 0; } /** * @brief Subtract a set from another set * * @param greq Request for which all this happens * @param con Containing constraint (for con_false marking) * @param accu constraint to remove from * @param in incoming gs to remove * * @return false if a previously non-empty list has * been reduced to an empty one, true otherwise. */ bool graphd_guid_set_subtract(graphd_request *greq, graphd_guid_set *accu, graphd_guid_set const *in) { cl_handle *cl = graphd_request_cl(greq); size_t i; cl_assert(cl, accu != NULL); cl_assert(cl, in != NULL); if (accu->gs_n == 0) accu->gs_null = true; if (in->gs_n == 0) { /* <in> is {null}. If <accu> contains null * (or is null), and accu has nothing left * after the subtraction, the result set is empty. */ accu->gs_null = false; return accu->gs_n > 0; } if (accu->gs_n == 0) { /* accu is {null}. If <in> contains null, * the result set is empty (and the call * returns false.) */ return !in->gs_null; } /* Remove guids from <accu> that are in <in>. * Return whether the result set is empty after that. */ if (in->gs_null) accu->gs_null = false; for (i = 0; i < in->gs_n; i++) (void)graphd_guid_set_delete(accu, in->gs_guid + i); return accu->gs_n > 0 || accu->gs_null; } /** * @brief "OR" of two guid sets * * "in" is burned after the call, and will be freed as * part of the request heap. * * @param greq Request for which all this happens * @param con Containing constraint (for con_false marking) * @param accu constraint to merge into * @param in incoming gs to merge * * @return 0 on success, otherwise a nonzero error code. */ int graphd_guid_set_union(graphd_request *greq, graphd_guid_set *accu, graphd_guid_set *in) { int err = 0; cl_handle *cl = graphd_request_cl(greq); size_t i; cl_assert(cl, accu != NULL); cl_assert(cl, in != NULL); accu->gs_null |= (in->gs_null || in->gs_n == 0); if (in->gs_n == 0) return 0; if (accu->gs_n == 0) { cl_assert(cl, in->gs_n != 0); /* <accu> is {null}. Move <in> into <accu> and * add {null} to that. */ graphd_guid_set_move(accu, in); accu->gs_null = true; return 0; } /* Add <in>'s GUIDs to <accu>. (The duplicate detection * is already done - clumsily - in constraint_add). */ for (i = 0; i < in->gs_n; i++) { err = graphd_guid_set_add(greq, accu, in->gs_guid + i); if (err != 0) return err; } return 0; } /** * @brief "OR" of two guid sets * * "in" is burned after the call, and will be freed as * part of the request heap. * * @param greq Request for which all this happens * @param con Containing constraint (for con_false marking) * @param accu constraint to merge into * @param in incoming gs to merge * * @return 0 on success, otherwise a nonzero error code. */ void graphd_guid_set_dump(cl_handle *cl, graphd_guid_set const *gs) { size_t i; char buf[GRAPH_GUID_SIZE]; if (gs->gs_null) cl_log(cl, CL_LEVEL_VERBOSE, " [flag] null"); for (i = 0; i < gs->gs_n; i++) cl_log(cl, CL_LEVEL_VERBOSE, " [%zu] %s", i, graph_guid_to_string(gs->gs_guid + i, buf, sizeof buf)); }
11,976
5,169
{ "name": "ZJSPhotoManager", "version": "0.2.0", "summary": "a convenient photo manager tool for ios project.", "description": "a convenient photo manager tool for ios project, it is compatible with both ALAssetsLibrary and PHPhotoLibrary.", "homepage": "https://github.com/allenzjs/ZJSPhotoManager", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "allenzjs": "<EMAIL>" }, "source": { "git": "https://github.com/allenzjs/ZJSPhotoManager.git", "tag": "0.2.0" }, "platforms": { "ios": "8.0" }, "source_files": "ZJSPhotoManager/Classes/**/*" }
236
14,668
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest class AutofillTestResult(unittest.TextTestResult): """A test result class that can print formatted text results to a stream. Used by AutofillTestRunner. """ def startTest(self, test): """Called when a test is started. """ super(unittest.TextTestResult, self).startTest(test) if self.showAll: self.stream.write('Running ') self.stream.write(self.getDescription(test)) self.stream.write('\n') self.stream.flush() def addFailure(self, test, err): """Logs a test failure as part of the specified test. Overloaded to not include the stack trace. Args: err: A tuple of values as returned by sys.exc_info(). """ err = (None, err[1], None) # self.failures.append((test, str(exception))) # self._mirrorOutput = True super(AutofillTestResult, self).addFailure(test, err) class AutofillTestRunner(unittest.TextTestRunner): """An autofill test runner class that displays results in textual form. It prints out the names of tests as they are run, errors as they occur, and a summary of the results at the end of the test run. """ resultclass = AutofillTestResult
438
1,734
<gh_stars>1000+ #pragma once #include "sc2api/sc2_game_settings.h" namespace sc2 { bool TestFeatureLayers(int argc, char** argv); }
58
892
<reponame>westonsteimel/advisory-database-github { "schema_version": "1.2.0", "id": "GHSA-m87h-fqqj-mh8j", "modified": "2022-04-22T00:24:20Z", "published": "2022-04-22T00:24:20Z", "aliases": [ "CVE-2011-1474" ], "details": "A locally locally exploitable DOS vulnerability was found in pax-linux versions 2.6.32.33-test79.patch, 2.6.38-test3.patch, and 2.6.37.4-test14.patch. A bad bounds check in arch_get_unmapped_area_topdown triggered by programs doing an mmap after a MAP_GROWSDOWN mmap will create an infinite loop condition without releasing the VM semaphore eventually leading to a system crash.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-1474" }, { "type": "WEB", "url": "https://security-tracker.debian.org/tracker/CVE-2011-1474" }, { "type": "WEB", "url": "http://seclists.org/oss-sec/2011/q1/579" } ], "database_specific": { "cwe_ids": [ ], "severity": null, "github_reviewed": false } }
472
5,079
<filename>apps/oozie/src/oozie/migrations/0005_initial.py # -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-06-06 18:55 from __future__ import unicode_literals from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('oozie', '0004_initial'), ] operations = [ migrations.AddField( model_name='link', name='child', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parent_node', to='oozie.Node', verbose_name=b''), ), migrations.AddField( model_name='link', name='parent', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='child_node', to='oozie.Node'), ), migrations.AddField( model_name='job', name='owner', field=models.ForeignKey(help_text='Person who can modify the job.', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Owner'), ), migrations.AddField( model_name='history', name='job', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oozie.Job'), ), ]
654
369
// Copyright (c) 2017-2021, Mudita <NAME>. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #include "ALS31300.hpp" namespace drivers::als31300 { conf_reg::conf_reg(whole_reg_t whole_reg) { user_eeprom = whole_reg & 0b11111; int_latch_enable = (whole_reg >> 5) & 0b1; channel_X_en = (whole_reg >> 6) & 0b1; channel_Y_en = (whole_reg >> 7) & 0b1; channel_Z_en = (whole_reg >> 8) & 0b1; I2C_threshold = (whole_reg >> 9) & 0b1; slave_addr = (whole_reg >> 10) & 0b1111111; disable_slave_ADC = (whole_reg >> 17) & 0b1; I2C_CRC_en = (whole_reg >> 18) & 0b1; hall_mode = (whole_reg >> 19) & 0b11; bandwidth = (whole_reg >> 21) & 0b111; RESERVED = (whole_reg >> 24) & 0xFF; } conf_reg::operator whole_reg_t() const { return (user_eeprom & 0b11111) | (int_latch_enable & 0b1) << 5 | (channel_X_en & 0b1) << 6 | (channel_Y_en & 0b1) << 7 | (channel_Z_en & 0b1) << 8 | (I2C_threshold & 0b1) << 9 | (slave_addr & 0b1111111) << 10 | (disable_slave_ADC & 0b1) << 17 | (I2C_CRC_en & 0b1) << 18 | (hall_mode & 0b11) << 19 | (bandwidth & 0b111) << 21 | (RESERVED & 0xFF) << 24; } int_reg::int_reg(whole_reg_t whole_reg) { int_X_threshold = whole_reg & 0b111111; int_Y_threshold = (whole_reg >> 6) & 0b111111; int_Z_threshold = (whole_reg >> 12) & 0b111111; int_X_en = (whole_reg >> 18) & 0b1; int_Y_en = (whole_reg >> 19) & 0b1; int_Z_en = (whole_reg >> 20) & 0b1; int_eeprom_en = (whole_reg >> 21) & 0b1; int_eeprom_status = (whole_reg >> 22) & 0b1; int_mode = (whole_reg >> 23) & 0b1; int_threshold_signed = (whole_reg >> 24) & 0b1; RESERVED = (whole_reg >> 25) & 0b1111111; } int_reg::operator whole_reg_t() const { return (int_X_threshold & 0b111111) | (int_Y_threshold & 0b111111) << 6 | (int_Z_threshold & 0b111111) << 12 | (int_X_en & 0b1) << 18 | (int_Y_en & 0b1) << 19 | (int_Z_en & 0b1) << 20 | (int_eeprom_en & 0b1) << 21 | (int_eeprom_status & 0b1) << 22 | (int_mode & 0b1) << 23 | (int_threshold_signed & 0b1) << 24 | (RESERVED & 0b1111111) << 25; } pwr_reg::pwr_reg(whole_reg_t whole_reg) { sleep = whole_reg & 0b11; I2C_loop_mode = (whole_reg >> 2) & 0b11; count_max_LP_mode = (whole_reg >> 4) & 0b111; RESERVED = (whole_reg >> 7) & 0x1FFFFFF; } pwr_reg::operator whole_reg_t() const { return (sleep & 0b11) | (I2C_loop_mode & 0b11) << 2 | (count_max_LP_mode & 0b111) << 4 | (RESERVED & 0x1FFFFFF) << 7; } measurements_MSB_reg::measurements_MSB_reg(whole_reg_t whole_reg) { temperature_MSB = whole_reg & 0b111111; int_flag = (whole_reg >> 6) & 0b1; new_data_flag = (whole_reg >> 7) & 0b1; Z_MSB = (whole_reg >> 8) & 0xFF; Y_MSB = (whole_reg >> 16) & 0xFF; X_MSB = (whole_reg >> 24) & 0xFF; } measurements_MSB_reg::operator whole_reg_t() const { return (temperature_MSB & 0b111111) | (int_flag & 0b1) << 6 | (new_data_flag & 0b1) << 7 | (Z_MSB & 0xFF) << 8 | (Y_MSB & 0xFF) << 16 | (X_MSB & 0xFF) << 24; } measurements_LSB_reg::measurements_LSB_reg(whole_reg_t whole_reg) { temperature_LSB = whole_reg & 0b111111; hall_mode_status = (whole_reg >> 6) & 0b11; Z_LSB = (whole_reg >> 8) & 0b1111; Y_LSB = (whole_reg >> 12) & 0b1111; X_LSB = (whole_reg >> 16) & 0b1111; int_eeprom_write_pending = (whole_reg >> 20) & 0b1; RESERVED = (whole_reg >> 21) & 0x7FF; } measurements_LSB_reg::operator whole_reg_t() const { return (temperature_LSB & 0b111111) | (hall_mode_status & 0b11) << 6 | (Z_LSB & 0b1111) << 8 | (Y_LSB & 0b1111) << 12 | (X_LSB & 0b1111) << 16 | (int_eeprom_write_pending & 0b1) << 20 | (RESERVED & 0x7FF) << 21; } float temperature_convert(uint16_t raw_temperature) { const int32_t intermediate = raw_temperature - 1708; return intermediate * 0.0737; } int16_t measurement_sign_convert(uint16_t raw_measurement, uint8_t bit_length) { // via: https://stackoverflow.com/questions/16946801/n-bit-2s-binary-to-decimal-in-c const auto sign_flag = 1 << (bit_length - 1); if (raw_measurement & sign_flag) { raw_measurement |= -(1 << bit_length); } return raw_measurement; } } // namespace drivers::als31300
2,761
2,151
<reponame>zealoussnow/chromium // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_SAFE_BROWSING_SETTINGS_RESET_PROMPT_SETTINGS_RESET_PROMPT_PREFS_MANAGER_H_ #define CHROME_BROWSER_SAFE_BROWSING_SETTINGS_RESET_PROMPT_SETTINGS_RESET_PROMPT_PREFS_MANAGER_H_ #include "base/time/time.h" class Profile; class PrefService; namespace user_prefs { class PrefRegistrySyncable; } // namespace user_prefs namespace safe_browsing { // Class responsible for reading and updating the preferences related to the // settings reset prompt. class SettingsResetPromptPrefsManager { public: // |prompt_wave| should be set to the prompt wave parameter obtained from the // |SettingsResetPromptConfig| class. If a new prompt wave has been started // (i.e., if the |prompt_wave| passed in to the constructor is greater than // the one stored in preferences), other related settings in preferences will // be reset. SettingsResetPromptPrefsManager(Profile* profile, int prompt_wave); ~SettingsResetPromptPrefsManager(); static void RegisterProfilePrefs(user_prefs::PrefRegistrySyncable* registry); base::Time LastTriggeredPrompt() const; base::Time LastTriggeredPromptForDefaultSearch() const; base::Time LastTriggeredPromptForStartupUrls() const; base::Time LastTriggeredPromptForHomepage() const; void RecordPromptShownForDefaultSearch(const base::Time& prompt_time); void RecordPromptShownForStartupUrls(const base::Time& prompt_time); void RecordPromptShownForHomepage(const base::Time& prompt_time); private: Profile* const profile_; PrefService* const prefs_; }; } // namespace safe_browsing #endif // CHROME_BROWSER_SAFE_BROWSING_SETTINGS_RESET_PROMPT_SETTINGS_RESET_PROMPT_PREFS_MANAGER_H_
603
6,224
/* ST Microelectronics LIS2DS12 3-axis accelerometer driver * * Copyright (c) 2019 STMicroelectronics * * SPDX-License-Identifier: Apache-2.0 * * Datasheet: * https://www.st.com/resource/en/datasheet/lis2ds12.pdf */ #define DT_DRV_COMPAT st_lis2ds12 #include <drivers/sensor.h> #include <kernel.h> #include <device.h> #include <init.h> #include <string.h> #include <sys/byteorder.h> #include <sys/__assert.h> #include <logging/log.h> #include "lis2ds12.h" LOG_MODULE_REGISTER(LIS2DS12, CONFIG_SENSOR_LOG_LEVEL); static int lis2ds12_set_odr(const struct device *dev, uint8_t odr) { const struct lis2ds12_config *cfg = dev->config; stmdev_ctx_t *ctx = (stmdev_ctx_t *)&cfg->ctx; lis2ds12_odr_t val; /* check if power off */ if (odr == 0U) { LOG_DBG("%s: set power-down", dev->name); return lis2ds12_xl_data_rate_set(ctx, LIS2DS12_XL_ODR_OFF); } /* * odr >= 1600Hz are available in HF mode only * 12,5Hz <= odr <= 800Hz are available in LP and HR mode only * odr == 1Hz is available in LP mode only */ if ((odr >= 9 && cfg->pm != 3) || (odr < 9 && cfg->pm == 3) || (odr == 1 && cfg->pm != 1)) { LOG_ERR("%s: bad odr and pm combination", dev->name); return -ENOTSUP; } switch (odr) { case 1: val = LIS2DS12_XL_ODR_1Hz_LP; break; case 2: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_12Hz5_LP : LIS2DS12_XL_ODR_12Hz5_HR; break; case 3: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_25Hz_LP : LIS2DS12_XL_ODR_25Hz_HR; break; case 4: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_50Hz_LP : LIS2DS12_XL_ODR_50Hz_HR; break; case 5: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_100Hz_LP : LIS2DS12_XL_ODR_100Hz_HR; break; case 6: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_200Hz_LP : LIS2DS12_XL_ODR_200Hz_HR; break; case 7: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_400Hz_LP : LIS2DS12_XL_ODR_400Hz_HR; break; case 8: val = (cfg->pm == 1) ? LIS2DS12_XL_ODR_800Hz_LP : LIS2DS12_XL_ODR_800Hz_HR; break; case 9: val = LIS2DS12_XL_ODR_1k6Hz_HF; break; case 10: val = LIS2DS12_XL_ODR_3k2Hz_HF; break; case 11: val = LIS2DS12_XL_ODR_6k4Hz_HF; break; default: LOG_ERR("%s: bad odr %d", dev->name, odr); return -ENOTSUP; } return lis2ds12_xl_data_rate_set(ctx, val); } static int lis2ds12_set_range(const struct device *dev, uint8_t range) { int err; struct lis2ds12_data *data = dev->data; const struct lis2ds12_config *cfg = dev->config; stmdev_ctx_t *ctx = (stmdev_ctx_t *)&cfg->ctx; switch (range) { default: case 2U: err = lis2ds12_xl_full_scale_set(ctx, LIS2DS12_2g); data->gain = lis2ds12_from_fs2g_to_mg(1); break; case 4U: err = lis2ds12_xl_full_scale_set(ctx, LIS2DS12_4g); data->gain = lis2ds12_from_fs4g_to_mg(1); break; case 8U: err = lis2ds12_xl_full_scale_set(ctx, LIS2DS12_8g); data->gain = lis2ds12_from_fs8g_to_mg(1); break; case 16U: err = lis2ds12_xl_full_scale_set(ctx, LIS2DS12_16g); data->gain = lis2ds12_from_fs16g_to_mg(1); break; } return err; } static int lis2ds12_accel_config(const struct device *dev, enum sensor_channel chan, enum sensor_attribute attr, const struct sensor_value *val) { switch (attr) { case SENSOR_ATTR_FULL_SCALE: return lis2ds12_set_range(dev, sensor_ms2_to_g(val)); case SENSOR_ATTR_SAMPLING_FREQUENCY: LOG_DBG("%s: set odr to %d Hz", dev->name, val->val1); return lis2ds12_set_odr(dev, LIS2DS12_ODR_TO_REG(val->val1)); default: LOG_DBG("Accel attribute not supported."); return -ENOTSUP; } return 0; } static int lis2ds12_attr_set(const struct device *dev, enum sensor_channel chan, enum sensor_attribute attr, const struct sensor_value *val) { switch (chan) { case SENSOR_CHAN_ACCEL_XYZ: return lis2ds12_accel_config(dev, chan, attr, val); default: LOG_WRN("attr_set() not supported on this channel."); return -ENOTSUP; } return 0; } static int lis2ds12_sample_fetch_accel(const struct device *dev) { struct lis2ds12_data *data = dev->data; const struct lis2ds12_config *cfg = dev->config; stmdev_ctx_t *ctx = (stmdev_ctx_t *)&cfg->ctx; int16_t buf[3]; /* fetch raw data sample */ if (lis2ds12_acceleration_raw_get(ctx, buf) < 0) { LOG_ERR("Failed to fetch raw data sample"); return -EIO; } data->sample_x = sys_le16_to_cpu(buf[0]); data->sample_y = sys_le16_to_cpu(buf[1]); data->sample_z = sys_le16_to_cpu(buf[2]); return 0; } static int lis2ds12_sample_fetch(const struct device *dev, enum sensor_channel chan) { switch (chan) { case SENSOR_CHAN_ACCEL_XYZ: lis2ds12_sample_fetch_accel(dev); break; #if defined(CONFIG_LIS2DS12_ENABLE_TEMP) case SENSOR_CHAN_DIE_TEMP: /* ToDo: lis2ds12_sample_fetch_temp(dev) */ break; #endif case SENSOR_CHAN_ALL: lis2ds12_sample_fetch_accel(dev); #if defined(CONFIG_LIS2DS12_ENABLE_TEMP) /* ToDo: lis2ds12_sample_fetch_temp(dev) */ #endif break; default: return -ENOTSUP; } return 0; } static inline void lis2ds12_convert(struct sensor_value *val, int raw_val, float gain) { int64_t dval; /* Gain is in mg/LSB */ /* Convert to m/s^2 */ dval = ((int64_t)raw_val * gain * SENSOR_G) / 1000; val->val1 = dval / 1000000LL; val->val2 = dval % 1000000LL; } static inline int lis2ds12_get_channel(enum sensor_channel chan, struct sensor_value *val, struct lis2ds12_data *data, float gain) { switch (chan) { case SENSOR_CHAN_ACCEL_X: lis2ds12_convert(val, data->sample_x, gain); break; case SENSOR_CHAN_ACCEL_Y: lis2ds12_convert(val, data->sample_y, gain); break; case SENSOR_CHAN_ACCEL_Z: lis2ds12_convert(val, data->sample_z, gain); break; case SENSOR_CHAN_ACCEL_XYZ: lis2ds12_convert(val, data->sample_x, gain); lis2ds12_convert(val + 1, data->sample_y, gain); lis2ds12_convert(val + 2, data->sample_z, gain); break; default: return -ENOTSUP; } return 0; } static int lis2ds12_channel_get(const struct device *dev, enum sensor_channel chan, struct sensor_value *val) { struct lis2ds12_data *data = dev->data; return lis2ds12_get_channel(chan, val, data, data->gain); } static const struct sensor_driver_api lis2ds12_driver_api = { .attr_set = lis2ds12_attr_set, #if defined(CONFIG_LIS2DS12_TRIGGER) .trigger_set = lis2ds12_trigger_set, #endif .sample_fetch = lis2ds12_sample_fetch, .channel_get = lis2ds12_channel_get, }; static int lis2ds12_init(const struct device *dev) { const struct lis2ds12_config * const cfg = dev->config; stmdev_ctx_t *ctx = (stmdev_ctx_t *)&cfg->ctx; uint8_t chip_id; int ret; /* check chip ID */ ret = lis2ds12_device_id_get(ctx, &chip_id); if (ret < 0) { LOG_ERR("%s: Not able to read dev id", dev->name); return ret; } if (chip_id != LIS2DS12_ID) { LOG_ERR("%s: Invalid chip ID 0x%02x", dev->name, chip_id); return -EINVAL; } /* reset device */ ret = lis2ds12_reset_set(ctx, PROPERTY_ENABLE); if (ret < 0) { return ret; } k_busy_wait(100); LOG_DBG("%s: chip id 0x%x", dev->name, chip_id); #ifdef CONFIG_LIS2DS12_TRIGGER ret = lis2ds12_trigger_init(dev); if (ret < 0) { LOG_ERR("%s: Failed to initialize triggers", dev->name); return ret; } #endif /* set sensor default pm and odr */ LOG_DBG("%s: pm: %d, odr: %d", dev->name, cfg->pm, cfg->odr); ret = lis2ds12_set_odr(dev, (cfg->pm == 0) ? 0 : cfg->odr); if (ret < 0) { LOG_ERR("%s: odr init error (12.5 Hz)", dev->name); return ret; } /* set sensor default scale */ LOG_DBG("%s: range is %d", dev->name, cfg->range); ret = lis2ds12_set_range(dev, cfg->range); if (ret < 0) { LOG_ERR("%s: range init error %d", dev->name, cfg->range); return ret; } return 0; } #if DT_NUM_INST_STATUS_OKAY(DT_DRV_COMPAT) == 0 #warning "LIS2DS12 driver enabled without any devices" #endif /* * Device creation macro, shared by LIS2DS12_DEFINE_SPI() and * LIS2DS12_DEFINE_I2C(). */ #define LIS2DS12_DEVICE_INIT(inst) \ DEVICE_DT_INST_DEFINE(inst, \ lis2ds12_init, \ NULL, \ &lis2ds12_data_##inst, \ &lis2ds12_config_##inst, \ POST_KERNEL, \ CONFIG_SENSOR_INIT_PRIORITY, \ &lis2ds12_driver_api); /* * Instantiation macros used when a device is on a SPI bus. */ #ifdef CONFIG_LIS2DS12_TRIGGER #define LIS2DS12_CFG_IRQ(inst) \ .gpio_int = GPIO_DT_SPEC_INST_GET(inst, irq_gpios), #else #define LIS2DS12_CFG_IRQ(inst) #endif /* CONFIG_LIS2DS12_TRIGGER */ #define LIS2DS12_SPI_OPERATION (SPI_WORD_SET(8) | \ SPI_OP_MODE_MASTER | \ SPI_MODE_CPOL | \ SPI_MODE_CPHA) \ #define LIS2DS12_CONFIG_SPI(inst) \ { \ .ctx = { \ .read_reg = \ (stmdev_read_ptr) stmemsc_spi_read, \ .write_reg = \ (stmdev_write_ptr) stmemsc_spi_write, \ .handle = \ (void *)&lis2ds12_config_##inst.stmemsc_cfg, \ }, \ .stmemsc_cfg = { \ .spi = SPI_DT_SPEC_INST_GET(inst, \ LIS2DS12_SPI_OPERATION, \ 0), \ }, \ .range = DT_INST_PROP(inst, range), \ .pm = DT_INST_PROP(inst, power_mode), \ .odr = DT_INST_PROP(inst, odr), \ COND_CODE_1(DT_INST_NODE_HAS_PROP(inst, irq_gpios), \ (LIS2DS12_CFG_IRQ(inst)), ()) \ } /* * Instantiation macros used when a device is on an I2C bus. */ #define LIS2DS12_CONFIG_I2C(inst) \ { \ .ctx = { \ .read_reg = \ (stmdev_read_ptr) stmemsc_i2c_read, \ .write_reg = \ (stmdev_write_ptr) stmemsc_i2c_write, \ .handle = \ (void *)&lis2ds12_config_##inst.stmemsc_cfg, \ }, \ .stmemsc_cfg = { \ .i2c = I2C_DT_SPEC_INST_GET(inst), \ }, \ .range = DT_INST_PROP(inst, range), \ .pm = DT_INST_PROP(inst, power_mode), \ .odr = DT_INST_PROP(inst, odr), \ COND_CODE_1(DT_INST_NODE_HAS_PROP(inst, irq_gpios), \ (LIS2DS12_CFG_IRQ(inst)), ()) \ } /* * Main instantiation macro. Use of COND_CODE_1() selects the right * bus-specific macro at preprocessor time. */ #define LIS2DS12_DEFINE(inst) \ static struct lis2ds12_data lis2ds12_data_##inst; \ static const struct lis2ds12_config lis2ds12_config_##inst = \ COND_CODE_1(DT_INST_ON_BUS(inst, spi), \ (LIS2DS12_CONFIG_SPI(inst)), \ (LIS2DS12_CONFIG_I2C(inst))); \ LIS2DS12_DEVICE_INIT(inst) DT_INST_FOREACH_STATUS_OKAY(LIS2DS12_DEFINE)
5,174
911
package org.apache.bcel.util; /* ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 2001 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Apache" and "Apache Software Foundation" and * "Apache BCEL" must not be used to endorse or promote products * derived from this software without prior written permission. For * written permission, please contact <EMAIL>. * * 5. Products derived from this software may not be called "Apache", * "Apache BCEL", nor may "Apache" appear in their name, without * prior written permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ import java.util.*; import java.util.zip.*; import java.io.*; /** * Responsible for loading (class) files from the CLASSPATH. Inspired by * sun.tools.ClassPath. * * @version $Id: ClassPath.java,v 1.2 2006/08/23 13:48:30 andos Exp $ * @author <A HREF="mailto:<EMAIL>"><NAME></A> */ public class ClassPath implements Serializable { /** * */ private static final long serialVersionUID = 8708518440751729976L; public static final ClassPath SYSTEM_CLASS_PATH = new ClassPath(); private PathEntry[] paths; private String class_path; /** * Search for classes in given path. */ public ClassPath(String class_path) { this.class_path = class_path; ArrayList<PathEntry> vec = new ArrayList<PathEntry>(); for (StringTokenizer tok = new StringTokenizer(class_path, System .getProperty("path.separator")); tok.hasMoreTokens();) { String path = tok.nextToken(); if (!path.equals("")) { File file = new File(path); try { if (file.exists()) { if (file.isDirectory()) vec.add(new Dir(path)); else vec.add(new Zip(new ZipFile(file))); } } catch (IOException e) { System.err .println("CLASSPATH component " + file + ": " + e); } } } paths = new PathEntry[vec.size()]; vec.toArray(paths); } /** * Search for classes in CLASSPATH. * * @deprecated Use SYSTEM_CLASS_PATH constant */ public ClassPath() { this(getClassPath()); } /** * @return used class path string */ public String toString() { return class_path; } public int hashCode() { return class_path.hashCode(); } public boolean equals(Object o) { if (o instanceof ClassPath) { return class_path.equals(((ClassPath) o).class_path); } return false; } private static final void getPathComponents(String path, ArrayList<String> list) { if (path != null) { StringTokenizer tok = new StringTokenizer(path, File.pathSeparator); while (tok.hasMoreTokens()) { String name = tok.nextToken(); File file = new File(name); if (file.exists()) list.add(name); } } } /** * Checks for class path components in the following properties: * "java.class.path", "sun.boot.class.path", "java.ext.dirs" * * @return class path as used by default by BCEL */ public static final String getClassPath() { String class_path = System.getProperty("java.class.path"); String boot_path = System.getProperty("sun.boot.class.path"); String ext_path = System.getProperty("java.ext.dirs"); ArrayList<String> list = new ArrayList<String>(); getPathComponents(class_path, list); getPathComponents(boot_path, list); ArrayList<String> dirs = new ArrayList<String>(); getPathComponents(ext_path, dirs); for (Iterator e = dirs.iterator(); e.hasNext();) { File ext_dir = new File((String) e.next()); String[] extensions = ext_dir.list(new FilenameFilter() { public boolean accept(File dir, String name) { name = name.toLowerCase(); return name.endsWith(".zip") || name.endsWith(".jar"); } }); if (extensions != null) for (int i = 0; i < extensions.length; i++) list.add(ext_path + File.separatorChar + extensions[i]); } StringBuffer buf = new StringBuffer(); for (Iterator e = list.iterator(); e.hasNext();) { buf.append((String) e.next()); if (e.hasNext()) buf.append(File.pathSeparatorChar); } return buf.toString().intern(); } /** * @param name * fully qualified class name, e.g. java.lang.String * @return input stream for class */ public InputStream getInputStream(String name) throws IOException { String suffix; if (name.endsWith(".class")) { suffix = ""; } else { suffix = ".class"; } return getInputStream(name, suffix); } /** * Return stream for class or resource on CLASSPATH. * * @param name * fully qualified file name, e.g. java/lang/String * @param suffix * file name ends with suff, e.g. .java * @return input stream for file on class path */ public InputStream getInputStream(String name, String suffix) throws IOException { InputStream is = null; try { is = getClass().getClassLoader().getResourceAsStream(name + suffix); } catch (Exception e) { } if (is != null) return is; return getClassFile(name, suffix).getInputStream(); } /** * @param name * fully qualified file name, e.g. java/lang/String * @param suffix * file name ends with suff, e.g. .java * @return class file for the java class */ public ClassFile getClassFile(String name, String suffix) throws IOException { for (int i = 0; i < paths.length; i++) { ClassFile cf; if ((cf = paths[i].getClassFile(name, suffix)) != null) return cf; } throw new IOException("Couldn't find: " + name + suffix); } /** * @param name * fully qualified class name, e.g. java.lang.String * @return input stream for class */ public ClassFile getClassFile(String name) throws IOException { return getClassFile(name, ".class"); } /** * @param name * fully qualified file name, e.g. java/lang/String * @param suffix * file name ends with suffix, e.g. .java * @return byte array for file on class path */ public byte[] getBytes(String name, String suffix) throws IOException { InputStream is = getInputStream(name, suffix); if (is == null) throw new IOException("Couldn't find: " + name + suffix); DataInputStream dis = new DataInputStream(is); byte[] bytes = new byte[is.available()]; dis.readFully(bytes); dis.close(); is.close(); return bytes; } /** * @return byte array for class */ public byte[] getBytes(String name) throws IOException { return getBytes(name, ".class"); } /** * @param name * name of file to search for, e.g. java/lang/String.java * @return full (canonical) path for file */ public String getPath(String name) throws IOException { int index = name.lastIndexOf('.'); String suffix = ""; if (index > 0) { suffix = name.substring(index); name = name.substring(0, index); } return getPath(name, suffix); } /** * @param name * name of file to search for, e.g. java/lang/String * @param suffix * file name suffix, e.g. .java * @return full (canonical) path for file, if it exists */ public String getPath(String name, String suffix) throws IOException { return getClassFile(name, suffix).getPath(); } private static abstract class PathEntry implements Serializable { abstract ClassFile getClassFile(String name, String suffix) throws IOException; } /** * Contains information about file/ZIP entry of the Java class. */ public interface ClassFile { /** * @return input stream for class file. */ public abstract InputStream getInputStream() throws IOException; /** * @return canonical path to class file. */ public abstract String getPath(); /** * @return base path of found class, i.e. class is contained relative to * that path, which may either denote a directory, or zip file */ public abstract String getBase(); /** * @return modification time of class file. */ public abstract long getTime(); /** * @return size of class file. */ public abstract long getSize(); } private static class Dir extends PathEntry { /** * */ private static final long serialVersionUID = -1016179728689833015L; private String dir; Dir(String d) { dir = d; } ClassFile getClassFile(String name, String suffix) throws IOException { final File file = new File(dir + File.separatorChar + name.replace('.', File.separatorChar) + suffix); return file.exists() ? new ClassFile() { public InputStream getInputStream() throws IOException { return new FileInputStream(file); } public String getPath() { try { return file.getCanonicalPath(); } catch (IOException e) { return null; } } public long getTime() { return file.lastModified(); } public long getSize() { return file.length(); } public String getBase() { return dir; } } : null; } public String toString() { return dir; } } private static class Zip extends PathEntry { private ZipFile zip; Zip(ZipFile z) { zip = z; } ClassFile getClassFile(String name, String suffix) throws IOException { final ZipEntry entry = zip .getEntry(name.replace('.', '/') + suffix); return (entry != null) ? new ClassFile() { public InputStream getInputStream() throws IOException { return zip.getInputStream(entry); } public String getPath() { return entry.toString(); } public long getTime() { return entry.getTime(); } public long getSize() { return entry.getSize(); } public String getBase() { return zip.getName(); } } : null; } } }
4,051
6,224
/* * Copyright (c) 2018 Intel Corporation * * SPDX-License-Identifier: Apache-2.0 */ #include <ztest.h> extern void test_isr_dynamic(void); extern void test_nested_isr(void); extern void test_prevent_interruption(void); extern void test_isr_regular(void); extern void test_isr_offload_job_multiple(void); extern void test_isr_offload_job_identi(void); extern void test_isr_offload_job(void); extern void test_direct_interrupt(void); void test_main(void) { ztest_test_suite(interrupt_feature, ztest_unit_test(test_isr_dynamic), ztest_unit_test(test_nested_isr), ztest_unit_test(test_prevent_interruption), ztest_unit_test(test_isr_regular), ztest_unit_test(test_isr_offload_job_multiple), ztest_unit_test(test_isr_offload_job_identi), ztest_unit_test(test_isr_offload_job), ztest_unit_test(test_direct_interrupt) ); ztest_run_test_suite(interrupt_feature); }
373
3,402
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.rest.controller; import java.util.Collection; import java.util.List; import org.apache.kylin.shaded.com.google.common.collect.Lists; import org.apache.kylin.metadata.model.DataModelDesc; import org.apache.kylin.rest.request.HybridRequest; import org.apache.kylin.rest.response.HybridRespone; import org.apache.kylin.rest.service.HybridService; import org.apache.kylin.storage.hybrid.HybridInstance; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; @Controller @RequestMapping(value = "/hybrids") public class HybridController extends BasicController { @Autowired private HybridService hybridService; @RequestMapping(value = "", method = RequestMethod.POST, produces = { "application/json" }) @ResponseBody public HybridRespone create(@RequestBody HybridRequest request) { checkRequiredArg("hybrid", request.getHybrid()); checkRequiredArg("project", request.getProject()); checkRequiredArg("model", request.getModel()); checkRequiredArg("cubes", request.getCubes()); HybridInstance hybridInstance = hybridService.createHybridInstance(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes()); return hybridInstance2response(hybridInstance); } @RequestMapping(value = "", method = RequestMethod.PUT, produces = { "application/json" }) @ResponseBody public HybridRespone update(@RequestBody HybridRequest request) { checkRequiredArg("hybrid", request.getHybrid()); checkRequiredArg("project", request.getProject()); checkRequiredArg("model", request.getModel()); checkRequiredArg("cubes", request.getCubes()); HybridInstance hybridInstance = hybridService.updateHybridInstance(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes()); return hybridInstance2response(hybridInstance); } @RequestMapping(value = "", method = RequestMethod.DELETE, produces = { "application/json" }) @ResponseBody public void delete(String hybrid, String project) { checkRequiredArg("hybrid", hybrid); checkRequiredArg("project", project); hybridService.deleteHybridInstance(hybrid, project); } @RequestMapping(value = "", method = RequestMethod.GET, produces = { "application/json" }) @ResponseBody public Collection<HybridRespone> list(@RequestParam(required = false) String project, @RequestParam(required = false) String model) { List<HybridInstance> hybridInstances = hybridService.listHybrids(project, model); List<HybridRespone> hybridRespones = Lists.newArrayListWithCapacity(hybridInstances.size()); for (HybridInstance hybridInstance : hybridInstances) { hybridRespones.add(hybridInstance2response(hybridInstance)); } return hybridRespones; } @RequestMapping(value = "{hybrid}", method = RequestMethod.GET, produces = { "application/json" }) @ResponseBody public HybridRespone get(@PathVariable String hybrid) { HybridInstance hybridInstance = hybridService.getHybridInstance(hybrid); return hybridInstance2response(hybridInstance); } private HybridRespone hybridInstance2response(HybridInstance hybridInstance){ DataModelDesc modelDesc = hybridInstance.getModel(); return new HybridRespone(modelDesc == null ? HybridRespone.NO_PROJECT : modelDesc.getProject(), modelDesc == null ? HybridRespone.NO_MODEL : modelDesc.getName(), hybridInstance); } }
1,478
1,233
<filename>mantis-server/mantis-server-worker/src/test/java/io/mantisrx/server/worker/jobmaster/control/utils/IntegratorTest.java /* * Copyright 2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.mantisrx.server.worker.jobmaster.control.utils; import org.junit.Test; import rx.Observable; import rx.observers.TestSubscriber; public class IntegratorTest { private final Observable<Double> data = Observable.just(1.0, -1.0, 0.0, -10.0); @Test public void shouldIntegrateOverInput() { Observable<Double> result = data.lift(new Integrator(0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(1.0, 0.0, 0.0, -10.0); } @Test public void shouldRespectMinimumValue() { Observable<Double> result = data.lift(new Integrator(0, 0.0, 10.0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(1.0, 0.0, 0.0, 0.0); } @Test public void shouldRespectMaximumValue() { Observable<Double> result = data.lift(new Integrator(0, -100.0, 0.0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(0.0, -1.0, -1.0, -11.0); } @Test public void shouldBeginFromInitialSuppliedValue() { Observable<Double> result = data.lift(new Integrator(1.0)); TestSubscriber<Double> testSubscriber = new TestSubscriber<>(); result.subscribe(testSubscriber); testSubscriber.assertCompleted(); testSubscriber.assertValues(2.0, 1.0, 1.0, -9.0); } }
896
360
<reponame>Yanci0/openGauss-server<gh_stars>100-1000 /* * Copyright (c) 2020 Huawei Technologies Co.,Ltd. * * openGauss is licensed under Mulan PSL v2. * You can use this software according to the terms and conditions of the Mulan PSL v2. * You may obtain a copy of Mulan PSL v2 at: * * http://license.coscl.org.cn/MulanPSL2 * * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. * See the Mulan PSL v2 for more details. * --------------------------------------------------------------------------------------- * * ash.h * * IDENTIFICATION * src/include/instruments/ash.h * * --------------------------------------------------------------------------------------- */ #ifndef ASH_H #define ASH_H #include "gs_thread.h" #include "pgstat.h" #define ENABLE_ASP u_sess->attr.attr_common.enable_asp typedef struct SessionHistEntry { uint64 changCount; uint64 sample_id; /* time stamp of every case */ TimestampTz sample_time; /* Whether the sample has been flushed to wdr */ bool need_flush_sample; uint64 session_id; TimestampTz start_time; bool is_flushed_sample; uint64 psessionid; /* Database OID, owning user's OID, connection client address */ Oid databaseid; Oid userid; SockAddr clientaddr; char* clienthostname; /* MUST be null-terminated */ /* application name; MUST be null-terminated */ char* st_appname; uint64 queryid; /* debug query id of current query */ UniqueSQLKey unique_sql_key; /* get unique sql key */ ThreadId procpid; /* The entry is valid iff st_procpid > 0, unused if st_procpid == 0 */ pid_t tid; int thread_level; /* thread level, mark with plan node id of Stream node */ uint32 smpid; /* smp worker id, used for parallel execution */ WaitState waitstatus; /* backend waiting states */ uint32 waitevent; /* backend's wait event */ uint64 xid; /* for transaction id, fit for 64-bit */ int waitnode_count; /* count of waiting nodes */ int nodeid; /* maybe for nodeoid/nodeidx */ int plannodeid; /* indentify which consumer is receiving data for SCTP */ char* relname; /* relation name, for analyze, vacuum, .etc.*/ Oid libpq_wait_nodeid; /* for libpq, point to libpq_wait_node*/ int libpq_wait_nodecount; /* for libpq, point to libpq_wait_nodecount*/ WaitStatePhase waitstatus_phase; /* detailed phase for wait status, now only for 'wait node' status */ int numnodes; /* nodes number when reporting waitstatus in case it changed */ LOCALLOCKTAG locallocktag; /* locked object */ uint64 st_block_sessionid; /* block session */ GlobalSessionId globalSessionId; } SessionHistEntry; typedef struct ActiveSessHistArrary { uint32 curr_index; /* the current index of active session history arrary */ uint32 max_size; /* the max size of the active_sess_hist_arrary */ SessionHistEntry *active_sess_hist_info; } ActiveSessHistArrary; void InitAsp(); extern ThreadId ash_start(void); extern void ActiveSessionCollectMain(); extern bool IsJobAspProcess(void); #endif
1,253
1,043
<reponame>JeffMuchine/micro-server package app.singleton.com.oath.micro.server.copy; import org.springframework.stereotype.Component; import com.oath.micro.server.dist.lock.DistributedLockService; @Component public class DummyLock implements DistributedLockService { @Override public boolean tryLock(String key) { return false; } @Override public boolean tryReleaseLock(String key) { return false; } }
134
348
<filename>docs/data/leg-t2/077/07704032.json {"nom":"Beton-Bazoches","circ":"4ème circonscription","dpt":"Seine-et-Marne","inscrits":519,"abs":307,"votants":212,"blancs":5,"nuls":1,"exp":206,"res":[{"nuance":"LR","nom":"<NAME>","voix":151},{"nuance":"REM","nom":"<NAME>","voix":55}]}
119
1,217
/* [auto_generated] boost/numeric/odeint/stepper/detail/rotating_buffer.hpp [begin_description] Implemetation of a rotating (cyclic) buffer for use in the Adam Bashforth stepper [end_description] Copyright 2009-2011 <NAME> Copyright 2009-2011 <NAME> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_NUMERIC_ODEINT_STEPPER_DETAIL_ROTATING_BUFFER_HPP_INCLUDED #define BOOST_NUMERIC_ODEINT_STEPPER_DETAIL_ROTATING_BUFFER_HPP_INCLUDED #include <boost/array.hpp> namespace boost { namespace numeric { namespace odeint { namespace detail { template< class T , size_t N > class rotating_buffer { public: typedef T value_type; const static size_t dim = N; rotating_buffer( void ) : m_first( 0 ) { } size_t size( void ) const { return dim; } value_type& operator[]( size_t i ) { return m_data[ get_index( i ) ]; } const value_type& operator[]( size_t i ) const { return m_data[ get_index( i ) ]; } void rotate( void ) { if( m_first == 0 ) m_first = dim-1; else --m_first; } protected: value_type m_data[N]; private: size_t get_index( size_t i ) const { return ( ( i + m_first ) % dim ); } size_t m_first; }; } // detail } // odeint } // numeric } // boost #endif // BOOST_NUMERIC_ODEINT_STEPPER_DETAIL_ROTATING_BUFFER_HPP_INCLUDED
650
3,301
<filename>core/src/test/java/com/alibaba/alink/operator/common/linear/unarylossfunc/UnaryLossFuncTest.java package com.alibaba.alink.operator.common.linear.unarylossfunc; import com.alibaba.alink.testutil.AlinkTestBase; import org.junit.Test; import static org.junit.Assert.assertTrue; /** * Test for the unary loss functions. */ public class UnaryLossFuncTest extends AlinkTestBase { @Test public void test() throws Exception { UnaryLossFunc[] lossFuncs; lossFuncs = new UnaryLossFunc[] { new ExponentialLossFunc(), new HingeLossFunc(), new LogisticLossFunc(), new LogLossFunc(), new PerceptronLossFunc(), new ZeroOneLossFunc() }; for (UnaryLossFunc lossFunc : lossFuncs) { assertTrue(lossFunc.loss(-1.0, 1.0) > 1.0 - 1e-10); assertTrue(lossFunc.loss(1.0, 1.0) < 0.5); assertTrue(lossFunc.loss(-0.5, 1.0) - lossFunc.loss(0.5, 1.0) > 0.49); assertTrue(lossFunc.loss(-0.5, 1.0) == lossFunc.loss(0.5, -1.0)); assertTrue(lossFunc.derivative(-0.5, 1.0) <= lossFunc.derivative(0.5, 1.0)); assertTrue(lossFunc.secondDerivative(-0.5, 1.0) >= lossFunc.secondDerivative(0.5, 1.0)); } lossFuncs = new UnaryLossFunc[] { new SquareLossFunc(), new SvrLossFunc(1.0), new HuberLossFunc(1.0) }; for (UnaryLossFunc lossFunc : lossFuncs) { assertTrue(Math.abs(lossFunc.loss(0.0, 0.0)) < 1e-10); assertTrue(Math.abs(lossFunc.derivative(0.0, 0.0)) < 1e-10); assertTrue(lossFunc.derivative(-0.5, 0.0) == -lossFunc.derivative(0.5, 0.0)); assertTrue(lossFunc.secondDerivative(-0.5, 0.0) == lossFunc.secondDerivative(0.5, 0.0)); } } }
734
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/core/typed_arrays/array_buffer/array_buffer_contents.h" #include "testing/gtest/include/gtest/gtest.h" namespace blink { class ArrayBufferContentsTest : public testing::Test {}; #if defined(ADDRESS_SANITIZER) #define DISABLE_ON_ASAN(test_name) DISABLED_##test_name #else #define DISABLE_ON_ASAN(test_name) test_name #endif // defined(ADDRESS_SANITIZER) // Disable on ASAN to avoid crashing on failed allocations, see // https://crbug.com/1038741. TEST_F(ArrayBufferContentsTest, DISABLE_ON_ASAN(AllocationFail)) { // This should be an amount of memory that cannot be allocated. size_t length = sizeof(size_t) == 4 ? 0x4fffffff : 0x8000000000; size_t element_byte_size = 1; ArrayBufferContents content1(length, element_byte_size, blink::ArrayBufferContents::kNotShared, blink::ArrayBufferContents::kDontInitialize); ArrayBufferContents content2(length, element_byte_size, blink::ArrayBufferContents::kNotShared, blink::ArrayBufferContents::kDontInitialize); // Check that no memory got allocated, and that DataLength is set accordingly. ASSERT_EQ(content2.DataLength(), 0u); ASSERT_EQ(content2.Data(), nullptr); } } // namespace blink
562
777
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef OffscreenCanvasRenderingContext2D_h #define OffscreenCanvasRenderingContext2D_h #include "core/html/canvas/CanvasContextCreationAttributes.h" #include "core/html/canvas/CanvasRenderingContext.h" #include "core/html/canvas/CanvasRenderingContextFactory.h" #include "modules/canvas2d/BaseRenderingContext2D.h" #include <memory> namespace blink { class MODULES_EXPORT OffscreenCanvasRenderingContext2D final : public CanvasRenderingContext, public BaseRenderingContext2D { DEFINE_WRAPPERTYPEINFO(); USING_GARBAGE_COLLECTED_MIXIN(OffscreenCanvasRenderingContext2D); public: class Factory : public CanvasRenderingContextFactory { public: Factory() {} ~Factory() override {} CanvasRenderingContext* create( ScriptState* scriptState, OffscreenCanvas* canvas, const CanvasContextCreationAttributes& attrs) override { return new OffscreenCanvasRenderingContext2D(scriptState, canvas, attrs); } CanvasRenderingContext::ContextType getContextType() const override { return CanvasRenderingContext::Context2d; } }; ScriptPromise commit(ScriptState*, ExceptionState&); // CanvasRenderingContext implementation ~OffscreenCanvasRenderingContext2D() override; ContextType getContextType() const override { return Context2d; } bool is2d() const override { return true; } void setOffscreenCanvasGetContextResult(OffscreenRenderingContext&) final; void setIsHidden(bool) final { ASSERT_NOT_REACHED(); } void stop() final { ASSERT_NOT_REACHED(); } void setCanvasGetContextResult(RenderingContext&) final {} void clearRect(double x, double y, double width, double height) override { BaseRenderingContext2D::clearRect(x, y, width, height); } PassRefPtr<Image> getImage(AccelerationHint, SnapshotReason) const final; ImageData* toImageData(SnapshotReason) override; void reset() override; // BaseRenderingContext2D implementation bool originClean() const final; void setOriginTainted() final; bool wouldTaintOrigin(CanvasImageSource*, ExecutionContext*) final; int width() const final; int height() const final; bool hasImageBuffer() const final; ImageBuffer* imageBuffer() const final; bool parseColorOrCurrentColor(Color&, const String& colorString) const final; SkCanvas* drawingCanvas() const final; SkCanvas* existingDrawingCanvas() const final; void disableDeferral(DisableDeferralReason) final; AffineTransform baseTransform() const final; void didDraw(const SkIRect& dirtyRect) final; bool stateHasFilter() final; sk_sp<SkImageFilter> stateGetFilter() final; void snapshotStateForFilter() final {} void validateStateStack() const final; bool hasAlpha() const final { return creationAttributes().alpha(); } bool isContextLost() const override; ImageBitmap* transferToImageBitmap(ScriptState*) final; ColorBehavior drawImageColorBehavior() const final; protected: OffscreenCanvasRenderingContext2D( ScriptState*, OffscreenCanvas*, const CanvasContextCreationAttributes& attrs); DECLARE_VIRTUAL_TRACE(); private: bool m_needsMatrixClipRestore = false; std::unique_ptr<ImageBuffer> m_imageBuffer; bool isPaintable() const final; RefPtr<StaticBitmapImage> transferToStaticBitmapImage(); }; DEFINE_TYPE_CASTS(OffscreenCanvasRenderingContext2D, CanvasRenderingContext, context, context->is2d() && context->offscreenCanvas(), context.is2d() && context.offscreenCanvas()); } // namespace blink #endif // OffscreenCanvasRenderingContext2D_h
1,241
1,473
/* * Autopsy * * Copyright 2020 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.discovery.ui; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.discovery.search.ResultFile; import org.sleuthkit.autopsy.textsummarizer.TextSummary; /** * Class to wrap all the information necessary for a document summary to be * displayed. */ final class DocumentWrapper { private TextSummary summary; private final ResultFile resultFile; /** * Construct a new DocumentWrapper. * * @param file The ResultFile which represents the document which the * summary is created for. */ @Messages({"DocumentWrapper.previewInitialValue=Preview not generated yet."}) DocumentWrapper(ResultFile file) { this.summary = new TextSummary(Bundle.DocumentWrapper_previewInitialValue(), null, 0); this.resultFile = file; } /** * Set the summary which exists. * * @param textSummary The TextSummary object which contains the text and * image which should be displayed as a summary for this * document. */ void setSummary(TextSummary textSummary) { this.summary = textSummary; } /** * Get the ResultFile which represents the document the summary was created * for. * * @return The ResultFile which represents the document file which the * summary was created for. */ ResultFile getResultFile() { return resultFile; } /** * Get the summary of the document. * * @return The TextSummary which is the summary of the document. */ TextSummary getSummary() { return summary; } }
788
2,305
<gh_stars>1000+ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. # type: ignore import logging import random import warnings import torch import torch.nn as nn from ..interface import BaseOneShotTrainer from .utils import AverageMeterGroup, replace_layer_choice, replace_input_choice, to_device _logger = logging.getLogger(__name__) def _get_mask(sampled, total): multihot = [i == sampled or (isinstance(sampled, list) and i in sampled) for i in range(total)] return torch.tensor(multihot, dtype=torch.bool) # pylint: disable=not-callable class PathSamplingLayerChoice(nn.Module): """ Mixed module, in which fprop is decided by exactly one or multiple (sampled) module. If multiple module is selected, the result will be sumed and returned. Attributes ---------- sampled : int or list of int Sampled module indices. mask : tensor A multi-hot bool 1D-tensor representing the sampled mask. """ def __init__(self, layer_choice): super(PathSamplingLayerChoice, self).__init__() self.op_names = [] for name, module in layer_choice.named_children(): self.add_module(name, module) self.op_names.append(name) assert self.op_names, 'There has to be at least one op to choose from.' self.sampled = None # sampled can be either a list of indices or an index def forward(self, *args, **kwargs): assert self.sampled is not None, 'At least one path needs to be sampled before fprop.' if isinstance(self.sampled, list): return sum([getattr(self, self.op_names[i])(*args, **kwargs) for i in self.sampled]) # pylint: disable=not-an-iterable else: return getattr(self, self.op_names[self.sampled])(*args, **kwargs) # pylint: disable=invalid-sequence-index def __len__(self): return len(self.op_names) @property def mask(self): return _get_mask(self.sampled, len(self)) class PathSamplingInputChoice(nn.Module): """ Mixed input. Take a list of tensor as input, select some of them and return the sum. Attributes ---------- sampled : int or list of int Sampled module indices. mask : tensor A multi-hot bool 1D-tensor representing the sampled mask. """ def __init__(self, input_choice): super(PathSamplingInputChoice, self).__init__() self.n_candidates = input_choice.n_candidates self.n_chosen = input_choice.n_chosen self.sampled = None def forward(self, input_tensors): if isinstance(self.sampled, list): return sum([input_tensors[t] for t in self.sampled]) # pylint: disable=not-an-iterable else: return input_tensors[self.sampled] def __len__(self): return self.n_candidates @property def mask(self): return _get_mask(self.sampled, len(self)) class SinglePathTrainer(BaseOneShotTrainer): """ Single-path trainer. Samples a path every time and backpropagates on that path. Parameters ---------- model : nn.Module Model with mutables. loss : callable Called with logits and targets. Returns a loss tensor. metrics : callable Returns a dict that maps metrics keys to metrics data. optimizer : Optimizer Optimizer that optimizes the model. num_epochs : int Number of epochs of training. dataset_train : Dataset Dataset of training. dataset_valid : Dataset Dataset of validation. batch_size : int Batch size. workers: int Number of threads for data preprocessing. Not used for this trainer. Maybe removed in future. device : torch.device Device object. Either ``torch.device("cuda")`` or ``torch.device("cpu")``. When ``None``, trainer will automatic detects GPU and selects GPU first. log_frequency : int Number of mini-batches to log metrics. """ def __init__(self, model, loss, metrics, optimizer, num_epochs, dataset_train, dataset_valid, batch_size=64, workers=4, device=None, log_frequency=None): warnings.warn('SinglePathTrainer is deprecated. Please use strategy.RandomOneShot instead.', DeprecationWarning) self.model = model self.loss = loss self.metrics = metrics self.optimizer = optimizer self.num_epochs = num_epochs self.dataset_train = dataset_train self.dataset_valid = dataset_valid self.batch_size = batch_size self.workers = workers self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') if device is None else device self.log_frequency = log_frequency self.model.to(self.device) self.nas_modules = [] replace_layer_choice(self.model, PathSamplingLayerChoice, self.nas_modules) replace_input_choice(self.model, PathSamplingInputChoice, self.nas_modules) for _, module in self.nas_modules: module.to(self.device) self.train_loader = torch.utils.data.DataLoader(self.dataset_train, batch_size=batch_size, num_workers=workers, shuffle=True) self.valid_loader = torch.utils.data.DataLoader(self.dataset_valid, batch_size=batch_size, num_workers=workers) def _resample(self): result = {} for name, module in self.nas_modules: if name not in result: result[name] = random.randint(0, len(module) - 1) module.sampled = result[name] return result def _train_one_epoch(self, epoch): self.model.train() meters = AverageMeterGroup() for step, (x, y) in enumerate(self.train_loader): x, y = to_device(x, self.device), to_device(y, self.device) self.optimizer.zero_grad() self._resample() logits = self.model(x) loss = self.loss(logits, y) loss.backward() self.optimizer.step() metrics = self.metrics(logits, y) metrics["loss"] = loss.item() meters.update(metrics) if self.log_frequency is not None and step % self.log_frequency == 0: _logger.info("Epoch [%s/%s] Step [%s/%s] %s", epoch + 1, self.num_epochs, step + 1, len(self.train_loader), meters) def _validate_one_epoch(self, epoch): self.model.eval() meters = AverageMeterGroup() with torch.no_grad(): for step, (x, y) in enumerate(self.valid_loader): x, y = to_device(x, self.device), to_device(y, self.device) self._resample() logits = self.model(x) loss = self.loss(logits, y) metrics = self.metrics(logits, y) metrics["loss"] = loss.item() meters.update(metrics) if self.log_frequency is not None and step % self.log_frequency == 0: _logger.info("Epoch [%s/%s] Validation Step [%s/%s] %s", epoch + 1, self.num_epochs, step + 1, len(self.valid_loader), meters) def fit(self): for i in range(self.num_epochs): self._train_one_epoch(i) self._validate_one_epoch(i) def export(self): return self._resample() RandomTrainer = SinglePathTrainer
3,422
2,270
<filename>modules/juce_audio_basics/midi/juce_MidiMessageSequence.h /* ============================================================================== This file is part of the JUCE library. Copyright (c) 2020 - Raw Material Software Limited JUCE is an open source library subject to commercial or open-source licensing. The code included in this file is provided under the terms of the ISC license http://www.isc.org/downloads/software-support-policy/isc-license. Permission To use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted provided that the above copyright notice and this permission notice appear in all copies. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE DISCLAIMED. ============================================================================== */ namespace juce { //============================================================================== /** A sequence of timestamped midi messages. This allows the sequence to be manipulated, and also to be read from and written to a standard midi file. @see MidiMessage, MidiFile @tags{Audio} */ class JUCE_API MidiMessageSequence { public: //============================================================================== /** Creates an empty midi sequence object. */ MidiMessageSequence(); /** Creates a copy of another sequence. */ MidiMessageSequence (const MidiMessageSequence&); /** Replaces this sequence with another one. */ MidiMessageSequence& operator= (const MidiMessageSequence&); /** Move constructor */ MidiMessageSequence (MidiMessageSequence&&) noexcept; /** Move assignment operator */ MidiMessageSequence& operator= (MidiMessageSequence&&) noexcept; /** Destructor. */ ~MidiMessageSequence(); //============================================================================== /** Structure used to hold midi events in the sequence. These structures act as 'handles' on the events as they are moved about in the list, and make it quick to find the matching note-offs for note-on events. @see MidiMessageSequence::getEventPointer */ class MidiEventHolder { public: //============================================================================== /** Destructor. */ ~MidiEventHolder(); /** The message itself, whose timestamp is used to specify the event's time. */ MidiMessage message; /** The matching note-off event (if this is a note-on event). If this isn't a note-on, this pointer will be nullptr. Use the MidiMessageSequence::updateMatchedPairs() method to keep these note-offs up-to-date after events have been moved around in the sequence or deleted. */ MidiEventHolder* noteOffObject = nullptr; private: //============================================================================== friend class MidiMessageSequence; MidiEventHolder (const MidiMessage&); MidiEventHolder (MidiMessage&&); JUCE_LEAK_DETECTOR (MidiEventHolder) }; //============================================================================== /** Clears the sequence. */ void clear(); /** Returns the number of events in the sequence. */ int getNumEvents() const noexcept; /** Returns a pointer to one of the events. */ MidiEventHolder* getEventPointer (int index) const noexcept; /** Iterator for the list of MidiEventHolders */ MidiEventHolder** begin() noexcept; /** Iterator for the list of MidiEventHolders */ MidiEventHolder* const* begin() const noexcept; /** Iterator for the list of MidiEventHolders */ MidiEventHolder** end() noexcept; /** Iterator for the list of MidiEventHolders */ MidiEventHolder* const* end() const noexcept; /** Returns the time of the note-up that matches the note-on at this index. If the event at this index isn't a note-on, it'll just return 0. @see MidiMessageSequence::MidiEventHolder::noteOffObject */ double getTimeOfMatchingKeyUp (int index) const noexcept; /** Returns the index of the note-up that matches the note-on at this index. If the event at this index isn't a note-on, it'll just return -1. @see MidiMessageSequence::MidiEventHolder::noteOffObject */ int getIndexOfMatchingKeyUp (int index) const noexcept; /** Returns the index of an event. */ int getIndexOf (const MidiEventHolder* event) const noexcept; /** Returns the index of the first event on or after the given timestamp. If the time is beyond the end of the sequence, this will return the number of events. */ int getNextIndexAtTime (double timeStamp) const noexcept; //============================================================================== /** Returns the timestamp of the first event in the sequence. @see getEndTime */ double getStartTime() const noexcept; /** Returns the timestamp of the last event in the sequence. @see getStartTime */ double getEndTime() const noexcept; /** Returns the timestamp of the event at a given index. If the index is out-of-range, this will return 0.0 */ double getEventTime (int index) const noexcept; //============================================================================== /** Inserts a midi message into the sequence. The index at which the new message gets inserted will depend on its timestamp, because the sequence is kept sorted. Remember to call updateMatchedPairs() after adding note-on events. @param newMessage the new message to add (an internal copy will be made) @param timeAdjustment an optional value to add to the timestamp of the message that will be inserted @see updateMatchedPairs */ MidiEventHolder* addEvent (const MidiMessage& newMessage, double timeAdjustment = 0); /** Inserts a midi message into the sequence. The index at which the new message gets inserted will depend on its timestamp, because the sequence is kept sorted. Remember to call updateMatchedPairs() after adding note-on events. @param newMessage the new message to add (an internal copy will be made) @param timeAdjustment an optional value to add to the timestamp of the message that will be inserted @see updateMatchedPairs */ MidiEventHolder* addEvent (MidiMessage&& newMessage, double timeAdjustment = 0); /** Deletes one of the events in the sequence. Remember to call updateMatchedPairs() after removing events. @param index the index of the event to delete @param deleteMatchingNoteUp whether to also remove the matching note-off if the event you're removing is a note-on */ void deleteEvent (int index, bool deleteMatchingNoteUp); /** Merges another sequence into this one. Remember to call updateMatchedPairs() after using this method. @param other the sequence to add from @param timeAdjustmentDelta an amount to add to the timestamps of the midi events as they are read from the other sequence @param firstAllowableDestTime events will not be added if their time is earlier than this time. (This is after their time has been adjusted by the timeAdjustmentDelta) @param endOfAllowableDestTimes events will not be added if their time is equal to or greater than this time. (This is after their time has been adjusted by the timeAdjustmentDelta) */ void addSequence (const MidiMessageSequence& other, double timeAdjustmentDelta, double firstAllowableDestTime, double endOfAllowableDestTimes); /** Merges another sequence into this one. Remember to call updateMatchedPairs() after using this method. @param other the sequence to add from @param timeAdjustmentDelta an amount to add to the timestamps of the midi events as they are read from the other sequence */ void addSequence (const MidiMessageSequence& other, double timeAdjustmentDelta); //============================================================================== /** Makes sure all the note-on and note-off pairs are up-to-date. Call this after re-ordering messages or deleting/adding messages, and it will scan the list and make sure all the note-offs in the MidiEventHolder structures are pointing at the correct ones. */ void updateMatchedPairs() noexcept; /** Forces a sort of the sequence. You may need to call this if you've manually modified the timestamps of some events such that the overall order now needs updating. */ void sort() noexcept; //============================================================================== /** Copies all the messages for a particular midi channel to another sequence. @param channelNumberToExtract the midi channel to look for, in the range 1 to 16 @param destSequence the sequence that the chosen events should be copied to @param alsoIncludeMetaEvents if true, any meta-events (which don't apply to a specific channel) will also be copied across. @see extractSysExMessages */ void extractMidiChannelMessages (int channelNumberToExtract, MidiMessageSequence& destSequence, bool alsoIncludeMetaEvents) const; /** Copies all midi sys-ex messages to another sequence. @param destSequence this is the sequence to which any sys-exes in this sequence will be added @see extractMidiChannelMessages */ void extractSysExMessages (MidiMessageSequence& destSequence) const; /** Removes any messages in this sequence that have a specific midi channel. @param channelNumberToRemove the midi channel to look for, in the range 1 to 16 */ void deleteMidiChannelMessages (int channelNumberToRemove); /** Removes any sys-ex messages from this sequence. */ void deleteSysExMessages(); /** Adds an offset to the timestamps of all events in the sequence. @param deltaTime the amount to add to each timestamp. */ void addTimeToMessages (double deltaTime) noexcept; //============================================================================== /** Scans through the sequence to determine the state of any midi controllers at a given time. This will create a sequence of midi controller changes that can be used to set all midi controllers to the state they would be in at the specified time within this sequence. As well as controllers, it will also recreate the midi program number and pitch bend position. @param channelNumber the midi channel to look for, in the range 1 to 16. Controllers for other channels will be ignored. @param time the time at which you want to find out the state - there are no explicit units for this time measurement, it's the same units as used for the timestamps of the messages @param resultMessages an array to which midi controller-change messages will be added. This will be the minimum number of controller changes to recreate the state at the required time. */ void createControllerUpdatesForTime (int channelNumber, double time, Array<MidiMessage>& resultMessages); //============================================================================== /** Swaps this sequence with another one. */ void swapWith (MidiMessageSequence&) noexcept; private: //============================================================================== friend class MidiFile; OwnedArray<MidiEventHolder> list; MidiEventHolder* addEvent (MidiEventHolder*, double); JUCE_LEAK_DETECTOR (MidiMessageSequence) }; } // namespace juce
4,829
979
import os from torch.nn.parameter import Parameter from openprompt.utils.logging import logger from openprompt.data_utils import InputExample, InputFeatures from typing import * from transformers import PreTrainedModel from transformers.tokenization_utils import PreTrainedTokenizer from openprompt import Template from openprompt.prompts import ManualTemplate, ManualVerbalizer import torch from torch import nn class SoftTemplate(Template): r"""This is the implementation of `The Power of Scale for Parameter-Efficient Prompt Tuning <https://arxiv.org/pdf/2104.08691v1.pdf>`_ . Similar to :obj:`PrefixTuningTemplate`, This template also does not need any textual template. Addition tokens are directly concatenated into the input ids. There are two initializations of the new tokens. (1). random initialization. (2) initialize with the tokens of the plm (We simply take the first n_tokens similar to their implementation). """ registered_inputflag_names = ["loss_ids", "shortenable_ids"] def __init__(self, model: PreTrainedModel, tokenizer: PreTrainedTokenizer, text: Optional[str] = None, soft_embeds: Optional[torch.FloatTensor] = None, num_tokens: int=20, initialize_from_vocab: Optional[bool] = True, random_range: Optional[float] = 0.5, placeholder_mapping: dict = {'<text_a>':'text_a','<text_b>':'text_b'}, ): super().__init__(tokenizer=tokenizer, placeholder_mapping=placeholder_mapping) self.raw_embedding = model.get_input_embeddings() self.raw_embedding.requires_grad_(False) self.model_is_encoder_decoder = model.config.is_encoder_decoder self.random_range = random_range self.num_tokens = num_tokens self.initialize_from_vocab = initialize_from_vocab self.text = text # self.default_text1 = {"placeholder<text_a> <mask>" # self.default_text2 = "<text_a> <text_b> <mask>".split() if soft_embeds is not None: self.soft_embeds = soft_embeds self.num_tokens = len(soft_embeds) else: if self.num_tokens>0: self.generate_parameters() def on_text_set(self): self.text = self.parse_text(self.text) def wrap_one_example(self, example) -> List[Dict]: #TODO this automatic generated template may not be able to process diverse data format. if self.text is None: logger.warning("You didn't provide text template for softprompt. Using default template, is this intended?") if example.text_b is None: self.text = self.default_text1 else: self.text = self.default_text2 return super().wrap_one_example(example) def generate_parameters(self) -> None: """ generate parameters needed for soft tokens embedding in soft-prompt for soft tokens, use a new embedding layer which is initialized with their corresponding embedding of hard tokens """ if self.initialize_from_vocab: soft_embeds = self.raw_embedding.weight[:self.num_tokens].clone().detach() else: soft_embeds = torch.FloatTensor(self.num_tokens, self.raw_embedding.weight.size(1)).uniform_(-self.random_range, self.random_range) self.soft_embeds = nn.Parameter(soft_embeds, requires_grad=True) def process_batch(self, batch: Union[Dict, InputFeatures]) -> Union[Dict, InputFeatures]: """ Convert input_ids to inputs_embeds for normal tokens, use the embedding layer of PLM for soft tokens, use a new embedding layer which is initialized with their corresponding embedding of hard tokens """ inputs_embeds = self.raw_embedding(batch['input_ids']) batch_size = inputs_embeds.size(0) if self.num_tokens>0: soft_embeds = self.soft_embeds.repeat(batch_size, 1, 1) inputs_embeds = torch.cat([soft_embeds, inputs_embeds], 1) batch['input_ids'] = None batch['inputs_embeds'] = inputs_embeds if 'attention_mask' in batch and self.num_tokens>0: am = batch['attention_mask'] batch['attention_mask'] = torch.cat([torch.ones((batch_size,self.num_tokens), dtype = am.dtype,device=am.device), am], dim=-1) return batch def post_processing_outputs(self, outputs: torch.Tensor): r"""Post processing the outputs of language models according to the need of template. Most templates don't need post processing, The template like SoftTemplate, which appends soft template as a module (rather than a sequence of input tokens) to the input, should remove the outputs on these positions to keep the seq_len the same """ if not self.model_is_encoder_decoder: outputs.logits = outputs.logits[:, self.num_tokens:,: ] return outputs
2,044
311
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.webapp; import static java.util.Objects.requireNonNull; import azkaban.Constants; import azkaban.utils.Props; import javax.inject.Inject; import com.google.inject.Provider; //import org.mortbay.jetty.Connector; //import org.mortbay.jetty.Server; //import org.mortbay.jetty.bio.SocketConnector; //import org.mortbay.jetty.security.SslSocketConnector; import org.slf4j.LoggerFactory; import org.slf4j.Logger; import org.eclipse.jetty.server.*; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import java.util.ArrayList; import java.util.List; public class WebServerProvider implements Provider<Server> { private static final Logger logger = LoggerFactory.getLogger(WebServerProvider.class); private static final int MAX_HEADER_BUFFER_SIZE = 10 * 1024 * 1024; private static final boolean JETTY_SEND_SERVER_VERSION = false; @Inject private Props props; @Override public Server get() { requireNonNull(this.props); final ServerConnector httpConnector; final ServerConnector httpsConnector; final int maxThreads = this.props .getInt("jetty.maxThreads", Constants.DEFAULT_JETTY_MAX_THREAD_COUNT); final QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads); final Server server = new Server(httpThreadPool); final boolean useSsl = this.props.getBoolean("jetty.use.ssl", true); final int port; if (useSsl) { final int sslPortNumber = this.props .getInt("jetty.ssl.port", Constants.DEFAULT_SSL_PORT_NUMBER); port = sslPortNumber; //server.addConnector(getSslSocketConnector(sslPortNumber)); // FIXME Use https connector. httpsConnector = createHttpsConnector(server); server.addConnector(httpsConnector); } else { port = this.props.getInt("jetty.port", Constants.DEFAULT_PORT_NUMBER); // server.addConnector(getSocketConnector(port)); // FIXME Use http connector. httpConnector = createHttpConnector(server); server.addConnector(httpConnector); } logger.info(String.format( "Starting %sserver on port: %d # Max threads: %d", useSsl ? "SSL " : "", port, maxThreads)); return server; } private ServerConnector createHttpConnector(Server server) { HttpConfiguration httpConfig = new HttpConfiguration(); setHeaderBufferSize(httpConfig); setSendServerVersion(httpConfig); int port = this.props.getInt("jetty.port", Constants.DEFAULT_PORT_NUMBER); String bindAddress = this.props.getString("jetty.hostname", "0.0.0.0"); ServerConnector connector = createServerConnector(server, port, new HttpConnectionFactory(httpConfig)); connector.setHost(bindAddress); return connector; } private ServerConnector createServerConnector(Server server, int port, ConnectionFactory... connectionFactories) { int acceptors = 2; ServerConnector connector = new ServerConnector(server, null, null, null, acceptors, 2, connectionFactories); connector.setPort(port); connector.setStopTimeout(0); connector.getSelectorManager().setStopTimeout(0); connector.setIdleTimeout(1200000L); setJettySettings(connector); return connector; } private void setJettySettings(ServerConnector connector) { int acceptQueueSize = this.props.getInt("jetty.acceptQueueSize", 100); connector.setAcceptQueueSize(acceptQueueSize); } private void setHeaderBufferSize(HttpConfiguration configuration) { configuration.setRequestHeaderSize(MAX_HEADER_BUFFER_SIZE); } private void setSendServerVersion(HttpConfiguration configuration) { final boolean sendServerVersion = props.getBoolean("jetty.send.server.version", JETTY_SEND_SERVER_VERSION); configuration.setSendServerVersion(sendServerVersion); } private ServerConnector createHttpsConnector(Server jettyServer) { SslContextFactory sslContextFactory = new SslContextFactory(); sslContextFactory.setKeyStorePath(this.props.getString("jetty.keystore")); sslContextFactory.setKeyManagerPassword(this.props.getString("jetty.password")); sslContextFactory.setKeyStorePassword(this.props.getString("jetty.keypassword")); sslContextFactory.setTrustStorePath(this.props.getString("jetty.truststore")); sslContextFactory.setTrustStorePassword(this.props.getString("jetty.trustpassword")); final List<String> cipherSuitesToExclude = this.props .getStringList("jetty.excludeCipherSuites", new ArrayList<>()); logger.info("Excluded Cipher Suites: " + String.valueOf(cipherSuitesToExclude)); if (cipherSuitesToExclude != null && !cipherSuitesToExclude.isEmpty()) { sslContextFactory.setExcludeCipherSuites(cipherSuitesToExclude.toArray(new String[cipherSuitesToExclude.size()])); } HttpConfiguration httpConfig = new HttpConfiguration(); setHeaderBufferSize(httpConfig); setSendServerVersion(httpConfig); httpConfig.addCustomizer(new SecureRequestCustomizer()); final int port = this.props.getInt("jetty.ssl.port", Constants.DEFAULT_SSL_PORT_NUMBER); return createServerConnector(jettyServer, port, new SslConnectionFactory(sslContextFactory, "http/1.1"), new HttpConnectionFactory(httpConfig)); } }
1,928
1,766
#include <gmock/gmock.h> #include <gtest/gtest.h> #include <rtp/RtpPaddingManagerHandler.h> #include <rtp/RtpHeaders.h> #include <MediaDefinitions.h> #include <WebRtcConnection.h> #include <stats/StatNode.h> #include <Stats.h> #include <queue> #include <string> #include <vector> #include "../utils/Mocks.h" #include "../utils/Tools.h" #include "../utils/Matchers.h" using ::testing::_; using ::testing::IsNull; using ::testing::Args; using ::testing::Return; using ::testing::AtLeast; using erizo::DataPacket; using erizo::packetType; using erizo::AUDIO_PACKET; using erizo::VIDEO_PACKET; using erizo::MovingIntervalRateStat; using erizo::IceConfig; using erizo::RtpMap; using erizo::RtpPaddingManagerHandler; using erizo::WebRtcConnection; using erizo::Pipeline; using erizo::InboundHandler; using erizo::OutboundHandler; using erizo::CumulativeStat; using erizo::Worker; using std::queue; using erizo::MediaStream; class RtpPaddingManagerHandlerBaseTest : public erizo::BaseHandlerTest { public: RtpPaddingManagerHandlerBaseTest() {} protected: void internalSetHandler() { clock = std::make_shared<erizo::SimulatedClock>(); padding_calculator_handler = std::make_shared<RtpPaddingManagerHandler>(clock); pipeline->addBack(padding_calculator_handler); } void whenSubscribersWithTargetBitrate(std::vector<uint32_t> subscriber_bitrates) { int i = 0; std::for_each(subscriber_bitrates.begin(), subscriber_bitrates.end(), [this, &i](uint32_t bitrate) { addMediaStreamToConnection("sub" + std::to_string(i), false, bitrate); simulated_worker->executeTasks(); i++; }); } void whenPublishers(uint num_publishers) { for (uint i = 0; i < num_publishers; i++) { addMediaStreamToConnection("pub" + std::to_string(i), true, 0); simulated_worker->executeTasks(); } } void whenBandwidthEstimationIs(uint32_t bitrate) { stats->getNode()["total"].insertStat("senderBitrateEstimation", CumulativeStat{bitrate}); } void whenCurrentTotalVideoBitrateIs(uint32_t bitrate) { stats->getNode()["total"].insertStat("videoBitrate", CumulativeStat{bitrate}); } void internalTearDown() { std::for_each(subscribers.begin(), subscribers.end(), [this](const std::shared_ptr<erizo::MockMediaStream> &stream) { connection->removeMediaStream(stream->getId()); }); std::for_each(publishers.begin(), publishers.end(), [this](const std::shared_ptr<erizo::MockMediaStream> &stream) { connection->removeMediaStream(stream->getId()); }); simulated_worker->executeTasks(); } std::shared_ptr<erizo::MockMediaStream> addMediaStreamToConnection(std::string id, bool is_publisher, uint32_t bitrate) { auto media_stream = std::make_shared<erizo::MockMediaStream>(simulated_worker, connection, id, id, rtp_maps, is_publisher); std::shared_ptr<erizo::MediaStream> stream_ptr = std::dynamic_pointer_cast<erizo::MediaStream>(media_stream); connection->addMediaStream(stream_ptr); EXPECT_CALL(*media_stream.get(), getTargetVideoBitrate()).WillRepeatedly(Return(bitrate)); if (is_publisher) { publishers.push_back(media_stream); } else { subscribers.push_back(media_stream); } return media_stream; } void expectPaddingBitrate(uint64_t bitrate, int times = 1) { std::for_each(subscribers.begin(), subscribers.end(), [bitrate, times](const std::shared_ptr<erizo::MockMediaStream> &stream) { EXPECT_CALL(*stream.get(), setTargetPaddingBitrate(testing::Eq(bitrate))).Times(times); }); std::for_each(publishers.begin(), publishers.end(), [bitrate](const std::shared_ptr<erizo::MockMediaStream> &stream) { EXPECT_CALL(*stream.get(), setTargetPaddingBitrate(_)).Times(0); }); } std::vector<std::shared_ptr<erizo::MockMediaStream>> subscribers; std::vector<std::shared_ptr<erizo::MockMediaStream>> publishers; std::shared_ptr<RtpPaddingManagerHandler> padding_calculator_handler; std::shared_ptr<erizo::SimulatedClock> clock; }; class RtpPaddingManagerHandlerTest : public ::testing::Test, public RtpPaddingManagerHandlerBaseTest { public: RtpPaddingManagerHandlerTest() {} void setHandler() override { internalSetHandler(); } protected: virtual void SetUp() { internalSetUp(); } void TearDown() override { internalTearDown(); } }; TEST_F(RtpPaddingManagerHandlerTest, basicBehaviourShouldReadPackets) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); EXPECT_CALL(*reader.get(), read(_, _)). With(Args<1>(erizo::RtpHasSequenceNumber(erizo::kArbitrarySeqNumber))).Times(1); pipeline->read(packet); } TEST_F(RtpPaddingManagerHandlerTest, basicBehaviourShouldWritePackets) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); EXPECT_CALL(*writer.get(), write(_, _)). With(Args<1>(erizo::RtpHasSequenceNumber(erizo::kArbitrarySeqNumber))).Times(1); pipeline->write(packet); } TEST_F(RtpPaddingManagerHandlerTest, shouldDistributePaddingEvenlyAmongStreamsWithoutPublishers) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate({200, 200, 200, 200, 200}); whenPublishers(0); whenBandwidthEstimationIs(600); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(100); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); } TEST_F(RtpPaddingManagerHandlerTest, shouldStopPaddingIfRembGoesDown) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate({500}); whenPublishers(0); whenBandwidthEstimationIs(300); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(200); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(200); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(0); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); } TEST_F(RtpPaddingManagerHandlerTest, shouldNotSendPaddingInTheBackoffPeriod) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate({500}); whenPublishers(0); whenBandwidthEstimationIs(300); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(200); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(200); whenCurrentTotalVideoBitrateIs(100); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(200); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(0, 2); clock->advanceTime( RtpPaddingManagerHandler::kMinDurationToSendPaddingAfterBweDecrease - std::chrono::milliseconds(1)); pipeline->write(packet); } TEST_F(RtpPaddingManagerHandlerTest, shouldRampUpAfterBackoff) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate({500}); whenPublishers(0); whenBandwidthEstimationIs(300); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(200); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(200); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(0); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(200); whenCurrentTotalVideoBitrateIs(100); std::chrono::steady_clock::duration kDurationLowerThanMaxDurationInRecovery = RtpPaddingManagerHandler::kMaxDurationInRecoveryFromBwe - std::chrono::seconds(1); double correcting_factor = static_cast<double>(kDurationLowerThanMaxDurationInRecovery.count())/ RtpPaddingManagerHandler::kMaxDurationInRecoveryFromBwe.count(); expectPaddingBitrate(100 * correcting_factor); clock->advanceTime(kDurationLowerThanMaxDurationInRecovery); pipeline->write(packet); } TEST_F(RtpPaddingManagerHandlerTest, shouldRecoverPaddingBitrateCompletely) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate({500}); whenPublishers(0); whenBandwidthEstimationIs(300); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(200); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(200); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(0); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); whenBandwidthEstimationIs(500); whenCurrentTotalVideoBitrateIs(100); expectPaddingBitrate(400); clock->advanceTime(RtpPaddingManagerHandler::kMaxDurationInRecoveryFromBwe + std::chrono::milliseconds(100)); pipeline->write(packet); } typedef std::vector<uint32_t> SubscriberBitratesList; class RtpPaddingManagerHandlerTestWithParam : public RtpPaddingManagerHandlerBaseTest, public ::testing::TestWithParam<std::tr1::tuple<SubscriberBitratesList, uint32_t, uint32_t, uint64_t>> { public: RtpPaddingManagerHandlerTestWithParam() { subscribers = std::tr1::get<0>(GetParam()); bw_estimation = std::tr1::get<1>(GetParam()); video_bitrate = std::tr1::get<2>(GetParam()); expected_padding_bitrate = std::tr1::get<3>(GetParam()); } protected: void setHandler() override { internalSetHandler(); } virtual void SetUp() { internalSetUp(); } void TearDown() override { internalTearDown(); } SubscriberBitratesList subscribers; uint32_t bw_estimation; uint32_t video_bitrate; uint64_t expected_padding_bitrate; }; TEST_P(RtpPaddingManagerHandlerTestWithParam, shouldDistributePaddingWithPublishers) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate(subscribers); whenPublishers(10); whenBandwidthEstimationIs(bw_estimation); whenCurrentTotalVideoBitrateIs(video_bitrate); expectPaddingBitrate(expected_padding_bitrate); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); } TEST_P(RtpPaddingManagerHandlerTestWithParam, shouldDistributePaddingWithNoPublishers) { auto packet = erizo::PacketTools::createDataPacket(erizo::kArbitrarySeqNumber, AUDIO_PACKET); whenSubscribersWithTargetBitrate(subscribers); whenPublishers(0); whenBandwidthEstimationIs(bw_estimation); whenCurrentTotalVideoBitrateIs(video_bitrate); expectPaddingBitrate(expected_padding_bitrate); clock->advanceTime(std::chrono::milliseconds(200)); pipeline->write(packet); } INSTANTIATE_TEST_CASE_P( Padding_values, RtpPaddingManagerHandlerTestWithParam, testing::Values( // targetBitrates, bwe, bitrate, expectedPaddingBitrate std::make_tuple(SubscriberBitratesList{200, 200, 200, 200, 200}, 600, 100, 100), std::make_tuple(SubscriberBitratesList{200, 200, 200, 200, 200}, 1500, 100, 0), std::make_tuple(SubscriberBitratesList{200, 200, 200, 200, 200}, 99, 100, 0), std::make_tuple(SubscriberBitratesList{200, 200, 200, 200, 200}, 600, 600, 0), std::make_tuple(SubscriberBitratesList{200, 200, 200, 200, 200}, 0, 100, 0), std::make_tuple(SubscriberBitratesList{200, 200, 200, 200, 200}, 1000, 0, 200)));
4,227
450
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #include "Exception.h" #include "ExceptionInternal.h" #include "Function.h" #include "SessionConfig.h" #include <sstream> #define ARRAYSIZE(A) (sizeof(A) / sizeof(A[0])) namespace Yarn { namespace Internal { template<typename T> static void CheckRangeGE(const char * key, T const & value, T const & target) { if (!(value >= target)) { std::stringstream ss; ss << "Invalid configure item: \"" << key << "\", value: " << value << ", expected value should be larger than " << target; THROW(YarnConfigInvalid, "%s", ss.str().c_str()); } } template<typename T> static void CheckMultipleOf(const char * key, const T & value, int unit) { if (value <= 0 || value % unit != 0) { THROW(YarnConfigInvalid, "%s should be larger than 0 and be the multiple of %d.", key, unit); } } SessionConfig::SessionConfig(const Config & conf) { ConfigDefault<bool> boolValues [] = { { &rpcTcpNoDelay, "rpc.client.connect.tcpnodelay", true } }; ConfigDefault<int32_t> i32Values[] = { { &rpcMaxIdleTime, "rpc.client.max.idle", 10 * 1000, bind(CheckRangeGE<int32_t>, _1, _2, 1) }, { &rpcPingTimeout, "rpc.client.ping.interval", 10 * 1000 }, { &rpcConnectTimeout, "rpc.client.connect.timeout", 600 * 1000 }, { &rpcReadTimeout, "rpc.client.read.timeout", 3600 * 1000 }, { &rpcWriteTimeout, "rpc.client.write.timeout", 3600 * 1000 }, { &rpcSocketLingerTimeout, "rpc.client.socket.linger.timeout", -1 }, { &rpcMaxRetryOnConnect, "rpc.client.connect.retry", 10, bind(CheckRangeGE<int32_t>, _1, _2, 1) }, { &rpcTimeout, "rpc.client.timeout", 3600 * 1000 }, { &rpcMaxHARetry, "yarn.client.failover.max.attempts", 15, bind(CheckRangeGE<int32_t>, _1, _2, 0) } }; ConfigDefault<std::string> strValues [] = { {&rpcAuthMethod, "hadoop.security.authentication", "simple" }, {&kerberosCachePath, "hadoop.security.kerberos.ticket.cache.path", "" }, {&logSeverity, "yarn.client.log.severity", "INFO" } }; for (size_t i = 0; i < ARRAYSIZE(boolValues); ++i) { *boolValues[i].variable = conf.getBool(boolValues[i].key, boolValues[i].value); if (boolValues[i].check) { boolValues[i].check(boolValues[i].key, *boolValues[i].variable); } } for (size_t i = 0; i < ARRAYSIZE(i32Values); ++i) { *i32Values[i].variable = conf.getInt32(i32Values[i].key, i32Values[i].value); if (i32Values[i].check) { i32Values[i].check(i32Values[i].key, *i32Values[i].variable); } } for (size_t i = 0; i < ARRAYSIZE(strValues); ++i) { *strValues[i].variable = conf.getString(strValues[i].key, strValues[i].value); if (strValues[i].check) { strValues[i].check(strValues[i].key, *strValues[i].variable); } } } } }
1,725
1,125
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardException; import java.io.IOException; public class PercentageScore extends SignificanceHeuristic { public static final String NAME = "percentage"; public PercentageScore() { } public PercentageScore(StreamInput in) { // Nothing to read. } @Override public void writeTo(StreamOutput out) throws IOException { } @Override public String getWriteableName() { return NAME; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME).endObject(); return builder; } public static SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryShardException { // move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken()); } return new PercentageScore(); } /** * Indicates the significance of a term in a sample by determining what percentage * of all occurrences of a term are found in the sample. */ @Override public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) { checkFrequencyValidity(subsetFreq, subsetSize, supersetFreq, supersetSize, "PercentageScore"); if (supersetFreq == 0) { // avoid a divide by zero issue return 0; } return (double) subsetFreq / (double) supersetFreq; } @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { return false; } return true; } @Override public int hashCode() { return getClass().hashCode(); } public static class PercentageScoreBuilder implements SignificanceHeuristicBuilder { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME).endObject(); return builder; } } }
1,120
614
[ { "home": "West Ham United", "away": "Newcastle United", "homescore": "2", "awayscore": "0" }, { "home": "<NAME>", "away": "Deportivo La Coruña", "homescore": "1", "awayscore": "3" }, { "home": "Sampdoria", "away": "Bologna", "homescore": "2", "awayscore": "0" }, { "home": "Geylang International", "away": "Home United" }, { "home": "West Ham United", "away": "Newcastle United", "homescore": "2", "awayscore": "0" }, { "home": "<NAME>", "away": "Deportivo La Coruña", "homescore": "1", "awayscore": "3" }, { "home": "Sampdoria", "away": "Bologna", "homescore": "2", "awayscore": "0" }, { "home": "Latina", "away": "Trapani", "homescore": "1", "awayscore": "1" }, { "home": "<NAME>", "away": "MSV Duisburg", "homescore": "2", "awayscore": "0" }, { "home": "Brest", "away": "Lens", "homescore": "2", "awayscore": "1" }, { "home": "<NAME>", "away": "SC Stormvogels Telstar", "homescore": "4", "awayscore": "1" }, { "home": "Boavista", "away": "<NAME>", "homescore": "0", "awayscore": "1" }, { "home": "FC Ufa", "away": "Ural Sverdlovsk Oblast", "homescore": "0", "awayscore": "1" }, { "home": "<NAME>", "away": "<NAME>", "homescore": "1", "awayscore": "0" }, { "home": "Kayserispor", "away": "Trabzonspor", "homescore": "0", "awayscore": "1" }, { "home": "Iraklis", "away": "Levadiakos", "homescore": "0", "awayscore": "1" }, { "home": "Esbjerg", "away": "<NAME>", "homescore": "4", "awayscore": "2" }, { "home": "<NAME>", "away": "Halmstad", "homescore": "4", "awayscore": "1" }, { "home": "<NAME>", "away": "<NAME>", "homescore": "0", "awayscore": "1" }, { "home": "<NAME>", "away": "Djurgarden", "homescore": "2", "awayscore": "1" }, { "home": "Aldosivi", "away": "Crucero del Norte", "homescore": "2", "awayscore": "0" }, { "home": "Temperley", "away": "Quilmes", "homescore": "0", "awayscore": "0" }, { "home": "Colegiales", "away": "Estudiantes de Caseros", "homescore": "1", "awayscore": "2" }, { "home": "Comunicaciones (Mercedes)", "away": "Barracas Central", "homescore": "0", "awayscore": "0" }, { "home": "Fénix", "away": "<NAME>", "homescore": "2", "awayscore": "0" }, { "home": "Flandria", "away": "Deportivo Riestra", "homescore": "2", "awayscore": "0" }, { "home": "Merlo", "away": "UAI Urquiza", "homescore": "0", "awayscore": "2" }, { "home": "Claypole", "away": "<NAME>", "homescore": "1", "awayscore": "1" }, { "home": "Palestino", "away": "<NAME>", "homescore": "2", "awayscore": "0" }, { "home": "Colo Colo", "away": "Unión La Calera", "homescore": "3", "awayscore": "1" }, { "home": "Arica", "away": "Universidad de Concepción", "homescore": "3", "awayscore": "0" }, { "home": "Deportes Tolima", "away": "Atlético Nacional", "homescore": "0", "awayscore": "0" }, { "home": "America Cali", "away": "Dépor FC", "homescore": "3", "awayscore": "1" }, { "home": "Geylang International", "away": "Home United" }, { "home": "Carmelita", "away": "Belen" }, { "home": "Cavalier SC", "away": "Arnett Gardens FC" } ]
1,265
2,136
package com.ulisesbocchio.jasyptspringboot.encryptor; import com.ulisesbocchio.jasyptspringboot.configuration.StringEncryptorBuilder; import com.ulisesbocchio.jasyptspringboot.properties.JasyptEncryptorConfigurationProperties; import com.ulisesbocchio.jasyptspringboot.util.Singleton; import lombok.extern.slf4j.Slf4j; import org.jasypt.encryption.StringEncryptor; import org.jasypt.encryption.pbe.PooledPBEStringEncryptor; import org.springframework.beans.factory.BeanFactory; import org.springframework.core.env.ConfigurableEnvironment; import java.util.Optional; import static com.ulisesbocchio.jasyptspringboot.util.Functional.tap; /** * Default Lazy Encryptor that delegates to a custom {@link StringEncryptor} bean or creates a default {@link PooledPBEStringEncryptor} or {@link SimpleAsymmetricStringEncryptor} * based on what properties are provided * * @author <NAME> */ @Slf4j public class DefaultLazyEncryptor implements StringEncryptor { private final Singleton<StringEncryptor> singleton; public DefaultLazyEncryptor(final ConfigurableEnvironment e, final String customEncryptorBeanName, boolean isCustom, final BeanFactory bf) { singleton = new Singleton<>(() -> Optional.of(customEncryptorBeanName) .filter(bf::containsBean) .map(name -> (StringEncryptor) bf.getBean(name)) .map(tap(bean -> log.info("Found Custom Encryptor Bean {} with name: {}", bean, customEncryptorBeanName))) .orElseGet(() -> { if (isCustom) { throw new IllegalStateException(String.format("String Encryptor custom Bean not found with name '%s'", customEncryptorBeanName)); } log.info("String Encryptor custom Bean not found with name '{}'. Initializing Default String Encryptor", customEncryptorBeanName); return createDefault(e); })); } public DefaultLazyEncryptor(final ConfigurableEnvironment e) { singleton = new Singleton<>(() -> createDefault(e)); } private StringEncryptor createDefault(ConfigurableEnvironment e) { return new StringEncryptorBuilder(JasyptEncryptorConfigurationProperties.bindConfigProps(e), "jasypt.encryptor").build(); } @Override public String encrypt(final String message) { return singleton.get().encrypt(message); } @Override public String decrypt(final String encryptedMessage) { return singleton.get().decrypt(encryptedMessage); } }
1,041
387
<reponame>flygod1159/Arcane-Engine #include "arcpch.h" #include "TextureLoader.h" #include <Arcane/Graphics/Texture/Cubemap.h> #include <Arcane/Graphics/Texture/Texture.h> namespace Arcane { // Static declarations std::unordered_map<std::string, Texture*> TextureLoader::m_TextureCache; Texture *TextureLoader::s_DefaultAlbedo; Texture *TextureLoader::s_DefaultNormal; Texture *TextureLoader::s_WhiteTexture; Texture *TextureLoader::s_BlackTexture; Texture* TextureLoader::Load2DTexture(std::string &path, TextureSettings *settings) { // Check the cache auto iter = m_TextureCache.find(path); if (iter != m_TextureCache.end()) { return iter->second; } // Load the texture int width, height, numComponents; unsigned char *data = stbi_load(path.c_str(), &width, &height, &numComponents, 0); if (!data) { ARC_LOG_ERROR("Failed to load texture path: {0}", path); stbi_image_free(data); return nullptr; } GLenum dataFormat; switch (numComponents) { case 1: dataFormat = GL_RED; break; case 3: dataFormat = GL_RGB; break; case 4: dataFormat = GL_RGBA; break; } Texture *texture = nullptr; if (settings != nullptr) { texture = new Texture(*settings); } else { texture = new Texture(); } texture->Generate2DTexture(width, height, dataFormat, GL_UNSIGNED_BYTE, data); m_TextureCache.insert(std::pair<std::string, Texture*>(path, texture)); stbi_image_free(data); return m_TextureCache[path]; } Cubemap* TextureLoader::LoadCubemapTexture(const std::string &right, const std::string &left, const std::string &top, const std::string &bottom, const std::string &back, const std::string &front, CubemapSettings *settings) { Cubemap *cubemap = new Cubemap(); if (settings != nullptr) cubemap->SetCubemapSettings(*settings); std::vector<std::string> faces = { right, left, top, bottom, back, front }; // Load the textures for the cubemap int width, height, numComponents; for (unsigned int i = 0; i < 6; ++i) { unsigned char *data = stbi_load(faces[i].c_str(), &width, &height, &numComponents, 0); if (data) { GLenum dataFormat; switch (numComponents) { case 1: dataFormat = GL_RED; break; case 3: dataFormat = GL_RGB; break; case 4: dataFormat = GL_RGBA; break; } cubemap->GenerateCubemapFace(GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, width, height, dataFormat, data); stbi_image_free(data); } else { ARC_LOG_ERROR("Couldn't load cubemap using 6 filepaths. Filepath error: {0}", faces[i]); stbi_image_free(data); return cubemap; } } return cubemap; } void TextureLoader::InitializeDefaultTextures() { // Setup texture and minimal filtering because they are 1x1 textures so they require none TextureSettings srgbTextureSettings; srgbTextureSettings.IsSRGB = true; s_DefaultAlbedo = Load2DTexture(std::string("res/textures/default/defaultAlbedo.png"), &srgbTextureSettings); s_DefaultAlbedo->Bind(); s_DefaultAlbedo->SetAnisotropicFilteringMode(1.0f); s_DefaultAlbedo->SetTextureMinFilter(GL_NEAREST); s_DefaultAlbedo->SetTextureMagFilter(GL_NEAREST); s_DefaultNormal = Load2DTexture(std::string("res/textures/default/defaultNormal.png")); s_DefaultNormal->Bind(); s_DefaultNormal->SetAnisotropicFilteringMode(1.0f); s_DefaultNormal->SetTextureMinFilter(GL_NEAREST); s_DefaultNormal->SetTextureMagFilter(GL_NEAREST); s_WhiteTexture = Load2DTexture(std::string("res/textures/default/white.png")); s_WhiteTexture->Bind(); s_WhiteTexture->SetAnisotropicFilteringMode(1.0f); s_WhiteTexture->SetTextureMinFilter(GL_NEAREST); s_WhiteTexture->SetTextureMagFilter(GL_NEAREST); s_BlackTexture = Load2DTexture(std::string("res/textures/default/black.png")); s_BlackTexture->Bind(); s_BlackTexture->SetAnisotropicFilteringMode(1.0f); s_BlackTexture->SetTextureMinFilter(GL_NEAREST); s_BlackTexture->SetTextureMagFilter(GL_NEAREST); } }
1,464
432
<reponame>lambdaxymox/DragonFlyBSD /*- * Copyright (c) 1983-2003, Regents of the University of California. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the University of California, San Francisco nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * $OpenBSD: connect.c,v 1.6 2003/06/11 08:45:24 pjanzen Exp $ * $NetBSD: connect.c,v 1.3 1997/10/11 08:13:40 lukem Exp $ */ #include <sys/types.h> #include <arpa/inet.h> #include <signal.h> #include <unistd.h> #include <string.h> #include "hunt.h" #include "client.h" void do_connect(char *name, u_int8_t team, u_int32_t enter_status) { u_int32_t uid; u_int32_t mode; const char * Ttyname; char buf[NAMELEN]; if (Send_message != NULL) mode = C_MESSAGE; else if (Am_monitor) mode = C_MONITOR; else mode = C_PLAYER; Ttyname = ttyname(STDOUT_FILENO); if (Ttyname == NULL) Ttyname = "not a tty"; memset(buf, '\0', sizeof buf); strlcpy(buf, Ttyname, sizeof buf); uid = htonl(getuid()); enter_status = htonl(enter_status); mode = htonl(mode); write(Socket, &uid, sizeof uid); write(Socket, name, NAMELEN); write(Socket, &team, sizeof team); write(Socket, &enter_status, sizeof enter_status); write(Socket, buf, NAMELEN); write(Socket, &mode, sizeof mode); }
909
13,585
<filename>mybatis-plus/src/test/java/com/baomidou/mybatisplus/test/batch/Entity.java<gh_stars>1000+ package com.baomidou.mybatisplus.test.batch; import lombok.Data; import lombok.NoArgsConstructor; import java.io.Serializable; /** * @author miemie * @since 2020-06-23 */ @Data @NoArgsConstructor public class Entity implements Serializable { private static final long serialVersionUID = 6962439201546719734L; private Long id; private String name; public Entity(String name) { this.name = name; } }
199
348
{"nom":"Salles-Mongiscard","circ":"3ème circonscription","dpt":"Pyrénées-Atlantiques","inscrits":245,"abs":122,"votants":123,"blancs":8,"nuls":8,"exp":107,"res":[{"nuance":"SOC","nom":"<NAME>","voix":61},{"nuance":"REM","nom":"<NAME>","voix":46}]}
101
2,098
<filename>src/aggregate/expr/parser.h #define AND 1 #define OR 2 #define NOT 3 #define EQ 4 #define NE 5 #define LT 6 #define LE 7 #define GT 8 #define GE 9 #define PLUS 10 #define MINUS 11 #define DIVIDE 12 #define TIMES 13 #define MOD 14 #define POW 15 #define LP 16 #define RP 17 #define PROPERTY 18 #define SYMBOL 19 #define STRING 20 #define NUMBER 21 #define ARGLIST 22 #define COMMA 23
776
2,151
<gh_stars>1000+ // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "media/audio/sounds/sounds_manager.h" #include <vector> #include "base/logging.h" #include "base/memory/ref_counted.h" #include "media/audio/audio_manager.h" #include "media/audio/sounds/audio_stream_handler.h" namespace media { namespace { SoundsManager* g_instance = NULL; bool g_initialized_for_testing = false; // SoundsManagerImpl --------------------------------------------------- class SoundsManagerImpl : public SoundsManager { public: SoundsManagerImpl() = default; ~SoundsManagerImpl() override { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); } // SoundsManager implementation: bool Initialize(SoundKey key, const base::StringPiece& data) override; bool Play(SoundKey key) override; bool Stop(SoundKey key) override; base::TimeDelta GetDuration(SoundKey key) override; private: AudioStreamHandler* GetHandler(SoundKey key); // There's only a handful of sounds, so a vector is sufficient. struct StreamEntry { SoundKey key; std::unique_ptr<AudioStreamHandler> handler; }; std::vector<StreamEntry> handlers_; DISALLOW_COPY_AND_ASSIGN(SoundsManagerImpl); }; bool SoundsManagerImpl::Initialize(SoundKey key, const base::StringPiece& data) { if (AudioStreamHandler* handler = GetHandler(key)) { DCHECK(handler->IsInitialized()); return true; } std::unique_ptr<AudioStreamHandler> handler(new AudioStreamHandler(data)); if (!handler->IsInitialized()) { LOG(WARNING) << "Can't initialize AudioStreamHandler for key=" << key; return false; } handlers_.push_back({key, std::move(handler)}); return true; } bool SoundsManagerImpl::Play(SoundKey key) { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); AudioStreamHandler* handler = GetHandler(key); return handler && handler->Play(); } bool SoundsManagerImpl::Stop(SoundKey key) { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); AudioStreamHandler* handler = GetHandler(key); if (!handler) return false; handler->Stop(); return true; } base::TimeDelta SoundsManagerImpl::GetDuration(SoundKey key) { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); AudioStreamHandler* handler = GetHandler(key); return !handler ? base::TimeDelta() : handler->duration(); } AudioStreamHandler* SoundsManagerImpl::GetHandler(SoundKey key) { for (auto& entry : handlers_) { if (entry.key == key) return entry.handler.get(); } return nullptr; } } // namespace SoundsManager::SoundsManager() = default; SoundsManager::~SoundsManager() { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); } // static void SoundsManager::Create() { CHECK(!g_instance || g_initialized_for_testing) << "SoundsManager::Create() is called twice"; if (g_initialized_for_testing) return; g_instance = new SoundsManagerImpl(); } // static void SoundsManager::Shutdown() { CHECK(g_instance) << "SoundsManager::Shutdown() is called " << "without previous call to Create()"; delete g_instance; g_instance = NULL; } // static SoundsManager* SoundsManager::Get() { CHECK(g_instance) << "SoundsManager::Get() is called before Create()"; return g_instance; } // static void SoundsManager::InitializeForTesting(SoundsManager* manager) { CHECK(!g_instance) << "SoundsManager is already initialized."; CHECK(manager); g_instance = manager; g_initialized_for_testing = true; } } // namespace media
1,195
3,702
<reponame>mondeique/metatron-discovery<filename>discovery-server/src/main/java/app/metatron/discovery/domain/favorite/Favorite.java package app.metatron.discovery.domain.favorite; import org.hibernate.annotations.GenericGenerator; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; import app.metatron.discovery.common.entity.DomainType; import app.metatron.discovery.domain.AbstractHistoryEntity; import app.metatron.discovery.domain.MetatronDomain; /** * */ @Entity @Table(name="favorite") public class Favorite extends AbstractHistoryEntity implements MetatronDomain<String> { /** * ID */ @Id @GeneratedValue(generator = "uuid") @GenericGenerator(name = "uuid", strategy = "uuid2") @Column(name = "id") String id; @Column(name = "favorite_target") String targetId; @Column(name = "favorite_domain") @Enumerated(EnumType.STRING) DomainType domainType; @Override public String getId() { return id; } public void setId(String id) { this.id = id; } public String getTargetId() { return targetId; } public void setTargetId(String targetId) { this.targetId = targetId; } public DomainType getDomainType() { return domainType; } public void setDomainType(DomainType domainType) { this.domainType = domainType; } @Override public String toString() { return "Favorite{" + "id='" + id + '\'' + ", targetId='" + targetId + '\'' + ", domainType=" + domainType + ", version=" + version + ", createdBy='" + createdBy + '\'' + ", createdTime=" + createdTime + ", modifiedBy='" + modifiedBy + '\'' + ", modifiedTime=" + modifiedTime + '}'; } }
683
625
<gh_stars>100-1000 /** @file util.h * @brief header/implementation file for utility functions used by the node * module * * @author <NAME> * @author <NAME> * * The MIT License (MIT) * * Copyright (c) 2015, 2016, 2017 PayPal * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ #ifndef SEIFNODE_UTIL_H #define SEIFNODE_UTIL_H // ----------------- // standard includes // ----------------- #include <vector> #include <string> // ----------------- // cryptopp includes // ----------------- #include "sha3.h" using CryptoPP::SHA3_256; // ---------- // hashString // ---------- /** * @brief creates a SHA3-256 hash of the given string using CryptoPP * @param digest output vector in which the hash to be stored * @param str input string to be hashed * PreCondition: 'digest' should be of appropriate size * (CryptoPP::SHA3_256::DIGESTSIZE) * @return void */ static void hashString(std::vector<uint8_t>& digest, const std::string& str) { CryptoPP::SHA3_256 hash; hash.Update(reinterpret_cast<const uint8_t*>(str.c_str()), str.size()); hash.Final(digest.data()); } // ---------- // hashBuffer // ---------- /** * @brief creates a SHA3-256 hash of the given buffer using CryptoPP * @param digest output vector in which the hash to be stored * @param input input buffer to be hashed * @param inputLen length of input buffer to be hashed * PreCondition: 'digest' should be of appropriate size * (CryptoPP::SHA3_256::DIGESTSIZE) * @return void */ static void hashBuffer( std::vector<uint8_t>& digest, const uint8_t* input, int inputLen ) { CryptoPP::SHA3_256 hash; hash.Update(input, inputLen); hash.Final(digest.data()); } #endif
842
2,494
/* * chacha20.h - header file for ChaCha20 implementation. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef FREEBL_CHACHA20_H_ #define FREEBL_CHACHA20_H_ #include <stdint.h> /* ChaCha20XOR encrypts |inLen| bytes from |in| with the given key and * nonce and writes the result to |out|, which may be equal to |in|. The * initial block counter is specified by |counter|. */ extern void ChaCha20XOR(unsigned char *out, const unsigned char *in, unsigned int inLen, const unsigned char key[32], const unsigned char nonce[8], uint64_t counter); #endif /* FREEBL_CHACHA20_H_ */
263
6,342
<reponame>vikram0207/django-rest from allauth.socialaccount.providers.oauth.urls import default_urlpatterns from .provider import TrelloProvider urlpatterns = default_urlpatterns(TrelloProvider)
64
2,151
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_CHROMEOS_POLICY_ACTIVE_DIRECTORY_POLICY_MANAGER_H_ #define CHROME_BROWSER_CHROMEOS_POLICY_ACTIVE_DIRECTORY_POLICY_MANAGER_H_ #include <memory> #include "base/bind.h" #include "base/macros.h" #include "base/memory/weak_ptr.h" #include "base/time/time.h" #include "base/timer/timer.h" #include "chrome/browser/chromeos/policy/component_active_directory_policy_service.h" #include "components/account_id/account_id.h" #include "components/policy/core/common/cloud/cloud_policy_store.h" #include "components/policy/core/common/configuration_policy_provider.h" #include "components/policy/core/common/policy_scheduler.h" namespace policy { class CloudExternalDataManager; // ConfigurationPolicyProvider for policy from Active Directory. // Derived classes implement specializations for user and device policy. // Data flow: Triggered by DoPolicyFetch(), policy is fetched by authpolicyd and // stored in session manager with completion indicated by OnPolicyFetched(). // From there policy load from session manager is triggered, completion of which // is notified via OnStoreLoaded()/OnStoreError(). class ActiveDirectoryPolicyManager : public ConfigurationPolicyProvider, public CloudPolicyStore::Observer, public ComponentActiveDirectoryPolicyService::Delegate { public: ~ActiveDirectoryPolicyManager() override; // ConfigurationPolicyProvider: void Init(SchemaRegistry* registry) override; void Shutdown() override; bool IsInitializationComplete(PolicyDomain domain) const override; void RefreshPolicies() override; // CloudPolicyStore::Observer: void OnStoreLoaded(CloudPolicyStore* cloud_policy_store) override; void OnStoreError(CloudPolicyStore* cloud_policy_store) override; // ComponentActiveDirectoryPolicyService::Delegate void OnComponentActiveDirectoryPolicyUpdated() override; CloudPolicyStore* store() const { return store_.get(); } CloudExternalDataManager* external_data_manager() { return external_data_manager_.get(); } PolicyScheduler* scheduler() { return scheduler_.get(); } ComponentActiveDirectoryPolicyService* extension_policy_service() { return extension_policy_service_.get(); } protected: ActiveDirectoryPolicyManager( std::unique_ptr<CloudPolicyStore> store, std::unique_ptr<CloudExternalDataManager> external_data_manager, PolicyDomain extension_policy_domain); // Publish the policy that's currently cached in the store. void PublishPolicy(); // Creates the policy service to load extension policy from Session Manager. // |scope| specifies whether the component policy is fetched along with user // or device policy. |account_type| specifies which account Session Manager // should load policy from (device vs user). |account_id| must be empty for // the device account and the user's account id for user accounts. // |schema_registry| is the registry that contains the extension schemas. void CreateExtensionPolicyService( PolicyScope scope, login_manager::PolicyAccountType account_type, const AccountId& account_id, SchemaRegistry* schema_registry); // Calls into authpolicyd to fetch policy. Reports success or failure via // |callback|. virtual void DoPolicyFetch(PolicyScheduler::TaskCallback callback) = 0; // Allows derived classes to cancel waiting for the initial policy fetch/load // and to flag the ConfigurationPolicyProvider ready (assuming all other // initialization tasks have completed) or to exit the session in case the // requirements to continue have not been met. virtual void CancelWaitForInitialPolicy() {} // Whether policy fetch has ever been reported as completed by authpolicyd // during lifetime of the object (after Chrome was started). bool fetch_ever_completed_ = false; private: // Called by scheduler with result of policy fetch. This covers policy // download, parsing and storing into session manager. (To access and publish // the policy, the store needs to be reloaded from session manager.) void OnPolicyFetched(bool success); // Called right before policy is published. Expands e.g. ${machine_name} for // a selected set of policies. void ExpandVariables(PolicyMap* policy_map); // Store used to serialize policy, usually sends data to Session Manager. const std::unique_ptr<CloudPolicyStore> store_; // Manages external data referenced by policies. const std::unique_ptr<CloudExternalDataManager> external_data_manager_; // Manages policy for Chrome extensions. std::unique_ptr<ComponentActiveDirectoryPolicyService> extension_policy_service_; // Type of extension policy to manage. Must be either POLICY_DOMAIN_EXTENSIONS // or POLICY_DOMAIN_SIGNIN_EXTENSIONS. const PolicyDomain extension_policy_domain_; std::unique_ptr<PolicyScheduler> scheduler_; // Must be last member. base::WeakPtrFactory<ActiveDirectoryPolicyManager> weak_ptr_factory_{this}; DISALLOW_COPY_AND_ASSIGN(ActiveDirectoryPolicyManager); }; // Manages user policy for Active Directory managed devices. class UserActiveDirectoryPolicyManager : public ActiveDirectoryPolicyManager { public: // If |initial_policy_fetch_timeout| is non-zero, IsInitializationComplete() // is forced to false until either there has been a successful policy fetch // from the server and a subsequent successful load from session manager or // |initial_policy_fetch_timeout| has expired and there has been a successful // load from session manager. If |policy_required| is true then the user // session is aborted by calling |exit_session| if no policy was loaded from // session manager and this is either immediate load in case of Chrome restart // or policy fetch failed. UserActiveDirectoryPolicyManager( const AccountId& account_id, bool policy_required, base::TimeDelta initial_policy_fetch_timeout, base::OnceClosure exit_session, std::unique_ptr<CloudPolicyStore> store, std::unique_ptr<CloudExternalDataManager> external_data_manager); ~UserActiveDirectoryPolicyManager() override; // ConfigurationPolicyProvider: void Init(SchemaRegistry* registry) override; bool IsInitializationComplete(PolicyDomain domain) const override; // Helper function to force a policy fetch timeout. void ForceTimeoutForTesting(); protected: // ActiveDirectoryPolicyManager: void DoPolicyFetch(PolicyScheduler::TaskCallback callback) override; void CancelWaitForInitialPolicy() override; private: // Called when |initial_policy_timeout_| times out, to cancel the blocking // wait for the initial policy fetch. void OnBlockingFetchTimeout(); // The user's account id. const AccountId account_id_; // If policy is required, but cannot be obtained (via fetch or load), // |exit_session_| is called. const bool policy_required_; // Whether we're waiting for a policy fetch to complete before reporting // IsInitializationComplete(). bool waiting_for_initial_policy_fetch_ = false; // A timer that puts a hard limit on the maximum time to wait for the initial // policy fetch/load. base::Timer initial_policy_timeout_{false /* retain_user_task */, false /* is_repeating */}; // Callback to exit the session. base::OnceClosure exit_session_; // Must be last member. base::WeakPtrFactory<UserActiveDirectoryPolicyManager> weak_ptr_factory_{ this}; DISALLOW_COPY_AND_ASSIGN(UserActiveDirectoryPolicyManager); }; // Manages device policy for Active Directory managed devices. class DeviceActiveDirectoryPolicyManager : public ActiveDirectoryPolicyManager { public: explicit DeviceActiveDirectoryPolicyManager( std::unique_ptr<CloudPolicyStore> store); ~DeviceActiveDirectoryPolicyManager() override; // ConfigurationPolicyProvider: void Shutdown() override; // Passes the |schema_registry| that corresponds to the signin profile and // uses it (wrapped in a ForwardingSchemaRegistry) to create the extension // policy service. void SetSigninProfileSchemaRegistry(SchemaRegistry* schema_registry); protected: // ActiveDirectoryPolicyManager: void DoPolicyFetch(PolicyScheduler::TaskCallback callback) override; private: // Wrapper schema registry that tracks the signin profile schema registry once // it is passed to this class. std::unique_ptr<ForwardingSchemaRegistry> signin_profile_forwarding_schema_registry_; DISALLOW_COPY_AND_ASSIGN(DeviceActiveDirectoryPolicyManager); }; } // namespace policy #endif // CHROME_BROWSER_CHROMEOS_POLICY_ACTIVE_DIRECTORY_POLICY_MANAGER_H_
2,446
315
package no.nordicsemi.android.mesh; import java.util.List; import androidx.annotation.NonNull; public class ScenesConfig extends ExportConfig { /** * Use this class to configure all Scenes. Exported scenes will not contain addresses of excluded nodes. */ public static class ExportAll implements Builder { @Override public ScenesConfig build() { return new ScenesConfig(this); } } /** * Use this class to configure when exporting the related Scenes, the scenes will not contain addresses of excluded nodes. */ public static class ExportRelated implements Builder { @Override public ScenesConfig build() { return new ScenesConfig(this); } } /** * Use this class to configure when exporting some of the Scenes. */ public static class ExportSome implements Builder { private final List<Scene> scenes; /** * Constructs ExportSome to export only a selected number of Scenes when exporting a mesh network. * The scenes will not contain addresses of excluded nodes. * * @param scenes List of Scenes to export. */ public ExportSome(@NonNull final List<Scene> scenes) { this.scenes = scenes; } protected List<Scene> getScenes() { return scenes; } @Override public ScenesConfig build() { return new ScenesConfig(this); } } ScenesConfig(@NonNull final Builder config) { super(config); } }
594
544
<reponame>reimerp/hvac<gh_stars>100-1000 from hvac.v1 import Client __all__ = ("Client",)
41
1,212
/** @file vl_version.c ** @brief vl_version MEX definition ** @author <NAME> **/ /* Copyright (C) 2007-12 <NAME> and <NAME>. All rights reserved. This file is part of the VLFeat library and is made available under the terms of the BSD license (see the COPYING file). */ #include "mexutils.h" #include <vl/stringop.h> #include <assert.h> /* option codes */ enum { opt_verbose } ; /* options */ vlmxOption options [] = { {"Verbose", 0, opt_verbose }, {0, 0, 0 } } ; void mexFunction(int nout, mxArray *out[], int nin, const mxArray *in[]) { int verbose = 0 ; char buffer [1024] ; int unsigned const bufferSize = sizeof(buffer)/sizeof(buffer[0]) ; int opt ; int next = 0 ; mxArray const *optarg ; VL_USE_MATLAB_ENV ; if (nout > 1) { vlmxError(vlmxErrTooManyOutputArguments, NULL) ; } while ((opt = vlmxNextOption (in, nin, options, &next, &optarg)) >= 0) { switch (opt) { case opt_verbose : ++ verbose ; break ; default: abort() ; } } if (verbose) { int offset = 0 ; char * string = vl_configuration_to_string_copy() ; offset = vl_string_copy(buffer, bufferSize, string) ; snprintf(buffer + offset, bufferSize - offset, " SIMD enabled: %s\n", VL_YESNO(vl_get_simd_enabled())) ; if(string) vl_free(string) ; } else { snprintf(buffer, sizeof(buffer)/sizeof(buffer[0]), "%s", VL_VERSION_STRING) ; } if (nout == 0) { mexPrintf("%s\n", buffer) ; } else { out[0] = mxCreateString(buffer) ; } }
739
945
<filename>Modules/Core/SpatialObjects/include/itkSpatialObjectFactory.h /*========================================================================= * * Copyright NumFOCUS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ /*========================================================================= * * Portions of this file are subject to the VTK Toolkit Version 3 copyright. * * Copyright (c) <NAME>, <NAME>, <NAME> * * For complete copyright, license and disclaimer of warranty information * please refer to the NOTICE file at the top of the ITK source tree. * *=========================================================================*/ #ifndef itkSpatialObjectFactory_h #define itkSpatialObjectFactory_h #include "itkSpatialObjectFactoryBase.h" namespace itk { /** *\class SpatialObjectFactory * \brief Create instances of SpatialObjects * \ingroup ITKSpatialObjects */ template <typename T> class SpatialObjectFactory : public SpatialObjectFactoryBase { public: static void RegisterSpatialObject() { typename T::Pointer t = T::New(); SpatialObjectFactoryBase::Pointer f = SpatialObjectFactoryBase::GetFactory(); f->RegisterSpatialObject(t->GetClassNameAndDimension().c_str(), t->GetClassNameAndDimension().c_str(), t->GetClassNameAndDimension().c_str(), 1, CreateObjectFunction<T>::New()); } }; } // end namespace itk #endif
659
973
/*_########################################################################## _## _## Copyright (C) 2016 Pcap4J.org _## _########################################################################## */ package org.pcap4j.packet; import java.nio.ByteOrder; import org.pcap4j.packet.RadiotapPacket.RadiotapData; import org.pcap4j.util.ByteArrays; /** * Radiotap Lock quality field. Quality of Barker code lock. Unitless. Monotonically nondecreasing * with "better" lock strength. Called "Signal Quality" in datasheets. * * @see <a href="http://www.radiotap.org/defined-fields/Lock%20quality">Radiotap</a> * @author <NAME> * @since pcap4j 1.6.5 */ public final class RadiotapDataLockQuality implements RadiotapData { /** */ private static final long serialVersionUID = -7889325752343077807L; private static final int LENGTH = 2; private final short lockQuality; /** * A static factory method. This method validates the arguments by {@link * ByteArrays#validateBounds(byte[], int, int)}, which may throw exceptions undocumented here. * * @param rawData rawData * @param offset offset * @param length length * @return a new RadiotapLockQuality object. * @throws IllegalRawDataException if parsing the raw data fails. */ public static RadiotapDataLockQuality newInstance(byte[] rawData, int offset, int length) throws IllegalRawDataException { ByteArrays.validateBounds(rawData, offset, length); return new RadiotapDataLockQuality(rawData, offset, length); } private RadiotapDataLockQuality(byte[] rawData, int offset, int length) throws IllegalRawDataException { if (length < LENGTH) { StringBuilder sb = new StringBuilder(200); sb.append("The data is too short to build a RadiotapLockQuality (") .append(LENGTH) .append(" bytes). data: ") .append(ByteArrays.toHexString(rawData, " ")) .append(", offset: ") .append(offset) .append(", length: ") .append(length); throw new IllegalRawDataException(sb.toString()); } this.lockQuality = ByteArrays.getShort(rawData, offset, ByteOrder.LITTLE_ENDIAN); } private RadiotapDataLockQuality(Builder builder) { if (builder == null) { throw new NullPointerException("builder is null."); } this.lockQuality = builder.lockQuality; } /** @return lockQuality */ public short getLockQuality() { return lockQuality; } /** @return lockQuality */ public int getLockQualityAsInt() { return lockQuality & 0xFFFF; } @Override public int length() { return LENGTH; } @Override public byte[] getRawData() { return ByteArrays.toByteArray(lockQuality, ByteOrder.LITTLE_ENDIAN); } /** @return a new Builder object populated with this object's fields. */ public Builder getBuilder() { return new Builder(this); } @Override public String toString() { return toString(""); } @Override public String toString(String indent) { StringBuilder sb = new StringBuilder(); String ls = System.getProperty("line.separator"); sb.append(indent) .append("Lock quality: ") .append(ls) .append(indent) .append(" Lock quality: ") .append(getLockQualityAsInt()) .append(ls); return sb.toString(); } @Override public int hashCode() { return lockQuality; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!this.getClass().isInstance(obj)) { return false; } RadiotapDataLockQuality other = (RadiotapDataLockQuality) obj; return lockQuality == other.lockQuality; } /** * @author <NAME> * @since pcap4j 1.6.5 */ public static final class Builder { private short lockQuality; /** */ public Builder() {} private Builder(RadiotapDataLockQuality obj) { this.lockQuality = obj.lockQuality; } /** * @param lockQuality lockQuality * @return this Builder object for method chaining. */ public Builder lockQuality(short lockQuality) { this.lockQuality = lockQuality; return this; } /** @return a new RadiotapLockQuality object. */ public RadiotapDataLockQuality build() { return new RadiotapDataLockQuality(this); } } }
1,523
377
/******************************************************************************* * * Copyright 2016 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.kundera.dataasobject.entities; import java.util.Date; import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToMany; import javax.persistence.Temporal; import javax.persistence.TemporalType; import com.impetus.core.DefaultKunderaEntity; /** * The Class Tweets. * * @author impetus * * Tweets entity */ @Entity public class Tweets extends DefaultKunderaEntity<Tweets, String> { /** The tweet id. */ @Id @Column(name = "tweet_id") private String tweetId; /** The body. */ @Column(name = "body") private String body; /** The tweet date. */ @Column(name = "tweeted_at") @Temporal(TemporalType.DATE) private Date tweetDate; /** The videos. */ @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY) @JoinColumn(name = "tweet_id") private Set<Video> videos; /** * Instantiates a new tweets. */ public Tweets() { // Default constructor. } /** * Gets the tweet id. * * @return the tweetId */ public String getTweetId() { return tweetId; } /** * Sets the tweet id. * * @param tweetId * the tweetId to set */ public void setTweetId(String tweetId) { this.tweetId = tweetId; } /** * Gets the body. * * @return the body */ public String getBody() { return body; } /** * Sets the body. * * @param body * the body to set */ public void setBody(String body) { this.body = body; } /** * Gets the tweet date. * * @return the tweetDate */ public Date getTweetDate() { return tweetDate; } /** * Sets the tweet date. * * @param tweetDate * the tweetDate to set */ public void setTweetDate(Date tweetDate) { this.tweetDate = tweetDate; } /** * Gets the videos. * * @return the videos */ public Set<Video> getVideos() { return videos; } /** * Sets the videos. * * @param videos * the new videos */ public void setVideos(Set<Video> videos) { this.videos = videos; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "Tweets [tweetId=" + tweetId + ", body=" + body + ", tweetDate=" + tweetDate + ", videos=" + videos + "]"; } }
1,456
1,030
// Copyright (c) 2012 Youdao. All rights reserved. Use of this source code is // governed by a BSD-style license that can be found in the LICENSE file. #ifndef HEX_INCLUDE_HEX_CAPI_H_ #define HEX_INCLUDE_HEX_CAPI_H_ #ifdef __cplusplus extern "C" { #endif #include "include/capi/cef_base_capi.h" /// // This function should be called after CefInitialize() on the main application // thread to initialize heX settings. /// CEF_EXPORT int hex_settings_initialize(const struct _hex_settings_t* settings); #ifdef __cplusplus } #endif #endif // HEX_INCLUDE_HEX_CAPI_H_
213
2,434
<reponame>SergKhram/swagger2markup<gh_stars>1000+ /* * Copyright 2017 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.swagger2markup.internal.component; import io.github.swagger2markup.OpenAPI2MarkupConverter; import io.github.swagger2markup.adoc.ast.impl.DescriptionListEntryImpl; import io.github.swagger2markup.adoc.ast.impl.DescriptionListImpl; import io.github.swagger2markup.adoc.ast.impl.ListItemImpl; import io.github.swagger2markup.adoc.ast.impl.ParagraphBlockImpl; import io.github.swagger2markup.extension.MarkupComponent; import io.swagger.v3.oas.models.examples.Example; import org.apache.commons.lang3.StringUtils; import org.asciidoctor.ast.StructuralNode; import java.util.Collections; import java.util.Map; import static io.github.swagger2markup.adoc.converter.internal.Delimiters.LINE_SEPARATOR; import static io.github.swagger2markup.config.OpenAPILabels.LABEL_EXAMPLES; import static io.github.swagger2markup.config.OpenAPILabels.LABEL_EXTERNAL_VALUE; import static io.github.swagger2markup.internal.helper.OpenApiHelpers.appendDescription; public class ExamplesComponent extends MarkupComponent<StructuralNode, ExamplesComponent.Parameters, StructuralNode> { private final MediaTypeExampleComponent mediaTypeExampleComponent; public ExamplesComponent(OpenAPI2MarkupConverter.OpenAPIContext context) { super(context); this.mediaTypeExampleComponent = new MediaTypeExampleComponent(context); } public static ExamplesComponent.Parameters parameters(Map<String, Example> examples) { return new ExamplesComponent.Parameters(examples); } public StructuralNode apply(StructuralNode node, Map<String, Example> examples) { return apply(node, parameters(examples)); } @Override public StructuralNode apply(StructuralNode node, ExamplesComponent.Parameters parameters) { Map<String, Example> examples = parameters.examples; if (examples == null || examples.isEmpty()) return node; DescriptionListImpl examplesList = new DescriptionListImpl(node); examplesList.setTitle(labels.getLabel(LABEL_EXAMPLES)); examples.forEach((name, example) -> { DescriptionListEntryImpl exampleEntry = new DescriptionListEntryImpl(examplesList, Collections.singletonList(new ListItemImpl(examplesList, name))); ListItemImpl tagDesc = new ListItemImpl(exampleEntry, ""); ParagraphBlockImpl exampleBlock = new ParagraphBlockImpl(tagDesc); appendDescription(exampleBlock, example.getSummary()); appendDescription(exampleBlock, example.getDescription()); mediaTypeExampleComponent.apply(tagDesc, example.getValue()); ParagraphBlockImpl paragraphBlock = new ParagraphBlockImpl(tagDesc); String source = ""; generateRefLink(source, example.getExternalValue(), labels.getLabel(LABEL_EXTERNAL_VALUE)); generateRefLink(source, example.get$ref(), ""); if(StringUtils.isNotBlank(source)){ paragraphBlock.setSource(source); tagDesc.append(paragraphBlock); } exampleEntry.setDescription(tagDesc); examplesList.addEntry(exampleEntry); }); node.append(examplesList); return node; } private String generateRefLink(String source, String ref, String alt) { if (StringUtils.isNotBlank(ref)) { if (StringUtils.isBlank(alt)) { alt = ref.substring(ref.lastIndexOf('/') + 1); } String anchor = ref.replaceFirst("#", "").replaceAll("/", "_"); source += "<<" + anchor + "," + alt + ">>" + LINE_SEPARATOR; } return source; } public static class Parameters { private final Map<String, Example> examples; public Parameters(Map<String, Example> examples) { this.examples = examples; } } }
1,597
860
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.container.grouper.task; import com.google.common.base.Preconditions; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.apache.samza.container.TaskName; import org.apache.samza.coordinator.stream.CoordinatorStreamValueSerde; import org.apache.samza.coordinator.stream.messages.SetTaskContainerMapping; import org.apache.samza.coordinator.stream.messages.SetTaskModeMapping; import org.apache.samza.job.model.TaskMode; import org.apache.samza.metadatastore.MetadataStore; import org.apache.samza.serializers.Serde; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Task assignment Manager is used to persist and read the task-to-container * assignment information from the coordinator stream. * */ public class TaskAssignmentManager { private static final Logger LOG = LoggerFactory.getLogger(TaskAssignmentManager.class); private final Map<String, String> taskNameToContainerId = new HashMap<>(); private final Serde<String> containerIdSerde; private final Serde<String> taskModeSerde; private MetadataStore taskContainerMappingMetadataStore; private MetadataStore taskModeMappingMetadataStore; /** * Builds the TaskAssignmentManager based upon the provided {@link MetadataStore} that is instantiated. * Setting up a metadata store instance is expensive which requires opening multiple connections * and reading tons of information. Fully instantiated metadata store is taken as a constructor argument * to reuse it across different utility classes. Uses the {@link CoordinatorStreamValueSerde} to serialize * messages before reading/writing into metadata store. * * @param taskContainerMappingMetadataStore an instance of {@link MetadataStore} used to read/write the task to container assignments. * @param taskModeMappingMetadataStore an instance of {@link MetadataStore} used to read/write the task to mode assignments. */ public TaskAssignmentManager(MetadataStore taskContainerMappingMetadataStore, MetadataStore taskModeMappingMetadataStore) { Preconditions.checkNotNull(taskContainerMappingMetadataStore, "Metadata store cannot be null"); Preconditions.checkNotNull(taskModeMappingMetadataStore, "Metadata store cannot be null"); this.taskModeMappingMetadataStore = taskModeMappingMetadataStore; this.taskContainerMappingMetadataStore = taskContainerMappingMetadataStore; this.containerIdSerde = new CoordinatorStreamValueSerde(SetTaskContainerMapping.TYPE); this.taskModeSerde = new CoordinatorStreamValueSerde(SetTaskModeMapping.TYPE); } /** * Method to allow read container task information from {@link MetadataStore}. This method is used in {@link org.apache.samza.coordinator.JobModelManager}. * * @return the map of taskName: containerId */ public Map<String, String> readTaskAssignment() { taskNameToContainerId.clear(); taskContainerMappingMetadataStore.all().forEach((taskName, valueBytes) -> { String containerId = containerIdSerde.fromBytes(valueBytes); if (containerId != null) { taskNameToContainerId.put(taskName, containerId); } LOG.debug("Assignment for task {}: {}", taskName, containerId); }); return Collections.unmodifiableMap(new HashMap<>(taskNameToContainerId)); } public Map<TaskName, TaskMode> readTaskModes() { Map<TaskName, TaskMode> taskModeMap = new HashMap<>(); taskModeMappingMetadataStore.all().forEach((taskName, valueBytes) -> { String taskMode = taskModeSerde.fromBytes(valueBytes); if (taskMode != null) { taskModeMap.put(new TaskName(taskName), TaskMode.valueOf(taskMode)); } LOG.debug("Task mode assignment for task {}: {}", taskName, taskMode); }); return Collections.unmodifiableMap(new HashMap<>(taskModeMap)); } /** * Method to batch write task container info to {@link MetadataStore}. * @param mappings the task and container mappings: (ContainerId, (TaskName, TaskMode)) */ public void writeTaskContainerMappings(Map<String, Map<String, TaskMode>> mappings) { for (String containerId : mappings.keySet()) { Map<String, TaskMode> tasks = mappings.get(containerId); for (String taskName : tasks.keySet()) { TaskMode taskMode = tasks.get(taskName); LOG.info("Storing task: {} and container ID: {} into metadata store", taskName, containerId); String existingContainerId = taskNameToContainerId.get(taskName); if (existingContainerId != null && !existingContainerId.equals(containerId)) { LOG.info("Task \"{}\" in mode {} moved from container {} to container {}", new Object[]{taskName, taskMode, existingContainerId, containerId}); } else { LOG.debug("Task \"{}\" in mode {} assigned to container {}", taskName, taskMode, containerId); } if (containerId == null) { taskContainerMappingMetadataStore.delete(taskName); taskModeMappingMetadataStore.delete(taskName); taskNameToContainerId.remove(taskName); } else { taskContainerMappingMetadataStore.put(taskName, containerIdSerde.toBytes(containerId)); taskModeMappingMetadataStore.put(taskName, taskModeSerde.toBytes(taskMode.toString())); taskNameToContainerId.put(taskName, containerId); } } } taskContainerMappingMetadataStore.flush(); taskModeMappingMetadataStore.flush(); } /** * Deletes the task container info from the {@link MetadataStore} for the task names. * * @param taskNames the task names for which the mapping will be deleted. */ public void deleteTaskContainerMappings(Iterable<String> taskNames) { for (String taskName : taskNames) { taskContainerMappingMetadataStore.delete(taskName); taskModeMappingMetadataStore.delete(taskName); taskNameToContainerId.remove(taskName); } taskContainerMappingMetadataStore.flush(); taskModeMappingMetadataStore.flush(); } public void close() { taskContainerMappingMetadataStore.close(); taskModeMappingMetadataStore.close(); } }
2,152
333
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 消费者通知信息 * * @author auto create * @since 1.0, 2020-12-14 14:56:35 */ public class ConsumerNotifyIstd extends AlipayObject { private static final long serialVersionUID = 4837759275173876412L; /** * 商品数量 */ @ApiField("goods_count") private Long goodsCount; /** * 商品缩略图url,支持格式:bmp、jpg、jpeg、png、gif */ @ApiField("goods_img") private String goodsImg; /** * 商品名称 */ @ApiField("goods_name") private String goodsName; /** * 商家电话 */ @ApiField("merchant_mobile") private String merchantMobile; /** * 商家名称,tiny_app_id和merchant_name不能同时为空 */ @ApiField("merchant_name") private String merchantName; /** * 商家小程序appid */ @ApiField("tiny_app_id") private String tinyAppId; /** * 商家小程序的路径,建议为订单页面 */ @ApiField("tiny_app_url") private String tinyAppUrl; public Long getGoodsCount() { return this.goodsCount; } public void setGoodsCount(Long goodsCount) { this.goodsCount = goodsCount; } public String getGoodsImg() { return this.goodsImg; } public void setGoodsImg(String goodsImg) { this.goodsImg = goodsImg; } public String getGoodsName() { return this.goodsName; } public void setGoodsName(String goodsName) { this.goodsName = goodsName; } public String getMerchantMobile() { return this.merchantMobile; } public void setMerchantMobile(String merchantMobile) { this.merchantMobile = merchantMobile; } public String getMerchantName() { return this.merchantName; } public void setMerchantName(String merchantName) { this.merchantName = merchantName; } public String getTinyAppId() { return this.tinyAppId; } public void setTinyAppId(String tinyAppId) { this.tinyAppId = tinyAppId; } public String getTinyAppUrl() { return this.tinyAppUrl; } public void setTinyAppUrl(String tinyAppUrl) { this.tinyAppUrl = tinyAppUrl; } }
996
326
/*BEGIN_LEGAL Intel Open Source License Copyright (c) 2002-2017 Intel Corporation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. END_LEGAL */ #include <iostream> #include <fstream> #include "pin.H" #include "instlib.H" ofstream OutFile; // Counters static UINT64 icountMemRead = 0; static UINT64 icountMemRead2 = 0; static UINT64 icountMemWrite = 0; static UINT64 icountMemOp = 0; static UINT64 icountMemCall = 0; static ADDRINT lastIp = 0; static ADDRINT lastReadAddr = 0; static ADDRINT lastWriteAddr = 0; static const ADDRINT mask(~(16 - 1)); static UINT64 errors = 0; static size_t lastBytes = 0; static BOOL lastIsPrefetch = 0; static BOOL lastIsRmw = 0; static BOOL lastIsAtomic = 0; static volatile THREADID myThread = INVALID_THREADID; VOID ThreadStart(THREADID threadid, CONTEXT *ctxt, INT32 flags, VOID *v) { if (INVALID_THREADID == myThread) myThread = threadid; } VOID PIN_FAST_ANALYSIS_CALL readXmmMemoryFunc( ADDRINT memea_callback, UINT32 bytes, string *dis, ADDRINT ip) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; // Check xmm size if (16 != lastBytes || bytes != lastBytes) { OutFile << "XMM bytes error found: " << lastBytes << " and not 16 for " << *dis << endl; errors++; } } VOID PIN_FAST_ANALYSIS_CALL verifyPrefetchFunc( ADDRINT memea_callback, UINT32 bytes, string *dis, ADDRINT ip) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (!lastIsPrefetch) { OutFile << "Prefetch flag not set: " << *dis << endl; errors++; } } VOID PIN_FAST_ANALYSIS_CALL verifyRmwFunc( ADDRINT memea_callback, UINT32 bytes, string *dis, ADDRINT ip) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (!lastIsRmw) { OutFile << "RMW flag not set: " << *dis << endl; errors++; } } VOID PIN_FAST_ANALYSIS_CALL verifyAtomicFunc( ADDRINT memea_callback, UINT32 bytes, string *dis, ADDRINT ip) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (!lastIsAtomic) { OutFile << "Atomic flag not set: " << *dis << endl; errors++; } } VOID PIN_FAST_ANALYSIS_CALL readMemoryFunc( ADDRINT memea_orig,ADDRINT memea_callback,THREADID threadIndex, string *dis, ADDRINT ip) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (ip != lastIp) { OutFile << "read analysis missing: " << hex << ip << " " << *dis << endl; errors++; } if (memea_orig != lastReadAddr) { OutFile << "read orig address incorrect: " << hex << memea_orig << " " << *dis << endl; errors++; } if ((memea_callback & 0xf) != 0) { OutFile << "read address not 16 aligned: " << hex << memea_callback << " " << *dis << endl; errors++; } if ((memea_orig & mask) != memea_callback) { OutFile << "read addresses incorrect: " << hex << memea_orig << " " << memea_callback << " " << *dis << endl; errors++; } icountMemRead++; } VOID PIN_FAST_ANALYSIS_CALL read2MemoryFunc(ADDRINT memea_orig,ADDRINT memea_callback, ADDRINT memea2_orig,ADDRINT memea2_callback, THREADID threadIndex, string *dis, CONTEXT *ctxt, ADDRINT ip) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (ip != lastIp) { OutFile << "read2 analysis missing: " << " " << *dis << endl; errors++; } if ((memea_callback & 0xf) != 0) { OutFile << "read2 first address not 16 aligned: " << hex << memea_callback << " " << *dis << endl; errors++; } if ((memea_orig & mask) != memea_callback) { OutFile << "read2 first addresses incorrect: " << hex << memea_orig << " " << memea_callback << " " << *dis << endl; errors++; } if ((memea2_callback & 0xf) != 0) { OutFile << "read2 second address not 16 aligned: " << hex << memea2_callback << " " << *dis << endl; errors++; } if ((memea_orig & mask) != memea_callback) { OutFile << "read2 second addresses incorrect: " << hex << memea2_orig << " " << memea2_callback << " " << *dis << endl; errors++; } icountMemRead2++; } VOID PIN_FAST_ANALYSIS_CALL writeMemoryFunc( THREADID threadIndex,ADDRINT memea_orig,ADDRINT memea_callback,ADDRINT ip, string *dis) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (ip != lastIp) { OutFile << "write analysis missing: " << " " << *dis << endl; errors++; } if (memea_orig != lastWriteAddr) { OutFile << "write orig address incorrect: " << hex << memea_orig << " " << ip << " " << *dis << endl; errors++; } if ((memea_callback & 0xf) != 0) { OutFile << "write address not 16 aligned: " << hex << memea_callback << " " << *dis << endl; errors++; } if ((memea_orig & mask) != memea_callback) { OutFile << "write addresses incorrect: " << hex << memea_orig << " " << memea_callback << " " << *dis << endl; errors++; } icountMemWrite++; } VOID PIN_FAST_ANALYSIS_CALL opMemoryFunc( ADDRINT memea_orig,ADDRINT memea_callback, UINT32 bytes, ADDRINT ip, string *dis) { // case of multithreading - we care just about the main thread if (PIN_ThreadId() != myThread) return; if (ip != lastIp) { OutFile << "op analysis missing: " << " " << *dis << endl; errors++; } if (bytes != lastBytes) { OutFile << "op bytes error found: " << bytes << " and not " << lastBytes << " for " << *dis << endl; errors++; } if ((memea_callback & 0xf) != 0) { OutFile << "op address not 16 aligned: " << hex << memea_callback << " " << *dis << endl; errors++; } if ((memea_orig & mask) != memea_callback) { OutFile << "op addresses incorrect: " << hex << memea_orig << " " << memea_callback << " " << ip << " " << *dis << endl; errors++; } icountMemOp++; } ADDRINT PIN_FAST_ANALYSIS_CALL memoryCallback(PIN_MEM_TRANS_INFO* memTransInfo, VOID *v) { // Test the threadIndex field (Mantis 0003429) if (memTransInfo->threadIndex != PIN_ThreadId()) { cout << "PIN_MEM_TRANS_INFO.threadIndex bad value " << memTransInfo->threadIndex << " (should be " << PIN_ThreadId() << ")" << endl; errors++; } if (memTransInfo->flags.bits.isFromPin) { // PIN Internal memory dereference return memTransInfo->addr; } if (PIN_ThreadId() == myThread) { icountMemCall++; lastIp = memTransInfo->ip; lastBytes = memTransInfo->bytes; lastIsAtomic = memTransInfo->flags.bits.isAtomic; lastIsPrefetch = memTransInfo->flags.bits.isPrefetch; lastIsRmw = memTransInfo->flags.bits.isRmw; } if (memTransInfo->memOpType == PIN_MEMOP_STORE ) { if (PIN_ThreadId() == myThread) lastWriteAddr = memTransInfo->addr; // Verify that we can call PIN API functions inside PIN PIN_SafeCopy((void*)memTransInfo->addr, (void*)memTransInfo->addr, memTransInfo->bytes); } else if (PIN_ThreadId() == myThread) lastReadAddr = memTransInfo->addr; // Check void parameter if ((ADDRINT)v != 0xa5a5a5a5) { OutFile << "v incorrect inside callback: " << hex << v << endl; errors++; } //OutFile << "callback addresses: " << hex << (memTransInfo->addr) << " " << (memTransInfo->addr & mask) << " " <<memTransInfo->ip << endl; return (memTransInfo->addr & mask); } // Pin calls this function every time a new instruction is encountered VOID Instruction(INS ins, VOID *v) { string *disptr = new string(INS_Disassemble(ins)); // reads if (INS_IsMemoryRead(ins)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)readMemoryFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYREAD_EA, IARG_MEMORYREAD_PTR, IARG_THREAD_ID, IARG_PTR, disptr, IARG_INST_PTR, IARG_END); } // Handle read from memory to XMM if (INS_Opcode(ins) == XED_ICLASS_MOVDQU && INS_IsMemoryRead(ins) && INS_OperandIsReg(ins, 0) && INS_OperandIsMemory(ins, 1)) { if (REG_is_xmm(INS_OperandReg(ins, 0))) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)readXmmMemoryFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYREAD_PTR, IARG_MEMORYREAD_SIZE, IARG_PTR, disptr, IARG_INST_PTR, IARG_END); } } // Handle Prefetch if (INS_IsMemoryRead(ins) && xed_decoded_inst_is_prefetch(INS_XedDec(ins))) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)verifyPrefetchFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYREAD_PTR, IARG_MEMORYREAD_SIZE, IARG_PTR, disptr, IARG_INST_PTR, IARG_END); } // Handle Atomic if (INS_IsAtomicUpdate(ins)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)verifyAtomicFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYWRITE_PTR, IARG_MEMORYWRITE_SIZE, IARG_PTR, disptr, IARG_INST_PTR, IARG_END); } // writes if (INS_IsMemoryWrite(ins)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)writeMemoryFunc, IARG_FAST_ANALYSIS_CALL, IARG_THREAD_ID, IARG_MEMORYWRITE_EA, IARG_MEMORYWRITE_PTR , IARG_INST_PTR, IARG_PTR, disptr, IARG_END); } UINT32 memOperands = INS_MemoryOperandCount(ins); if (!INS_IsVgather(ins) && memOperands) { // OPs for (UINT32 memOp = 0; memOp < memOperands; memOp++) { if (INS_MemoryOperandIsRead(ins, memOp) || INS_MemoryOperandIsWritten(ins, memOp)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)opMemoryFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYOP_EA, memOp, IARG_MEMORYOP_PTR, memOp, IARG_UINT32,INS_MemoryOperandSize(ins,memOp), IARG_INST_PTR , IARG_PTR, disptr, IARG_END); } // Handle RMW if (INS_MemoryOperandIsRead(ins, memOp) && INS_MemoryOperandIsWritten(ins, memOp)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)verifyRmwFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYOP_PTR, memOp, IARG_UINT32,INS_MemoryOperandSize(ins,memOp), IARG_PTR, disptr, IARG_INST_PTR, IARG_END); } } } // READ2 if (INS_HasMemoryRead2(ins)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)read2MemoryFunc, IARG_FAST_ANALYSIS_CALL, IARG_MEMORYREAD_EA, IARG_MEMORYREAD_PTR, IARG_MEMORYREAD2_EA, IARG_MEMORYREAD2_PTR, IARG_THREAD_ID, IARG_PTR, disptr, IARG_CONTEXT, IARG_INST_PTR, IARG_END); } } KNOB<string> KnobOutputFile(KNOB_MODE_WRITEONCE, "pintool", "o", "memaddrcall.out", "specify output file name"); // This function is called when the application exits VOID Fini(INT32 code, VOID *v) { // Write to a file since cout and cerr maybe closed by the application OutFile.setf(ios::showbase); OutFile << "Count Mem Reads " << icountMemRead << endl; OutFile << "Count Mem Read2s " << icountMemRead2 << endl; OutFile << "Count Mem Writes " << icountMemWrite << endl; OutFile << "Count Mem Ops " << icountMemOp << endl; OutFile << "Count Mem callbacks " << icountMemCall << endl; OutFile << "Errors " << errors << endl; OutFile.close(); // If we have errors then terminate abnormally if (errors) { cout << "Test memory_addr_callback is terminated cause found " << errors << " errors " << endl; PIN_ExitProcess(errors); } } /* ===================================================================== */ /* Print Help Message */ /* ===================================================================== */ INT32 Usage() { cerr << "This tool tests memory address translation callback" << endl; cerr << endl << KNOB_BASE::StringKnobSummary() << endl; return -1; } /* ===================================================================== */ /* Main */ /* ===================================================================== */ int main(int argc, char * argv[]) { ADDRINT dummy = 0xa5a5a5a5; // Initialize pin if (PIN_Init(argc, argv)) return Usage(); PIN_AddThreadStartFunction(ThreadStart, NULL); OutFile.open(KnobOutputFile.Value().c_str()); // Register Instruction to be called to instrument instructions INS_AddInstrumentFunction(Instruction, 0); // Verify that the PIN API is null before registration if (PIN_GetMemoryAddressTransFunction()) { cout << "Test memory_addr_callback found PIN API callback not null before registration " << endl; PIN_ExitProcess(-1); } // Register memory callback PIN_AddMemoryAddressTransFunction(memoryCallback,(VOID*)dummy); // Verify that the PIN API is not null after registration if (!PIN_GetMemoryAddressTransFunction()) { cout << "Test memory_addr_callback found PIN API callback null after registration " << endl; PIN_ExitProcess(-1); } // Register Fini to be called when the application exits PIN_AddFiniFunction(Fini, 0); // Start the program, never returns PIN_StartProgram(); return errors; }
7,342
853
<reponame>blin00/tinyssh /* - based on tweetnacl 20140427 (http://tweetnacl.cr.yp.to/software.html) */ #include "crypto_int64.h" #include "crypto_uint32.h" #include "crypto_uint64.h" #include "cleanup.h" #include "sc25519.h" #define FOR(i,n) for (i = 0;i < n;++i) static const crypto_uint64 L[32] = {0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10}; static void modL(unsigned char *r,crypto_int64 x[64]) { crypto_int64 carry,i,j; for (i = 63;i >= 32;--i) { carry = 0; for (j = i - 32;j < i - 12;++j) { x[j] += carry - 16 * x[i] * L[j - (i - 32)]; carry = (x[j] + 128) >> 8; x[j] -= carry << 8; } x[j] += carry; x[i] = 0; } carry = 0; FOR(j,32) { x[j] += carry - (x[31] >> 4) * L[j]; carry = x[j] >> 8; x[j] &= 255; } FOR(j,32) x[j] -= carry * L[j]; FOR(i,32) { x[i+1] += x[i] >> 8; r[i] = x[i] & 255; } } void sc25519_reduce(unsigned char *s) { crypto_int64 t[64], i; for (i = 0; i < 64; ++i) t[i] = s[i]; for (i = 0; i < 64; ++i) s[i] = 0; modL(s, t); cleanup(t); } void sc25519_muladd(unsigned char *s, const unsigned char *a, const unsigned char *b, const unsigned char *c) { crypto_int64 t[64], i, j; for (i = 0; i < 64; ++i) t[i] = 0; for (i = 0; i < 32; ++i) for (j = 0; j < 32; ++j) { t[i + j] += (crypto_int64)a[i] * (crypto_int64)b[j]; } for (i = 0; i < 32; ++i) t[i] += c[i]; modL(s, t); cleanup(t); }
845
2,151
{ "name": "permissions/optional", "description": "permissions/optional", "key": "<KEY>", "version": "0.1", "manifest_version": 2, "background": { "scripts": ["background.js"] }, "permissions": [ "management", "http://a.com/*" ], "optional_permissions": [ "bookmarks", "cookies", "management", "background", "http://a.com/*", "http://*.c.com/*" ] }
193
5,823
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "range.h" #include <algorithm> #include <cinttypes> #include "base/string_utils.h" namespace gfx { std::string Range::ToString() const { return base::StringPrintf("{%" PRIu32 ",%" PRIu32 "}", start(), end()); } std::ostream& operator<<(std::ostream& os, const Range& range) { return os << range.ToString(); } } // namespace gfx
174
6,098
<reponame>kernelrich/h2o-3 package water.api; import com.google.gson.Gson; import water.*; import water.api.schemas3.FrameV3; import water.api.schemas3.JobV3; import water.api.schemas3.KeyV3; import water.api.schemas3.ModelSchemaV3; import water.exceptions.H2OIllegalArgumentException; import water.exceptions.H2OKeyNotFoundArgumentException; import water.exceptions.H2ONotFoundArgumentException; import water.fvec.Frame; import water.util.*; import java.lang.annotation.Annotation; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import java.util.*; /** * Base Schema class; all REST API Schemas inherit from here. * <p> * Schema is a primary interface of the REST APIs: all endpoints consume some schema object as an input, and produce * another schema object as an output (though some endpoints may return nothing). * <p> * Schemas, as an external interface, are required to be stable: fields may not be renamed or removed, their types or * meaning may not change, etc. It is allowed to add new fields to a schema, provided that they are optional, and * that their default values correspond to the old behavior of the endpoint. If these requirements cannot be met, * then a new version of a schema class must be created. * <p> * Many schemas are in direct correspondence with H2O objects. For example, JobV3 schema represents the Job object. * These "representative" Iced objects are called "implementation" or "impl", and are parametrized with type I. * Such representation is necessary in order to ensure stability of the interface: even as Job class evolves, the * interface of JobV3 schema must not. In the simplest case, when there is 1-to-1 correspondence between fields in * the impl class and in the schema, we use reflection magic to copy those fields over. The reflection magic is smart * enough to perform simple field name translations, and even certain type translations (like Keyed objects into Keys). * If there is no such correspondence, then special type adapters must be written. Right now this is done by * overriding the {@link #fillImpl(I) fillImpl} and {@link #fillFromImpl(I) fillFromImpl} methods. Usually they will * want to call super to get the default behavior, and then modify the results a bit (e.g., to map differently-named * fields, or to compute field values). Transient and static fields are ignored by the reflection magic. * <p> * There are also schemas that do not correspond to any H2O object. These are mostly the input schemas (schemas used * for inputs of api requests). Such schemas should be "implemented" by Iced. * <p> * All schemas are expected to be self-documenting, in the sense that all fields within those schemas should carry * detailed documentation about their meaning, as well as any additional hints about the field's usage. These should * be annotated using the {@link API @API} interface. If a schema contains a complicated object, then that object * itself should derive from Schema, so that its fields can also be properly documented. However if the internal * object is sufficiently simple (say, a Map), then it may be sufficient to document it as a whole and have it derived * from Iced, not from Schema. * <p> * Schema names (getSimpleName()) must be unique within an application. During Schema discovery and registration * there are checks to ensure this. Each schema is associated with exactly one implementation object, however some * Iced objects are mapped into multiple schemas. * <p> * For V3 Schemas each field had a "direction" (input / output / both), which allowed us to use the same schema as * both input and output for an endpoint. This is no longer possible in V4: two separate schema classes for input / * output should be created. * * <h1>Usage</h1> * <p> * {@link Handler} creates an input schema from the body/parameters of the HTTP request (using * {@link #fillFromParms(Properties) fillFromParms()}, and passes it on to the corresponding handler method. * <p> * Each handler method may modify the input schema and return it as the output schema (common for V3 endpoints, * should be avoided in V4). * <p> * Alternatively, a handler method may create a new output schema object from scratch, or from an existing * implementation object. * * <h1>Internal details</h1> * <p> * Most Java developers need not be concerned with the details that follow, because the * framework will make these calls as necessary. * <p> * To create a schema object and fill it from an existing impl object: * <pre>{@code S schema = new SomeSchemaClass().fillFromImpl(impl);}</pre> * <p> * To create an impl object and fill it from an existing schema object: * <pre>{@code I impl = schema.createAndFillImpl();}</pre> * <p> * Schemas that are used for HTTP requests are filled with the default values of their impl * class, and then any present HTTP parameters override those default values. * To create a schema object filled from the default values of its impl class and then * overridden by HTTP request params: * <pre>{@code S schema = new SomeSchemaClass().fillFromImpl().fillFromParms(parms);}</pre> * * @param <I> "implementation" (Iced) class for this schema * @param <S> reference to self: this should always be the same class as being declared. For example: * <pre>public class TimelineV3 extends Schema&lt;Timeline, TimelineV3&gt;</pre> */ public abstract class Schema<I extends Iced, S extends Schema<I,S>> extends Iced { // These fields are declared transient so that they do not get included when a schema is serialized into JSON. private transient Class<I> _impl_class; private transient int _schema_version; private transient String _schema_name; private transient String _schema_type; private transient static final Gson gson = H2oRestGsonHelper.createH2oCompatibleGson(); // stateless and thread safe /** Default constructor; triggers lazy schema registration. * @throws water.exceptions.H2OFailException if there is a name collision or * there is more than one schema which maps to the same Iced class */ public Schema() { init_meta(); SchemaServer.checkIfRegistered(this); } /** * Create a new Schema instance from an existing impl object. */ public Schema(I impl) { this(); this.fillFromImpl(impl); } public void init_meta() { if (_schema_name != null) return; _schema_name = this.getClass().getSimpleName(); _schema_version = extractVersionFromSchemaName(_schema_name); _schema_type = getImplClass().getSimpleName(); } /** Extract the version number from the schema class name. Returns -1 if * there's no version number at the end of the classname. */ public static int extractVersionFromSchemaName(String clz_name) { int idx = clz_name.lastIndexOf('V'); if (idx == -1) return -1; try { return Integer.valueOf(clz_name.substring(idx+1)); } catch( NumberFormatException ex) { return -1; } } /** Get the version number of this schema, for example 3 or 99. Note that 99 * is the "experimental" version, meaning that there are no stability * guarantees between H2O versions. */ public int getSchemaVersion() { return _schema_version; } public String getSchemaName() { return _schema_name; } public String getSchemaType() { return _schema_type; } /* Temporary hack to allow reassignment of schema_type by KeyV3 class */ public void setSchemaType_doNotCall(String s) { _schema_type = s; } /** * Create an appropriate implementation object and any child objects but does not fill them. * The standard purpose of a createImpl without a fillImpl is to be able to get the default * values for all the impl's fields. * <p> * For objects without children this method does all the required work. For objects * with children the subclass will need to override, e.g. by calling super.createImpl() * and then calling createImpl() on its children. * <p> * Note that impl objects for schemas which override this method don't need to have * a default constructor (e.g., a Keyed object constructor can still create and set * the Key), but they must not fill any fields which can be filled later from the schema. * <p> * TODO: We could handle the common case of children with the same field names here * by finding all of our fields that are themselves Schemas. */ public I createImpl() { try { return getImplClass().newInstance(); } catch (Exception e) { throw H2O.fail("Exception making a newInstance",e); } } protected I fillImpl(I impl, String[] fieldsToSkip) { PojoUtils.copyProperties(impl, this, PojoUtils.FieldNaming.CONSISTENT, fieldsToSkip); // TODO: make field names in the impl classes consistent and remove PojoUtils.copyProperties(impl, this, PojoUtils.FieldNaming.DEST_HAS_UNDERSCORES, fieldsToSkip); return impl; } /** Fill an impl object and any children from this schema and its children. * If a schema doesn't need to adapt any fields if does not need to override * this method. */ public I fillImpl(I impl) { return fillImpl(impl, null); } /** Convenience helper which creates and fills an impl object from this schema. */ public final I createAndFillImpl() { return this.fillImpl(this.createImpl()); } /** * Fill this schema from the default impl, and then return self. */ public final S fillFromImpl() { return fillFromImpl(createImpl(), null); } /** * Fill this Schema from the given implementation object. If a schema doesn't need to adapt any fields if does not * need to override this method. */ public S fillFromImpl(I impl) { return fillFromImpl(impl, null); } protected S fillFromImpl(I impl, String[] fieldsToSkip) { PojoUtils.copyProperties(this, impl, PojoUtils.FieldNaming.ORIGIN_HAS_UNDERSCORES, fieldsToSkip); PojoUtils.copyProperties(this, impl, PojoUtils.FieldNaming.CONSISTENT, fieldsToSkip); // TODO: make field names in the impl classes consistent and remove //noinspection unchecked (parameter <S> should be the derived class itself) return (S) this; } /** Return the class of the implementation type parameter I for the * given Schema class. Used by the metadata facilities and the * reflection-base field-copying magic in PojoUtils. */ public static Class<? extends Iced> getImplClass(Class<? extends Schema> clz) { Class<? extends Iced> impl_class = ReflectionUtils.findActualClassParameter(clz, 0); if (null == impl_class) Log.warn("Failed to find an impl class for Schema: " + clz); return impl_class; } /** Return the class of the implementation type parameter I for this Schema. * Used by generic code which deals with arbitrary schemas and their backing * impl classes. Never returns null. */ public Class<I> getImplClass() { return _impl_class != null ? _impl_class : (_impl_class = ReflectionUtils.findActualClassParameter(this.getClass(), 0)); } /** * Fill this Schema object from a set of parameters. * * @param parms parameters - set of tuples (parameter name, parameter value) * @return this schema * * @see #fillFromParms(Properties, Properties, boolean) */ public S fillFromParms(Properties parms) { return fillFromParms(parms, true); } /** * Fill this Schema object from a set of parameters. * * @param parms parameters - set of tuples (parameter name, parameter value) * @param checkRequiredFields perform check for missing required fields * @return this schema * * @see #fillFromParms(Properties, Properties, boolean) */ public S fillFromParms(Properties parms, boolean checkRequiredFields) { return fillFromParms(parms, null, checkRequiredFields); } /** * Fill this Schema from a set of (generally HTTP) parameters. * <p> * Using reflection this process determines the type of the target field and * conforms the types if possible. For example, if the field is a Keyed type * the name (ID) will be looked up in the DKV and mapped appropriately. * <p> * The process ignores parameters which are not fields in the schema, and it * verifies that all fields marked as required are present in the parameters * list. * <p> * It also does various sanity checks for broken Schemas, for example fields must * not be private, and since input fields get filled here they must not be final. * @param parms Properties map of parameter values * @param unknownParms if not null, bad parameters won't cause an exception, * they will be collected in this Properties object instead * @param checkRequiredFields perform check for missing required fields * @return this schema * @throws H2OIllegalArgumentException for bad/missing parameters */ public S fillFromParms(Properties parms, Properties unknownParms, boolean checkRequiredFields) { // Get passed-in fields, assign into Schema Class thisSchemaClass = this.getClass(); Map<String, Field> fields = new HashMap<>(); Field current = null; // declare here so we can print in catch{} try { Class clz = thisSchemaClass; do { Field[] some_fields = clz.getDeclaredFields(); for (Field f : some_fields) { current = f; if (null == fields.get(f.getName())) fields.put(f.getName(), f); } clz = clz.getSuperclass(); } while (Iced.class.isAssignableFrom(clz.getSuperclass())); } catch (SecurityException e) { throw H2O.fail("Exception accessing field: " + current + " in class: " + this.getClass() + ": " + e); } for( String key : parms.stringPropertyNames() ) { try { Field f = fields.get(key); // No such field error, if parm is junk if (null == f) { if (unknownParms != null) { unknownParms.put(key, parms.getProperty(key)); continue; } else throw new H2OIllegalArgumentException("Unknown parameter: " + key, "Unknown parameter in fillFromParms: " + key + " for class: " + this.getClass().toString()); } int mods = f.getModifiers(); if( Modifier.isTransient(mods) || Modifier.isStatic(mods) ) { // Attempting to set a transient or static; treat same as junk fieldname throw new H2OIllegalArgumentException( "Bad parameter for field: " + key + " for class: " + this.getClass().toString(), "Bad parameter definition for field: " + key + " in fillFromParms for class: " + this.getClass().toString() + " (field was declared static or transient)"); } // Only support a single annotation which is an API, and is required Annotation[] apis = f.getAnnotations(); if( apis.length == 0 ) throw H2O.fail("Broken internal schema; missing API annotation for field: " + key); API api = (API)apis[0]; // Must have one of these set to be an input field if( api.direction() == API.Direction.OUTPUT ) { throw new H2OIllegalArgumentException( "Attempting to set output field: " + key + " for class: " + this.getClass().toString(), "Attempting to set output field: " + key + " in fillFromParms for class: " + this.getClass().toString() + " (field was annotated as API.Direction.OUTPUT)"); } // Parse value and set the field setField(this, f, key, parms.getProperty(key), api.required(), thisSchemaClass); } catch( IllegalAccessException iae ) { // Come here if field is final or private throw H2O.fail("Broken internal schema; field cannot be private nor final: " + key); } } // Here every thing in 'parms' was set into some field - so we have already // checked for unknown or extra parms. // Confirm required fields are set if (checkRequiredFields) { for (Field f : fields.values()) { int mods = f.getModifiers(); if (Modifier.isTransient(mods) || Modifier.isStatic(mods)) continue; // Ignore transient & static try { API api = (API) f.getAnnotations()[0]; // TODO: is there a more specific way we can do this? if (api.required()) { if (parms.getProperty(f.getName()) == null) { IcedHashMapGeneric.IcedHashMapStringObject values = new IcedHashMapGeneric.IcedHashMapStringObject(); values.put("schema", this.getClass().getSimpleName()); values.put("argument", f.getName()); throw new H2OIllegalArgumentException( "Required field " + f.getName() + " not specified", "Required field " + f.getName() + " not specified for schema class: " + this.getClass(), values); } } } catch (ArrayIndexOutOfBoundsException e) { throw H2O.fail("Missing annotation for API field: " + f.getName()); } } } //noinspection unchecked (parameter <S> should be the derived class itself) return (S) this; } /** * Fills this Schema from the body content when available. * By default the body is interpreted as JSON object. * * We use PojoUtils.fillFromJson() rather than just using "schema = Gson.fromJson(post_body)" * so that we have defaults: we only overwrite fields that the client has specified. * * @param body the post body (can't be null), converted to JSON by default * @return the filled schema */ public S fillFromBody(String body) { return (S) PojoUtils.fillFromJson(this, body); } /** * * @param o * @return */ public S fillFromAny(Object o) { throw new IllegalArgumentException("can't convert object of type " + o.getClass() + " to schema " + this.getSchemaType()); } /** * Safe method to set the field on given schema object * @param o schema object to modify * @param f field to modify * @param key name of field to modify * @param value string-based representation of value to set * @param required is field required by API * @param thisSchemaClass class of schema handling this (can be null) * @throws IllegalAccessException */ public static <T extends Schema> void setField(T o, Field f, String key, String value, boolean required, Class thisSchemaClass) throws IllegalAccessException { // Primitive parse by field type Object parse_result = parse(key, value, f.getType(), required, thisSchemaClass); if (parse_result != null && f.getType().isArray() && parse_result.getClass().isArray() && (f.getType().getComponentType() != parse_result.getClass().getComponentType())) { // We have to conform an array of primitives. There's got to be a better way. . . if (parse_result.getClass().getComponentType() == int.class && f.getType().getComponentType() == Integer.class) { int[] from = (int[])parse_result; Integer[] copy = new Integer[from.length]; for (int i = 0; i < from.length; i++) copy[i] = from[i]; f.set(o, copy); } else if (parse_result.getClass().getComponentType() == Integer.class && f.getType().getComponentType() == int.class) { Integer[] from = (Integer[])parse_result; int[] copy = new int[from.length]; for (int i = 0; i < from.length; i++) copy[i] = from[i]; f.set(o, copy); } else if (parse_result.getClass().getComponentType() == Double.class && f.getType().getComponentType() == double.class) { Double[] from = (Double[])parse_result; double[] copy = new double[from.length]; for (int i = 0; i < from.length; i++) copy[i] = from[i]; f.set(o, copy); } else if (parse_result.getClass().getComponentType() == Float.class && f.getType().getComponentType() == float.class) { Float[] from = (Float[])parse_result; float[] copy = new float[from.length]; for (int i = 0; i < from.length; i++) copy[i] = from[i]; f.set(o, copy); } else { throw H2O.fail("Don't know how to cast an array of: " + parse_result.getClass().getComponentType() + " to an array of: " + f.getType().getComponentType()); } } else { f.set(o, parse_result); } } static <E> Object parsePrimitve(String s, Class fclz) { if (fclz.equals(String.class)) return s; // Strings already the right primitive type if (fclz.equals(int.class)) return parseInteger(s, int.class); if (fclz.equals(long.class)) return parseInteger(s, long.class); if (fclz.equals(short.class)) return parseInteger(s, short.class); if (fclz.equals(boolean.class)) { if (s.equals("0")) return Boolean.FALSE; if (s.equals("1")) return Boolean.TRUE; return Boolean.valueOf(s); } if (fclz.equals(byte.class)) return parseInteger(s, byte.class); if (fclz.equals(double.class)) return Double.valueOf(s); if (fclz.equals(float.class)) return Float.valueOf(s); //FIXME: if (fclz.equals(char.class)) return Character.valueOf(s); throw H2O.fail("Unknown primitive type to parse: " + fclz.getSimpleName()); } // URL parameter parse static <E> Object parse(String field_name, String s, Class fclz, boolean required, Class schemaClass) { if (fclz.isPrimitive() || String.class.equals(fclz)) { try { return parsePrimitve(s, fclz); } catch (NumberFormatException ne) { String msg = "Illegal argument for field: " + field_name + " of schema: " + schemaClass.getSimpleName() + ": cannot convert \"" + s + "\" to type " + fclz.getSimpleName(); throw new H2OIllegalArgumentException(msg); } } // An array? if (fclz.isArray()) { // Get component type Class<E> afclz = (Class<E>) fclz.getComponentType(); // Result E[] a = null; // Handle simple case with null-array if (s.equals("null") || s.length() == 0) return null; // Handling of "auto-parseable" cases if (AutoParseable.class.isAssignableFrom(afclz)) return gson.fromJson(s, fclz); // Splitted values String[] splits; // "".split(",") => {""} so handle the empty case explicitly if (s.startsWith("[") && s.endsWith("]") ) { // It looks like an array read(s, 0, '[', fclz); read(s, s.length() - 1, ']', fclz); String inside = s.substring(1, s.length() - 1).trim(); if (inside.length() == 0) splits = new String[]{}; else splits = splitArgs(inside); } else { // Lets try to parse single value as an array! // See PUBDEV-1955 splits = new String[] { s.trim() }; } // Can't cast an int[] to an Object[]. Sigh. if (afclz == int.class) { // TODO: other primitive types. . . a = (E[]) Array.newInstance(Integer.class, splits.length); } else if (afclz == double.class) { a = (E[]) Array.newInstance(Double.class, splits.length); } else if (afclz == float.class) { a = (E[]) Array.newInstance(Float.class, splits.length); } else { // Fails with primitive classes; need the wrapper class. Thanks, Java. a = (E[]) Array.newInstance(afclz, splits.length); } for (int i = 0; i < splits.length; i++) { if (String.class == afclz || KeyV3.class.isAssignableFrom(afclz)) { // strip quotes off string values inside array String stripped = splits[i].trim(); if ("null".equals(stripped.toLowerCase()) || "na".equals(stripped.toLowerCase())) { a[i] = null; continue; } // Quotes are now optional because standard clients will send arrays of length one as just strings. if (stripped.startsWith("\"") && stripped.endsWith("\"")) { stripped = stripped.substring(1, stripped.length() - 1); } a[i] = (E) parse(field_name, stripped, afclz, required, schemaClass); } else { a[i] = (E) parse(field_name, splits[i].trim(), afclz, required, schemaClass); } } return a; } // Are we parsing an object from a string? NOTE: we might want to make this check more restrictive. if (! fclz.isAssignableFrom(Schema.class) && s != null && s.startsWith("{") && s.endsWith("}")) { return gson.fromJson(s, fclz); } if (fclz.equals(Key.class)) if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(field_name, s); else if (!required && (s == null || s.length() == 0)) return null; else return Key.make(s.startsWith("\"") ? s.substring(1, s.length() - 1) : s); // If the key name is in an array we need to trim surrounding quotes. if (KeyV3.class.isAssignableFrom(fclz)) { if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(field_name, s); if (!required && (s == null || s.length() == 0)) return null; return KeyV3.make(fclz, Key.make(s.startsWith("\"") ? s.substring(1, s.length() - 1) : s)); // If the key name is in an array we need to trim surrounding quotes. } if (Enum.class.isAssignableFrom(fclz)) { return EnumUtils.valueOf(fclz, s); } // TODO: these can be refactored into a single case using the facilities in Schema: if (FrameV3.class.isAssignableFrom(fclz)) { if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(field_name, s); else if (!required && (s == null || s.length() == 0)) return null; else { Value v = DKV.get(s); if (null == v) return null; // not required if (!v.isFrame()) throw H2OIllegalArgumentException.wrongKeyType(field_name, s, "Frame", v.get().getClass()); return new FrameV3((Frame) v.get()); // TODO: version! } } if (JobV3.class.isAssignableFrom(fclz)) { if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(s); else if (!required && (s == null || s.length() == 0)) return null; else { Value v = DKV.get(s); if (null == v) return null; // not required if (!v.isJob()) throw H2OIllegalArgumentException.wrongKeyType(field_name, s, "Job", v.get().getClass()); return new JobV3().fillFromImpl((Job) v.get()); // TODO: version! } } // TODO: for now handle the case where we're only passing the name through; later we need to handle the case // where the frame name is also specified. if (FrameV3.ColSpecifierV3.class.isAssignableFrom(fclz)) { return new FrameV3.ColSpecifierV3(s); } if (ModelSchemaV3.class.isAssignableFrom(fclz)) throw H2O.fail("Can't yet take ModelSchemaV3 as input."); /* if( (s==null || s.length()==0) && required ) throw new IllegalArgumentException("Missing key"); else if (!required && (s == null || s.length() == 0)) return null; else { Value v = DKV.get(s); if (null == v) return null; // not required if (! v.isModel()) throw new IllegalArgumentException("Model argument points to a non-model object."); return v.get(); } */ throw H2O.fail("Unimplemented schema fill from " + fclz.getSimpleName()); } // parse() /** * Helper functions for parse() **/ /** * Parses a string into an integer data type specified by parameter return_type. Accepts any format that * is accepted by java's BigDecimal class. * - Throws a NumberFormatException if the evaluated string is not an integer or if the value is too large to * be stored into return_type without overflow. * - Throws an IllegalAgumentException if return_type is not an integer data type. **/ static private <T> T parseInteger(String s, Class<T> return_type) { try { java.math.BigDecimal num = new java.math.BigDecimal(s); T result = (T) num.getClass().getDeclaredMethod(return_type.getSimpleName() + "ValueExact", new Class[0]).invoke(num); return result; } catch (InvocationTargetException ite) { throw new NumberFormatException("The expression's numeric value is out of the range of type " + return_type.getSimpleName()); } catch (NoSuchMethodException nsme) { throw new IllegalArgumentException(return_type.getSimpleName() + " is not an integer data type"); } catch (IllegalAccessException iae) { throw H2O.fail("Cannot parse expression as " + return_type.getSimpleName() + " (Illegal Access)"); } } static private int read( String s, int x, char c, Class fclz ) { if( peek(s,x,c) ) return x+1; throw new IllegalArgumentException("Expected '"+c+"' while reading a "+fclz.getSimpleName()+", but found "+s); } static private boolean peek( String s, int x, char c ) { return x < s.length() && s.charAt(x) == c; } // Splits on commas, but ignores commas in double quotes. Required // since using a regex blow the stack on long column counts // TODO: detect and complain about malformed JSON private static String[] splitArgs(String argStr) { StringBuilder sb = new StringBuilder(argStr); StringBuilder arg = new StringBuilder(); List<String> splitArgList = new ArrayList<String> (); boolean inDoubleQuotes = false; boolean inSquareBrackets = false; // for arrays of arrays for (int i=0; i < sb.length(); i++) { if (sb.charAt(i) == '"' && !inDoubleQuotes && !inSquareBrackets) { inDoubleQuotes = true; arg.append(sb.charAt(i)); } else if (sb.charAt(i) == '"' && inDoubleQuotes && !inSquareBrackets) { inDoubleQuotes = false; arg.append(sb.charAt(i)); } else if (sb.charAt(i) == ',' && !inDoubleQuotes && !inSquareBrackets) { splitArgList.add(arg.toString()); // clear the field for next word arg.setLength(0); } else if (sb.charAt(i) == '[') { inSquareBrackets = true; arg.append(sb.charAt(i)); } else if (sb.charAt(i) == ']') { inSquareBrackets = false; arg.append(sb.charAt(i)); } else { arg.append(sb.charAt(i)); } } if (arg.length() > 0) splitArgList.add(arg.toString()); return splitArgList.toArray(new String[splitArgList.size()]); } /** * Returns a new Schema instance. Does not throw, nor returns null. * @return New instance of Schema Class 'clz'. */ public static <T extends Schema> T newInstance(Class<T> clz) { try { return clz.newInstance(); } catch (Exception e) { throw H2O.fail("Failed to instantiate schema of class: " + clz.getCanonicalName(),e); } } /** * For a given schema_name (e.g., "FrameV2") return an appropriate new schema object (e.g., a water.api.Framev2). */ protected static Schema newInstance(String schema_name) { return Schema.newInstance(SchemaServer.getSchema(schema_name)); } /** * Generate Markdown documentation for this Schema possibly including only the input or output fields. * @throws H2ONotFoundArgumentException if reflection on a field fails */ public StringBuffer markdown(boolean include_input_fields, boolean include_output_fields) { return markdown(new SchemaMetadata(this), include_input_fields, include_output_fields); } /** * Generate Markdown documentation for this Schema, given we already have the metadata constructed. * @throws H2ONotFoundArgumentException if reflection on a field fails */ public StringBuffer markdown(SchemaMetadata meta, boolean include_input_fields, boolean include_output_fields) { MarkdownBuilder builder = new MarkdownBuilder(); builder.comment("Preview with http://jbt.github.io/markdown-editor"); builder.heading1("schema ", this.getClass().getSimpleName()); builder.hline(); // builder.paragraph(metadata.summary); // TODO: refactor with Route.markdown(): // fields boolean first; // don't print the table at all if there are no rows try { if (include_input_fields) { first = true; builder.heading2("input fields"); for (SchemaMetadata.FieldMetadata field_meta : meta.fields) { if (field_meta.direction == API.Direction.INPUT || field_meta.direction == API.Direction.INOUT) { if (first) { builder.tableHeader("name", "required?", "level", "type", "schema?", "schema", "default", "description", "values", "is member of frames", "is mutually exclusive with"); first = false; } builder.tableRow( field_meta.name, String.valueOf(field_meta.required), field_meta.level.name(), field_meta.type, String.valueOf(field_meta.is_schema), field_meta.is_schema ? field_meta.schema_name : "", (null == field_meta.value ? "(null)" : field_meta.value.toString()), // Something better for toString()? field_meta.help, (field_meta.values == null || field_meta.values.length == 0 ? "" : Arrays.toString(field_meta.values)), (field_meta.is_member_of_frames == null ? "[]" : Arrays.toString(field_meta.is_member_of_frames)), (field_meta.is_mutually_exclusive_with == null ? "[]" : Arrays.toString(field_meta.is_mutually_exclusive_with)) ); } } if (first) builder.paragraph("(none)"); } if (include_output_fields) { first = true; builder.heading2("output fields"); for (SchemaMetadata.FieldMetadata field_meta : meta.fields) { if (field_meta.direction == API.Direction.OUTPUT || field_meta.direction == API.Direction.INOUT) { if (first) { builder.tableHeader("name", "type", "schema?", "schema", "default", "description", "values", "is member of frames", "is mutually exclusive with"); first = false; } builder.tableRow( field_meta.name, field_meta.type, String.valueOf(field_meta.is_schema), field_meta.is_schema ? field_meta.schema_name : "", (null == field_meta.value ? "(null)" : field_meta.value.toString()), // something better than toString()? field_meta.help, (field_meta.values == null || field_meta.values.length == 0 ? "" : Arrays.toString(field_meta.values)), (field_meta.is_member_of_frames == null ? "[]" : Arrays.toString(field_meta.is_member_of_frames)), (field_meta.is_mutually_exclusive_with == null ? "[]" : Arrays.toString(field_meta.is_mutually_exclusive_with))); } } if (first) builder.paragraph("(none)"); } // TODO: render examples and other stuff, if it's passed in } catch (Exception e) { IcedHashMapGeneric.IcedHashMapStringObject values = new IcedHashMapGeneric.IcedHashMapStringObject(); values.put("schema", this); // TODO: This isn't quite the right exception type: throw new H2OIllegalArgumentException("Caught exception using reflection on schema: " + this, "Caught exception using reflection on schema: " + this + ": " + e, values); } return builder.stringBuffer(); } /** * This "Marker Interface" denotes classes that can directly be parsed by GSON parser (skip H2O's own parser) */ public interface AutoParseable { /* nothing here */} }
12,680
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.subversion.ui.update; import java.util.Iterator; import java.util.regex.Matcher; import org.netbeans.modules.subversion.ui.actions.ContextAction; import org.netbeans.modules.subversion.util.Context; import org.netbeans.modules.subversion.*; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.logging.Level; import java.util.regex.Pattern; import javax.swing.SwingUtilities; import org.netbeans.modules.subversion.client.SvnClient; import org.netbeans.modules.subversion.client.SvnClientExceptionHandler; import org.netbeans.modules.subversion.client.SvnProgressSupport; import org.netbeans.modules.subversion.ui.actions.ActionUtils; import org.netbeans.modules.subversion.util.ClientCheckSupport; import org.netbeans.modules.subversion.util.SvnUtils; import org.netbeans.modules.versioning.util.Utils; import org.netbeans.modules.versioning.util.VersioningOutputManager; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.nodes.Node; import org.openide.awt.StatusDisplayer; import org.openide.filesystems.FileUtil; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.util.RequestProcessor; import org.openide.util.actions.SystemAction; import org.openide.util.lookup.Lookups; import org.tigris.subversion.svnclientadapter.ISVNInfo; import org.tigris.subversion.svnclientadapter.ISVNNotifyListener; import org.tigris.subversion.svnclientadapter.SVNClientException; import org.tigris.subversion.svnclientadapter.SVNNodeKind; import org.tigris.subversion.svnclientadapter.SVNRevision; import org.tigris.subversion.svnclientadapter.SVNUrl; /** * Update action * * @author <NAME> */ public class UpdateAction extends ContextAction { private static final String ICON_RESOURCE = "org/netbeans/modules/subversion/resources/icons/update.png"; //NOI18N private static final int STATUS_RECURSIVELY_TRAVERSIBLE = FileInformation.STATUS_MANAGED & ~FileInformation.STATUS_NOTVERSIONED_EXCLUDED; public UpdateAction () { this(ICON_RESOURCE); } protected UpdateAction (String iconResource) { super(iconResource); } protected String getBaseName(Node[] nodes) { return "CTL_MenuItem_Update"; // NOI18N } @Override protected int getFileEnabledStatus() { return FileInformation.STATUS_VERSIONED | FileInformation.STATUS_IN_REPOSITORY | FileInformation.STATUS_NOTVERSIONED_NEWLOCALLY; // updating locally new file is permitted, it either does nothing or exchanges the local file with the one in repository } @Override protected int getDirectoryEnabledStatus() { return FileInformation.STATUS_MANAGED & ~FileInformation.STATUS_NOTVERSIONED_EXCLUDED & ~FileInformation.STATUS_NOTVERSIONED_NEWLOCALLY; } @Override protected String iconResource () { return ICON_RESOURCE; } @Override protected void performContextAction(final Node[] nodes) { ClientCheckSupport.getInstance().runInAWTIfAvailable(ActionUtils.cutAmpersand(getRunningName(nodes)), new Runnable() { @Override public void run() { performUpdate(nodes); } }); } void performUpdate(final Node[] nodes) { // FIXME add shalow logic allowing to ignore nested projects // look into CVS, it's very tricky: // project1/ // nbbuild/ (project1) // project2/ // src/ (project1) // test/ (project1 but imagine it's in repository, to be updated ) // Is there a way how to update project1 without updating project2? final Context ctx = getContext(nodes); if (ctx.getRootFiles().length == 0) { Subversion.LOG.info("UpdateAction.performUpdate: context is empty, some files may be unversioned."); //NOI18N return; } final SVNRevision revision = getRevision(ctx); if (revision == null) { return; } final ContextAction.ProgressSupport support = new ContextAction.ProgressSupport(this, nodes, ctx) { @Override public void perform() { update(ctx, this, getContextDisplayName(nodes), revision); } }; Utils.post(new Runnable() { @Override public void run () { support.start(createRequestProcessor(ctx)); } }); } protected SVNRevision getRevision (Context ctx) { return SVNRevision.HEAD; } private void update(Context ctx, SvnProgressSupport progress, String contextDisplayName, SVNRevision revision) { File[] roots = ctx.getRootFiles(); Map<File, List<File>> rootsPerCheckout = new HashMap<>(); for (File root : roots) { File topManaged = Subversion.getInstance().getTopmostManagedAncestor(root); if (topManaged != null) { List<File> files = rootsPerCheckout.get(topManaged); if (files == null) { files = new ArrayList<>(); rootsPerCheckout.put(topManaged, files); } files.add(root); } } if (rootsPerCheckout.isEmpty()) { return; } FileStatusCache cache = Subversion.getInstance().getStatusCache(); cache.refreshCached(ctx); for (Map.Entry<File, List<File>> e : rootsPerCheckout.entrySet()) { List<File> files = e.getValue(); try { if (rootsPerCheckout.size() > 1) { contextDisplayName = getContextDisplayName(files); } File root = files.get(0); SVNUrl repositoryUrl = SvnUtils.getRepositoryRootUrl(root); if(repositoryUrl == null) { Subversion.LOG.log(Level.WARNING, "Could not retrieve repository root for context file {0}", new Object[]{root}); continue; } update(e.getKey(), files.toArray(new File[files.size()]), progress, contextDisplayName, repositoryUrl, revision); } catch (SVNClientException ex) { SvnClientExceptionHandler.notifyException(ex, true, true); } } } private static void update (File checkoutRoot, File[] roots, final SvnProgressSupport progress, String contextDisplayName, SVNUrl repositoryUrl, final SVNRevision revision) { File[][] split = Utils.splitFlatOthers(roots); final List<File> recursiveFiles = new ArrayList<File>(); final List<File> flatFiles = new ArrayList<File>(); // recursive files for (int i = 0; i<split[1].length; i++) { recursiveFiles.add(split[1][i]); } // flat files //File[] flatRoots = SvnUtils.flatten(split[0], getDirectoryEnabledStatus()); for (int i= 0; i<split[0].length; i++) { flatFiles.add(split[0][i]); } final SvnClient client; UpdateOutputListener listener = new UpdateOutputListener(); try { client = Subversion.getInstance().getClient(repositoryUrl); // this isn't clean - the client notifies only files which realy were updated. // The problem here is that the revision in the metadata is set to HEAD even if the file didn't change => // we have to explicitly force the refresh for the relevant context - see bellow in updateRoots client.removeNotifyListener(Subversion.getInstance().getRefreshHandler()); client.addNotifyListener(listener); client.addNotifyListener(progress); progress.setCancellableDelegate(client); } catch (SVNClientException ex) { SvnClientExceptionHandler.notifyException(ex, true, true); return; } try { UpdateNotifyListener l = new UpdateNotifyListener(); client.addNotifyListener(l); try { SvnUtils.runWithoutIndexing(new Callable<Void>() { @Override public Void call () throws Exception { updateRoots(recursiveFiles, progress, client, true, revision); if(progress.isCanceled()) { return null; } updateRoots(flatFiles, progress, client, false, revision); return null; } }, roots); if(progress.isCanceled()) { return; } } finally { client.removeNotifyListener(l); client.removeNotifyListener(progress); } if (!l.existedFiles.isEmpty() || !l.conflictedFiles.isEmpty()) { // status of replaced files should be refreshed // because locally added files can be replaced with those in repository and their status would be still the same in the cache HashSet<File> filesToRefresh = new HashSet<File>(l.existedFiles); filesToRefresh.addAll(l.conflictedFiles); Subversion.getInstance().getStatusCache().refreshAsync(filesToRefresh.toArray(new File[filesToRefresh.size()])); } if (!l.conflictedFiles.isEmpty()) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { NotifyDescriptor nd = new NotifyDescriptor.Message( org.openide.util.NbBundle.getMessage(UpdateAction.class, "MSG_UpdateCausedConflicts_Prompt"), //NOI18N NotifyDescriptor.WARNING_MESSAGE); DialogDisplayer.getDefault().notify(nd); } }); } else { StatusDisplayer.getDefault().setStatusText(org.openide.util.NbBundle.getMessage(UpdateAction.class, "MSG_Update_Completed")); // NOI18N } } catch (SVNClientException e1) { progress.annotate(e1); } finally { openResults(listener.getResults(), repositoryUrl, contextDisplayName, checkoutRoot == null ? "" : checkoutRoot.getAbsolutePath()); } } private static void openResults(final List<FileUpdateInfo> resultsList, final SVNUrl url, final String contextDisplayName, final String checkoutRoot) { SwingUtilities.invokeLater(new Runnable() { public void run() { UpdateResults results = new UpdateResults(resultsList, url, contextDisplayName); VersioningOutputManager vom = VersioningOutputManager.getInstance(); vom.addComponent(SvnUtils.decodeToString(url) + "-UpdateExecutor-" + checkoutRoot, results); // NOI18N } }); } private static void updateRoots(List<File> roots, SvnProgressSupport support, SvnClient client, boolean recursive, SVNRevision revision) throws SVNClientException { for (Iterator<File> it = roots.iterator(); it.hasNext();) { File root = it.next(); if(support.isCanceled()) { break; } long rev = client.update(root, revision == null ? SVNRevision.HEAD : revision, recursive); revisionUpdateWorkaround(recursive, FileUtil.normalizeFile(root), client, rev); } return; } private static void revisionUpdateWorkaround(final boolean recursive, final File root, final SvnClient client, final long revision) throws SVNClientException { Utils.post(new Runnable() { public void run() { SVNRevision.Number svnRevision = null; if(revision < -1) { ISVNInfo info = null; try { info = SvnUtils.getInfoFromWorkingCopy(client, root); // try to retrieve from local WC first svnRevision = info.getRevision(); if(svnRevision == null) { info = client.getInfo(root); // contacts the server svnRevision = info.getRevision(); } } catch (SVNClientException ex) { SvnClientExceptionHandler.notifyException(ex, true, true); } } else { svnRevision = new SVNRevision.Number(revision); } // this isn't clean - the client notifies only files which realy were updated. // The problem here is that the revision in the metadata is set to HEAD even if the file didn't change List<File> filesToRefresh; File[] fileArray; if (recursive) { Subversion.getInstance().getStatusCache().patchRevision(new File[] { root }, svnRevision); int maxItems = 5; filesToRefresh = patchFilesRecursively(root, svnRevision, maxItems); // if >= 10000 rather refresh everything than just too large set of files fileArray = filesToRefresh.size() >= maxItems ? null : filesToRefresh.toArray(new File[filesToRefresh.size()]); } else { filesToRefresh = new ArrayList<>(); filesToRefresh.add(root); File[] files = root.listFiles(); if (files != null) { filesToRefresh.addAll(Arrays.asList(files)); } fileArray = filesToRefresh.toArray(new File[filesToRefresh.size()]); Subversion.getInstance().getStatusCache().patchRevision(fileArray, svnRevision); } // the cache fires status change events to trigger the annotation refresh. // unfortunatelly, we have to call the refresh explicitly for each file from this place // as the revision label was changed even if the files status wasn't Subversion.getInstance().getStatusCache().getLabelsCache().flushFileLabels(fileArray); Subversion.getInstance().refreshAnnotationsAndSidebars(fileArray); } }); } public static void performUpdate(final Context context, final String contextDisplayName) { if(!Subversion.getInstance().checkClientAvailable()) { return; } if (context == null || context.getRoots().size() == 0) { return; } SVNUrl repository; try { repository = getSvnUrl(context); } catch (SVNClientException ex) { SvnClientExceptionHandler.notifyException(ex, true, true); return; } RequestProcessor rp = Subversion.getInstance().getRequestProcessor(repository); SvnProgressSupport support = new SvnProgressSupport() { public void perform() { SystemAction.get(UpdateAction.class).update(context, this, contextDisplayName, null); } }; support.start(rp, repository, org.openide.util.NbBundle.getMessage(UpdateAction.class, "MSG_Update_Progress")); // NOI18N } /** * Run update on a single file * @param file */ public static void performUpdate(final File file) { if(!Subversion.getInstance().checkClientAvailable()) { return; } if (file == null) { return; } SVNUrl repository; try { repository = SvnUtils.getRepositoryRootUrl(file); } catch (SVNClientException ex) { SvnClientExceptionHandler.notifyException(ex, true, true); return; } final SVNUrl repositoryUrl = repository; RequestProcessor rp = Subversion.getInstance().getRequestProcessor(repositoryUrl); SvnProgressSupport support = new SvnProgressSupport() { public void perform() { // FileStatusCache cache = Subversion.getInstance().getStatusCache(); // cache.refresh(file, FileStatusCache.REPOSITORY_STATUS_UNKNOWN); update(Subversion.getInstance().getTopmostManagedAncestor(file), new File[] {file}, this, file.getAbsolutePath(), repositoryUrl, null); } }; support.start(rp, repositoryUrl, org.openide.util.NbBundle.getMessage(UpdateAction.class, "MSG_Update_Progress")); // NOI18N } private static List<File> patchFilesRecursively (File root, SVNRevision.Number revision, int maxReturnFiles) { List<File> ret = new ArrayList<>(); if (root == null) { return ret; } if (maxReturnFiles > 0) { // at this point it's useless to refresh a specific set of files in the IDE // it's better to refresh everything to save memory and it might be faster anyway ret.add(root); } File[] files = root.listFiles(); if (files != null) { FileStatusCache cache = Subversion.getInstance().getStatusCache(); cache.patchRevision(files, revision); for (File file : files) { FileInformation info = cache.getCachedStatus(file); if (!(SvnUtils.isPartOfSubversionMetadata(file) || SvnUtils.isAdministrative(file) || info != null && (info.getStatus() & STATUS_RECURSIVELY_TRAVERSIBLE) == 0)) { if (file.isDirectory()) { ret.addAll(patchFilesRecursively(file, revision, maxReturnFiles - ret.size())); } else if (maxReturnFiles - ret.size() > 0) { ret.add(file); } } } } return ret; } private String getContextDisplayName (List<File> files) { Node[] nodes = new Node[files.size()]; for (int i = 0; i < files.size(); ++i) { final File file = files.get(i); nodes[i] = new AbstractNode(Children.LEAF, Lookups.fixed(file)) { @Override public String getName () { return file.getName(); } }; } return getContextDisplayName(nodes); } private static class UpdateOutputListener implements ISVNNotifyListener { private List<FileUpdateInfo> results; public void setCommand(int command) { } public void logCommandLine(String str) { } public void logMessage(String logMsg) { catchMessage(logMsg); } public void logError(String str) { if (str == null) return; catchMessage(str); } public void logRevision(long rev, String str) { } public void logCompleted(String str) { } public void onNotify(File file, SVNNodeKind kind) { } List<FileUpdateInfo> getResults() { if(results == null) { results = new ArrayList<FileUpdateInfo>(); } return results; } private void catchMessage(String logMsg) { FileUpdateInfo[] fuis = FileUpdateInfo.createFromLogMsg(logMsg); if(fuis != null) { for(FileUpdateInfo fui : fuis) { if(fui != null) getResults().add(fui); } } } }; private static class UpdateNotifyListener implements ISVNNotifyListener { private static Pattern conflictFilePattern = Pattern.compile("(C...|.C..|..C.|...C) ?(.+)"); //NOI18N private static Pattern existedFilePattern = Pattern.compile("E ?(.+)"); //NOI18N HashSet<File> conflictedFiles = new HashSet<File>(); HashSet<File> existedFiles = new HashSet<File>(); public void logMessage(String msg) { catchMessage(msg); } public void logError(String msg) { if (msg == null) return; catchMessage(msg); } public void setCommand(int arg0) { /* boring */ } public void logCommandLine(String arg0) { /* boring */ } public void logRevision(long arg0, String arg1) { /* boring */ } public void logCompleted(String arg0) { /* boring */ } public void onNotify(File arg0, SVNNodeKind arg1) { /* boring */ } private void catchMessage (String message) { Matcher m = conflictFilePattern.matcher(message); if (m.matches() && m.groupCount() > 1) { String filePath = m.group(2); conflictedFiles.add(FileUtil.normalizeFile(new File(filePath))); } else { m = existedFilePattern.matcher(message); if (m.matches() && m.groupCount() > 0) { String filePath = m.group(1); existedFiles.add(FileUtil.normalizeFile(new File(filePath))); } } } } }
10,110
348
{"nom":"Rospigliani","dpt":"Haute-Corse","inscrits":124,"abs":43,"votants":81,"blancs":8,"nuls":4,"exp":69,"res":[{"panneau":"1","voix":61},{"panneau":"2","voix":8}]}
71
942
<filename>include/tsqlqueryormapper.h #include "../src/tsqlqueryormapper.h"
28
676
package com.alorma.github.ui.activity.base; import android.view.MenuItem; public class BackActivity extends BaseActivity { @Override public void onStart() { super.onStart(); if (getSupportActionBar() != null) { getSupportActionBar().setDisplayHomeAsUpEnabled(true); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { close(true); return true; } return false; } @Override public void onBackPressed() { close(false); } protected void close(boolean navigateUp) { finish(); } }
223
335
{ "word": "Soz", "definitions": [ "Sorry (used to express apology)" ], "parts-of-speech": "Adjective" }
60
384
<reponame>Wentaobi/tensorflow # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Extremely random forest graph builder using TF resources handles.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import six from google.protobuf import text_format from tensorflow.contrib.decision_trees.proto import generic_tree_model_pb2 as _tree_proto from tensorflow.contrib.tensor_forest.proto import tensor_forest_params_pb2 as _params_proto from tensorflow.contrib.tensor_forest.python import tensor_forest from tensorflow.contrib.tensor_forest.python.ops import model_ops from tensorflow.contrib.tensor_forest.python.ops import stats_ops from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.platform import tf_logging as logging # Stores tuples of (leaf model type, stats model type) CLASSIFICATION_LEAF_MODEL_TYPES = { 'all_dense': (_params_proto.MODEL_DENSE_CLASSIFICATION, _params_proto.STATS_DENSE_GINI), 'all_sparse': (_params_proto.MODEL_SPARSE_CLASSIFICATION, _params_proto.STATS_SPARSE_GINI), 'sparse_then_dense': (_params_proto.MODEL_SPARSE_OR_DENSE_CLASSIFICATION, _params_proto.STATS_SPARSE_THEN_DENSE_GINI), } REGRESSION_MODEL_TYPE = ( _params_proto.MODEL_REGRESSION, _params_proto.STATS_LEAST_SQUARES_REGRESSION, _params_proto.COLLECTION_BASIC) COLLECTION_TYPES = { 'basic': _params_proto.COLLECTION_BASIC, 'graph_runner': _params_proto.GRAPH_RUNNER_COLLECTION } FINISH_TYPES = { 'basic': _params_proto.SPLIT_FINISH_BASIC, 'hoeffding': _params_proto.SPLIT_FINISH_DOMINATE_HOEFFDING, 'bootstrap': _params_proto.SPLIT_FINISH_DOMINATE_BOOTSTRAP } PRUNING_TYPES = { 'none': _params_proto.SPLIT_PRUNE_NONE, 'half': _params_proto.SPLIT_PRUNE_HALF, 'quarter': _params_proto.SPLIT_PRUNE_QUARTER, '10_percent': _params_proto.SPLIT_PRUNE_10_PERCENT, 'hoeffding': _params_proto.SPLIT_PRUNE_HOEFFDING, } SPLIT_TYPES = { 'less_or_equal': _tree_proto.InequalityTest.LESS_OR_EQUAL, 'less': _tree_proto.InequalityTest.LESS_THAN } def build_params_proto(params): """Build a TensorForestParams proto out of the V4ForestHParams object.""" proto = _params_proto.TensorForestParams() proto.num_trees = params.num_trees proto.max_nodes = params.max_nodes proto.is_regression = params.regression proto.num_outputs = params.num_classes proto.num_features = params.num_features proto.leaf_type = params.v4_leaf_model_type proto.stats_type = params.v4_stats_model_type proto.collection_type = params.v4_split_collection_type proto.pruning_type.type = params.v4_pruning_type proto.finish_type.type = params.v4_finish_type proto.inequality_test_type = params.v4_split_type proto.drop_final_class = False proto.collate_examples = params.v4_collate_examples proto.checkpoint_stats = params.v4_checkpoint_stats proto.use_running_stats_method = params.v4_use_running_stats_method proto.initialize_average_splits = params.v4_initialize_average_splits if params.v4_prune_every_samples: text_format.Merge(params.v4_prune_every_samples, proto.pruning_type.prune_every_samples) else: # Pruning half-way through split_after_samples seems like a decent default, # making it easy to select the number being pruned with v4_pruning_type # while not paying the cost of pruning too often. Note that this only holds # if not using a depth-dependent split_after_samples. if params.v4_split_after_samples: logging.error( 'If using depth-dependent split_after_samples and also pruning, ' 'need to set v4_prune_every_samples') proto.pruning_type.prune_every_samples.constant_value = ( params.split_after_samples / 2) if params.v4_finish_check_every_samples: text_format.Merge(params.v4_finish_check_every_samples, proto.finish_type.check_every_steps) else: # Checking for finish every quarter through split_after_samples seems # like a decent default. We don't want to incur the checking cost too often, # but (at least for hoeffding) it's lower than the cost of pruning so # we can do it a little more frequently. proto.finish_type.check_every_steps.constant_value = int( params.split_after_samples / 4) if params.v4_split_after_samples: text_format.Merge(params.v4_split_after_samples, proto.split_after_samples) else: proto.split_after_samples.constant_value = params.split_after_samples if params.v4_num_splits_to_consider: text_format.Merge(params.v4_num_splits_to_consider, proto.num_splits_to_consider) else: proto.num_splits_to_consider.constant_value = params.num_splits_to_consider proto.dominate_fraction.constant_value = params.dominate_fraction proto.min_split_samples.constant_value = params.split_after_samples if params.v4_param_file: with open(params.v4_param_file) as f: text_format.Merge(f.read(), proto) return proto class V4ForestHParams(object): def __init__(self, hparams): for k, v in six.iteritems(hparams.__dict__): setattr(self, k, v) # How to store leaf models. model_name = getattr(self, 'v4_model_name', 'all_dense') self.v4_leaf_model_type = ( REGRESSION_MODEL_TYPE[0] if self.regression else CLASSIFICATION_LEAF_MODEL_TYPES[model_name][0]) # How to store stats objects. self.v4_stats_model_type = ( REGRESSION_MODEL_TYPE[1] if self.regression else CLASSIFICATION_LEAF_MODEL_TYPES[model_name][1]) split_collection_name = getattr(self, 'v4_split_collection_name', 'basic') self.v4_split_collection_type = ( REGRESSION_MODEL_TYPE[2] if self.regression else COLLECTION_TYPES[split_collection_name]) finish_name = getattr(self, 'v4_finish_name', 'basic') self.v4_finish_type = ( _params_proto.SPLIT_FINISH_BASIC if self.regression else FINISH_TYPES[finish_name]) pruning_name = getattr(self, 'v4_pruning_name', 'none') self.v4_pruning_type = PRUNING_TYPES[pruning_name] self.v4_collate_examples = getattr(self, 'v4_collate_examples', False) self.v4_checkpoint_stats = getattr(self, 'v4_checkpoint_stats', False) self.v4_use_running_stats_method = getattr( self, 'v4_use_running_stats_method', False) self.v4_initialize_average_splits = getattr( self, 'v4_initialize_average_splits', False) self.v4_param_file = getattr(self, 'v4_param_file', None) self.v4_split_type = getattr(self, 'v4_split_type', SPLIT_TYPES['less_or_equal']) # Special versions of the normal parameters, that support depth-dependence self.v4_num_splits_to_consider = getattr(self, 'v4_num_splits_to_consider', None) self.v4_split_after_samples = getattr(self, 'v4_split_after_samples', None) self.v4_finish_check_every_samples = getattr( self, 'v4_finish_check_every_samples', None) self.v4_prune_every_samples = getattr( self, 'v4_prune_every_samples', None) class TreeTrainingVariablesV4(tensor_forest.TreeTrainingVariables): """Stores tf.Variables for training a single random tree.""" def __init__(self, params, tree_num, training): if (not hasattr(params, 'params_proto') or not isinstance(params.params_proto, _params_proto.TensorForestParams)): params.params_proto = build_params_proto(params) params.serialized_params_proto = params.params_proto.SerializeToString() self.stats = None if training: # TODO(gilberth): Manually shard this to be able to fit it on # multiple machines. self.stats = stats_ops.fertile_stats_variable( params, '', self.get_tree_name('stats', tree_num)) self.tree = model_ops.tree_variable( params, '', self.stats, self.get_tree_name('tree', tree_num)) class RandomTreeGraphsV4(tensor_forest.RandomTreeGraphs): """Builds TF graphs for random tree training and inference.""" def tree_initialization(self): return control_flow_ops.no_op() def training_graph(self, input_data, input_labels, random_seed, data_spec, sparse_features=None, input_weights=None): if input_weights is None: input_weights = [] sparse_indices = [] sparse_values = [] sparse_shape = [] if sparse_features is not None: sparse_indices = sparse_features.indices sparse_values = sparse_features.values sparse_shape = sparse_features.dense_shape if input_data is None: input_data = [] leaf_ids = model_ops.traverse_tree_v4( self.variables.tree, input_data, sparse_indices, sparse_values, sparse_shape, input_spec=data_spec.SerializeToString(), params=self.params.serialized_params_proto) update_model = model_ops.update_model_v4( self.variables.tree, leaf_ids, input_labels, input_weights, params=self.params.serialized_params_proto) finished_nodes = stats_ops.process_input_v4( self.variables.tree, self.variables.stats, input_data, sparse_indices, sparse_values, sparse_shape, input_labels, input_weights, leaf_ids, input_spec=data_spec.SerializeToString(), random_seed=random_seed, params=self.params.serialized_params_proto) with ops.control_dependencies([update_model]): return stats_ops.grow_tree_v4( self.variables.tree, self.variables.stats, finished_nodes, params=self.params.serialized_params_proto) def inference_graph(self, input_data, data_spec, sparse_features=None): sparse_indices = [] sparse_values = [] sparse_shape = [] if sparse_features is not None: sparse_indices = sparse_features.indices sparse_values = sparse_features.values sparse_shape = sparse_features.dense_shape if input_data is None: input_data = [] return model_ops.tree_predictions_v4( self.variables.tree, input_data, sparse_indices, sparse_values, sparse_shape, input_spec=data_spec.SerializeToString(), params=self.params.serialized_params_proto) def average_impurity(self): return constant_op.constant(0) def size(self): """Constructs a TF graph for evaluating the current number of nodes. Returns: The current number of nodes in the tree. """ return model_ops.tree_size(self.variables.tree) def feature_usage_counts(self): return model_ops.feature_usage_counts( self.variables.tree, params=self.params.serialized_params_proto) class RandomForestGraphsV4(tensor_forest.RandomForestGraphs): def __init__(self, params, tree_graphs=None, tree_variables_class=None, **kwargs): if not isinstance(params, V4ForestHParams): params = V4ForestHParams(params) super(RandomForestGraphsV4, self).__init__( params, tree_graphs=tree_graphs or RandomTreeGraphsV4, tree_variables_class=(tree_variables_class or TreeTrainingVariablesV4), **kwargs)
5,006
349
/********************************************************************************* * * Inviwo - Interactive Visualization Workshop * * Copyright (c) 2013-2021 Inviwo Foundation * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * *********************************************************************************/ #include <inviwo/core/datastructures/geometry/plane.h> #include <glm/gtx/perpendicular.hpp> #include <array> namespace inviwo { inviwo::uvec3 Plane::COLOR_CODE = uvec3(225, 174, 225); const std::string Plane::CLASS_IDENTIFIER = "org.inviwo.Plane"; Plane::Plane(vec3 point, vec3 normal) noexcept : point_(point), normal_(glm::normalize(normal)) {} float Plane::distance(const vec3& p) const { return glm::dot(p - point_, normal_); } vec3 Plane::projectPoint(const vec3& p) const { return p - distance(p) * normal_; } bool Plane::isInside(const vec3& p) const { return distance(p) >= 0.f; } bool Plane::perpendicularToPlane(const vec3& p) const { return (glm::abs(glm::dot(normal_, p)) < glm::epsilon<float>()); } mat4 Plane::inPlaneBasis() const { std::array<vec3, 3> perp = {glm::perp(vec3{1.0f, 0.0f, 0.0}, normal_), glm::perp(vec3{0.0f, 1.0f, 0.0}, normal_), glm::perp(vec3{0.0f, 0.0f, 1.0}, normal_)}; const auto a1 = glm::normalize(*std::max_element( perp.begin(), perp.end(), [](const vec3& a, const vec3& b) { return glm::length2(a) < glm::length2(b); })); const auto a2 = glm::cross(normal_, a1); return glm::translate(point_) * mat4{vec4{a1, 0.0f}, vec4{a2, 0.0f}, vec4{normal_, 0.0f}, vec4{vec3{0.0f}, 1.0}}; } void Plane::setPoint(const vec3 p) { point_ = p; } void Plane::setNormal(const vec3& n) { normal_ = glm::normalize(n); } Plane Plane::transform(const mat4& transform) const { const auto newPos = vec3(transform * vec4(point_, 1.0)); const auto normalTransform = glm::transpose(glm::inverse(transform)); const auto newNormal = glm::normalize(vec3(normalTransform * vec4(normal_, 0.0))); return Plane(newPos, newNormal); } std::optional<vec3> Plane::getIntersection(const vec3& start, const vec3& stop) const { // Distance from point to plane const float d = glm::dot(point_ - start, normal_); if (glm::abs(d) < glm::epsilon<float>()) { // segment is in plane, return start point. return start; } const vec3 segment = stop - start; // Distance of segment projected onto plane normal float denom = glm::dot(normal_, segment); // if zero, segment is parallel to plane if (std::abs(denom) > glm::epsilon<float>()) { float tHit = d / denom; if (tHit >= 0.0f && tHit <= 1.0f) { return start + tHit * segment; } } // No intersection return std::nullopt; } std::optional<float> Plane::getIntersectionWeight(const vec3& start, const vec3& stop) const { // Distance from point to plane const float d = glm::dot(point_ - start, normal_); if (glm::abs(d) < glm::epsilon<float>()) { // segment is in plane, return start point. return 0.0f; } const vec3 segment = stop - start; // Distance of segment projected onto plane normal float denom = glm::dot(normal_, segment); // if zero, segment is parallel to plane if (std::abs(denom) > glm::epsilon<float>()) { const float tHit = d / denom; if (tHit >= 0.0f && tHit <= 1.0f) { return tHit; } } // No intersection return std::nullopt; } std::string Plane::getDataInfo() const { return "Plane"; } } // namespace inviwo
1,810
769
<reponame>Thales-RISC-V/pulpino-compliant-debug // Copyright 2017 ETH Zurich and University of Bologna. // Copyright and related rights are licensed under the Solderpad Hardware // License, Version 0.51 (the “License”); you may not use this file except in // compliance with the License. You may obtain a copy of the License at // http://solderpad.org/licenses/SHL-0.51. Unless required by applicable law // or agreed to in writing, software, hardware and materials distributed under // this License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #include "utils.h" #include "string_lib.h" #include "bar.h" #include "gpio.h" int main() { set_pin_function(PIN_MSPI_CSN1, FUNC_EXT2); if (get_pin_function(PIN_MSPI_CSN1) == FUNC_EXT2) { printf("Successfully enabled func 2 on PIN_MSPI_CSN1\n"); } else { printf("ERROR on enabling func 2 on PIN_MSPI_CSN1\n"); } set_pin_function(PIN_MSPI_CSN2, FUNC_GPIO); set_gpio_pin_direction(PIN_MSPI_CSN2, DIR_OUT); if (get_gpio_pin_direction(PIN_MSPI_CSN2) == DIR_OUT) { printf("Successfully set out dir on PIN_MSPI_CSN2\n"); } else { printf("ERROR on setting out dir on PIN_MSPI_CSN2\n"); } printf("Done!!!\n"); return 0; }
490
2,125
from functools import partial from torchnlp.encoders.text.static_tokenizer_encoder import StaticTokenizerEncoder class MosesEncoder(StaticTokenizerEncoder): """ Encodes the text using the Moses tokenizer. **Tokenizer Reference:** http://www.nltk.org/_modules/nltk/tokenize/moses.html Args: **args: Arguments passed onto ``StaticTokenizerEncoder.__init__``. **kwargs: Keyword arguments passed onto ``StaticTokenizerEncoder.__init__``. NOTE: The `doctest` is skipped because running NLTK moses with Python 3.7's pytest halts on travis. Example: >>> encoder = MosesEncoder(["This ain't funny.", "Don't?"]) # doctest: +SKIP >>> encoder.encode("This ain't funny.") # doctest: +SKIP tensor([5, 6, 7, 8, 9]) >>> encoder.vocab # doctest: +SKIP ['<pad>', '<unk>', '</s>', '<s>', '<copy>', 'This', 'ain', '&apos;t', 'funny', '.', \ 'Don', '?'] >>> encoder.decode(encoder.encode("This ain't funny.")) # doctest: +SKIP "This ain't funny." """ def __init__(self, *args, **kwargs): if 'tokenize' in kwargs: raise TypeError('``MosesEncoder`` does not take keyword argument ``tokenize``.') if 'detokenize' in kwargs: raise TypeError('``MosesEncoder`` does not take keyword argument ``detokenize``.') try: from sacremoses import MosesTokenizer from sacremoses import MosesDetokenizer except ImportError: print("Please install SacreMoses. " "See the docs at https://github.com/alvations/sacremoses for more information.") raise super().__init__( *args, tokenize=MosesTokenizer().tokenize, detokenize=partial(MosesDetokenizer().detokenize, return_str=True), **kwargs)
785
348
{"nom":"Couvertpuis","circ":"1ère circonscription","dpt":"Meuse","inscrits":87,"abs":36,"votants":51,"blancs":7,"nuls":5,"exp":39,"res":[{"nuance":"UDI","nom":"M. <NAME>","voix":21},{"nuance":"REM","nom":"<NAME>","voix":18}]}
92
476
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.planner.optimizations; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.prestosql.expressions.RowExpressionRewriter; import io.prestosql.expressions.RowExpressionTreeRewriter; import io.prestosql.spi.block.SortOrder; import io.prestosql.spi.plan.AggregationNode; import io.prestosql.spi.plan.AggregationNode.Aggregation; import io.prestosql.spi.plan.LimitNode; import io.prestosql.spi.plan.OrderingScheme; import io.prestosql.spi.plan.PlanNode; import io.prestosql.spi.plan.PlanNodeId; import io.prestosql.spi.plan.PlanNodeIdAllocator; import io.prestosql.spi.plan.Symbol; import io.prestosql.spi.plan.TopNNode; import io.prestosql.spi.relation.CallExpression; import io.prestosql.spi.relation.RowExpression; import io.prestosql.spi.relation.VariableReferenceExpression; import io.prestosql.sql.planner.PartitioningScheme; import io.prestosql.sql.planner.SymbolUtils; import io.prestosql.sql.planner.TypeProvider; import io.prestosql.sql.planner.plan.CubeFinishNode; import io.prestosql.sql.planner.plan.StatisticAggregations; import io.prestosql.sql.planner.plan.StatisticAggregationsDescriptor; import io.prestosql.sql.planner.plan.StatisticsWriterNode; import io.prestosql.sql.planner.plan.TableFinishNode; import io.prestosql.sql.planner.plan.TableWriterNode; import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.ExpressionRewriter; import io.prestosql.sql.tree.ExpressionTreeRewriter; import io.prestosql.sql.tree.SymbolReference; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.prestosql.spi.plan.AggregationNode.groupingSets; import static io.prestosql.sql.planner.SymbolUtils.toSymbolReference; import static io.prestosql.sql.relational.OriginalExpressionUtils.castToExpression; import static io.prestosql.sql.relational.OriginalExpressionUtils.castToRowExpression; import static io.prestosql.sql.relational.OriginalExpressionUtils.isExpression; import static java.util.Objects.requireNonNull; public class SymbolMapper { private final Map<String, String> mapping; private TypeProvider types; public SymbolMapper(Map<String, String> mapping, TypeProvider types) { requireNonNull(mapping, "mapping is null"); this.mapping = mapping; this.types = types; } public void setTypes(TypeProvider types) { this.types = types; } public TypeProvider getTypes() { return types; } public Symbol map(Symbol symbol) { String canonical = symbol.getName(); while (mapping.containsKey(canonical) && !mapping.get(canonical).equals(canonical)) { canonical = mapping.get(canonical); } return new Symbol(canonical); } public VariableReferenceExpression map(VariableReferenceExpression variable) { String canonical = variable.getName(); while (mapping.containsKey(canonical) && !mapping.get(canonical).equals(canonical)) { canonical = mapping.get(canonical); } if (canonical.equals(variable.getName())) { return variable; } return new VariableReferenceExpression(canonical, types.get(new Symbol(canonical))); } public RowExpression map(RowExpression value) { if (isExpression(value)) { return castToRowExpression(map(castToExpression(value))); } return RowExpressionTreeRewriter.rewriteWith(new RowExpressionRewriter<Void>() { @Override public RowExpression rewriteVariableReference(VariableReferenceExpression variable, Void context, RowExpressionTreeRewriter<Void> treeRewriter) { return map(variable); } }, value); } public Expression map(Expression value) { return ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<Void>() { @Override public Expression rewriteSymbolReference(SymbolReference node, Void context, ExpressionTreeRewriter<Void> treeRewriter) { Symbol canonical = map(SymbolUtils.from(node)); return toSymbolReference(canonical); } }, value); } public AggregationNode map(AggregationNode node, PlanNode source) { return map(node, source, node.getId()); } public AggregationNode map(AggregationNode node, PlanNode source, PlanNodeIdAllocator idAllocator) { return map(node, source, idAllocator.getNextId()); } private AggregationNode map(AggregationNode node, PlanNode source, PlanNodeId newNodeId) { ImmutableMap.Builder<Symbol, Aggregation> aggregations = ImmutableMap.builder(); for (Entry<Symbol, Aggregation> entry : node.getAggregations().entrySet()) { aggregations.put(map(entry.getKey()), map(entry.getValue())); } return new AggregationNode( newNodeId, source, aggregations.build(), groupingSets( mapAndDistinct(node.getGroupingKeys()), node.getGroupingSetCount(), node.getGlobalGroupingSets()), ImmutableList.of(), node.getStep(), node.getHashSymbol().map(this::map), node.getGroupIdSymbol().map(this::map), node.getAggregationType(), node.getFinalizeSymbol()); } private Aggregation map(Aggregation aggregation) { return new Aggregation( new CallExpression( aggregation.getFunctionCall().getDisplayName(), aggregation.getFunctionCall().getFunctionHandle(), aggregation.getFunctionCall().getType(), aggregation.getArguments().stream() .map(this::map) .collect(toImmutableList())), aggregation.getArguments().stream() .map(this::map) .collect(toImmutableList()), aggregation.isDistinct(), aggregation.getFilter().map(this::map), aggregation.getOrderingScheme().map(this::map), aggregation.getMask().map(this::map)); } public TopNNode map(TopNNode node, PlanNode source, PlanNodeId newNodeId) { return new TopNNode( newNodeId, source, node.getCount(), map(node.getOrderingScheme()), node.getStep()); } public LimitNode map(LimitNode node, PlanNode source) { return new LimitNode( node.getId(), source, node.getCount(), node.getTiesResolvingScheme().map(this::map), node.isPartial()); } public TableWriterNode map(TableWriterNode node, PlanNode source) { return map(node, source, node.getId()); } public TableWriterNode map(TableWriterNode node, PlanNode source, PlanNodeId newNodeId) { // Intentionally does not use canonicalizeAndDistinct as that would remove columns ImmutableList<Symbol> columns = node.getColumns().stream() .map(this::map) .collect(toImmutableList()); return new TableWriterNode( newNodeId, source, node.getTarget(), map(node.getRowCountSymbol()), map(node.getFragmentSymbol()), columns, node.getColumnNames(), node.getPartitioningScheme().map(partitioningScheme -> canonicalize(partitioningScheme, source)), node.getStatisticsAggregation().map(this::map), node.getStatisticsAggregationDescriptor().map(this::map)); } public StatisticsWriterNode map(StatisticsWriterNode node, PlanNode source) { return new StatisticsWriterNode( node.getId(), source, node.getTarget(), node.getRowCountSymbol(), node.isRowCountEnabled(), node.getDescriptor().map(this::map)); } public TableFinishNode map(TableFinishNode node, PlanNode source) { return new TableFinishNode( node.getId(), source, node.getTarget(), map(node.getRowCountSymbol()), node.getStatisticsAggregation().map(this::map), node.getStatisticsAggregationDescriptor().map(descriptor -> descriptor.map(this::map))); } public CubeFinishNode map(CubeFinishNode node, PlanNode source) { return new CubeFinishNode( node.getId(), source, map(node.getRowCountSymbol()), node.getMetadata(), node.getPredicateColumnsType()); } private PartitioningScheme canonicalize(PartitioningScheme scheme, PlanNode source) { return new PartitioningScheme( scheme.getPartitioning().translate(this::map), mapAndDistinct(source.getOutputSymbols()), scheme.getHashColumn().map(this::map), scheme.isReplicateNullsAndAny(), scheme.getBucketToPartition()); } private StatisticAggregations map(StatisticAggregations statisticAggregations) { Map<Symbol, Aggregation> aggregations = statisticAggregations.getAggregations().entrySet().stream() .collect(toImmutableMap(entry -> map(entry.getKey()), entry -> map(entry.getValue()))); return new StatisticAggregations(aggregations, mapAndDistinct(statisticAggregations.getGroupingSymbols())); } private StatisticAggregationsDescriptor<Symbol> map(StatisticAggregationsDescriptor<Symbol> descriptor) { return descriptor.map(this::map); } private List<Symbol> map(List<Symbol> outputs) { return outputs.stream() .map(this::map) .collect(toImmutableList()); } private List<Symbol> mapAndDistinct(List<Symbol> outputs) { Set<Symbol> added = new HashSet<>(); ImmutableList.Builder<Symbol> builder = ImmutableList.builder(); for (Symbol symbol : outputs) { Symbol canonical = map(symbol); if (added.add(canonical)) { builder.add(canonical); } } return builder.build(); } private OrderingScheme map(OrderingScheme orderingScheme) { ImmutableList.Builder<Symbol> symbols = ImmutableList.builder(); ImmutableMap.Builder<Symbol, SortOrder> orderings = ImmutableMap.builder(); Set<Symbol> seenCanonicals = new HashSet<>(orderingScheme.getOrderBy().size()); for (Symbol symbol : orderingScheme.getOrderBy()) { Symbol canonical = map(symbol); if (seenCanonicals.add(canonical)) { symbols.add(canonical); orderings.put(canonical, orderingScheme.getOrdering(symbol)); } } return new OrderingScheme(symbols.build(), orderings.build()); } public static SymbolMapper.Builder builder() { return new Builder(); } public static class Builder { private final ImmutableMap.Builder<String, String> mappings = ImmutableMap.builder(); private TypeProvider types; public SymbolMapper build() { return new SymbolMapper(mappings.build(), types); } public void put(Symbol from, VariableReferenceExpression to) { mappings.put(from.getName(), to.getName()); } public void put(Symbol from, Symbol to) { mappings.put(from.getName(), to.getName()); } public void putTypes(TypeProvider types) { this.types = types; } } }
5,540
572
// (C) Copyright <NAME> and <NAME> 2013. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // Repository at: https://github.com/djeedjay/DebugViewPP/ #pragma once #include "PropertyItem.h" #include "PropertyItemEditors.h" #include "PropertyItemImpl.h" #include "DebugView++Lib/Colors.h" #include "Win32/Win32Lib.h" namespace fusion { namespace debugviewpp { class ColorDialog : public CColorDialogImpl<ColorDialog> { public: ColorDialog(const wchar_t* title, COLORREF clrInit = 0, DWORD dwFlags = 0, HWND hWndParent = nullptr) : CColorDialogImpl<ColorDialog>(clrInit == Colors::Auto ? RGB(255, 255, 255) : clrInit, dwFlags, hWndParent), m_title(title), m_auto(clrInit == Colors::Auto), m_showAuto(false) { } BEGIN_MSG_MAP_EX(ColorDialog) MSG_WM_INITDIALOG(OnInitDialog) MSG_WM_DESTROY(OnDestroy) CHAIN_MSG_MAP(CColorDialogImpl<ColorDialog>) END_MSG_MAP() BOOL OnInitDialog(CWindow /*wndFocus*/, LPARAM /*lInitParam*/) { SetWindowText(m_title.c_str()); if (m_showAuto) { RECT rect = {166, 160, 216, 176}; m_btnAuto.Create(*this, &rect, L"Auto", WS_CHILD | WS_VISIBLE | BS_AUTOCHECKBOX | BS_TEXT | BS_LEFT, 0); m_btnAuto.SetFont(GetFont(), FALSE); m_btnAuto.SetCheck(m_auto ? BST_CHECKED : BST_UNCHECKED); } return TRUE; } void OnDestroy() { if (m_btnAuto.IsWindow() != 0) { m_auto = m_showAuto && m_btnAuto.GetCheck() == BST_CHECKED; m_btnAuto.DestroyWindow(); } else { m_auto = false; } } void ShowAuto(bool show) { m_showAuto = show; } void SetCurrentColor(COLORREF color) { m_auto = color == Colors::Auto; CColorDialogImpl<ColorDialog>::SetCurrentColor(m_auto ? RGB(255, 255, 255) : color); } COLORREF GetColor() const { return m_auto ? Colors::Auto : CColorDialogImpl<ColorDialog>::GetColor(); } private: std::wstring m_title; CButton m_btnAuto; bool m_auto; bool m_showAuto; }; class CPropertyColorItem : public CPropertyItem { public: CPropertyColorItem(const wchar_t* name, COLORREF color) : CPropertyItem(name, 0), m_dlg(name, color, CC_ANYCOLOR) { } COLORREF GetColor() const { return m_dlg.GetColor(); } void SetColor(COLORREF color) { m_dlg.SetCurrentColor(color); } void ShowAuto(bool show) { m_dlg.ShowAuto(show); } BOOL Activate(UINT action, LPARAM /*lParam*/) override { if (IsEnabled() == 0) { return FALSE; } switch (action) { case PACT_SPACE: case PACT_CLICK: case PACT_DBLCLICK: if (m_dlg.DoModal(m_hWndOwner) == IDOK) { // Let control owner know NMPROPERTYITEM nmh = {{m_hWndOwner, static_cast<UINT_PTR>(::GetDlgCtrlID(m_hWndOwner)), PIN_ITEMCHANGED}, this}; ::SendMessage(::GetParent(m_hWndOwner), WM_NOTIFY, nmh.hdr.idFrom, reinterpret_cast<LPARAM>(&nmh)); } break; default: break; } return TRUE; } void DrawValue(PROPERTYDRAWINFO& di) override { CDCHandle dc(di.hDC); RECT rect = di.rcItem; if (IsEnabled() == 0) { return dc.FillSolidRect(&rect, di.clrDisabledBack); } dc.FillSolidRect(&rect, di.clrBorder); ::InflateRect(&rect, -1, -1); dc.FillSolidRect(&rect, RGB(0, 0, 0)); ::InflateRect(&rect, -1, -1); auto color = GetColor(); if (color == Colors::Auto) { Win32::ScopedBkColor bg(dc, RGB(255, 255, 255)); Win32::ScopedTextColor fg(dc, RGB(0, 0, 0)); Win32::ScopedTextAlign ta(dc, TA_CENTER | TA_BOTTOM); dc.ExtTextOut((rect.left + rect.right) / 2, rect.bottom, ETO_OPAQUE, &rect, L"A", 1); } else { dc.FillSolidRect(&rect, color & 0xFFFFFF); } } BOOL GetValue(VARIANT* pValue) const override { CComVariant var(GetColor()); return SUCCEEDED(var.Detach(pValue)); } BOOL SetValue(const VARIANT& value) override { CComVariant var; if (FAILED(VariantChangeType(&var, &value, 0, VT_COLOR))) { return FALSE; } SetColor(var.intVal); return TRUE; } private: ColorDialog m_dlg; }; inline CPropertyColorItem* PropCreateColorItem(const wchar_t* name, COLORREF color) { return new CPropertyColorItem(name, color); } } // namespace debugviewpp } // namespace fusion
2,383
533
<filename>framework/operators/ctc_align.cpp<gh_stars>100-1000 #include "framework/operators/ctc_align.h" namespace anakin { namespace ops { #define INSTANCE_CTC_ALIGN(Ttype, Ptype) \ template<> \ void CtcAlign<Ttype, Ptype>::operator()(OpContext<Ttype>& ctx, \ const std::vector<Tensor4dPtr<Ttype> >& ins, \ std::vector<Tensor4dPtr<Ttype> >& outs) { \ auto* impl = static_cast<CtcAlignHelper<Ttype, Ptype>*>(this->_helper); \ auto& param = \ static_cast<CtcAlignHelper<Ttype, Ptype>*>(this->_helper)->_param_ctc_align; \ impl->_funcs_ctc_align(ins, outs, param, ctx); \ } /// TODO ... specialization other type of operator /// set helper template<typename Ttype, Precision Ptype> CtcAlignHelper<Ttype, Ptype>::~CtcAlignHelper() { } template<typename Ttype, Precision Ptype> Status CtcAlignHelper<Ttype, Ptype>::InitParam() { DLOG(WARNING) << "Parsing CtcAlign op parameter."; auto merge_repeated = GET_PARAMETER(bool, merge_repeated); auto blank = GET_PARAMETER(int, blank); CtcAlignParam<Ttype> ctc_align_param(blank, merge_repeated); _param_ctc_align = ctc_align_param; return Status::OK(); } template<typename Ttype, Precision Ptype> Status CtcAlignHelper<Ttype, Ptype>::Init(OpContext<Ttype> &ctx, const std::vector<Tensor4dPtr<Ttype> >& ins, std::vector<Tensor4dPtr<Ttype> >& outs) { SABER_CHECK(_funcs_ctc_align.init(ins, outs, _param_ctc_align, SPECIFY, SABER_IMPL, ctx)); return Status::OK(); } template<typename Ttype, Precision Ptype> Status CtcAlignHelper<Ttype, Ptype>::InferShape(const std::vector<Tensor4dPtr<Ttype> >& ins, std::vector<Tensor4dPtr<Ttype> >& outs) { SABER_CHECK(_funcs_ctc_align.compute_output_shape(ins, outs, _param_ctc_align)); return Status::OK(); } #ifdef USE_CUDA INSTANCE_CTC_ALIGN(NV, Precision::FP32); template class CtcAlignHelper<NV, Precision::FP32>; template class CtcAlignHelper<NV, Precision::FP16>; template class CtcAlignHelper<NV, Precision::INT8>; ANAKIN_REGISTER_OP_HELPER(CtcAlign, CtcAlignHelper, NV, Precision::FP32); #endif #ifdef AMD_GPU INSTANCE_CTC_ALIGN(AMD, Precision::FP32); template class CtcAlignHelper<AMD, Precision::FP32>; ANAKIN_REGISTER_OP_HELPER(CtcAlign, CtcAlignHelper, AMD, Precision::FP32); #endif #ifdef USE_ARM_PLACE INSTANCE_CTC_ALIGN(ARM, Precision::FP32); template class CtcAlignHelper<ARM, Precision::FP32>; template class CtcAlignHelper<ARM, Precision::FP16>; template class CtcAlignHelper<ARM, Precision::INT8>; ANAKIN_REGISTER_OP_HELPER(CtcAlign, CtcAlignHelper, ARM, Precision::FP32); #endif //! register op ANAKIN_REGISTER_OP(CtcAlign) .Doc("CtcAlign operator") #ifdef USE_CUDA .__alias__<NV, Precision::FP32>("ctc_align") #endif #ifdef USE_ARM_PLACE .__alias__<ARM, Precision::FP32>("ctc_align") #endif #ifdef AMD_GPU .__alias__<AMD, Precision::FP32>("ctc_align") #endif .num_in(1) .num_out(1) .Args<bool>("merge_repeated", " merge_repeated for ctc_align.") .Args<int>("blank", "blank for ctc_align."); } /* namespace ops */ } /* namespace anakin */
1,411
388
#!/usr/bin/env python # Copyright (c) 2016 <NAME> <<EMAIL>> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. The names of the author(s) may not be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED # AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. from honssh.config import Config from honssh.utils import validation from honssh import spoof class Plugin(object): def __init__(self): self.cfg = Config.getInstance() self.connection_timeout = self.cfg.getint(['honeypot', 'connection_timeout']) def get_pre_auth_details(self, conn_details): return self.get_connection_details() def get_post_auth_details(self, conn_details): success, username, password = spoof.get_connection_details(conn_details) if success: details = self.get_connection_details() details['username'] = username details['password'] = password details['connection_timeout'] = self.connection_timeout else: details = {'success': False} return details def get_connection_details(self): sensor_name = self.cfg.get(['honeypot-static', 'sensor_name']) honey_ip = self.cfg.get(['honeypot-static', 'honey_ip']) honey_port = self.cfg.getint(['honeypot-static', 'honey_port']) return {'success': True, 'sensor_name': sensor_name, 'honey_ip': honey_ip, 'honey_port': honey_port, 'connection_timeout': self.connection_timeout} def validate_config(self): props = [['honeypot-static', 'enabled'], ['honeypot-static', 'pre-auth'], ['honeypot-static', 'post-auth']] for prop in props: if not self.cfg.check_exist(prop, validation.check_valid_boolean): return False props = [['honeypot-static', 'honey_ip']] for prop in props: if not self.cfg.check_exist(prop, validation.check_valid_ip): return False props = [['honeypot-static', 'honey_port']] for prop in props: if not self.cfg.check_exist(prop, validation.check_valid_port): return False props = [['honeypot-static', 'sensor_name']] for prop in props: if not self.cfg.check_exist(prop): return False return True
1,302
2,292
import hashlib import os def sha256_hash(git_uri: str) -> str: m = hashlib.sha256() m.update(git_uri.encode()) return m.hexdigest() def home_directory_path(folder: str, hash_digest: str) -> str: # does this work on all operating systems? home = os.path.expanduser("~") return os.path.join(home, folder, hash_digest)
135
778
<filename>applications/PfemSolidMechanicsApplication/custom_problemtype/Kratos_Pfem_Solid_Mechanics_Application.gid/MainKratos.py import KratosMultiphysics import KratosMultiphysics.ExternalSolversApplication import KratosMultiphysics.SolidMechanicsApplication import KratosMultiphysics.ConstitutiveModelsApplication import KratosMultiphysics.DelaunayMeshingApplication import KratosMultiphysics.UmatApplication import KratosMultiphysics.PfemApplication import KratosMultiphysics.ContactMechanicsApplication import KratosMultiphysics.PfemSolidMechanicsApplication import MainSolid model = KratosMultiphysics.Model() MainSolid.Solution(model).Run()
196
726
<filename>app/src/main/java/com/yuyh/sprintnba/ui/view/ForumListView.java package com.yuyh.sprintnba.ui.view; import com.yuyh.sprintnba.http.bean.forum.ForumsData; import com.yuyh.sprintnba.ui.view.base.BaseView; import java.util.List; /** * @author yuyh. * @date 16/6/25. */ public interface ForumListView extends BaseView{ void showForumList(List<ForumsData.Forum> forumList); }
162
485
<reponame>5A59/Zvm # coding=utf-8 from base.utils import common_utils, print_utils from java_class.class_file import * from java_class.class_parser import ClassParser from runtime.thread import Slot from runtime.heap import Heap from base.jvm_config import jdk_path import os # 对应 java 中的 Class class JClass(object): ACC_PUBLIC = 0x0001 ACC_PRIVATE = 0x0002 ACC_PROTECTED = 0x0004 ACC_STATIC = 0x0008 ACC_FINAL = 0x0010 ACC_VOLATILE = 0x0040 ACC_TRANSIENT = 0x0080 ACC_SYNTHETIC = 0x1000 ACC_ENUM = 0x4000 @staticmethod def is_public(flag): return flag & JClass.ACC_PUBLIC != 0 @staticmethod def is_private(flag): return flag & JClass.ACC_PRIVATE != 0 @staticmethod def is_static(flag): return flag & JClass.ACC_STATIC != 0 def __init__(self): self.access_flag = None self.name = None self.super_class_name = None self.super_class = None self.interfaces = None self.fields = None self.methods = None self.constant_pool = None self.class_loader = None self.static_fields = None # map{ name: Slot } self.has_inited = False def new_jclass(self, class_file): Heap.new_jclass(self) self.access_flag = common_utils.get_int_from_bytes(class_file.access_flag) self.constant_pool = ConstantPool.new_constant_pool(class_file) self.fields = Field.new_fields(class_file.fields, self.constant_pool.constants) self.methods = Method.new_methods(self, class_file.methods, self.constant_pool.constants) super_class = self.constant_pool.constants[common_utils.get_int_from_bytes(class_file.super_class)] if super_class is not None: self.super_class_name = super_class.class_name # 从方法区取 self.interfaces = None self.static_fields = {} for sf in self.__get_static_fields(): desc = sf.descriptor slot = Slot() if desc == 'B' or desc == 'I' or desc == 'J' or desc == 'S' or desc == 'Z': slot.num = 0 elif desc == 'C': slot.num = '0' elif desc == 'F': slot.num = 0.0 elif desc == 'D': slot.num = 0.0 self.static_fields[sf.name] = slot def get_instance_fields(self): return [field for field in self.fields if not JClass.is_static(field.access_flag)] # return Field[] def __get_static_fields(self): return [field for field in self.fields if JClass.is_static(field.access_flag)] def get_main_method(self): methods = self.methods for method in methods: if method.name == 'main' and method.descriptor == '([Ljava/lang/String;)V': return method return None class ConstantPool(object): def __init__(self): self.constants = None @staticmethod def new_constant_pool(class_file): r_cp = class_file.constant_pool constants = [] for cp in r_cp: if isinstance(cp, ClassInfo): constants.append(ClassRef.new_class_ref(r_cp, cp)) elif isinstance(cp, FieldRefInfo): constants.append(FieldRef.new_field_ref(r_cp, cp)) elif isinstance(cp, MethodRefInfo): constants.append(MethodRef.new_method_ref(r_cp, cp)) elif isinstance(cp, InterfaceMethodRefInfo): constants.append(None) elif isinstance(cp, StringInfo): st = r_cp[common_utils.get_int_from_bytes(cp.string_index)] st = common_utils.get_string_from_bytes(st.bytes) jstring = JString() jstring.data = st constants.append(jstring) elif isinstance(cp, IntegerInfo): jint = JInteger() jint.data = common_utils.get_int_from_bytes(cp.bytes) constants.append(jint) elif isinstance(cp, FloatInfo): jfloat = JFloat() jfloat.data = common_utils.get_float_from_bytes(cp.bytes) constants.append(jfloat) elif isinstance(cp, LongInfo): jlong = JLong() jlong.data = common_utils.get_long_from_bytes(cp.high_bytes, cp.low_bytes) constants.append(jlong) elif isinstance(cp, DoubleInfo): jdouble = JDouble() jdouble.data = common_utils.get_double_from_bytes(cp.high_bytes, cp.low_bytes) constants.append(jdouble) elif isinstance(cp, NameAndTypeInfo): constants.append(None) elif isinstance(cp, Utf8Info): constants.append(common_utils.get_string_from_bytes(cp.bytes)) elif isinstance(cp, MethodHandleInfo): constants.append(None) else: constants.append(None) constants_pool = ConstantPool() constants_pool.constants = constants return constants_pool def get_attribute(attributes, constant_pool, name): for attr in attributes: index = common_utils.get_int_from_bytes(attr.attribute_name_index) aname = constant_pool[index] if aname == name: return attr return None class Field(object): def __init__(self): self.access_flag = None self.name = None self.descriptor = None self.descriptor_index = None self.constant_value_index = None self.signature = None # 记录范型变量 self.type = None # JClass def is_public(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_PUBLIC) def is_protected(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_PROTECTED) def is_private(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_PRIVATE) def is_static(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_STATIC) def is_final(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_FINAL) def is_volatile(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_VOLATILE) def is_transient(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_TRANSIENT) @staticmethod def new_fields(cf_fields, constant_pool): fields = [] for f in cf_fields: nf = Field() nf.access_flag = common_utils.get_int_from_bytes(f.access_flags) nf.name = constant_pool[common_utils.get_int_from_bytes(f.name_index)] nf.descriptor_index = common_utils.get_int_from_bytes(f.descriptor_index) nf.descriptor = constant_pool[nf.descriptor_index] attr = get_attribute(f.attributes, constant_pool, 'ConstantValue') if attr is not None: nf.constant_value_index = common_utils.get_int_from_bytes(attr.constant_value_index) fields.append(nf) return fields class Method(object): def __init__(self): self.access_flag = None self.name = None self.descriptor = None self.max_stack = None self.max_locals = None self.code = None self.exceptions = None # ExceptionTable[] self.arg_desc = None self.jclass = None @staticmethod def new_methods(jclass, cf_methods, constant_pool): methods = [] for m in cf_methods: nm = Method() nm.jclass = jclass nm.access_flag = common_utils.get_int_from_bytes(m.access_flags) nm.name = constant_pool[common_utils.get_int_from_bytes(m.name_index)] nm.descriptor = constant_pool[common_utils.get_int_from_bytes(m.descriptor_index)] attr = get_attribute(m.attributes, constant_pool, 'Code') nm.max_stack = common_utils.get_int_from_bytes(attr.max_stack) nm.max_locals = common_utils.get_int_from_bytes(attr.max_locals) nm.code = attr.code nm.exceptions = [] for ex in attr.exception_table: jex = JException() jex.start_pc = common_utils.get_int_from_bytes(ex.start_pc) jex.end_pc = common_utils.get_int_from_bytes(ex.end_pc) jex.handler_pc = common_utils.get_int_from_bytes(ex.handler_pc) jex.catch_type = common_utils.get_int_from_bytes(ex.catch_type) nm.exceptions.append(jex) nm.arg_desc = Method.get_arg_desc(nm.descriptor) methods.append(nm) return methods @staticmethod def get_arg_desc(descs): arg_desc = [] desc = '' for s in descs: if s == ')': break if len(desc) == 0: if s == 'B' or s == 'C' or s == 'D' or s == 'F' or s == 'I' or s == 'J' or s == 'S' or s == 'Z': desc = s arg_desc.append(desc) desc = '' elif s == 'L': desc += s else: if desc[0] == 'L': desc += s if s == ';': arg_desc.append(desc) desc = '' elif desc[0] == '[': if 'L' in desc: desc += s if s == ';': arg_desc.append(desc) desc = '' else: desc += s if s != '[': arg_desc.append(desc) desc = '' return arg_desc class JException(object): def __init__(self): self.start_pc = 0 self.end_pc = 0 self.handler_pc = 0 self.catch_type = 0 class Ref(object): def __init__(self): self.cp = None self.class_name = None self.cache_class = None # JClass def resolve_class(self, class_loader, need_re_resolve=False, class_name=None): if self.cache_class is not None and not need_re_resolve: return self.cache_class if class_loader is None: class_loader = ClassLoader.default_class_loader() if class_name is None: class_name = self.class_name self.cache_class = class_loader.load_class(class_name) self.cache_class.class_loader = class_loader return self.cache_class class ClassRef(Ref): def __init__(self): super(ClassRef, self).__init__() @staticmethod def new_class_ref(cp, class_info): # ConstantPool ConstantClassInfo cr = ClassRef() cr.cp = cp tmp = cp[common_utils.get_int_from_bytes(class_info.name_index)] cr.class_name = common_utils.get_string_from_bytes(tmp.bytes) return cr class MemberRef(Ref): def __init__(self): super(MemberRef, self).__init__() self.name = None self.descriptor = None self.access_flag = None @staticmethod def check_state(flag, state): if flag is None: return False return (flag & state) != 0 @staticmethod def get_string(cp, index_byte): return common_utils.get_string_from_bytes(cp[common_utils.get_int_from_bytes(index_byte)].bytes) @staticmethod def get_obj(cp, index_byte): return cp[common_utils.get_int_from_bytes(index_byte)] class FieldRef(MemberRef): ACC_PUBLIC = 0x0001 ACC_PRIVATE = 0x0002 ACC_PROTECTED = 0x0004 ACC_STATIC = 0x0008 ACC_FINAL = 0x0010 ACC_VOLATILE = 0x0040 ACC_TRANSIENT = 0x0080 ACC_SYNTHETIC = 0x1000 ACC_ENUM = 0x4000 def __init__(self): super(FieldRef, self).__init__() self.field = None def is_public(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_PUBLIC) def is_protected(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_PROTECTED) def is_private(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_PRIVATE) def is_static(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_STATIC) def is_final(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_FINAL) def is_volatile(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_VOLATILE) def is_transient(self): return MemberRef.check_state(self.access_flag, FieldRef.ACC_TRANSIENT) @staticmethod def new_field_ref(cp, field_ref_info): fr = FieldRef() cl = cp[common_utils.get_int_from_bytes(field_ref_info.class_index)] fr.class_name = MemberRef.get_string(cp, cl.name_index) fr.cp = cp name_and_type = MemberRef.get_obj(cp, field_ref_info.name_and_type_index) fr.name = MemberRef.get_string(cp, name_and_type.name_index) fr.descriptor = MemberRef.get_string(cp, name_and_type.descriptor_index) return fr def resolve_field(self, class_loader): if self.field is not None: return self.field if self.cache_class is None: self.resolve_class(class_loader) fields = self.cache_class.fields for f in fields: if f.name == self.name: self.field = f break return self.field class MethodRef(MemberRef): def __init__(self): super(MethodRef, self).__init__() self.method = None @staticmethod def new_method_ref(cp, method_ref_info): mr = MethodRef() cl = cp[common_utils.get_int_from_bytes(method_ref_info.class_index)] mr.class_name = MemberRef.get_string(cp, cl.name_index) mr.cp = cp name_and_type = MemberRef.get_obj(cp, method_ref_info.name_and_type_index) mr.name = MemberRef.get_string(cp, name_and_type.name_index) mr.descriptor = MemberRef.get_string(cp, name_and_type.descriptor_index) return mr # TODO: 方法权限等的处理 def resolve_method(self, class_loader, need_re_resolve=False, class_name=None): if self.method is not None and not need_re_resolve: return self.method if self.cache_class is None or need_re_resolve: self.resolve_class(class_loader, need_re_resolve, class_name) methods = self.cache_class.methods for m in methods: if m.name == self.name and m.descriptor == self.descriptor: self.method = m break return self.method def resolve_method_with_super(self, class_loader): self.resolve_method(class_loader, True) if self.method is None: super_class = self.cache_class.super_class while super_class is not None: for m in super_class.methods: if m.name == self.name: self.method = m break if self.method is not None: break super_class = super_class.super_class return self.method def re_resolve_method_with_super_by_class_name(self, class_loader, class_name): self.resolve_method(class_loader, True, class_name) if self.method is None: super_class = self.cache_class.super_class while super_class is not None: for m in super_class.methods: if m.name == self.name: self.method = m break if self.method is not None: break super_class = super_class.super_class return self.method class BaseType(object): def __init__(self): self.data = None class JInteger(BaseType): def __init__(self): super(JInteger, self).__init__() class JFloat(BaseType): def __init__(self): super(JFloat, self).__init__() class JLong(BaseType): def __init__(self): super(JLong, self).__init__() class JDouble(BaseType): def __init__(self): super(JDouble, self).__init__() class JString(BaseType): def __init__(self): super(JString, self).__init__() # TODO: 感觉还是应该分一个包出去 class ClassLoader(object): default_loader = None def __init__(self): self._loading_classes = [] self._loaded_classes = {} self.pkg_path = jdk_path self.hack() def get_all_loaded_class(self): return self._loading_classes def hack(self): # 先提前 load self.load_class('java/lang/Object') @staticmethod def default_class_loader(): # TODO: 线程同步 if ClassLoader.default_loader is None: ClassLoader.default_loader = ClassLoader() return ClassLoader.default_loader def add_path(self, path): self.pkg_path.append(path) # TODO: jar zip 处理 def load_class(self, class_name): # TODO: load class 线程之间同步 暂时轮询 if class_name in self._loading_classes: while True: if class_name not in self._loading_classes: break if class_name in self._loaded_classes: return self._loaded_classes[class_name] jclass = self.__load_class(class_name) self._loading_classes.remove(class_name) return jclass def __load_class(self, class_name): self._loading_classes.append(class_name) if class_name[0] == '[': return self.__load_array_class(class_name) for path in self.pkg_path: class_path = path + class_name.replace('.', '/') + '.class' if not os.path.exists(class_path): continue print_utils.print_jvm_status('load class: ' + class_path) jclass = self.define_class(class_name, class_path) self._loaded_classes[class_name] = jclass return jclass return None def __load_array_class(self, class_name): jclass = JClass() jclass.super_class_name = 'java/lang/Object' jclass.class_loader = self jclass.has_inited = True jclass.name = class_name self._loaded_classes[class_name] = jclass def define_class(self, class_name, path): parser = ClassParser(path) parser.parse() jclass = JClass() jclass.name = class_name jclass.new_jclass(parser.class_file) jclass.super_class = self.load_super_class(jclass) return jclass def load_super_class(self, jclass): if jclass.super_class_name == 'java/lang/Object' or jclass.super_class_name is None: return return self.load_class(jclass.super_class_name)
9,149
435
{ "copyright_text": "Standard YouTube License", "description": "", "duration": 2093, "language": "ita", "recorded": "2016-06-22", "related_urls": [], "speakers": [ "<NAME>" ], "tags": [], "thumbnail_url": "https://i.ytimg.com/vi/KTk1lpNksUg/maxresdefault.jpg", "title": "From Java to Python", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=KTk1lpNksUg" } ] }
194
2,577
<filename>engine/src/main/java/org/camunda/bpm/engine/impl/dmn/result/SingleResultDecisionResultMapper.java /* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.dmn.result; import org.camunda.bpm.dmn.engine.DmnDecisionResult; import org.camunda.bpm.dmn.engine.DmnDecisionResultEntries; import org.camunda.bpm.dmn.engine.DmnEngineException; import org.camunda.bpm.engine.impl.ProcessEngineLogger; import org.camunda.bpm.engine.impl.dmn.DecisionLogger; import org.camunda.bpm.engine.variable.Variables; /** * Maps the decision result to pairs of output name and untyped entries. * * @author <NAME> */ public class SingleResultDecisionResultMapper implements DecisionResultMapper { protected static final DecisionLogger LOG = ProcessEngineLogger.DECISION_LOGGER; @Override public Object mapDecisionResult(DmnDecisionResult decisionResult) { try { DmnDecisionResultEntries singleResult = decisionResult.getSingleResult(); if (singleResult != null) { return singleResult.getEntryMap(); } else return Variables.untypedNullValue(); } catch (DmnEngineException e) { throw LOG.decisionResultMappingException(decisionResult, this, e); } } @Override public String toString() { return "SingleResultDecisionResultMapper{}"; } }
628
488
struct A { typedef int I; typedef I (*X) (I); virtual X start(); }; struct B : A { typedef int J; typedef J (*Z) (J); Z start() override; }; struct C : A { X start() override; }; int foo(A& a) { a.start(); }
103
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef _ADRPARSE_HXX #define _ADRPARSE_HXX #include "svl/svldllapi.h" #include <tools/list.hxx> #include <tools/string.hxx> //============================================================================ struct SvAddressEntry_Impl { UniString m_aAddrSpec; UniString m_aRealName; SvAddressEntry_Impl() {}; SvAddressEntry_Impl(UniString const & rTheAddrSpec, UniString const & rTheRealName): m_aAddrSpec(rTheAddrSpec), m_aRealName(rTheRealName) {} }; //============================================================================ DECLARE_LIST(SvAddressList_Impl, SvAddressEntry_Impl *) //============================================================================ class SVL_DLLPUBLIC SvAddressParser { friend class SvAddressParser_Impl; SvAddressEntry_Impl m_aFirst; SvAddressList_Impl m_aRest; bool m_bHasFirst; public: SvAddressParser(UniString const & rInput); ~SvAddressParser(); sal_Int32 Count() const { return m_bHasFirst ? m_aRest.Count() + 1 : 0; } inline UniString const & GetEmailAddress(sal_Int32 nIndex) const; inline UniString const &GetRealName(sal_Int32 nIndex) const; /** Create an RFC 822 <mailbox> (i.e., 'e-mail address'). @param rPhrase Either an empty string (the <mailbox> will have no <phrase> an will be of the form <addr-spec>), or some text that will become the <phrase> part of a <phrase route-addr> form <mailbox>. Non US-ASCII characters within the text are put into a <qouted-string> verbatim, so the result may actually not be a valid RFC 822 <mailbox>, but a more human-readable representation. @param rAddrSpec A valid RFC 822 <addr-spec>. (An RFC 822 <mailbox> including a <route> cannot be created by this method.) @param rMailbox If this method returns true, this parameter returns the created RFC 822 <mailbox> (rather, a more human-readable representation thereof). Otherwise, this parameter is not modified. @return True, if rAddrSpec is a valid RFC 822 <addr-spec>. */ static bool createRFC822Mailbox(String const & rPhrase, String const & rAddrSpec, String & rMailbox); }; inline UniString const & SvAddressParser::GetEmailAddress(sal_Int32 nIndex) const { return nIndex == 0 ? m_aFirst.m_aAddrSpec : m_aRest.GetObject(nIndex - 1)->m_aAddrSpec; } inline UniString const & SvAddressParser::GetRealName(sal_Int32 nIndex) const { return nIndex == 0 ? m_aFirst.m_aRealName : m_aRest.GetObject(nIndex - 1)->m_aRealName; } #endif // _ADRPARSE_HXX
1,090
416
<reponame>tjachmann/visionaray // This file is distributed under the MIT license. // See the LICENSE file for details. #include <visionaray/math/vector.h> #include <visionaray/get_area.h> #include <visionaray/get_surface.h> #include <visionaray/result_record.h> #include <visionaray/sampling.h> #include <visionaray/spectrum.h> #include <visionaray/surface_interaction.h> #include <visionaray/traverse.h> namespace visionaray { namespace pathtracing { template <typename Params> struct kernel { Params params; template <typename Intersector, typename R, typename Generator> VSNRAY_FUNC result_record<typename R::scalar_type> operator()( Intersector& isect, R ray, Generator& gen ) const { using S = typename R::scalar_type; using I = simd::int_type_t<S>; using V = vector<3, S>; using C = spectrum<S>; simd::mask_type_t<S> active_rays = true; simd::mask_type_t<S> last_specular = true; C intensity(0.0); C throughput(1.0); result_record<S> result; result.color = vector<4, S>(params.background.intensity(ray.dir), S(1.0)); for (unsigned bounce = 0; bounce < params.num_bounces; ++bounce) { auto hit_rec = closest_hit(ray, params.prims.begin, params.prims.end, isect); // Handle rays that just exited auto exited = active_rays & !hit_rec.hit; auto env = params.amb_light.intensity(ray.dir); intensity += select( exited, from_rgb(env) * throughput, C(0.0) ); // Exit if no ray is active anymore active_rays &= hit_rec.hit; if (!any(active_rays)) { break; } // Special handling for first bounce if (bounce == 0) { result.hit = hit_rec.hit; result.depth = hit_rec.t; } // Process the current bounce V refl_dir(0.0); V view_dir = -ray.dir; hit_rec.isect_pos = ray.ori + ray.dir * hit_rec.t; auto surf = get_surface(hit_rec, params); S brdf_pdf(0.0); // Remember the last type of surface interaction. // If the last interaction was not diffuse, we have // to include light from emissive surfaces. I inter = 0; auto src = surf.sample(view_dir, refl_dir, brdf_pdf, inter, gen); auto zero_pdf = brdf_pdf <= S(0.0); S light_pdf(0.0); auto num_lights = params.lights.end - params.lights.begin; if (num_lights > 0 && any(inter == surface_interaction::Emission)) { auto A = get_area(params.prims.begin, hit_rec); auto ld = length(hit_rec.isect_pos - ray.ori); auto L = normalize(hit_rec.isect_pos - ray.ori); auto n = surf.geometric_normal; auto ldotln = abs(dot(-L, n)); auto solid_angle = (ldotln * A) / (ld * ld); light_pdf = select( inter == surface_interaction::Emission, S(1.0) / solid_angle, S(0.0) ); } S mis_weight = select( bounce > 0 && num_lights > 0 && !last_specular, power_heuristic(brdf_pdf, light_pdf / static_cast<float>(num_lights)), S(1.0) ); intensity += select( active_rays && inter == surface_interaction::Emission, mis_weight * throughput * src, C(0.0) ); active_rays &= inter != surface_interaction::Emission; active_rays &= !zero_pdf; auto n = surf.shading_normal; #if 1 n = faceforward( n, view_dir, surf.geometric_normal ); #endif if (num_lights > 0) { auto ls = sample_random_light(params.lights.begin, params.lights.end, gen); auto ld = length(ls.pos - hit_rec.isect_pos); auto L = normalize(ls.pos - hit_rec.isect_pos); auto ln = select(ls.delta_light, -L, ls.normal); #if 1 ln = faceforward( ln, -L, ln ); #endif auto ldotn = dot(L, n); auto ldotln = abs(dot(-L, ln)); R shadow_ray( hit_rec.isect_pos + L * S(params.epsilon), // origin L, // direction S(params.epsilon), // tmin ld - S(params.epsilon) // tmax ); auto lhr = any_hit(shadow_ray, params.prims.begin, params.prims.end, isect); auto brdf_pdf = surf.pdf(view_dir, L, inter); auto prob = max_element(throughput.samples()); brdf_pdf *= prob; // TODO: inv_pi / dot(n, wi) factor only valid for plastic and matte auto src = surf.shade(view_dir, L, ls.intensity) * constants::inv_pi<S>() / ldotn; auto solid_angle = (ldotln * ls.area); solid_angle = select(!ls.delta_light, solid_angle / (ld * ld), solid_angle); auto light_pdf = S(1.0) / solid_angle; S mis_weight = power_heuristic(light_pdf / static_cast<float>(num_lights), brdf_pdf); intensity += select( active_rays && !lhr.hit && ldotn > S(0.0) && ldotln > S(0.0), mis_weight * throughput * src * (ldotn / light_pdf) * S(static_cast<float>(num_lights)), C(0.0) ); } throughput *= src * (dot(n, refl_dir) / brdf_pdf); throughput = select(zero_pdf, C(0.0), throughput); if (bounce >= 2) { // Russian roulette auto prob = max_element(throughput.samples()); auto terminate = gen.next() > prob; active_rays &= !terminate; throughput /= prob; if (!any(active_rays)) { break; } } ray.ori = hit_rec.isect_pos + refl_dir * S(params.epsilon); ray.dir = refl_dir; last_specular = inter == surface_interaction::SpecularReflection || inter == surface_interaction::SpecularTransmission; } result.color = select( result.hit, to_rgba(intensity), result.color ); return result; } template <typename R, typename Generator> VSNRAY_FUNC result_record<typename R::scalar_type> operator()( R ray, Generator& gen ) const { default_intersector ignore; return (*this)(ignore, ray, gen); } }; } // pathtracing } // visionaray
3,763
5,169
<gh_stars>1000+ { "name": "BYConnect", "version": "0.21.1", "summary": "BYConnect is an iOS framework that helps you integrate with Banyan quickly and securely.", "description": "BYConnect is a universal iOS framework that was built using Swift and supports devices and simulators.\nThe BYConnect framework manages bank authentication, credit card authentication and securely sending data to the Banyan cloud.", "homepage": "https://github.com/getBanyan/by_mobile_ios_framework_connect_distribution", "license": { "type": "Apache License, Version 2.0", "text": " Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n http://www.apache.org/licenses/LICENSE-2.0\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n" }, "authors": { "Shawn": "<EMAIL>" }, "platforms": { "ios": "13.0" }, "dependencies": { "VGSCollectSDK": [ "1.7.5" ], "VGSCollectSDK/CardScan": [ "1.7.5" ], "AWSLambda": [ "2.23.2" ] }, "swift_versions": "5", "user_target_xcconfig": { "EXCLUDED_ARCHS[sdk=iphonesimulator*]": "arm64" }, "pod_target_xcconfig": { "EXCLUDED_ARCHS[sdk=iphonesimulator*]": "arm64" }, "ios": { "vendored_frameworks": "BYConnect.framework" }, "source": { "git": "https://github.com/getBanyan/by_mobile_ios_framework_connect_distribution.git", "tag": "0.21.1" }, "exclude_files": "Classes/Exclude", "swift_version": "5" }
682
696
/* * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.math.impl.random; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import java.util.List; import org.junit.jupiter.api.Test; import com.opengamma.strata.math.impl.cern.MersenneTwister64; /** * Test {@link NormalRandomNumberGenerator}. */ public class NormalRandomNumberGeneratorTest { private static final NormalRandomNumberGenerator GENERATOR = new NormalRandomNumberGenerator(0, 1); @Test public void test_array() { double[] result = GENERATOR.getVector(10); assertThat(result.length).isEqualTo(10); } @Test public void test_list() { List<double[]> result = GENERATOR.getVectors(10, 50); assertThat(result).hasSize(50); for (double[] d : result) { assertThat(d.length).isEqualTo(10); } } @Test public void test_invalid() { assertThatIllegalArgumentException() .isThrownBy(() -> new NormalRandomNumberGenerator(0, -1)); assertThatIllegalArgumentException() .isThrownBy(() -> new NormalRandomNumberGenerator(0, -1, new MersenneTwister64())); assertThatIllegalArgumentException() .isThrownBy(() -> new NormalRandomNumberGenerator(0, 1, null)); assertThatIllegalArgumentException() .isThrownBy(() -> GENERATOR.getVectors(-1, 4)); assertThatIllegalArgumentException() .isThrownBy(() -> GENERATOR.getVectors(1, -5)); } }
577
893
package io.lacuna.bifurcan.utils; import static io.lacuna.bifurcan.utils.Bits.branchingBit; import static io.lacuna.bifurcan.utils.Bits.maskAbove; import static io.lacuna.bifurcan.utils.Bits.maskBelow; /** * Static methods which implement bit-range operations over a bit-vector stored within a long[]. * * @author ztellman */ public final class BitVector { /** * @param length the bit length of the vector * @return a bit vector which can hold the specified number of bits */ public static long[] create(int length) { return new long[(Math.max(0, length - 1) >> 6) + 1]; } public static long[] clone(long[] vector) { long[] nVector = new long[vector.length]; System.arraycopy(vector, 0, nVector, 0, vector.length); return nVector; } /** * @param vector the bit vector * @param bitIndex the bit to be tested * @return true if the bit is 1, false otherwise */ public static boolean test(long[] vector, int bitIndex) { return (vector[bitIndex >> 6] & (1L << (bitIndex & 63))) != 0; } /** * @param bitLen the number of significant bits in each vector * @param vectors a list of bit-vectors * @return the bit-wise interleaving of the values, starting with the topmost bit of the 0th vector, followed by the * topmost bit of the 1st vector, and downward from there */ public static long[] interleave(int bitLen, long[] vectors) { long[] interleaved = create(bitLen * vectors.length); int offset = (interleaved.length << 6) - 1; for (int i = 0; i < bitLen; i++) { long mask = 1L << i; for (int j = vectors.length - 1; j >= 0; j--) { long val = (vectors[j] & mask) >>> i; interleaved[offset >> 6] |= val << (63 - (offset & 63)); offset--; } } return interleaved; } public static int branchingBit(long[] a, long[] b, int aIdx, int bIdx, int bitOffset, int bitLen) { long mask = maskAbove(bitOffset & 63); int branch = Bits.branchingBit(a[aIdx] & mask, b[bIdx] & mask); if (branch >= 0) { return branch; } aIdx++; bIdx++; int branchIdx = 64 - bitOffset; int len = ((bitLen - (bitOffset + 1)) >> 6) + 1; for (int i = 0; i < len; i++) { branch = Bits.branchingBit(a[aIdx + i], b[bIdx + 1]); if (branch >= 0) { return branchIdx + branch; } else { branchIdx += 64; } } return branchIdx > bitLen ? -1 : branchIdx; } /** * Reads a bit range from the vector, which cannot be longer than 64 bits. * * @param vector the bit vector * @param offset the bit offset * @param len the bit length * @return a number representing the bit range */ public static long get(long[] vector, int offset, int len) { int idx = offset >> 6; int bitIdx = offset & 63; int truncatedLen = Math.min(len, 64 - bitIdx); long val = (vector[idx] >>> bitIdx) & maskBelow(truncatedLen); if (len != truncatedLen) { val |= (vector[idx + 1] & maskBelow(len - truncatedLen)) << truncatedLen; } return val; } /** * Overwrites a bit range within the vector. * * @param vector the bit vector * @param val the rowValue to set * @param offset the offset of the write * @param len the bit length of the rowValue */ public static void overwrite(long[] vector, long val, int offset, int len) { int idx = offset >> 6; int bitIdx = offset & 63; int truncatedValLen = Math.min(len, 64 - bitIdx); vector[idx] &= ~(maskBelow(truncatedValLen) << bitIdx); vector[idx] |= val << bitIdx; if (len != truncatedValLen) { long mask = maskBelow(len - truncatedValLen); vector[idx + 1] &= ~mask; vector[idx + 1] |= (val >>> truncatedValLen); } } public static void overwrite(long[] vector, int bitIdx, boolean flag) { long mask = (1L << (bitIdx & 63)); if (flag) { vector[bitIdx >> 6] |= mask; } else { vector[bitIdx >> 6] &= ~mask; } } /** * Copies a bit range from one vector to another. * * @param src the source vector * @param srcOffset the bit offset within src * @param dst the destination vector * @param dstOffset the bit offset within dst * @param len the length of the bit range */ public static void copy(long[] src, int srcOffset, long[] dst, int dstOffset, int len) { int srcLimit = srcOffset + len; while (srcOffset < srcLimit) { int srcIdx = srcOffset & 63; int dstIdx = dstOffset & 63; int srcRemainder = 64 - srcIdx; int dstRemainder = 64 - dstIdx; int chunkLen = Math.min(srcRemainder, dstRemainder); long mask = maskBelow(chunkLen) << srcIdx; dst[dstOffset >> 6] |= ((src[srcOffset >> 6] & mask) >>> srcIdx) << dstOffset; srcOffset += chunkLen; dstOffset += chunkLen; } } /** * Returns a copy of the vector, with an empty bit range inserted at the specified location. * * @param vector the bit vector * @param vectorLen the length of the bit vector * @param offset the offset within the bit vector * @param len the length of the empty bit range * @return an updated copy of the vector */ public static long[] interpose(long[] vector, int vectorLen, int offset, int len) { long[] updated = create(vectorLen + len); int idx = offset >> 6; System.arraycopy(vector, 0, updated, 0, idx); if (idx < vector.length) { int delta = offset & 63; updated[idx] |= vector[idx] & maskBelow(delta); } copy(vector, offset, updated, offset + len, vectorLen - offset); return updated; } /** * @param vector the bit vector * @param vectorLen the length of the bit vector * @param val the rowValue to be inserted * @param offset the offset within the bit vector * @param len the bit length of the rowValue * @return an updated copy of the vector */ public static long[] insert(long[] vector, int vectorLen, long val, int offset, int len) { long[] updated = interpose(vector, vectorLen, offset, len); overwrite(updated, val, offset, len); return updated; } /** * Returns a copy of the vector, with a bit range excised from the specified location. * * @param vector the bit vector * @param vectorLen the length of the bit vector * @param offset the offset within the bit vector * @param len the length of the excised bit range * @return an updated copy of the vector */ public static long[] remove(long[] vector, int vectorLen, int offset, int len) { long[] updated = create(vectorLen - len); int idx = offset >> 6; System.arraycopy(vector, 0, updated, 0, idx); if (idx < updated.length) { int delta = offset & 63; updated[idx] |= vector[idx] & maskBelow(delta); } copy(vector, offset + len, updated, offset, vectorLen - (offset + len)); return updated; } }
2,588
471
<filename>examples/pxScene2d/external/WinSparkle/3rdparty/wxWidgets/include/wx/xrc/xh_html.h<gh_stars>100-1000 ///////////////////////////////////////////////////////////////////////////// // Name: wx/xrc/xh_html.h // Purpose: XML resource handler for wxHtmlWindow // Author: <NAME> // Created: 2000/03/21 // Copyright: (c) 2000 <NAME> and Verant Interactive // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// #ifndef _WX_XH_HTML_H_ #define _WX_XH_HTML_H_ #include "wx/xrc/xmlres.h" #if wxUSE_XRC && wxUSE_HTML class WXDLLIMPEXP_XRC wxHtmlWindowXmlHandler : public wxXmlResourceHandler { DECLARE_DYNAMIC_CLASS(wxHtmlWindowXmlHandler) public: wxHtmlWindowXmlHandler(); virtual wxObject *DoCreateResource(); virtual bool CanHandle(wxXmlNode *node); }; #endif // wxUSE_XRC && wxUSE_HTML #endif // _WX_XH_HTML_H_
353
335
<reponame>Safal08/Hacktoberfest-1<filename>M/Mentor_verb.json<gh_stars>100-1000 { "word": "Mentor", "definitions": [ "Advise or train (someone, especially a younger colleague)" ], "parts-of-speech": "Verb" }
102
5,169
<filename>Specs/e/6/8/DHYunRuiSDK/0.1.3/DHYunRuiSDK.podspec.json { "name": "DHYunRuiSDK", "version": "0.1.3", "summary": "A short description of DHYunRuiSDK.", "description": "TODO: Add long description of the pod here.", "homepage": "https://github.com/goodxianping/DHYunRuiSDK.git", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "ganqixian": "<EMAIL>" }, "source": { "git": "https://github.com/goodxianping/DHYunRuiSDK.git", "tag": "0.1.3" }, "platforms": { "ios": "9.0" }, "vendored_frameworks": "DHYunRuiSDK/Depend/sdk_output/Framework/*.framework", "frameworks": [ "VideoToolbox", "AudioToolbox", "CoreMedia" ], "info_plist": { "CFBundleIdentifier": "com.gxlj.DHYunRuiSDK" }, "pod_target_xcconfig": { "PRODUCT_BUNDLE_IDENTIFIER": "com.gxlj.DHYunRuiSDK", "ENABLE_BITCODE": "false", "CLANG_CXX_LIBRARY": "libstdc++", "CLANG_WARN_DOCUMENTATION_COMMENTS": "NO", "OTHER_LDFLAGS": "-ObjC -all_load", "VALID_ARCHS[sdk=iphonesimulator*]": "" }, "resources": "DHYunRuiSDK/Depend/sdk_output/Bundle/*.bundle", "dependencies": { "AFNetworking": [ ] } }
570
501
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.common.auth; import java.io.IOException; import java.util.Collection; import org.apache.sshd.common.CommonModuleProperties; import org.apache.sshd.common.NamedResource; import org.apache.sshd.common.session.SessionContext; /** * Represents a user authentication method * * @param <S> The type of {@link SessionContext} being provided to the instance creator * @param <M> The authentication method factory type * @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a> */ public interface UserAuthMethodFactory<S extends SessionContext, M extends UserAuthInstance<S>> extends NamedResource { /** * Password authentication method name */ String PASSWORD = "password"; /** * Public key authentication method name */ String PUBLIC_KEY = "publickey"; /** * Keyboard interactive authentication method */ String KB_INTERACTIVE = "keyboard-interactive"; /** * Host-based authentication method */ String HOST_BASED = "hostbased"; /** * @param session The session for which authentication is required * @return The authenticator instance * @throws IOException If failed to create the instance */ M createUserAuth(S session) throws IOException; /** * @param <S> The type of {@link SessionContext} being provided to the instance creator * @param <M> The authentication method factory type * @param session The session through which the request is being made * @param factories The available factories * @param name The requested factory name * @return The created authenticator instance - {@code null} if no matching factory * @throws IOException If failed to create the instance */ static <S extends SessionContext, M extends UserAuthInstance<S>> M createUserAuth( S session, Collection<? extends UserAuthMethodFactory<S, M>> factories, String name) throws IOException { UserAuthMethodFactory<S, M> f = NamedResource.findByName(name, String.CASE_INSENSITIVE_ORDER, factories); if (f != null) { return f.createUserAuth(session); } else { return null; } } /** * According to <A HREF="https://tools.ietf.org/html/rfc4252#section-8">RFC 4252 - section 8</A>: * * <PRE> * Both the server and the client should check whether the underlying * transport layer provides confidentiality (i.e., if encryption is * being used). If no confidentiality is provided ("none" cipher), * password authentication SHOULD be disabled. If there is no * confidentiality or no MAC, password change SHOULD be disabled. * </PRE> * * @param session The {@link SessionContext} being used for authentication * @return {@code true} if the context is not {@code null} and the ciphers have been established to anything * other than &quot;none&quot;. * @see CommonModuleProperties#ALLOW_INSECURE_AUTH * @see SessionContext#isSecureSessionTransport(SessionContext) */ static boolean isSecureAuthenticationTransport(SessionContext session) { if (session == null) { return false; } boolean allowInsecure = CommonModuleProperties.ALLOW_INSECURE_AUTH.getRequired(session); if (allowInsecure) { return true; } return SessionContext.isSecureSessionTransport(session); } /** * @param session The {@link SessionContext} being used for authentication * @return {@code true} if the context is not {@code null} and the MAC(s) used to verify packet integrity * have been established. * @see CommonModuleProperties#ALLOW_NON_INTEGRITY_AUTH * @see SessionContext#isDataIntegrityTransport(SessionContext) */ static boolean isDataIntegrityAuthenticationTransport(SessionContext session) { if (session == null) { return false; } boolean allowNonValidated = CommonModuleProperties.ALLOW_NON_INTEGRITY_AUTH.getRequired(session); if (allowNonValidated) { return true; } return SessionContext.isDataIntegrityTransport(session); } }
1,828
1,826
package com.vladsch.flexmark.ext.admonition; import com.vladsch.flexmark.ext.admonition.internal.AdmonitionBlockParser; import com.vladsch.flexmark.ext.admonition.internal.AdmonitionNodeFormatter; import com.vladsch.flexmark.ext.admonition.internal.AdmonitionNodeRenderer; import com.vladsch.flexmark.formatter.Formatter; import com.vladsch.flexmark.html.HtmlRenderer; import com.vladsch.flexmark.parser.Parser; import com.vladsch.flexmark.util.data.DataKey; import com.vladsch.flexmark.util.data.MutableDataHolder; import org.jetbrains.annotations.NotNull; import java.io.*; import java.util.HashMap; import java.util.Map; /** * Extension for admonitions * <p> * Create it with {@link #create()} and then configure it on the builders * <p> * The parsed admonition text is turned into {@link AdmonitionBlock} nodes. */ public class AdmonitionExtension implements Parser.ParserExtension, HtmlRenderer.HtmlRendererExtension, Formatter.FormatterExtension // , Parser.ReferenceHoldingExtension { final public static DataKey<Integer> CONTENT_INDENT = new DataKey<>("ADMONITION.CONTENT_INDENT", 4); final public static DataKey<Boolean> ALLOW_LEADING_SPACE = new DataKey<>("ADMONITION.ALLOW_LEADING_SPACE", true); final public static DataKey<Boolean> INTERRUPTS_PARAGRAPH = new DataKey<>("ADMONITION.INTERRUPTS_PARAGRAPH", true); final public static DataKey<Boolean> INTERRUPTS_ITEM_PARAGRAPH = new DataKey<>("ADMONITION.INTERRUPTS_ITEM_PARAGRAPH", true); final public static DataKey<Boolean> WITH_SPACES_INTERRUPTS_ITEM_PARAGRAPH = new DataKey<>("ADMONITION.WITH_SPACES_INTERRUPTS_ITEM_PARAGRAPH", true); final public static DataKey<Boolean> ALLOW_LAZY_CONTINUATION = new DataKey<>("ADMONITION.ALLOW_LAZY_CONTINUATION", true); final public static DataKey<String> UNRESOLVED_QUALIFIER = new DataKey<>("ADMONITION.UNRESOLVED_QUALIFIER", "note"); final public static DataKey<Map<String, String>> QUALIFIER_TYPE_MAP = new DataKey<>("ADMONITION.QUALIFIER_TYPE_MAP", AdmonitionExtension::getQualifierTypeMap); final public static DataKey<Map<String, String>> QUALIFIER_TITLE_MAP = new DataKey<>("ADMONITION.QUALIFIER_TITLE_MAP", AdmonitionExtension::getQualifierTitleMap); final public static DataKey<Map<String, String>> TYPE_SVG_MAP = new DataKey<>("ADMONITION.TYPE_SVG_MAP", AdmonitionExtension::getQualifierSvgValueMap); public static Map<String, String> getQualifierTypeMap() { HashMap<String, String> infoSvgMap = new HashMap<>(); // qualifier type map infoSvgMap.put("abstract", "abstract"); infoSvgMap.put("summary", "abstract"); infoSvgMap.put("tldr", "abstract"); infoSvgMap.put("bug", "bug"); infoSvgMap.put("danger", "danger"); infoSvgMap.put("error", "danger"); infoSvgMap.put("example", "example"); infoSvgMap.put("snippet", "example"); infoSvgMap.put("fail", "fail"); infoSvgMap.put("failure", "fail"); infoSvgMap.put("missing", "fail"); infoSvgMap.put("faq", "faq"); infoSvgMap.put("question", "faq"); infoSvgMap.put("help", "faq"); infoSvgMap.put("info", "info"); infoSvgMap.put("todo", "info"); infoSvgMap.put("note", "note"); infoSvgMap.put("seealso", "note"); infoSvgMap.put("quote", "quote"); infoSvgMap.put("cite", "quote"); infoSvgMap.put("success", "success"); infoSvgMap.put("check", "success"); infoSvgMap.put("done", "success"); infoSvgMap.put("tip", "tip"); infoSvgMap.put("hint", "tip"); infoSvgMap.put("important", "tip"); infoSvgMap.put("warning", "warning"); infoSvgMap.put("caution", "warning"); infoSvgMap.put("attention", "warning"); return infoSvgMap; } public static Map<String, String> getQualifierTitleMap() { HashMap<String, String> infoTitleMap = new HashMap<>(); infoTitleMap.put("abstract", "Abstract"); infoTitleMap.put("summary", "Summary"); infoTitleMap.put("tldr", "TLDR"); infoTitleMap.put("bug", "Bug"); infoTitleMap.put("danger", "Danger"); infoTitleMap.put("error", "Error"); infoTitleMap.put("example", "Example"); infoTitleMap.put("snippet", "Snippet"); infoTitleMap.put("fail", "Fail"); infoTitleMap.put("failure", "Failure"); infoTitleMap.put("missing", "Missing"); infoTitleMap.put("faq", "Faq"); infoTitleMap.put("question", "Question"); infoTitleMap.put("help", "Help"); infoTitleMap.put("info", "Info"); infoTitleMap.put("todo", "To Do"); infoTitleMap.put("note", "Note"); infoTitleMap.put("seealso", "See Also"); infoTitleMap.put("quote", "Quote"); infoTitleMap.put("cite", "Cite"); infoTitleMap.put("success", "Success"); infoTitleMap.put("check", "Check"); infoTitleMap.put("done", "Done"); infoTitleMap.put("tip", "Tip"); infoTitleMap.put("hint", "Hint"); infoTitleMap.put("important", "Important"); infoTitleMap.put("warning", "Warning"); infoTitleMap.put("caution", "Caution"); infoTitleMap.put("attention", "Attention"); return infoTitleMap; } public static Map<String, String> getQualifierSvgValueMap() { HashMap<String, String> typeSvgMap = new HashMap<>(); typeSvgMap.put("abstract", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-abstract.svg"))); typeSvgMap.put("bug", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-bug.svg"))); typeSvgMap.put("danger", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-danger.svg"))); typeSvgMap.put("example", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-example.svg"))); typeSvgMap.put("fail", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-fail.svg"))); typeSvgMap.put("faq", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-faq.svg"))); typeSvgMap.put("info", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-info.svg"))); typeSvgMap.put("note", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-note.svg"))); typeSvgMap.put("quote", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-quote.svg"))); typeSvgMap.put("success", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-success.svg"))); typeSvgMap.put("tip", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-tip.svg"))); typeSvgMap.put("warning", getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/images/adm-warning.svg"))); return typeSvgMap; } public static String getInputStreamContent(InputStream inputStream) { try { InputStreamReader streamReader = new InputStreamReader(inputStream); StringWriter stringWriter = new StringWriter(); copy(streamReader, stringWriter); stringWriter.close(); return stringWriter.toString(); } catch (Exception e) { e.printStackTrace(); return ""; } } public static String getDefaultCSS() { return getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/admonition.css")); } public static String getDefaultScript() { return getInputStreamContent(AdmonitionExtension.class.getResourceAsStream("/admonition.js")); } public static void copy(Reader reader, Writer writer) throws IOException { char[] buffer = new char[4096]; int n; while (-1 != (n = reader.read(buffer))) { writer.write(buffer, 0, n); } writer.flush(); reader.close(); } private AdmonitionExtension() { } public static AdmonitionExtension create() { return new AdmonitionExtension(); } @Override public void extend(Formatter.Builder formatterBuilder) { formatterBuilder.nodeFormatterFactory(new AdmonitionNodeFormatter.Factory()); } @Override public void rendererOptions(@NotNull MutableDataHolder options) { } @Override public void parserOptions(MutableDataHolder options) { } @Override public void extend(Parser.Builder parserBuilder) { parserBuilder.customBlockParserFactory(new AdmonitionBlockParser.Factory()); } @Override public void extend(@NotNull HtmlRenderer.Builder htmlRendererBuilder, @NotNull String rendererType) { if (htmlRendererBuilder.isRendererType("HTML")) { htmlRendererBuilder.nodeRendererFactory(new AdmonitionNodeRenderer.Factory()); } else if (htmlRendererBuilder.isRendererType("JIRA")) { } } }
3,585
635
<gh_stars>100-1000 from __future__ import absolute_import import unittest import numpy as np from tests.sample_data import SampleData from pyti import commodity_channel_index class TestCommodityChannelIndex(unittest.TestCase): def setUp(self): """Create data to use for testing.""" self.close_data = SampleData().get_sample_close_data() self.high_data = SampleData().get_sample_high_data() self.low_data = SampleData().get_sample_low_data() self.cci_period_6_expected = [np.nan, np.nan, np.nan, np.nan, np.nan, 17.178843724328658, 14.805329132651183, 17.263942454817897, 4.6415807564446148, -4.3363353101251887, 10.942586757419953, 13.353100797135502, 3.4871979776397621, -18.425725123230592, -10.317296085567687, -50.265600132859966, -49.431262579588712, -76.685056004832092, -48.944719837880363, -70.866892800759857, -46.850551078895059, 2.8008582165234599, 48.511826295828975, 30.920438725870014, 11.908272351227666, -0.073998896077287546, 7.7513843640897191, 2.6325107279474129, -19.461709668311862, 14.387235369815095, -5.7515641976024146, -20.031501168105876, -75.615771956515729, -6.7949486322911463, 19.113914856747815, 93.344057484218069, 96.191165010789504, 48.125182063825548, -36.757101653960909, -38.973368591474291, -22.319917029294491, 34.825730466345867, 49.764257611936316, 72.609566803378897, 44.286489329819148, 38.597824193882069, 37.833785591884364, 44.782281933537391, -1.3264302121846316, -9.9510015499857385, 2.0867688693778432, -15.991161442289082, -53.203356307124913, -43.51505083821398, -34.424286455126762, -2.4419635705485963, -0.53279205175556177, 18.035380946422684, -22.353216532528833, -13.900692628107128, -17.900332961081737, -8.398874704765511, -11.702925414613542, 6.5322525512174758, 1.0896337447370843, 21.785275005136494, 18.266627496663684, -7.6607357163950702, 25.659117214779297, 21.611377599355038, -16.111409648414341, -28.043731640868348, -34.011742609496459, -23.376251270796473, -4.837677831049219, -25.718316331640143, -22.745410681737663, 10.66879084193433, -19.720705804581041, 25.601768070318617, 38.810571020104028, 29.893704042797886, -11.738074890249942, -26.510104519666402, -31.995272691391413, -19.269312538509855, 0.8454373876824518, 0.033299503234344038, -2.8193579405428761, 3.3761996335232629, 8.752219433534588, 8.0344301415852399, 14.424234817853927, 20.453294875746455, 21.217333477743399, 3.2633513170062232, 12.766659545724504, 10.863037944136877, 9.8159535646444134, 8.754069405935887, 3.3576999095038467, -5.8625625417185345, -1.1987821164511496, -27.886483986703873, -17.110394745457157, -8.2675266642282974, -2.1071185657989364, -25.820064813747123, -88.511929570375386, -81.93712765391291, -73.503103473511004, -49.841956452816959, -21.219183450145454, -16.479554156399104, -3.4150490539637728, -10.086049535326305, -8.9890159009810002, -54.866481496461262, -41.002788316391829, -48.310179304018199, -25.059726156553154, -28.082581061309234, -40.020452970967511, -58.927170918699147, -38.47942596015848, -38.405427064082332, -8.0621797276141756] self.cci_period_8_expected = [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, 27.440178144738148, 10.854250575228061, -1.0919462102394641, 14.078752471793019, 17.751410182726513, 6.3907296624697159, -18.773982427893827, -11.201120400590323, -51.031488707259342, -54.941405378739574, -91.332212497118917, -68.60530153939915, -92.245173877471089, -72.083712148129351, -19.896453182764553, 33.141330594388151, 24.748468299928753, 11.403692378600985, 6.263081566736612, 22.818484591613924, 14.193913253813195, -20.216960901399609, 9.6707307310925419, -6.7029125052951457, -18.789244700209458, -80.683308858504219, -16.065622831466694, 14.15090139546863, 90.516374667867453, 96.028829932520324, 56.252573318607816, -13.568160088860227, -12.886907751849385, -6.3047059457802046, 36.662753061462908, 38.118218848681614, 65.015892586553605, 54.530711505510574, 62.616940874349574, 64.295790829101875, 65.597246413860276, 12.613574329463709, -1.4998651248653838, 8.8909673636792661, -10.257634475605759, -55.199476528808866, -55.41592329983456, -47.274194758937689, -10.966636398645534, -12.766197052623422, 4.8159406553263953, -27.981295072302959, -12.664911063617524, -15.838076226029422, -10.678040703943848, -17.633474442103253, -1.5872763208566254, -3.0829790078175745, 23.105692807014258, 20.579092999077123, -5.2183096527457709, 30.714629296155568, 27.816185035430696, -10.610054218172673, -26.485592385341629, -37.794936171444611, -24.773442927354605, -10.159123445202944, -40.102314249752588, -35.172600291707369, 4.8284279690389429, -22.572900755257937, 24.207351372363942, 36.007862831178898, 32.539164577559305, -2.8415576093656463, -21.983222052142775, -27.060008816141163, -17.55161316331742, -7.2107299296252334, -12.300004007337305, -11.370392875367019, 2.1644677102588625, 11.387042626984002, 11.131746435517796, 15.505081193681818, 22.342579191217961, 26.946235513422337, 9.8733027091043404, 18.636621977050122, 16.730225416860552, 15.559192886438336, 12.820308745379942, 5.1794602323056393, -2.8762445919017208, 1.7801359437574433, -28.56264889960925, -19.974152023645949, -13.052017788721647, -7.5173628552452545, -31.715001872499354, -99.863822721722613, -94.823110419561658, -88.826424878703207, -72.294609001949638, -50.286412322381388, -47.081335136036493, -20.651241922752735, -16.182171092788224, -12.265317024800474, -59.37301426756455, -49.428950114086334, -58.897108867167454, -38.069657073130884, -44.334126119169611, -59.58252364208311, -74.986318846759403, -51.136937134168541, -51.965262277133689, -23.273577802489221] self.cci_period_10_expected = [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, 7.2770514402354678, 19.325921694009487, 21.297252285506808, 9.5946968653735993, -15.640776669363824, -9.8411131893110024, -53.109377709108003, -58.886841520337299, -96.354332576640473, -77.825378993381932, -108.58524011476446, -92.579278893259698, -39.625298865914488, 13.677215961954682, 8.6523209238300414, 0.46619304528649513, 1.8747620321169274, 23.294112496150671, 20.217238397259955, -8.5257828115383223, 18.426835106670463, -7.0794743877083874, -23.266362910122115, -84.384271147186581, -18.193738584027784, 10.0986293476596, 86.298992583185779, 97.778441331646505, 58.849102083338174, -12.287516693623244, -7.362520165203529, 11.117594146642793, 58.073223657968228, 53.012809149726472, 69.731379740454756, 49.126757122230714, 60.167762411433493, 76.063835272263148, 88.401301220739981, 35.896864487063155, 15.297791786045272, 20.048520914203575, -3.5708167302072829, -51.796267298216577, -54.274860322324052, -51.954994930302774, -22.101990280347074, -23.637097379468845, -2.1589177930534542, -38.174550508321282, -24.337496930839404, -21.215113510861386, -10.547062657887349, -16.8739682724907, -3.9404412161122493, -7.8864323494310433, 17.331281450247634, 18.562253086492124, -3.645185620764527, 33.222914377313259, 31.536849530193852, -6.0605095887248153, -22.700271355131452, -35.437331342424208, -25.444150421674564, -14.219997864680812, -42.727702583952507, -41.572209821706267, -7.449098873614111, -32.873269593347537, 19.549028365681977, 35.562759471274163, 33.568119227513066, -4.0347898086110083, -20.702301161045447, -21.591397897413902, -15.32221142175119, -4.1924074572554382, -11.44281929490222, -18.375775868378508, -7.8087335085500236, 5.0060253196245084, 10.833438385706645, 18.36689600084819, 25.928103202019805, 29.32021259819976, 12.497303564002905, 23.968982428375444, 23.012176702096916, 21.168494206332699, 18.296967044055727, 10.264016880392207, 0.42068372419919231, 3.1823225258010499, -27.008117090267035, -19.112804873307187, -14.599612201556724, -10.394994926448327, -36.611693823169816, -108.00138882471543, -105.42733722466858, -103.34611827249614, -87.424515792519557, -66.080644202485203, -67.655710705488872, -45.552610441700793, -41.36464291821013, -26.826079805917068, -66.273781321246361, -55.37707387939367, -65.725449501314074, -47.672678814312974, -55.172836926220938, -72.911482299373802, -91.833370020801453, -70.379610070090848, -67.82331820510386, -35.55276962030473] def test_cci_period_6(self): period = 6 cci = commodity_channel_index.commodity_channel_index(self.close_data, self.high_data, self.low_data, period) np.testing.assert_array_equal(cci, self.cci_period_6_expected) def test_cci_period_8(self): period = 8 cci = commodity_channel_index.commodity_channel_index(self.close_data, self.high_data, self.low_data, period) np.testing.assert_array_equal(cci, self.cci_period_8_expected) def test_cci_period_10(self): period = 10 cci = commodity_channel_index.commodity_channel_index(self.close_data, self.high_data, self.low_data, period) np.testing.assert_array_equal(cci, self.cci_period_10_expected) def test_commodity_channel_index_invalid_period(self): period = 128 with self.assertRaises(Exception) as cm: commodity_channel_index.commodity_channel_index(self.close_data, self.high_data, self.low_data, period) expected = "Error: data_len < period" self.assertEqual(str(cm.exception), expected) def test_commodity_channel_index_invalid_data(self): period = 6 self.close_data.append(1) with self.assertRaises(Exception) as cm: commodity_channel_index.commodity_channel_index(self.close_data, self.high_data, self.low_data, period) expected = ("Error: mismatched data lengths, check to ensure that all input data is the same length and valid") self.assertEqual(str(cm.exception), expected)
5,591
1,233
/* * Copyright 2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.mantisrx.server.master; import io.mantisrx.common.metrics.Gauge; import io.mantisrx.common.metrics.Metrics; import io.mantisrx.common.metrics.MetricsRegistry; import io.mantisrx.server.core.master.MasterDescription; import io.mantisrx.server.master.config.MasterConfiguration; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LeadershipManagerZkImpl implements ILeadershipManager { private static final Logger logger = LoggerFactory.getLogger(LeadershipManagerZkImpl.class); private final Gauge isLeaderGauge; private final AtomicBoolean firstTimeLeaderMode = new AtomicBoolean(false); private final MasterConfiguration config; private final ServiceLifecycle serviceLifecycle; private volatile boolean isLeader = false; private volatile boolean isReady = false; public LeadershipManagerZkImpl(final MasterConfiguration config, final ServiceLifecycle serviceLifecycle) { this.config = config; this.serviceLifecycle = serviceLifecycle; Metrics m = new Metrics.Builder() .name(MasterMain.class.getCanonicalName()) .addGauge("isLeaderGauge") .build(); m = MetricsRegistry.getInstance().registerAndGet(m); isLeaderGauge = m.getGauge("isLeaderGauge"); } public void becomeLeader() { logger.info("Becoming leader now"); if (firstTimeLeaderMode.compareAndSet(false, true)) { serviceLifecycle.becomeLeader(); isLeaderGauge.set(1L); } else { logger.warn("Unexpected to be told to enter leader mode more than once, ignoring."); } isLeader = true; } public boolean isLeader() { return isLeader; } public boolean isReady() { return isReady; } public void setLeaderReady() { logger.info("marking leader READY"); isReady = true; } public void stopBeingLeader() { logger.info("Asked to stop being leader now"); isReady = false; isLeader = false; isLeaderGauge.set(0L); if (!firstTimeLeaderMode.get()) { logger.warn("Unexpected to be told to stop being leader when we haven't entered leader mode before, ignoring."); return; } // Various services may have built in-memory state that is currently not easy to revert to initialization state. // Until we create such a lifecycle feature for each service and all of their references, best thing to do is to // exit the process and depend on a watcher process to restart us right away. Especially since restart isn't // very expensive. logger.error("Exiting due to losing leadership after running as leader"); System.exit(1); } public MasterDescription getDescription() { return new MasterDescription( getHost(), getHostIP(), config.getApiPort(), config.getSchedInfoPort(), config.getApiPortV2(), config.getApiStatusUri(), config.getConsolePort(), System.currentTimeMillis() ); } private String getHost() { String host = config.getMasterHost(); if (host != null) { return host; } try { return InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { throw new RuntimeException("Failed to get the host information: " + e.getMessage(), e); } } private String getHostIP() { String ip = config.getMasterIP(); if (ip != null) { return ip; } try { return InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException e) { throw new RuntimeException("Failed to get the host information: " + e.getMessage(), e); } } }
1,808
898
<reponame>AdamBSteele/heroic /* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.filter; import static com.spotify.heroic.filter.FilterEncoding.filter; import static com.spotify.heroic.filter.FilterEncoding.string; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TreeTraversingParser; import com.google.common.collect.ImmutableMap; import com.spotify.heroic.grammar.QueryParser; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Optional; /** * Registry of all known filters. * <p> * This is primarily used to provide serialization for Filters. * <p> * A filter can be deserialized from an object, or an array. When an object is used, the type field * is determined by the filters {@link com.fasterxml.jackson.annotation.JsonTypeName} annotation. * Otherwise it falls back to the id. * <p> * Object-based is the more universal method, but is typically more verbose. It is useful for use in * clients which are statically typed, since type-based deserialization is usually readily * supported. * * @see com.spotify.heroic.filter.Filter */ public class FilterRegistry { private final Map<String, FilterEncoding<? extends Filter>> deserializers = new HashMap<>(); private final Map<Class<? extends Filter>, JsonSerializer<Filter>> serializers = new HashMap<>(); private final HashMap<Class<? extends Filter>, String> typeMapping = new HashMap<>(); private final HashMap<String, Class<? extends Filter>> typeNameMapping = new HashMap<>(); public FilterRegistry() { } public <T extends Filter> void registerList( String id, Class<T> type, FilterEncoding<T> s ) { registerJson(id, type, s); register(id, type); } public <T extends Filter> void registerTwo( String id, Class<T> type, FilterEncoding<T> s ) { registerJson(id, type, s); register(id, type); } public <T extends Filter> void registerOne( String id, Class<T> type, FilterEncoding<T> s ) { registerJson(id, type, s); register(id, type); } public <T extends Filter> void registerEmpty( String id, Class<T> type, FilterEncoding<T> s ) { registerJson(id, type, s); register(id, type); } public Module module(final QueryParser parser) { final SimpleModule m = new SimpleModule("filter"); for (final Map.Entry<Class<? extends Filter>, JsonSerializer<Filter>> e : this .serializers.entrySet()) { m.addSerializer(e.getKey(), e.getValue()); } final FilterJsonDeserializer deserializer = new FilterJsonDeserializer(ImmutableMap.copyOf(deserializers), typeNameMapping, parser); m.addDeserializer(Filter.class, deserializer); return m; } @SuppressWarnings("unchecked") private <T extends Filter> void registerJson( String id, Class<T> type, FilterEncoding<T> serialization ) { serializers.put(type, new FilterJsonSerializer((FilterEncoding<Filter>) serialization)); deserializers.put(id, serialization); } private <T extends Filter> void register(String id, Class<T> type) { if (typeMapping.put(type, id) != null) { throw new IllegalStateException("Multiple mappings for single type: " + type); } final String typeName = buildTypeId(id, type); if (typeNameMapping.put(typeName, type) != null) { throw new IllegalStateException("Multiple type names for single type: " + type); } } private <T extends Filter> String buildTypeId(final String id, final Class<T> type) { final JsonTypeName annotation = type.getAnnotation(JsonTypeName.class); if (annotation == null) { return id; } return annotation.value(); } private static final class FilterJsonSerializer extends JsonSerializer<Filter> { private final FilterEncoding<Filter> serializer; @java.beans.ConstructorProperties({ "serializer" }) public FilterJsonSerializer(final FilterEncoding<Filter> serializer) { this.serializer = serializer; } @Override public void serialize(Filter value, JsonGenerator g, SerializerProvider provider) throws IOException { g.writeStartArray(); g.writeString(value.operator()); final EncoderImpl s = new EncoderImpl(g); serializer.serialize(s, value); g.writeEndArray(); } private static final class EncoderImpl implements FilterEncoding.Encoder { private final JsonGenerator generator; @java.beans.ConstructorProperties({ "generator" }) public EncoderImpl(final JsonGenerator generator) { this.generator = generator; } @Override public void string(String string) throws IOException { generator.writeString(string); } @Override public void filter(Filter filter) throws IOException { generator.writeObject(filter); } } } /** * Provides both array, and object based deserialization for filters. */ static class FilterJsonDeserializer extends JsonDeserializer<Filter> { final Map<String, FilterEncoding<? extends Filter>> deserializers; final HashMap<String, Class<? extends Filter>> typeNameMapping; final QueryParser parser; @java.beans.ConstructorProperties({ "deserializers", "typeNameMapping", "parser" }) public FilterJsonDeserializer( final Map<String, FilterEncoding<? extends Filter>> deserializers, final HashMap<String, Class<? extends Filter>> typeNameMapping, final QueryParser parser) { this.deserializers = deserializers; this.typeNameMapping = typeNameMapping; this.parser = parser; } @Override public Filter deserialize(JsonParser p, DeserializationContext c) throws IOException { if (p.getCurrentToken() == JsonToken.START_ARRAY) { return deserializeArray(p, c); } if (p.getCurrentToken() == JsonToken.START_OBJECT) { return deserializeObject(p, c); } throw c.mappingException("Expected start of array or object"); } private Filter deserializeArray(final JsonParser p, final DeserializationContext c) throws IOException { if (p.nextToken() != JsonToken.VALUE_STRING) { throw c.mappingException("Expected operator (string)"); } final String operator = p.readValueAs(String.class); final FilterEncoding<? extends Filter> deserializer = deserializers.get(operator); if (deserializer == null) { throw c.mappingException("No such operator: " + operator); } p.nextToken(); final FilterEncoding.Decoder d = new Decoder(p, c); final Filter filter; try { filter = deserializer.deserialize(d); if (p.getCurrentToken() != JsonToken.END_ARRAY) { throw c.mappingException("Expected end of array from '" + deserializer + "'"); } if (filter instanceof RawFilter) { return parseRawFilter((RawFilter) filter); } return filter.optimize(); } catch (final Exception e) { // use special {operator} syntax to indicate filter. throw JsonMappingException.wrapWithPath(e, this, "{" + operator + "}"); } } private Filter deserializeObject(final JsonParser p, final DeserializationContext c) throws IOException { final ObjectNode object = (ObjectNode) p.readValueAs(JsonNode.class); final JsonNode typeNode = object.remove("type"); if (typeNode == null) { throw c.mappingException("Expected 'type' field"); } if (!typeNode.isTextual()) { throw c.mappingException("Expected 'type' to be string"); } final String type = typeNode.asText(); final Class<? extends Filter> cls = typeNameMapping.get(type); if (cls == null) { throw c.mappingException("No such type: " + type); } // use tree traversing parser to operate on the node (without 'type') again. final TreeTraversingParser parser = new TreeTraversingParser(object, p.getCodec()); return parser.readValueAs(cls); } private Filter parseRawFilter(RawFilter filter) { return parser.parseFilter(filter.filter()); } private static final class Decoder implements FilterEncoding.Decoder { private final JsonParser parser; private final DeserializationContext c; private int index = 0; @java.beans.ConstructorProperties({ "parser", "c" }) public Decoder(final JsonParser parser, final DeserializationContext c) { this.parser = parser; this.c = c; } @Override public Optional<String> string() throws IOException { final int index = this.index++; if (parser.getCurrentToken() == JsonToken.END_ARRAY) { return Optional.empty(); } if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { throw c.mappingException("Expected string"); } final String string; try { string = parser.getValueAsString(); } catch (final JsonMappingException e) { throw JsonMappingException.wrapWithPath(e, this, index); } parser.nextToken(); return Optional.of(string); } @Override public Optional<Filter> filter() throws IOException { final int index = this.index++; if (parser.getCurrentToken() == JsonToken.END_ARRAY) { return Optional.empty(); } if (parser.getCurrentToken() != JsonToken.START_ARRAY) { throw c.mappingException("Expected start of new filter expression"); } final Filter filter; try { filter = parser.readValueAs(Filter.class); } catch (final JsonMappingException e) { throw JsonMappingException.wrapWithPath(e, this, index); } parser.nextToken(); return Optional.of(filter); } } } public static FilterRegistry registry() { final FilterRegistry registry = new FilterRegistry(); registry.registerList(AndFilter.OPERATOR, AndFilter.class, new MultiArgumentsFilterBase<>(AndFilter::create, AndFilter::filters, filter())); registry.registerList(OrFilter.OPERATOR, OrFilter.class, new MultiArgumentsFilterBase<>(OrFilter::create, OrFilter::filters, filter())); registry.registerOne(NotFilter.OPERATOR, NotFilter.class, new OneArgumentFilterEncoding<>(NotFilter::create, NotFilter::filter, filter())); registry.registerTwo(MatchKeyFilter.OPERATOR, MatchKeyFilter.class, new OneArgumentFilterEncoding<>(MatchKeyFilter::create, MatchKeyFilter::key, string())); registry.registerTwo(MatchTagFilter.OPERATOR, MatchTagFilter.class, new TwoArgumentFilterEncoding<>(MatchTagFilter::create, MatchTagFilter::tag, MatchTagFilter::value, string(), string())); registry.registerOne(HasTagFilter.OPERATOR, HasTagFilter.class, new OneArgumentFilterEncoding<>(HasTagFilter::create, HasTagFilter::tag, string())); registry.registerTwo(StartsWithFilter.OPERATOR, StartsWithFilter.class, new TwoArgumentFilterEncoding<>(StartsWithFilter::create, StartsWithFilter::tag, StartsWithFilter::value, string(), string())); registry.registerTwo(RegexFilter.OPERATOR, RegexFilter.class, new TwoArgumentFilterEncoding<>(RegexFilter::create, RegexFilter::tag, RegexFilter::value, string(), string())); registry.registerEmpty(TrueFilter.OPERATOR, TrueFilter.class, new NoArgumentFilterBase<>(TrueFilter::get)); registry.registerEmpty(FalseFilter.OPERATOR, FalseFilter.class, new NoArgumentFilterBase<>(FalseFilter::get)); registry.registerOne(RawFilter.OPERATOR, RawFilter.class, new OneArgumentFilterEncoding<>(RawFilter::create, RawFilter::filter, string())); return registry; } }
5,928
480
<filename>polardbx-server/src/main/java/com/alibaba/polardbx/server/response/ShowProfileSyncAction.java<gh_stars>100-1000 /* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.server.response; import com.alibaba.polardbx.CobarServer; import com.alibaba.polardbx.common.constants.CpuStatAttribute; import com.alibaba.polardbx.common.constants.CpuStatAttribute.CpuStatAttr; import com.alibaba.polardbx.common.model.SqlType; import com.alibaba.polardbx.common.utils.TStringUtil; import com.alibaba.polardbx.executor.cursor.ResultCursor; import com.alibaba.polardbx.executor.cursor.impl.ArrayResultCursor; import com.alibaba.polardbx.executor.handler.LogicalShowProfileHandler; import com.alibaba.polardbx.executor.sync.ISyncAction; import com.alibaba.polardbx.executor.utils.ExecUtils; import com.alibaba.polardbx.net.FrontendConnection; import com.alibaba.polardbx.net.NIOProcessor; import com.alibaba.polardbx.optimizer.context.ExecutionContext; import com.alibaba.polardbx.optimizer.core.datatype.DataTypes; import com.alibaba.polardbx.optimizer.core.profiler.cpu.CpuStat; import com.alibaba.polardbx.optimizer.core.profiler.cpu.CpuStatItem; import com.alibaba.polardbx.optimizer.core.rel.BaseTableOperation; import com.alibaba.polardbx.optimizer.core.rel.HashAgg; import com.alibaba.polardbx.optimizer.core.rel.LogicalInsert; import com.alibaba.polardbx.optimizer.core.rel.LogicalView; import com.alibaba.polardbx.optimizer.memory.MemoryManager; import com.alibaba.polardbx.optimizer.memory.MemoryPool; import com.alibaba.polardbx.optimizer.memory.MemoryType; import com.alibaba.polardbx.optimizer.memory.QueryMemoryPool; import com.alibaba.polardbx.optimizer.parse.SqlTypeUtils; import com.alibaba.polardbx.optimizer.statis.MemoryStatisticsGroup; import com.alibaba.polardbx.server.ServerConnection; import com.alibaba.polardbx.statistics.RuntimeStatistics; import com.alibaba.polardbx.statistics.RuntimeStatistics.Metrics; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelVisitor; import org.apache.calcite.sql.SqlShowProfile; import org.apache.calcite.util.trace.RuntimeStatisticsSketch; import org.apache.calcite.util.trace.RuntimeStatisticsSketchExt; import org.apache.commons.lang3.StringUtils; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.Set; import java.util.Stack; /** * @author chenghui.lch */ public class ShowProfileSyncAction implements ISyncAction { protected static final MemoryComparator MEMORY_COMP = new MemoryComparator(); protected static final CpuComparator CPU_COMP = new CpuComparator(); protected static final DecimalFormat NUM_FORMAT = new DecimalFormat("#,###"); protected static Set<SqlType> profileIgnoreSqlTypeSet = new HashSet<>(); static { profileIgnoreSqlTypeSet.add(SqlType.SHOW); profileIgnoreSqlTypeSet.add(SqlType.SHOW_SEQUENCES); profileIgnoreSqlTypeSet.add(SqlType.SHOW_CHARSET); profileIgnoreSqlTypeSet.add(SqlType.SHOW_INSTANCE_TYPE); profileIgnoreSqlTypeSet.add(SqlType.TDDL_SHOW); } protected String queryId; protected List<String> types; public ShowProfileSyncAction() { } public ShowProfileSyncAction(List<String> types, String queryId) { this.queryId = queryId; this.types = types; } public String getQueryId() { return queryId; } public void setQueryId(String queryId) { this.queryId = queryId; } public List<String> getTypes() { return types; } public void setTypes(List<String> types) { this.types = types; } protected static class MemoryPoolItem { public Long queryId; public MemoryPool stmtPoolStat; } protected static class CpuSketchItem { public long queryId; public String traceId; public String dbName; public long logTc = 0L; public long phySqlTc = 0L; public long phyRsTc = 0L; public String sql = null; } protected static class RelNodeItem { public int relNodeId; public String relNodeName; public long timeCost = -1L; public long rowCount = -1L; public long parallelism = 0L; } protected static class MemoryComparator implements Comparator<MemoryPoolItem> { @Override public int compare(MemoryPoolItem o1, MemoryPoolItem o2) { MemoryPool o1PoolStat = o1.stmtPoolStat; MemoryPool o2PoolStat = o2.stmtPoolStat; if (o1PoolStat.getMemoryUsageStat() < o2PoolStat.getMemoryUsageStat()) { return 1; } else if (o1PoolStat.getMemoryUsageStat() > o2PoolStat.getMemoryUsageStat()) { return -1; } else { return 0; } } } protected static class CpuComparator implements Comparator<CpuSketchItem> { @Override public int compare(CpuSketchItem o1, CpuSketchItem o2) { if (o1.logTc < o2.logTc) { return 1; } else if (o1.logTc > o2.logTc) { return -1; } else { return 0; } } } protected static class RelItemVisitor extends RelVisitor { protected List<RelNodeItem> relNodeItemList = new ArrayList<>(); protected Stack<RelNode> relNodeStack = new Stack<>(); protected RuntimeStatistics runtimeStatistics = null; protected Map<RelNode, RuntimeStatisticsSketch> relStatMaps = null; public RelItemVisitor(RuntimeStatistics runtimeStatistics) { this.runtimeStatistics = runtimeStatistics; this.relStatMaps = this.runtimeStatistics.toSketchExt(); } @Override public void visit(RelNode node, int ordinal, RelNode parent) { buildRelNodeItem(node); relNodeStack.push(node); if (!(node instanceof LogicalView || node instanceof LogicalInsert)) { List<RelNode> inputs = node.getInputs(); for (int i = 0; i < inputs.size(); i++) { visit(inputs.get(i), i, node); } } relNodeStack.pop(); } @Override public RelNode go(RelNode p) { this.replaceRoot(p); visit(p, 0, null); return p; } protected void buildRelNodeItem(RelNode p) { RelNodeItem relNodeItem = new RelNodeItem(); relNodeItem.relNodeId = p.getId(); relNodeItem.relNodeName = buildRelNameWithIndents(relNodeStack.size(), p); boolean isLvOrLm = false; if (p instanceof LogicalView || p instanceof LogicalInsert) { isLvOrLm = true; } else if (runtimeStatistics.isFromAllAtOnePhyTable()) { // When the final plan is only a PhyTableOperation, // We should treat it as LogicalView as well. isLvOrLm = true; } RuntimeStatisticsSketchExt statSketch = (RuntimeStatisticsSketchExt) relStatMaps.get(p); if (statSketch != null) { long timeCostSumOfInputs = 0; if (!isLvOrLm) { List<RelNode> inputList = p.getInputs(); for (int i = 0; i < inputList.size(); i++) { RuntimeStatisticsSketchExt statSketchOfInput = (RuntimeStatisticsSketchExt) relStatMaps.get(inputList.get(i)); if (statSketchOfInput != null) { timeCostSumOfInputs += statSketchOfInput.getStartupDurationNano() + statSketchOfInput.getDurationNano() + statSketchOfInput.getCloseDurationNano() + statSketchOfInput.getChildrenAsyncTaskDuration() + statSketchOfInput.getSelfAsyncTaskDuration(); } } relNodeItem.parallelism = statSketch.getInstances(); } if (!statSketch.hasInputOperator()) { relNodeItem.timeCost = statSketch.getStartupDurationNano() + statSketch.getDurationNano() + statSketch.getCloseDurationNano() + statSketch.getSelfAsyncTaskDuration() + statSketch.getChildrenAsyncTaskDuration(); } else { relNodeItem.timeCost = statSketch.getStartupDurationNano() + statSketch.getDurationNano() + statSketch.getCloseDurationNano() + statSketch.getSelfAsyncTaskDuration() + statSketch.getChildrenAsyncTaskDuration() - timeCostSumOfInputs; } relNodeItem.rowCount = statSketch.getRowCount(); relNodeItemList.add(relNodeItem); } if (isLvOrLm && statSketch != null) { RelNodeItem newItem = null; newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.CREATE_CONN_TC_SUM); newItem.timeCost = statSketch.getCreateConnDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.WAIT_CONN_TC_SUM); newItem.timeCost = statSketch.getWaitConnDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.INIT_CONN_TC_SUM); newItem.timeCost = statSketch.getInitConnDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.PREPARE_STMT_TS_SUM); newItem.timeCost = statSketch.getCreateAndInitJdbcStmtDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.PHY_SQL_EXEC_TS_SUM); newItem.timeCost = statSketch.getExecJdbcStmtDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.PHY_SQL_FETCH_RS_TS_SUM); newItem.timeCost = statSketch.getFetchJdbcResultSetDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } newItem = new RelNodeItem(); newItem.relNodeId = 0; newItem.relNodeName = buildStageTagsWithIndents(relNodeStack.size() + 1, CpuStatAttribute.PHY_SQL_CLOSE_RS_TS_SUM); newItem.timeCost = statSketch.getCloseJdbcResultSetDurationNanoSum(); if (newItem.timeCost > 0) { relNodeItemList.add(newItem); } } } protected String buildStageTagsWithIndents(int stackDeep, String stageTags) { StringBuilder stageTagsWithIndents = new StringBuilder(""); for (int i = 0; i < stackDeep + 1; i++) { stageTagsWithIndents.append(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT); } return stageTagsWithIndents.append(stageTags).toString(); } protected String buildRelNameWithIndents(int stackDeep, RelNode rel) { StringBuilder relNameWithIndents = new StringBuilder(""); for (int i = 0; i < stackDeep + 1; i++) { relNameWithIndents.append(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT); } String relName = rel.getClass().getSimpleName(); if (rel instanceof HashAgg) { if (((HashAgg) rel).isPartial()) { relName = "PartialHashAgg"; } } return relNameWithIndents.append(relName).toString(); } public List<RelNodeItem> getRelNodeItemList() { return relNodeItemList; } } protected static class AllocationStat { public String allocId; public long usedSize; public long usedPeak; public AllocationStat() { } public AllocationStat(String allocId, long usedSize, long usedPeak) { this.allocId = allocId; this.usedSize = usedSize; this.usedPeak = usedPeak; } } protected static class AllocationComparator implements Comparator<AllocationStat> { @Override public int compare(AllocationStat ma1, AllocationStat ma2) { long t1 = ma1.usedSize; long t2 = ma2.usedSize; if (t1 > t2) { return 1; } else if (t1 < t2) { return -1; } else { return 0; } } } protected ResultCursor syncForMemory() { String memUsageStr = ""; String memUsageMaxStr = ""; String memLimitStr = ""; if (queryId == null) { ArrayResultCursor result = new ArrayResultCursor(LogicalShowProfileHandler.PROFILE_TABLE_NAME); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_MEMORY_POOL_NAME, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_CONN_ID, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_NODE_HOST, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_MEMORY_USED, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_MEMORY_PEAK, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_MEMORY_LIMIT, DataTypes.StringType); result.initMeta(); String nodeHost = CobarServer.getInstance().getServerAddress(); // Add Global Pool MemoryPool globalPool = MemoryManager.getInstance().getGlobalMemoryPool(); memUsageStr = NUM_FORMAT.format(globalPool.getMemoryUsage()); memUsageMaxStr = NUM_FORMAT.format(globalPool.getMaxMemoryUsage()); memLimitStr = NUM_FORMAT.format(globalPool.getMaxLimit()); result.addRow(new Object[] { globalPool.getFullName(), LogicalShowProfileHandler.PROFILE_NO_VALUE, nodeHost, memUsageStr, memUsageMaxStr, memLimitStr}); // Add Cached Pool MemoryPool cachePool = MemoryManager.getInstance().getCacheMemoryPool(); memUsageStr = NUM_FORMAT.format(cachePool.getMemoryUsage()); memUsageMaxStr = NUM_FORMAT.format(cachePool.getMaxMemoryUsage()); memLimitStr = NUM_FORMAT.format(cachePool.getMaxLimit()); result.addRow(new Object[] { cachePool.getFullName(), LogicalShowProfileHandler.PROFILE_NO_VALUE, nodeHost, memUsageStr, memUsageMaxStr, memLimitStr}); // Add Sorted StmtPool of General Pool PriorityQueue<MemoryPoolItem> allSortedStmtPoolList = new PriorityQueue<>(MEMORY_COMP); for (NIOProcessor p : CobarServer.getInstance().getProcessors()) { for (FrontendConnection fc : p.getFrontends().values()) { if (fc != null && fc instanceof ServerConnection) { ServerConnection sc = (ServerConnection) fc; if (sc.isStatementExecuting().get()) { ExecutionContext ec = sc.getTddlConnection().getExecutionContext(); if (ec.getRuntimeStatistics() == null) { continue; } MemoryPoolItem stmtPoolItem = new MemoryPoolItem(); stmtPoolItem.queryId = sc.getId(); stmtPoolItem.stmtPoolStat = ec.getRuntimeStatistics().getMemoryPool(); allSortedStmtPoolList.add(stmtPoolItem); } } } } while (!allSortedStmtPoolList.isEmpty()) { MemoryPoolItem stmtPoolItem = allSortedStmtPoolList.poll(); MemoryPool stmtPoolStat = stmtPoolItem.stmtPoolStat; Long memUsage = stmtPoolStat.getMemoryUsageStat(); Long memUsageMax = stmtPoolStat.getMaxMemoryUsage(); Long memLimit = stmtPoolStat.getMaxLimit(); memUsageStr = NUM_FORMAT.format(memUsage); memUsageMaxStr = NUM_FORMAT.format(memUsageMax); memLimitStr = NUM_FORMAT.format(memLimit); result.addRow(new Object[] { stmtPoolStat.getFullName(), String.valueOf(stmtPoolItem.queryId), nodeHost, memUsageStr, memUsageMaxStr, memLimitStr}); } // Add Sorted CachePool PriorityQueue<MemoryPoolItem> allSortedCacheList = new PriorityQueue<>(MEMORY_COMP); for (Map.Entry<String, MemoryPool> dbMemStatItem : cachePool.getChildren().entrySet()) { MemoryPool dbMemoryStat = dbMemStatItem.getValue(); for (Map.Entry<String, MemoryPool> planMemStatItem : dbMemoryStat.getChildren().entrySet()) { MemoryPool planMemoryStat = planMemStatItem.getValue(); MemoryPoolItem stmtPoolItem = new MemoryPoolItem(); stmtPoolItem.stmtPoolStat = planMemoryStat; stmtPoolItem.queryId = null; allSortedCacheList.add(stmtPoolItem); } } while (!allSortedCacheList.isEmpty()) { MemoryPoolItem planPoolItem = allSortedCacheList.poll(); MemoryPool planPoolStat = planPoolItem.stmtPoolStat; Long memUsage = planPoolStat.getMemoryUsageStat(); Long memUsageMax = planPoolStat.getMaxMemoryUsage(); Long memLimit = planPoolStat.getMaxLimit(); memUsageStr = NUM_FORMAT.format(memUsage); memUsageMaxStr = NUM_FORMAT.format(memUsageMax); memLimitStr = NUM_FORMAT.format(memLimit); result.addRow(new Object[] { planPoolStat.getFullName(), LogicalShowProfileHandler.PROFILE_NO_VALUE, nodeHost, memUsageStr, memUsageMaxStr, memLimitStr}); } // Add Task //TODO //FIXME PriorityQueue<MemoryPoolItem> allSortedTaskList = new PriorityQueue<>(MEMORY_COMP); Collection<MemoryPool> schemaMemoryPools = globalPool.getChildren().values(); for (MemoryPool schemaPool : schemaMemoryPools) { Collection<MemoryPool> queryPools = schemaPool.getChildren().values(); for (MemoryPool queryPool : queryPools) { Collection<MemoryPool> taskPools = queryPool.getChildren().values(); for (MemoryPool taskPool : taskPools) { if (taskPool.getMemoryType() == MemoryType.TASK) { MemoryPoolItem stmtPoolItem = new MemoryPoolItem(); stmtPoolItem.stmtPoolStat = taskPool; stmtPoolItem.queryId = null; allSortedTaskList.add(stmtPoolItem); } } } } while (!allSortedTaskList.isEmpty()) { MemoryPoolItem planPoolItem = allSortedTaskList.poll(); MemoryPool planPoolStat = planPoolItem.stmtPoolStat; Long memUsage = planPoolStat.getMemoryUsageStat(); Long memUsageMax = planPoolStat.getMaxMemoryUsage(); Long memLimit = planPoolStat.getMaxLimit(); memUsageStr = NUM_FORMAT.format(memUsage); memUsageMaxStr = NUM_FORMAT.format(memUsageMax); memLimitStr = NUM_FORMAT.format(memLimit); result.addRow(new Object[] { planPoolStat.getFullName(), LogicalShowProfileHandler.PROFILE_NO_VALUE, nodeHost, memUsageStr, memUsageMaxStr, memLimitStr}); } return result; } else { ArrayResultCursor result = new ArrayResultCursor(LogicalShowProfileHandler.PROFILE_TABLE_NAME); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_STAGE, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_MEMORY_USED, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_MEMORY_PEAK, DataTypes.StringType); result.initMeta(); ServerConnection targetSc = findTargetServerConnection(queryId); RuntimeStatistics runtimeStat = null; if (targetSc != null) { if (targetSc.isStatementExecuting().get()) { ExecutionContext ec = targetSc.getTddlConnection().getExecutionContext(); if (profileIgnoreSqlTypeSet.contains(ec.getSqlType())) { runtimeStat = targetSc.getLastSqlRunTimeStat(); } else { runtimeStat = (RuntimeStatistics) targetSc.getTddlConnection() .getExecutionContext() .getRuntimeStatistics(); } } else { runtimeStat = targetSc.getLastSqlRunTimeStat(); } } if (runtimeStat != null) { String dbName = runtimeStat.getSchemaName(); MemoryPool stmtPoolStat = runtimeStat.getMemoryPool(); List<Object[]> rowInfos = new ArrayList<>(); buildPoolInfoByMemoryStat(runtimeStat, dbName, rowInfos, stmtPoolStat, true, 0); for (int i = 0; i < rowInfos.size(); i++) { result.addRow(rowInfos.get(i)); } } return result; } } private void buildPoolInfoByMemoryStat(RuntimeStatistics runtimeStat, String dbName, List<Object[]> rowInfos, MemoryPool stmtPoolStat, boolean isOutputOptPool, int subQueryDepth) { if (stmtPoolStat == null || stmtPoolStat.getMaxMemoryUsage() <= 0) { return; } if (subQueryDepth == 0) { Object[] totalStat = new Object[] { LogicalShowProfileHandler.PROFILE_TOTAL, NUM_FORMAT.format(stmtPoolStat.getMemoryUsage()), NUM_FORMAT.format(stmtPoolStat.getMaxMemoryUsage())}; rowInfos.add(totalStat); } if (isOutputOptPool) { List<AllocationStat> optimizerPoolAlloc = new ArrayList<>(); boolean isCached = runtimeStat.getSqlWholeStageMemEstimation().cachedPlanMemEstimation != null; MemoryPool optimizerPoolStatOfStmt = null; if (!isCached) { optimizerPoolStatOfStmt = stmtPoolStat.getChildPool(MemoryType.PLANER.getExtensionName()); } if (optimizerPoolStatOfStmt != null && optimizerPoolStatOfStmt.getMemoryUsageStat() > 0) { extractPoolInfo(optimizerPoolStatOfStmt, optimizerPoolAlloc, subQueryDepth); buildPoolInfoOutput(optimizerPoolStatOfStmt, isCached, optimizerPoolAlloc, rowInfos, subQueryDepth); } } if (!runtimeStat.getMemoryToStatistics().isEmpty()) { Object[] totalStat = new Object[] { "Server", LogicalShowProfileHandler.PROFILE_NO_VALUE, LogicalShowProfileHandler.PROFILE_NO_VALUE}; rowInfos.add(totalStat); for (Map.Entry<String, MemoryStatisticsGroup> nodeEntry : runtimeStat.getMemoryToStatistics().entrySet()) { buildMppPoolInfoOutput(nodeEntry.getKey(), nodeEntry.getValue(), rowInfos, subQueryDepth); } } else { QueryMemoryPool queryMemoryPool = null; if (stmtPoolStat.getMemoryType() == MemoryType.QUERY) { queryMemoryPool = (QueryMemoryPool) stmtPoolStat; } if (queryMemoryPool != null) { if (queryMemoryPool != null && queryMemoryPool.getMaxMemoryUsage() > 0) { List<AllocationStat> executionPoolAlloc = new ArrayList<>(); extractPoolInfo(queryMemoryPool, executionPoolAlloc, subQueryDepth); buildPoolInfoOutput(queryMemoryPool, false, executionPoolAlloc, rowInfos, subQueryDepth); } String subQueryIndent = StringUtils.repeat(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT, subQueryDepth); for (MemoryPool memoryPool : queryMemoryPool.getChildren().values()) { if (memoryPool.getMemoryType() == MemoryType.SUBQUERY) { if (memoryPool.getMaxMemoryUsage() > 0) { rowInfos .add(new Object[] { subQueryIndent + memoryPool.getName(), NUM_FORMAT.format(memoryPool.getMemoryUsage()), NUM_FORMAT.format(memoryPool.getMaxMemoryUsage())}); buildPoolInfoByMemoryStat(runtimeStat, dbName, rowInfos, memoryPool, false, subQueryDepth + 1); } } } } } } private ServerConnection findTargetServerConnection(String queryIdStr) { ServerConnection targetSc = null; Long queryId = Long.valueOf(queryIdStr); for (NIOProcessor p : CobarServer.getInstance().getProcessors()) { for (FrontendConnection fc : p.getFrontends().values()) { if (fc != null && fc instanceof ServerConnection) { ServerConnection sc = (ServerConnection) fc; if (sc.getId() == queryId) { targetSc = sc; break; } } } } return targetSc; } private void buildMppPoolInfoOutput(String name, MemoryStatisticsGroup memoryStatisticsGroup, List<Object[]> rowInfos, int depth) { String depthIndent = StringUtils.repeat(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT, depth + 1); rowInfos.add(new Object[] { depthIndent + name, NUM_FORMAT.format(memoryStatisticsGroup.getMemoryUsage()), NUM_FORMAT.format(memoryStatisticsGroup.getMaxMemoryUsage()), LogicalShowProfileHandler.PROFILE_NO_VALUE}); if (memoryStatisticsGroup.getMemoryStatistics() != null) { for (Map.Entry<String, MemoryStatisticsGroup> entry : memoryStatisticsGroup.getMemoryStatistics() .entrySet()) { buildMppPoolInfoOutput(entry.getKey(), entry.getValue(), rowInfos, depth + 1); } } } protected void buildPoolInfoOutput(MemoryPool rootPool, boolean isCacehed, List<AllocationStat> poolAllocList, List<Object[]> returnRows, int subQueryDepth) { if (rootPool == null) { return; } //String subQueryIndent = StringUtils.repeat(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT, subQueryDepth); //树型结构不排序 //Collections.sort(poolAllocList, MEMORY_ALLOC_COMP); String poolName = rootPool.getName(); if (isCacehed) { poolName = poolName + LogicalShowProfileHandler.PROFILE_CACHED_POSTFIX; } //poolName = subQueryIndent + poolName; Object[] rowInfo = new Object[] { poolName, NUM_FORMAT.format(rootPool.getMemoryUsage()), NUM_FORMAT.format(rootPool.getMaxMemoryUsage())}; returnRows.add(rowInfo); int size = poolAllocList.size(); for (int i = 0; i < size; i++) { AllocationStat mai = poolAllocList.get(i); String allocId = LogicalShowProfileHandler.PROFILE_INDENTS_UNIT + mai.allocId; Object[] allocInfo = new Object[] { allocId, NUM_FORMAT.format(mai.usedSize), NUM_FORMAT.format(mai.usedPeak), LogicalShowProfileHandler.PROFILE_NO_VALUE}; returnRows.add(allocInfo); } } protected void extractPoolInfo(MemoryPool poolStat, List<AllocationStat> pooAllocItemList, int depth) { if (poolStat == null) { return; } String treeIndent = StringUtils.repeat(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT, depth); for (MemoryPool memoryPool : poolStat.getChildren().values()) { AllocationStat mai = new AllocationStat(treeIndent + memoryPool.getName(), memoryPool.getMemoryUsage(), memoryPool.getMaxMemoryUsage()); pooAllocItemList.add(mai); if (memoryPool.getChildrenSize() > 0) { extractPoolInfo(memoryPool, pooAllocItemList, depth + 1); } } } protected ResultCursor syncForCpu() { if (queryId == null) { ArrayResultCursor result = new ArrayResultCursor(LogicalShowProfileHandler.PROFILE_TABLE_NAME); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_CONN_ID, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_TRACE_ID, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_DB, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_LOG_TIME_COST, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_PHY_SQL_TIME_COST, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_PHY_RS_TIME_COST, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_SQL, DataTypes.StringType); result.initMeta(); PriorityQueue<CpuSketchItem> allSortedStmtCpuList = new PriorityQueue<>(CPU_COMP); for (NIOProcessor p : CobarServer.getInstance().getProcessors()) { for (FrontendConnection fc : p.getFrontends().values()) { if (fc != null && fc instanceof ServerConnection) { ServerConnection sc = (ServerConnection) fc; if (sc.isStatementExecuting().get()) { ExecutionContext ec = sc.getTddlConnection().getExecutionContext(); if (ExecUtils.isOperatorMetricEnabled(ec)) { RuntimeStatistics runTimeStat = (RuntimeStatistics) ec.getRuntimeStatistics(); if (runTimeStat != null) { Metrics metrics = runTimeStat.toMetrics(); CpuSketchItem cpuSketchItem = new CpuSketchItem(); cpuSketchItem.queryId = sc.getId(); cpuSketchItem.traceId = ec.getTraceId(); cpuSketchItem.dbName = ec.getSchemaName(); cpuSketchItem.logTc = metrics.logCpuTc; cpuSketchItem.phySqlTc = metrics.execSqlTc; cpuSketchItem.phyRsTc = metrics.fetchRsTc; String sqlContext = sc.getSqlSample(); if (sqlContext != null) { cpuSketchItem.sql = TStringUtil.substring(sc.getSqlSample(), 0, 1000); } else { cpuSketchItem.sql = ""; } allSortedStmtCpuList.add(cpuSketchItem); } } } } } } String logTcStr = ""; String phySqlTcStr = ""; String phyRsTcStr = ""; while (!allSortedStmtCpuList.isEmpty()) { CpuSketchItem stmtCpuItem = allSortedStmtCpuList.poll(); long queryId = stmtCpuItem.queryId; String traceId = stmtCpuItem.traceId; String dbName = stmtCpuItem.dbName; String sqlInfo = stmtCpuItem.sql; long logTc = stmtCpuItem.logTc; long phySqlTc = stmtCpuItem.phySqlTc; long phyRsTc = stmtCpuItem.phyRsTc; logTcStr = NUM_FORMAT.format(logTc); phySqlTcStr = NUM_FORMAT.format(phySqlTc); phyRsTcStr = NUM_FORMAT.format(phyRsTc); result.addRow(new Object[] {queryId, traceId, dbName, logTcStr, phySqlTcStr, phyRsTcStr, sqlInfo}); } return result; } else { ArrayResultCursor result = new ArrayResultCursor(LogicalShowProfileHandler.PROFILE_TABLE_NAME); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_STAGE, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_TIME_COST, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_TIME_COST_PCT, DataTypes.StringType); result.addColumn(LogicalShowProfileHandler.PROFILE_ATTR_ROW_COUNT, DataTypes.StringType); result.initMeta(); ServerConnection targetSc = findTargetServerConnection(queryId); ExecutionContext ec = null; CpuStat cpuStatManager = null; RuntimeStatistics runTimeStat = null; RelNode planTree = null; long totalTc = 0; if (targetSc != null) { if (targetSc.isStatementExecuting().get()) { ec = targetSc.getTddlConnection().getExecutionContext(); if (ec.getSqlType() != null && profileIgnoreSqlTypeSet.contains(ec.getSqlType())) { runTimeStat = targetSc.getLastSqlRunTimeStat(); } else { runTimeStat = (RuntimeStatistics) targetSc.getTddlConnection() .getExecutionContext() .getRuntimeStatistics(); } } else { runTimeStat = targetSc.getLastSqlRunTimeStat(); } if (runTimeStat != null) { cpuStatManager = runTimeStat.getSqlWholeStageCpuStat(); planTree = runTimeStat.getPlanTree(); if (planTree != null && planTree instanceof BaseTableOperation && !runTimeStat .isFromAllAtOnePhyTable()) { planTree = ((BaseTableOperation) planTree).getParent(); } } } if (runTimeStat != null && runTimeStat.isRunningWithCpuProfile()) { Metrics metrics = runTimeStat.toMetrics(); totalTc = metrics.totalTc; if (cpuStatManager == null) { cpuStatManager = runTimeStat.getSqlWholeStageCpuStat(); } // Process CpuStatItem List<CpuStatItem> cpuStatItems = cpuStatManager.getStatItems(); List<CpuStatAttr> topAttrList = CpuStatAttribute.getChildrenAttr(CpuStatAttr.ROOT); for (int i = 0; i < topAttrList.size(); i++) { CpuStatAttr cpuStatAttr = topAttrList.get(i); processCpuStatItems(cpuStatItems, cpuStatAttr, totalTc, 0, result); } // Process Root of ExecutePlan String titleOfPlanOperators = CpuStatAttr.PLAN_EXECUTION.getAttrName(); boolean isPlanFinishedExecuting = runTimeStat.isFinishExecution(); if (isPlanFinishedExecuting) { String extMsg = String.format(LogicalShowProfileHandler.PROFILE_TRACE_ID, runTimeStat.getTraceId(), LogicalShowProfileHandler.PROFILE_PLAN_STATE_COMPLETED); titleOfPlanOperators += extMsg; } else { String extMsg = String.format(LogicalShowProfileHandler.PROFILE_TRACE_ID, runTimeStat.getTraceId(), LogicalShowProfileHandler.PROFILE_PLAN_STATE_RUNNING); titleOfPlanOperators += extMsg; } Object[] tmpRowInfo = new Object[] { titleOfPlanOperators, String.valueOf(""), String.valueOf(""), String.valueOf("")}; result.addRow(tmpRowInfo); if (SqlTypeUtils.isSelectSqlType(runTimeStat.getSqlType())) { // Process Plan Operators Object[] rowInfo = null; List<RelNodeItem> relNodeItemList = new ArrayList<>(); if (planTree != null) { RelItemVisitor relItemVisitor = new RelItemVisitor(runTimeStat); relItemVisitor.go(planTree); relNodeItemList = relItemVisitor.getRelNodeItemList(); } for (int i = 0; i < relNodeItemList.size(); i++) { RelNodeItem relNodeItem = relNodeItemList.get(i); String tcStr = LogicalShowProfileHandler.PROFILE_VALUE_COMPUTING; String pctStr = LogicalShowProfileHandler.PROFILE_VALUE_COMPUTING; long itemTc = relNodeItem.timeCost; if (itemTc >= 0 && totalTc > 0) { double pct = 100.0 * itemTc / totalTc; tcStr = NUM_FORMAT.format(itemTc); if (pct <= 100.0) { pctStr = String.format("%.4f%%", pct); } } else if (totalTc == 0) { tcStr = LogicalShowProfileHandler.PROFILE_ZEOR_VALUE; pctStr = LogicalShowProfileHandler.PROFILE_ZEOR_PCT_VALUE; } String relNodeName = relNodeItem.relNodeName; if (relNodeItem.parallelism > 1) { relNodeName += String.format(LogicalShowProfileHandler.PROFILE_PLAN_PARALLELISM, relNodeItem.parallelism); } rowInfo = new Object[] { relNodeName, tcStr, pctStr, relNodeItem.rowCount < 0 ? LogicalShowProfileHandler.PROFILE_NO_VALUE : String.valueOf(relNodeItem.rowCount)}; result.addRow(rowInfo); } } tmpRowInfo = new Object[] { CpuStatAttribute.LOGICAL_TIME_COST, NUM_FORMAT.format(metrics.logCpuTc), String.format("%.4f%%", 100.0 * metrics.logCpuTc / metrics.totalTc), LogicalShowProfileHandler.PROFILE_NO_VALUE}; result.addRow(tmpRowInfo); tmpRowInfo = new Object[] { CpuStatAttribute.PHYSICAL_TIME_COST, NUM_FORMAT.format(metrics.phyCpuTc), String.format("%.4f%%", 100.0 * metrics.phyCpuTc / metrics.totalTc), LogicalShowProfileHandler.PROFILE_NO_VALUE}; result.addRow(tmpRowInfo); tmpRowInfo = new Object[] { CpuStatAttribute.TOTAL_TIME_COST, NUM_FORMAT.format(metrics.totalTc), String.format("%.4f%%", 100.0 * metrics.totalTc / metrics.totalTc), LogicalShowProfileHandler.PROFILE_NO_VALUE}; result.addRow(tmpRowInfo); } return result; } } protected void processCpuStatItems(List<CpuStatItem> cpuStatItems, CpuStatAttr currStatAttr, long totalTc, int level, ArrayResultCursor result) { List<CpuStatAttr> childrenAttrs = CpuStatAttribute.getChildrenAttr(currStatAttr); if (childrenAttrs != null && childrenAttrs.size() > 0) { for (int i = 0; i < childrenAttrs.size(); i++) { CpuStatAttr childAttr = childrenAttrs.get(i); processCpuStatItems(cpuStatItems, childAttr, totalTc, level + 1, result); } } CpuStatItem cpuStatItem = cpuStatItems.get(currStatAttr.getAttrId()); if (cpuStatItem != null) { StringBuilder indentSb = new StringBuilder(""); for (int i = 1; i < level; i++) { indentSb.append(LogicalShowProfileHandler.PROFILE_INDENTS_UNIT); } double itemTc = cpuStatItem.timeCostNano; double pct = 100 * itemTc / totalTc; String pctStr = String.format("%.4f%%", pct); String tcStr = NUM_FORMAT.format(cpuStatItem.timeCostNano); Object[] rowInfo = new Object[] { indentSb.append(currStatAttr.getAttrName()), tcStr, pctStr, LogicalShowProfileHandler.PROFILE_NO_VALUE}; result.addRow(rowInfo); } return; } @Override public ResultCursor sync() { String type = SqlShowProfile.CPU_TYPE; if (types != null) { type = types.get(0); } if (type.equalsIgnoreCase(SqlShowProfile.MEMORY_TYPE)) { return syncForMemory(); } else { return syncForCpu(); } } }
21,762
789
package io.advantageous.qbit.vertx.bugs; import io.advantageous.boon.core.Sys; import io.advantageous.qbit.http.client.HttpClient; import io.advantageous.qbit.http.client.HttpClientBuilder; import io.advantageous.qbit.http.request.HttpRequestBuilder; import org.junit.Test; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static io.advantageous.boon.core.IO.puts; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class Bug660 { @Test public void test() throws Exception { AtomicInteger codeRef = new AtomicInteger(); AtomicReference<Throwable> error = new AtomicReference<>(); final HttpClient httpClient = HttpClientBuilder.httpClientBuilder() .setHost("localhost") .setPort(9999).setErrorHandler((err) -> { error.compareAndSet(null, err); }) .buildAndStart(); httpClient.sendHttpRequest(HttpRequestBuilder .httpRequestBuilder() .setJsonBodyForPost("\"hi mob\"") .setResponse((code, contentType, body) -> { puts(code, contentType, body); codeRef.set(code); }) .build()); for (int index = 0; index < 100; index++) { if (codeRef.get() != 0 && error.get() != null) { break; } Sys.sleep(1); } assertEquals(503, codeRef.get()); assertNotNull(error.get()); } }
748
3,428
{"id":"02302","group":"easy-ham-1","checksum":{"type":"MD5","value":"6b0e60535c666317d676c81aeea21f4f"},"text":"From <EMAIL> Sun Oct 6 22:54:44 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: yyyy<EMAIL>assassin.taint.org\nReceived: from localhost (jalapeno [127.0.0.1])\n\tby jmason.org (Postfix) with ESMTP id 1164C16F20\n\tfor <jm@localhost>; Sun, 6 Oct 2002 22:53:03 +0100 (IST)\nReceived: from jalapeno [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor jm@localhost (single-drop); Sun, 06 Oct 2002 22:53:03 +0100 (IST)\nReceived: from dogma.slashnull.org (localhost [127.0.0.1]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g9680EK15209 for\n <<EMAIL>>; Sun, 6 Oct 2002 09:00:14 +0100\nMessage-Id: <<EMAIL>>\nTo: yyyy<EMAIL>ass<EMAIL>int.org\nFrom: boingboing <<EMAIL>>\nSubject: Glennf responds to warchalking FUD\nDate: Sun, 06 Oct 2002 08:00:14 -0000\nContent-Type: text/plain; encoding=utf-8\n\nURL: http://boingboing.net/#85528553\nDate: Not supplied\n\n<NAME> written an open letter to the Infoworld writer who published \nan hysterical ambivalent article[1] about warchalking: \n\n The only place I hear about these stories on warchalking that relate to \n stealing access from open, but not shared APs -- accidentally shared, I \n suppose is accurate -- is via law enforcement without any specific \n locations mentioned, arrests made, or even photos of the offending marks. \n\nLink[2] Discuss[3]\n\n[1] http://www.infoworld.com/articles/op/xml/02/10/04/021004opethics.xml?s=rss&t=wireless&slot=3\n[2] http://80211b.weblogger.com/2002/10/04\n[3] http://www.quicktopic.com/boing/H/RkUMBVBT8A3\n\n\n"}
656
984
<filename>core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodec.java /* * Copyright DataStax, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.oss.driver.internal.core.type.codec.extras.enums; import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Objects; import net.jcip.annotations.Immutable; /** * A codec that serializes {@link Enum} instances as CQL {@code int}s representing their ordinal * values as returned by {@link Enum#ordinal()}. * * <p><strong>Note that this codec relies on the enum constants declaration order; it is therefore * vital that this order remains immutable.</strong> * * @param <EnumT> The Enum class this codec serializes from and deserializes to. */ @Immutable public class EnumOrdinalCodec<EnumT extends Enum<EnumT>> extends MappingCodec<Integer, EnumT> { private final EnumT[] enumConstants; public EnumOrdinalCodec(@NonNull Class<EnumT> enumClass) { super( TypeCodecs.INT, GenericType.of(Objects.requireNonNull(enumClass, "enumClass must not be null"))); this.enumConstants = enumClass.getEnumConstants(); } @Nullable @Override protected EnumT innerToOuter(@Nullable Integer value) { return value == null ? null : enumConstants[value]; } @Nullable @Override protected Integer outerToInner(@Nullable EnumT value) { return value == null ? null : value.ordinal(); } }
711
4,639
from django import template register = template.Library() @register.simple_tag def wishlists_containing_product(wishlists, product): return wishlists.filter(lines__product=product)
53