max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
32,544
package com.baeldung.methodorders; import static org.junit.Assert.assertEquals; import org.junit.AfterClass; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class NameAscendingOrderOfExecutionUnitTest { private static StringBuilder output = new StringBuilder(""); @Test public void secondTest() { output.append("b"); } @Test public void thirdTest() { output.append("c"); } @Test public void firstTest() { output.append("a"); } @AfterClass public static void assertOutput() { assertEquals(output.toString(), "abc"); } }
268
1,444
package mage.game.turn; import mage.constants.PhaseStep; import mage.constants.TurnPhase; import mage.game.Game; import mage.game.events.GameEvent.EventType; /** * * @author <EMAIL> */ public class EndPhase extends Phase { public EndPhase() { this.type = TurnPhase.END; this.event = EventType.END_PHASE; this.preEvent = EventType.END_PHASE_PRE; this.postEvent = EventType.END_PHASE_POST; this.steps.add(new EndStep()); this.steps.add(new CleanupStep()); } public EndPhase(final EndPhase phase) { super(phase); } @Override protected void playStep(Game game) { if (currentStep.getType() == PhaseStep.CLEANUP) { game.getTurn().setEndTurnRequested(false); // so triggers trigger again currentStep.beginStep(game, activePlayerId); // 514.3a At this point, the game checks to see if any state-based actions would be performed // and/or any triggered abilities are waiting to be put onto the stack (including those that // trigger "at the beginning of the next cleanup step"). If so, those state-based actions are // performed, then those triggered abilities are put on the stack, then the active player gets // priority. Players may cast spells and activate abilities. Once the stack is empty and all players // pass in succession, another cleanup step begins if (game.checkStateAndTriggered()) { game.playPriority(activePlayerId, true); playStep(game); } currentStep.endStep(game, activePlayerId); } else { super.playStep(game); } } @Override public EndPhase copy() { return new EndPhase(this); } }
725
345
import unittest from programy.storage.stores.sql.dao.trigger import Trigger class TriggerTests(unittest.TestCase): def test_init(self): trigger1 = Trigger(name='name', trigger_class='class') self.assertIsNotNone(trigger1) self.assertEqual("<Trigger(id='n/a', name='name', trigger_class='class')>", str(trigger1)) trigger2 = Trigger(id=1, name='name', trigger_class='class') self.assertIsNotNone(trigger2) self.assertEqual("<Trigger(id='1', name='name', trigger_class='class')>", str(trigger2))
227
1,006
<reponame>eenurkka/incubator-nuttx /**************************************************************************** * boards/arm/tiva/dk-tm4c129x/src/tm4c_userleds.c * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ /* The development board has one tri-color user LED. * * --- ------------ ----------------- * Pin Pin Function Jumper * --- ------------ ----------------- * PN5 Red LED J36 pins 1 and 2 * PQ4 Blue LED J36 pins 3 and 4 * PQ7 Green LED J36 pins 5 and 6 * --- ------------ ----------------- * * A high output illuminates the LED. */ /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <stdint.h> #include <stdbool.h> #include <debug.h> #include <arch/board/board.h> #include "tiva_gpio.h" #include "dk-tm4c129x.h" #ifndef CONFIG_ARCH_LEDS /**************************************************************************** * Public Functions ****************************************************************************/ /**************************************************************************** * Name: board_userled_initialize ****************************************************************************/ uint32_t board_userled_initialize(void) { /* Configure LED PIOs for output */ tiva_configgpio(GPIO_LED_R); tiva_configgpio(GPIO_LED_G); tiva_configgpio(GPIO_LED_B); return BOARD_NLEDS; } /**************************************************************************** * Name: board_userled ****************************************************************************/ void board_userled(int led, bool ledon) { uint32_t ledcfg; if (led == BOARD_LED_R) { ledcfg = GPIO_LED_R; } else if (led == BOARD_LED_B) { ledcfg = GPIO_LED_B; } else if (led == BOARD_LED_G) { ledcfg = GPIO_LED_G; } else { return; } tiva_gpiowrite(ledcfg, ledon); } /**************************************************************************** * Name: board_userled_all ****************************************************************************/ void board_userled_all(uint32_t ledset) { bool ledon; ledon = ((ledset & BOARD_LED_R_BIT) != 0); tiva_gpiowrite(GPIO_LED_R, ledon); ledon = ((ledset & BOARD_LED_G_BIT) != 0); tiva_gpiowrite(GPIO_LED_G, ledon); ledon = ((ledset & BOARD_LED_B_BIT) != 0); tiva_gpiowrite(GPIO_LED_B, ledon); } #endif /* !CONFIG_ARCH_LEDS */
990
422
<filename>AutotestWebD/apps/dubbo_task/views/dubbo_task.py from django.shortcuts import HttpResponse,render from apps.common.func.LanguageFunc import * from apps.common.func.CommonFunc import * from apps.common.config import commonWebConfig from apps.config.services.http_confService import HttpConfService # from apps.task.services.HTTP_taskService import HTTP_taskService from apps.dubbo_task.services.dubbo_taskService import DubboTaskService from apps.config.services.businessLineService import BusinessService from apps.config.services.modulesService import ModulesService from apps.config.services.http_confService import HttpConfService from apps.config.services.sourceService import SourceService from apps.config.services.serviceConfService import ServiceConfService from apps.dubbo_interface.services.dubbo_interface_service import DubboInterfaceService from apps.dubbo_testcase.services.dubbo_testcase_service import DubboTestcaseService from apps.dubbo_task.services.dubbo_task_executeService import DubboTaskExecuteService from urllib import parse from all_models_for_dubbo.models import Tb2DubboTaskExecute,Tb2DubboTask from apps.common.func.WebFunc import getServiceConf from django.db.utils import * import json from apps.common.func.WebFunc import * from apps.common.decorator.permission_normal_funcitons import * from apps.version_manage.services.common_service import VersionService def dubbo_testCheck(request): langDict = getLangTextDict(request) context = {} if not isRelease: context["env"] = "test" context["taskCheck"] = "current-page" context["checkBusinessLine"] = dbModelListToListDict(BusinessService.getAllBusinessLine()) context["checkModules"] = dbModelListToListDict(ModulesService.getAllModules()) # 文本 text = {} text["pageTitle"] = langDict["dubbo"]["dubboTaskPageHeadings_check"] context["text"] = text context["page"] = 1 # context["lang"] = getLangTextDict(request) return render(request, "dubbo/task/taskCheck.html", context) def dubbo_taskListCheck(request): page = request.POST.get("page") if isInt(page): page = int(page) else: return HttpResponse("<script>alert('请验证页数参数');</script>") checkArr = json.loads(parse.unquote(request.POST.get("checkArr"))) orderBy = request.POST.get("orderBy") if isSqlInjectable(orderBy): return HttpResponse("<script>alert('查询条件非法');</script>") if VersionService.isCurrentVersion(request): tbName = "tb2_dubbo_task" versionCondition = "" else: tbName = "tb2_dubbo_version_task" versionCondition = "and versionName='%s'" % request.session.get("version") execSql = "SELECT t.*,u.userName,um.userName modByName from %s t LEFT JOIN tb_user u ON t.addBy = u.loginName LEFT JOIN tb_user um ON t.modBy = um.loginName WHERE t.state=1 %s " %(tbName,versionCondition) checkList = [] for key in checkArr: if checkArr[key] == "": continue elif key == "taskFounder" : checkList.append("%%%s%%" % checkArr[key]) checkList.append("%%%s%%" % checkArr[key]) execSql += """ and (t.addBy LIKE %s or u.userName LIKE %s) """ continue elif key == "module": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.modulesGroup LIKE %s """ continue elif key == "businessLine": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.businessLineGroup LIKE %s """ continue checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.%s """ % key execSql += """ LIKE %s""" execSql += """ ORDER BY %s""" % orderBy context = pagination(sqlStr=execSql,attrList=checkList,page=page,pageNum=commonWebConfig.taskPageNum,request=request) response = render(request, "dubbo/task/SubPages/taskList_check_page.html",context) return response def getTaskForTaskId(request): langDict = getLangTextDict(request) id = request.GET.get("id") if VersionService.isCurrentVersion(request): taskData = DubboTaskService.getTaskForId(id) else: taskData = DubboTaskService.getVersionTaskForId(id,VersionService.getVersionName(request)) if not taskData: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON).toJson()) taskDataDict = dbModelToDict(taskData) taskDataUser = dbModelToDict(taskData.addBy) del taskDataUser["id"] taskDataDict.update(taskDataUser) context = {} context.update(getServiceConf(request)) # context["httpConf"] = HttpConfService.queryHttpConfSort(request) envConfList = DubboInterfaceService.queryDubboConfSort(request) context["httpConf"] = envConfList context["taskData"] = taskDataDict context["option"] = request.GET.get("option") return render(request,"dubbo/task/SubPages/task_Run_DetailsPage.html",context) @single_add_page_permission def taskAdd(request,context): langDict = getLangTextDict(request) context["interfacePage"] = 1 context["testCasePage"] = 1 context["option"] = "add" if not isRelease: context["env"] = "test" context["taskAdd"] = "current-page" context["checkBusinessLine"] = dbModelListToListDict(BusinessService.getAllBusinessLine()) context["checkModules"] = dbModelListToListDict(ModulesService.getAllModules()) context.update(getServiceConf(request)) text = {} text["pageTitle"] = langDict["dubbo"]["dubboTaskPageHeadings_%s" % context["option"]] text["subPageTitle"] = langDict["dubbo"]["dubboTaskSubPageTitle_%s" % context["option"]] context["text"] = text return render(request, "dubbo/task/taskAdd.html", context) def queryPeopleTask(request): langDict = getLangTextDict(request) pageNum = int(request.GET.get("num")) if VersionService.isCurrentVersion(request): attrData = DubboTaskService.queryPeopleTask(pageNum, commonWebConfig.queryPeopleInterface,request.session.get("loginName")) else: attrData = DubboTaskService.queryVersionPeopleTask(pageNum, commonWebConfig.queryPeopleInterface,request.session.get("loginName"),VersionService.getVersionName(request)) return HttpResponse(ApiReturn(ApiReturn.CODE_OK, langDict["dubbo"]["httpTestCaseSuccess"], attrData).toJson()) def TestCaseSelectInterfaceCheckList(request): page = request.POST.get("interfacePage") if isInt(page): page = int(page) else: return HttpResponse("<script>alert('请验证页数参数');</script>") checkArr = json.loads(parse.unquote(request.POST.get("checkArr"))) orderBy = request.POST.get("orderBy") if isSqlInjectable(orderBy): return HttpResponse("<script>alert('查询条件非法');</script>") if VersionService.isCurrentVersion(request): tbName = "tb2_dubbo_interface" versionCondition = "" else: tbName = "tb_version_http_interface" versionCondition = "and versionName='%s'" % request.session.get("version") execSql = "SELECT i.*,u.userName from %s i LEFT JOIN tb_user u ON i.addBy = u.loginName LEFT JOIN tb_modules m ON i.moduleId = m.id LEFT JOIN tb_business_line b ON i.businessLineId = b.id WHERE 1=1 and i.state=1 %s " % (tbName,versionCondition) checkList = [] for key in checkArr: if checkArr[key] == "": continue elif key == "caseFounder": checkList.append("%%%s%%" % checkArr[key]) checkList.append("%%%s%%" % checkArr[key]) execSql += """ and (i.addBy LIKE %s or u.userName LIKE %s) """ continue elif key == "module": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and m.moduleName LIKE %s """ continue elif key == "businessLine": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and b.bussinessLineName LIKE %s """ continue checkList.append("%%%s%%" % checkArr[key]) execSql += """ and i.%s """ % key execSql += """ LIKE %s""" execSql += """ ORDER BY %s""" % orderBy context = pagination(sqlStr=execSql, attrList=checkList, page=page, pageNum=commonWebConfig.taskCheckInterfaceSelectPage) response = render(request,"dubbo/testcase/SubPages/TestCase_Select_interface_list_check_page.html", context) return response def dubboTaskSelectTestCaseCheckList(request): page = request.POST.get("testCasePage") if isInt(page): page = int(page) else: return HttpResponse("<script>alert('请验证页数参数');</script>") checkArr = json.loads(parse.unquote(request.POST.get("checkArr"))) orderBy = request.POST.get("orderBy") if isSqlInjectable(orderBy): return HttpResponse("<script>alert('查询条件非法');</script>") if VersionService.isCurrentVersion(request): tbName = "tb2_dubbo_testcase" versionCondition = "" else: tbName = "tb_version_http_testcase" versionCondition = "and versionName='%s'" % request.session.get("version") execSql = "SELECT t.*,u.userName from %s t LEFT JOIN tb_user u ON t.addBy = u.loginName LEFT JOIN tb_modules m ON t.moduleId = m.id LEFT JOIN tb_business_line b ON t.businessLineId = b.id WHERE 1=1 and t.state=1 %s " %(tbName,versionCondition) checkList = [] for key in checkArr: if checkArr[key] == "": continue elif key == "caseFounder": checkList.append("%%%s%%" % checkArr[key]) checkList.append("%%%s%%" % checkArr[key]) execSql += """ and (t.addBy LIKE %s or u.userName LIKE %s) """ continue elif key == "module": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and m.moduleName LIKE %s """ continue elif key == "businessLine": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and b.bussinessLineName LIKE %s """ continue checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.%s """ % key execSql += """ LIKE %s""" execSql += """ ORDER BY %s""" % orderBy context = pagination(sqlStr=execSql, attrList=checkList, page=page, pageNum=commonWebConfig.interfaceSelectPageNum) response = render(request,"InterfaceTest/HTTPTask/SubPages/HTTP_Task_Select_TestCase_list_check_page.html", context) return response @single_data_permission(Tb2DubboTask,Tb2DubboTask) def taskAddData(request): taskData = json.loads(request.body) taskInterfaceBusinessLineArr = [] taskInterfaceModulesArr = [] taskInterfaceSourceArr = [] if taskData["taskInterfaces"] != "": #去重,切成数组遍历获取业务线名称 taskInterfaceList = list(set(taskData["taskInterfaces"].split(","))) taskInterfaceListPartSql = '' for i in range(0,len(taskInterfaceList)): if i == 0: taskInterfaceListPartSql = "interfaceId = '%s'" % taskInterfaceList[i] continue taskInterfaceListPartSql += " or interfaceId = '%s'" % taskInterfaceList[i] if VersionService.isCurrentVersion(request): taskInterfaceBusinessLineArr = BusinessService.getInterfaceListBusinessId(taskInterfaceListPartSql,"DUBBO") taskInterfaceModulesArr = ModulesService.getInterfaceListModulesId(taskInterfaceListPartSql,"DUBBO") # taskInterfaceSourceArr = SourceService.getInterfaceListSourcesId(taskInterfaceListPartSql) else: taskInterfaceBusinessLineArr = BusinessService.getVersionInterfaceListBusinessId(taskInterfaceListPartSql,VersionService.getVersionName(request)) taskInterfaceModulesArr = ModulesService.getVersionInterfaceListModulesId(taskInterfaceListPartSql,VersionService.getVersionName(request)) # taskInterfaceSourceArr = SourceService.getVersionInterfaceListSourcesId(taskInterfaceListPartSql,VersionService.getVersionName(request)) taskTestCaseBusinessLineArr = [] taskTestCaseModulesArr = [] # taskTestCaseSourceArr = [] if taskData["taskTestcases"] != "": #去重,切成数组遍历获取业务线名称 taskTestCaseList = list(set(taskData["taskTestcases"].split(","))) taskTestCasePartSql = "" for i in range(0, len(taskTestCaseList)): if i == 0: taskTestCasePartSql = "caseId = '%s'" % taskTestCaseList[i] continue taskTestCasePartSql += " or caseId = '%s'" % taskTestCaseList[i] if VersionService.isCurrentVersion(request): taskTestCaseBusinessLineArr = BusinessService.getTestCaseListBusinessId(taskTestCasePartSql,protocol="DUBBO") taskTestCaseModulesArr = ModulesService.getTestCaseListModulesId(taskTestCasePartSql,protocol="DUBBO") # taskTestCaseSourceArr = SourceService.getTestCaseListSourcesId(taskTestCasePartSql) else: taskTestCaseBusinessLineArr = BusinessService.getVersionTestCaseListBusinessId(taskTestCasePartSql,VersionService.getVersionName(request)) taskTestCaseModulesArr = ModulesService.getVersionTestCaseListModulesId(taskTestCasePartSql,VersionService.getVersionName(request)) # taskTestCaseSourceArr = SourceService.getVersionTestCaseListSourcesId(taskTestCasePartSql,VersionService.getVersionName(request)) businessLineGroupArr = taskInterfaceBusinessLineArr + taskTestCaseBusinessLineArr businessLineGroup = [] for i in range(0,len(businessLineGroupArr)): businessLineGroup.append(businessLineGroupArr[i]["bussinessLineName"]) taskData["businessLineGroup"] = list(set(businessLineGroup)) modulesGroupArr = taskInterfaceModulesArr + taskTestCaseModulesArr modulesGroup = [] for i in range(0,len(modulesGroupArr)): modulesGroup.append(modulesGroupArr[i]["moduleName"]) taskData["modulesGroup"] = list(set(modulesGroup)) taskData["protocol"] = "DUBBO" taskData["addBy_id"] = request.session.get("loginName") # sourcesGroupArr = taskInterfaceSourceArr + taskTestCaseSourceArr # sourceGroup = [] # for i in range(0,len(sourcesGroupArr)): # sourceGroup.append(sourcesGroupArr[i]["sourceName"]) # taskData["sourceGroup"] = list(set(sourceGroup)) if VersionService.isCurrentVersion(request): if "id" not in taskData.keys(): createTask = DubboTaskService.addTask(taskData) if createTask.id >= 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON,"任务保存失败").toJson()) else: taskData["modTime"] = datetime.datetime.now() editTaskData = DubboTaskService.editTask(taskData) if editTaskData == 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "任务编辑保存失败").toJson()) else: if "id" not in taskData.keys(): createTask = DubboTaskService.addVersionTask(taskData,VersionService.getVersionName(request)) if createTask.id >= 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "任务保存失败").toJson()) else: taskData["modTime"] = datetime.datetime.now() editTaskData = DubboTaskService.editVersionTask(taskData,VersionService.getVersionName(request)) if editTaskData == 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "任务编辑保存失败").toJson()) @single_page_permission def operationTask(request,context): langDict = getLangTextDict(request) context["option"] = request.GET.get("option") context["page"] = 1 if not isRelease: context["env"] = "test" try: if VersionService.isCurrentVersion(request): context["dataAddBy"] = DubboTaskService.getTaskForId(request.GET.get("id")).addBy.loginName else: context["dataAddBy"] = DubboTaskService.getVersionTaskForId(request.GET.get("id"),request.session.get("version")).addBy.loginName except Exception as e: print(traceback.format_exc()) return render(request, "permission/page_404.html") context["checkBusinessLine"] = dbModelListToListDict(BusinessService.getAllBusinessLine()) context["checkModules"] = dbModelListToListDict(ModulesService.getAllModules()) context["id"] = request.GET.get("id") context["interfacePage"] = 1 context["testCasePage"] = 1 context["taskAdd"] = "current-page" text = {} text["pageTitle"] = langDict["dubbo"]["dubboTaskPageHeadings_%s" % context["option"]] context["text"] = text context.update(getServiceConf(request)) return render(request, "dubbo/task/taskAdd.html", context) def getTaskData(request): id = request.GET.get("id") if VersionService.isCurrentVersion(request): taskDataModel = DubboTaskService.findTaskForId(id)[0] taskData = dbModelToDict(taskDataModel) serviceConf = ServiceConfService.queryServiceConfSort(request) highPriorityVARS = taskData["highPriorityVARS"] taskData["priorityCommon"] = substr(highPriorityVARS, "[CONF=common]", "[ENDCONF]") taskData["confPriority"] = {} for i in range(0, len(serviceConf)): if serviceConf[i]["serviceConfKey"] not in highPriorityVARS: taskData["confPriority"]["priority%s" % serviceConf[i]["serviceConfKey"]] = "" continue taskData["confPriority"]["priority%s" % serviceConf[i]["serviceConfKey"]] = substr(highPriorityVARS,"[CONF=%s]" % serviceConf[i]["serviceConfKey"],"[ENDCONF]") taskData["interfaceList"] = [] if taskData["taskInterfaces"]: taskInterfaceList = taskData["taskInterfaces"].split(",") for i in range(0,len(taskInterfaceList)): try: thisInterface = DubboInterfaceService.getInterfaceForInterfaceId(taskInterfaceList[i]) if not thisInterface: continue taskData["interfaceList"].append(dbModelToDict(thisInterface)) addBy = dbModelToDict(thisInterface.addBy) del addBy["id"] del addBy["state"] taskData["interfaceList"][i].update(addBy) except Exception as e: continue taskData["testCaseList"] = [] if taskData["taskTestcases"]: taskTestCaseList = taskData["taskTestcases"].split(",") for i in range(0,len(taskTestCaseList)): try: thisTestCase = DubboTestcaseService.getTestCaseForTestCaseId(taskTestCaseList[i]) taskData["testCaseList"].append(dbModelToDict(thisTestCase)) addBy = dbModelToDict(thisTestCase.addBy) del addBy["id"] del addBy["state"] taskData["testCaseList"][i].update(addBy) except Exception as e: continue return HttpResponse(ApiReturn(ApiReturn.CODE_OK,body=taskData).toJson()) else: taskDataModel = DubboTaskService.findVersionTaskForId(id,VersionService.getVersionName(request))[0] taskData = dbModelToDict(taskDataModel) serviceConf = ServiceConfService.queryServiceConfSort(request) highPriorityVARS = taskData["highPriorityVARS"] taskData["priorityCommon"] = substr(highPriorityVARS, "[CONF=common]", "[ENDCONF]") taskData["confPriority"] = {} for i in range(0, len(serviceConf)): if serviceConf[i]["serviceConfKey"] not in highPriorityVARS: taskData["confPriority"]["priority%s" % serviceConf[i]["serviceConfKey"]] = "" continue taskData["confPriority"]["priority%s" % serviceConf[i]["serviceConfKey"]] = substr(highPriorityVARS,"[CONF=%s]" % serviceConf[i]["serviceConfKey"],"[ENDCONF]") taskData["interfaceList"] = [] if taskData["taskInterfaces"]: taskInterfaceList = taskData["taskInterfaces"].split(",") for i in range(0,len(taskInterfaceList)): try: thisInterface = HTTP_interfaceService.getVersionInterfaceForInterfaceId(taskInterfaceList[i],VersionService.getVersionName(request)) if not thisInterface: continue taskData["interfaceList"].append(dbModelToDict(thisInterface)) addBy = dbModelToDict(thisInterface.addBy) del addBy["id"] del addBy["state"] taskData["interfaceList"][i].update(addBy) # print( taskData["interfaceList"][i]) except Exception as e: # print(addBy) # taskData["interfaceList"][i].update(addBy) taskData["interfaceList"].append('') continue taskData["testCaseList"] = [] if taskData["taskTestcases"]: taskTestCaseList = taskData["taskTestcases"].split(",") for i in range(0,len(taskTestCaseList)): try: thisTestCase = HTTP_test_caseService.getVersionTestCaseForTestCaseId(taskTestCaseList[i],VersionService.getVersionName(request)) taskData["testCaseList"].append(dbModelToDict(thisTestCase)) addBy = dbModelToDict(thisTestCase.addBy) del addBy["id"] del addBy["state"] taskData["testCaseList"][i].update(addBy) except Exception as e: taskData["interfaceList"].append('') continue return HttpResponse(ApiReturn(ApiReturn.CODE_OK,body=taskData).toJson()) @single_data_permission(Tb2DubboTask,Tb2DubboTask) def taskDataEdit(request): if VersionService.isCurrentVersion(request): postLoad = json.loads(request.body) # if postLoad["addBy"] != request.session.get("loginName"): # return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, '只可以编辑自己的任务').toJson()) taskInterfaceBusinessLineArr = [] taskInterfaceModulesArr = [] taskInterfaceSourceArr = [] if postLoad["taskInterfaces"] != "": # 去重,切成数组遍历获取业务线名称 taskInterfaceList = list(set(postLoad["taskInterfaces"].split(","))) taskInterfaceListPartSql = '' for i in range(0, len(taskInterfaceList)): if i == 0: taskInterfaceListPartSql = "interfaceId = '%s'" % taskInterfaceList[i] continue taskInterfaceListPartSql += " or interfaceId = '%s'" % taskInterfaceList[i] taskInterfaceBusinessLineArr = BusinessService.getInterfaceListBusinessId(taskInterfaceListPartSql,"DUBBO") taskInterfaceModulesArr = ModulesService.getInterfaceListModulesId(taskInterfaceListPartSql,"DUBBO") # taskInterfaceSourceArr = SourceService.getInterfaceListSourcesId(taskInterfaceListPartSql) taskTestCaseBusinessLineArr = [] taskTestCaseModulesArr = [] # taskTestCaseSourceArr = [] if postLoad["taskTestcases"] != "": # 去重,切成数组遍历获取业务线名称 taskTestCaseList = list(set(postLoad["taskTestcases"].split(","))) taskTestCasePartSql = "" for i in range(0, len(taskTestCaseList)): if i == 0: taskTestCasePartSql = "caseId = '%s'" % taskTestCaseList[i] continue taskTestCasePartSql += " or caseId = '%s'" % taskTestCaseList[i] taskTestCaseBusinessLineArr = BusinessService.getTestCaseListBusinessId(taskTestCasePartSql,protocol="DUBBO") taskTestCaseModulesArr = ModulesService.getTestCaseListModulesId(taskTestCasePartSql,protocol="DUBBO") # taskTestCaseSourceArr = SourceService.getTestCaseListSourcesId(taskTestCasePartSql) businessLineGroupArr = taskInterfaceBusinessLineArr + taskTestCaseBusinessLineArr businessLineGroup = [] for i in range(0, len(businessLineGroupArr)): businessLineGroup.append(businessLineGroupArr[i]["bussinessLineName"]) postLoad["businessLineGroup"] = list(set(businessLineGroup)) modulesGroupArr = taskInterfaceModulesArr + taskTestCaseModulesArr modulesGroup = [] for i in range(0, len(modulesGroupArr)): modulesGroup.append(modulesGroupArr[i]["moduleName"]) postLoad["modulesGroup"] = list(set(modulesGroup)) # sourcesGroupArr = taskInterfaceSourceArr + taskTestCaseSourceArr # sourceGroup = [] # for i in range(0, len(sourcesGroupArr)): # sourceGroup.append(sourcesGroupArr[i]["sourceName"]) # postLoad["sourceGroup"] = list(set(sourceGroup)) postLoad["modTime"] = datetime.datetime.now() postLoad["modBy"] = request.session.get("loginName") saveEditResult = DubboTaskService.editTask(postLoad) if saveEditResult == 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_INTERFACE_ERROR,'保存编辑失败!').toJson()) else: #TODO 历史版本 postLoad = json.loads(request.body) if postLoad["addBy"] != request.session.get("loginName"): return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, '只可以编辑自己的任务').toJson()) taskInterfaceBusinessLineArr = [] taskInterfaceModulesArr = [] taskInterfaceSourceArr = [] if postLoad["taskInterfaces"] != "": # 去重,切成数组遍历获取业务线名称 taskInterfaceList = list(set(postLoad["taskInterfaces"].split(","))) taskInterfaceListPartSql = '' for i in range(0, len(taskInterfaceList)): if i == 0: taskInterfaceListPartSql = "interfaceId = '%s'" % taskInterfaceList[i] continue taskInterfaceListPartSql += " or interfaceId = '%s'" % taskInterfaceList[i] taskInterfaceBusinessLineArr = BusinessService.getVersionInterfaceListBusinessId(taskInterfaceListPartSql,VersionService.getVersionName(request)) taskInterfaceModulesArr = ModulesService.getVersionInterfaceListModulesId(taskInterfaceListPartSql,VersionService.getVersionName(request)) taskInterfaceSourceArr = SourceService.getVersionInterfaceListSourcesId(taskInterfaceListPartSql,VersionService.getVersionName(request)) taskTestCaseBusinessLineArr = [] taskTestCaseModulesArr = [] taskTestCaseSourceArr = [] if postLoad["taskTestcases"] != "": # 去重,切成数组遍历获取业务线名称 taskTestCaseList = list(set(postLoad["taskTestcases"].split(","))) taskTestCasePartSql = "" for i in range(0, len(taskTestCaseList)): if i == 0: taskTestCasePartSql = "caseId = '%s'" % taskTestCaseList[i] continue taskTestCasePartSql += " or caseId = '%s'" % taskTestCaseList[i] taskTestCaseBusinessLineArr = BusinessService.getVersionTestCaseListBusinessId(taskTestCasePartSql,VersionService.getVersionName(request)) taskTestCaseModulesArr = ModulesService.getVersionTestCaseListModulesId(taskTestCasePartSql,VersionService.getVersionName(request)) taskTestCaseSourceArr = SourceService.getVersionTestCaseListSourcesId(taskTestCasePartSql,VersionService.getVersionName(request)) businessLineGroupArr = taskInterfaceBusinessLineArr + taskTestCaseBusinessLineArr businessLineGroup = [] for i in range(0, len(businessLineGroupArr)): businessLineGroup.append(businessLineGroupArr[i]["bussinessLineName"]) postLoad["businessLineGroup"] = list(set(businessLineGroup)) modulesGroupArr = taskInterfaceModulesArr + taskTestCaseModulesArr modulesGroup = [] for i in range(0, len(modulesGroupArr)): modulesGroup.append(modulesGroupArr[i]["moduleName"]) postLoad["modulesGroup"] = list(set(modulesGroup)) sourcesGroupArr = taskInterfaceSourceArr + taskTestCaseSourceArr sourceGroup = [] for i in range(0, len(sourcesGroupArr)): sourceGroup.append(sourcesGroupArr[i]["sourceName"]) postLoad["sourceGroup"] = list(set(sourceGroup)) postLoad["modTime"] = datetime.datetime.now() saveEditResult = DubboTaskService.editVersionTask(postLoad,VersionService.getVersionName(request)) if saveEditResult == 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_INTERFACE_ERROR, '保存编辑失败!').toJson()) @single_data_permission(Tb2DubboTask,Tb2DubboTask) def taskDel(request): id = request.GET.get("id") if VersionService.isCurrentVersion(request): try: taskData = DubboTaskService.getTaskForId(id) except Exception as e: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "参数id错误 %s" % e).toJson()) # if request.session.get("loginName") != taskData.addBy.loginName: # return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "只能删除自己创建的任务").toJson()) if DubboTaskService.delTaskForId(id) == 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON,"删除失败,请联系管理员").toJson()) else: try: taskData = DubboTaskService.getVersionTaskById(id) except Exception as e: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "参数id错误 %s" % e).toJson()) # if request.session.get("loginName") != taskData.addBy.loginName: # return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "只能删除自己创建的任务").toJson()) if DubboTaskService.delVersionTaskForId(id) == 1: return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) else: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "删除失败,请联系管理员").toJson()) def taskDelTheSameCase(request): id = request.GET.get("id") if VersionService.isCurrentVersion(request): try: taskData = DubboTaskService.getTaskForId(id) except Exception as e: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "参数id错误 %s" % e).toJson()) if request.session.get("loginName") != taskData.addBy.loginName: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "只能去重自己创建的任务").toJson()) #开始对task进行去重并保存。 else: try: taskData = DubboTaskService.getVersionTaskById(id) except Exception as e: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "参数id错误 %s" % e).toJson()) if request.session.get("loginName") != taskData.addBy.loginName: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "只能去重自己创建的任务").toJson()) #开始对VersionTask进行去重并保存 oldTaskInterfaces = taskData.taskInterfaces oldInterfaceList = oldTaskInterfaces.split(",") newInterfaceList = list(set(oldInterfaceList)) newInterfaceListStr = "" for tmpInterface in newInterfaceList: newInterfaceListStr += tmpInterface + "," newInterfaceListStr = newInterfaceListStr[:-1] taskData.taskInterfaces = newInterfaceListStr oldTaskCases = taskData.taskTestcases oldCaseList = oldTaskCases.split(",") newCaseList = list(set(oldCaseList)) newCaseStr = "" for tmpCase in newCaseList: newCaseStr += tmpCase + "," newCaseStr = newCaseStr[:-1] taskData.taskTestcases = newCaseStr try: taskData.save(force_update=True) return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) except Exception as e: return HttpResponse(ApiReturn(ApiReturn.CODE_INTERFACE_ERROR, '去重失败!').toJson()) def taskResultCheck(request): langDict = getLangTextDict(request) context = {} if not isRelease: context["env"] = "test" context["taskExecuteResult"] = "current-page" context["checkBusinessLine"] = dbModelListToListDict(BusinessService.getAllBusinessLine()) context["checkModules"] = dbModelListToListDict(ModulesService.getAllModules()) context["httpConf"] = HttpConfService.queryHttpConfSort(request) # 文本 text = {} text["pageTitle"] = langDict["dubbo"]["dubboTaskCheckPageHeadings_check"] context["text"] = text context["page"] = 1 return render(request, "dubbo/task/task_ExecResult.html", context) def getTaskResultList(request): t1 = datetime.datetime.now() page = request.POST.get("page") if isInt(page): page = int(page) else: return HttpResponse("<script>alert('请验证页数参数');</script>") checkArr = json.loads(parse.unquote(request.POST.get("checkArr"))) orderBy = request.POST.get("orderBy") if isSqlInjectable(orderBy): return HttpResponse("<script>alert('查询条件非法');</script>") execSql = "SELECT t.*,u.userName,tch.alias from tb2_dubbo_task_execute t LEFT JOIN tb_user u ON t.addBy = u.loginName LEFT JOIN tb_config_http tch on t.httpConfKey = tch.httpConfKey WHERE (t.execStatus in (1,2)) or (t.state=1" checkList = [] for key in checkArr: if checkArr[key] == "": continue elif key == "taskFounder": checkList.append("%%%s%%" % checkArr[key]) checkList.append("%%%s%%" % checkArr[key]) execSql += """ and (t.addBy LIKE %s or u.userName LIKE %s) """ continue elif key == "module": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.modulesGroup LIKE %s """ continue elif key == "businessLine": checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.businessLineGroup LIKE %s """ continue elif key == "httpConfKey": checkList.append("%s" % checkArr[key]) execSql += """ and tch.alias = %s """ continue elif key == "taskId": checkList.append("%s" % checkArr[key]) execSql += """ and t.taskId = %s """ continue checkList.append("%%%s%%" % checkArr[key]) execSql += """ and t.%s """ % key execSql += """ LIKE %s""" execSql += """) ORDER BY %s,%s""" % ("t.execStatus asc", orderBy) context = pagination(sqlStr=execSql, attrList=checkList, page=page, pageNum=commonWebConfig.taskPageNum) for pageData in context["pageDatas"]: #进度条和颜色 execProgressDataLen = pageData["execProgressData"].split(":") try: pageData["execPercent"] = "pass" pageData["execColor"] = "success" pageData["executeCount"] = ( int(execProgressDataLen[1]) + int(execProgressDataLen[2]) + int(execProgressDataLen[3])) pageData["passCount"] = int(execProgressDataLen[1]) pageData["failCount"] = int(execProgressDataLen[2]) pageData["errorCount"] = int(execProgressDataLen[3]) pageData["passPercent"] = int( (pageData["executeCount"] / int(execProgressDataLen[0])) * 100) if int(execProgressDataLen[2]) > 0 or int(execProgressDataLen[3]) > 0: pageData["execPercent"] = "fail" pageData["execColor"] = "danger" except ZeroDivisionError: pageData["passPercent"] = 0 #版本号 if pageData["version"] == "CurrentVersion": pageData["versionText"] = request.session.get("CurrentVersion") else: pageData["versionText"] = pageData["version"] #执行备注 if pageData["execComments"] == "": pageData["execComments"] = "-" #保存到历史记录 if pageData["isSaveHistory"] == 1: pageData["isSaveHistoryText"] = "是" else: pageData["isSaveHistoryText"] = "否" #发送报告邮件 if pageData["isSendEmail"] == 1: pageData["isSendEmailText"] = "是" else: pageData["isSendEmailText"] = "否" response = render(request, "dubbo/task/SubPages/task_result_list_page.html", context) print(datetime.datetime.now()-t1) return response def updateTaskExecuteProgressData(request): taskExecuteIdList = request.POST.get("taskExecuteIds").split(",") redisCache = RedisCache() resultDict = {} for idIndex in taskExecuteIdList: try: selfData = redisCache.get_data("%s_taskExecute_%s" % ("DUBBO",idIndex)) # print(selfData) selfStatus = redisCache.get_data("%s_taskExecuteStatus_%s" % ("DUBBO",idIndex)) except ValueError: taskExecute = Tb2DubboTaskExecute.objects.get(id=idIndex) selfData = taskExecute.execProgressData selfStatus = taskExecute.execStatus # print(taskExecute.execStatus) if selfData == None or int(selfStatus) == 10 or int(selfStatus) == 11 or int(selfStatus) == 3: #已经有任务执行完毕了,要刷新页面 return HttpResponse(ApiReturn(ApiReturn.CODE_RELOAD).toJson()) else: resultDict[idIndex] = {} execProgressDataLen = selfData.split(":") resultDict[idIndex]["status"] = selfStatus resultDict[idIndex]["execPercent"] = "pass" resultDict[idIndex]["execColor"] = "success" resultDict[idIndex]["executeCount"] = (int(execProgressDataLen[1]) + int(execProgressDataLen[2]) + int(execProgressDataLen[3])) resultDict[idIndex]["passCount"] = int(execProgressDataLen[1]) resultDict[idIndex]["failCount"] = int(execProgressDataLen[2]) resultDict[idIndex]["errorCount"] = int(execProgressDataLen[3]) if int(execProgressDataLen[0]) == 0: resultDict[idIndex]["passPercent"] = 0.00 else: resultDict[idIndex]["passPercent"] = int((resultDict[idIndex]["executeCount"] / int(execProgressDataLen[0])) * 100) if int(execProgressDataLen[2]) > 0 or int(execProgressDataLen[3]) > 0: resultDict[idIndex]["execPercent"] = "fail" resultDict[idIndex]["execColor"] = "danger" return HttpResponse(ApiReturn(ApiReturn.CODE_OK,body=resultDict).toJson()) def queryPeopleTaskExecute(request): langDict = getLangTextDict(request) pageNum = int(request.GET.get("num")) attrData = DubboTaskExecuteService.queryPeopleTaskExecute(pageNum, commonWebConfig.queryPeopleInterface,request.session.get("loginName")) return HttpResponse(ApiReturn(ApiReturn.CODE_OK, langDict["dubbo"]["httpTestCaseSuccess"], attrData).toJson()) def getTaskRestltDetail(request): id = request.GET.get("id") taskExecDataModel = DubboTaskExecuteService.findTaskRestltForId(id) taskExecData = dbModelToDict(taskExecDataModel) taskExecData.update(dbModelToDict(taskExecDataModel.httpConfKey)) taskExecData.update(dbModelToDict(taskExecDataModel.addBy)) return HttpResponse(ApiReturn(ApiReturn.CODE_OK,body=json.dumps(taskExecData)).toJson()) @sql_inject_validate def getInterfeceListDataForTask(request): id = request.GET.get("id") if VersionService.isCurrentVersion(request): taskDataModel = DubboTaskService.getTaskForId(id) taskData = dbModelToDict(taskDataModel) getInterFaceDataSql = taskData["taskInterfaces"].replace(",","' union all select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.dubboSystem,thi.dubboService,thi.dubboMethod,thi.addBy,tu.userName from tb2_dubbo_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '") sql = "select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.dubboSystem,thi.dubboService,thi.dubboMethod,thi.addBy,tu.userName from tb2_dubbo_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '%s'" % getInterFaceDataSql else: taskDataModel = DubboTaskService.getVersionTaskById(id) taskData = dbModelToDict(taskDataModel) getInterFaceDataSql = taskData["taskInterfaces"].replace(",","' union all select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.url,thi.addBy,tu.userName from tb_version_http_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '") sql = "select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.url,thi.addBy,tu.userName from tb_version_http_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '%s' and versionName='%s'" % (getInterFaceDataSql,VersionService.getVersionName(request)) print(sql) taskInterfaceListData = executeSqlGetDict(sql) response = render(request,"dubbo/task/SubPages/Task_Details_Select_interface_list_check_page.html", {"pageDatas":taskInterfaceListData}) return response def getTestCaseListDataForTask(request): id = request.GET.get("id") if VersionService.isCurrentVersion(request): taskDataModel = DubboTaskService.getTaskForId(id) taskData = dbModelToDict(taskDataModel) getTestCaseDataSql = taskData["taskTestcases"].replace(",","' union all select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb2_dubbo_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId = '") sql = "select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb2_dubbo_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId ='%s'" % getTestCaseDataSql print(sql) else: taskDataModel = DubboTaskService.getVersionTaskById(id) taskData = dbModelToDict(taskDataModel) getTestCaseDataSql = taskData["taskTestcases"].replace(",","' union all select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb_version_http_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId = '") sql = "select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb_version_http_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId ='%s' and versionName='%s'" % (getTestCaseDataSql,VersionService.getVersionName(request)) taskTestCaseListData = executeSqlGetDict(sql) response = render(request,"dubbo/task/SubPages/Task_Details_Select_TestCase_list_check_page.html", {"pageDatas":taskTestCaseListData}) return response def getInterfeceListData(request): #根据任务执行结果 id = request.GET.get("id") taskDataModel = DubboTaskExecuteService.findTaskRestltForId(id) taskData = dbModelToDict(taskDataModel) print(len(taskData)) if taskDataModel.version == "CurrentVersion": getInterFaceDataSql = taskData["taskInterfaces"].replace(",","' union all select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.dubboSystem,thi.dubboService,thi.dubboMethod,thi.addBy,tu.userName from tb2_dubbo_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '") sql = "select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.dubboSystem,thi.dubboService,thi.dubboMethod,thi.addBy,tu.userName from tb2_dubbo_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '%s'" % getInterFaceDataSql else: getInterFaceDataSql = taskData["taskInterfaces"].replace(",","' union all select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.url,thi.addBy,tu.userName from tb_version_http_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '") sql = "select thi.id,thi.interfaceId,thi.title,thi.casedesc,thi.url,thi.addBy,tu.userName from tb_version_http_interface thi LEFT JOIN tb_user tu on thi.addBy = tu .loginName where interfaceId = '%s' and versionName='%s'" % (getInterFaceDataSql,taskDataModel.version) taskInterfaceListData = executeSqlGetDict(sql) response = render(request,"dubbo/task/SubPages/Task_Details_Select_interface_list_check_page.html", {"pageDatas":taskInterfaceListData}) return response def getTestCaseListData(request): id = request.GET.get("id") taskDataModel = DubboTaskExecuteService.findTaskRestltForId(id) taskData = dbModelToDict(taskDataModel) if taskDataModel.version == "CurrentVersion": getTestCaseDataSql = taskData["taskTestcases"].replace(",","' union all select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb2_dubbo_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId = '") sql = "select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb2_dubbo_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId ='%s'" % getTestCaseDataSql else: getTestCaseDataSql = taskData["taskTestcases"].replace(",","' union all select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb_version_http_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId = '") sql = "select tht.id,tht.caseId,tht.title,tht.casedesc,tht.stepCount,tht.addBy,tu.userName from tb_version_http_testcase tht LEFT JOIN tb_user tu on tht.addBy = tu .loginName where tht.caseId ='%s' and versionName='%s'" % (getTestCaseDataSql,taskDataModel.version) taskTestCaseListData = executeSqlGetDict(sql) response = render(request,"dubbo/task/SubPages/Task_Details_Select_TestCase_list_check_page.html", {"pageDatas":taskTestCaseListData}) return response def againRunTask(request): #历史版本再次执行取任务错误。 id = request.GET.get("id") res = DubboTaskExecuteService.againRunTask(id,request.session.get("loginName")) if res == False: return ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, '任务已被删除').toJson() result = dbModelToDict(res) tcpin = '{"do":3,"TaskExecuteId":%s,"protocol":"DUBBO","TaskExecuteEnv":"%s","TaskId":"%s"}' % (result["id"], result["httpConfKey_id"], result["taskId"]) RedisCache().set_data("%s_taskExecute_%s" % ("DUBBO", result["id"]), "0:0:0:0:0") RedisCache().set_data("%s_taskExecuteStatus_%s" % ("DUBBO", result["id"]), "1") retApiResult = send_tcp_request(tcpin) if retApiResult.code != ApiReturn.CODE_OK: RedisCache().del_data("%s_taskExecute_%s" % ("DUBBO", result["id"])) RedisCache().del_data("%s_taskExecuteStatus_%s" % ("DUBBO", result["id"])) DubboTaskExecuteService.updateFailExecute(result["id"],retApiResult.message) return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, retApiResult.message).toJson()) return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) def stopTaskRun(request): id = request.GET.get("id") try: DubboTaskExecuteService.stopTaskRun(id) except Exception as e: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON,"请验证id正确性%s" % e).toJson()) tcpin = '{"do":4,"TaskExecuteId":"%s","protocol":"DUBBO"}' % id retApiResult = send_tcp_request(tcpin) if retApiResult.code != ApiReturn.CODE_OK: return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, retApiResult.message).toJson()) return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) def taskRunAdd(request): if VersionService.isCurrentVersion(request): taskData = dbModelToDict(DubboTaskService.getTaskForTaskId(request.POST.get("taskId"))) del taskData["taskId"] taskData["taskId"] = request.POST.get("taskId") else: taskData = dbModelToDict(DubboTaskService.getVersionTaskForTaskId(request.POST.get("taskId"),VersionService.getVersionName(request))) del taskData["taskId"] del taskData["versionName_id"] taskData["taskId"] = request.POST.get("taskId") del taskData["id"] taskData["protocol"] = request.POST.get("protocol") taskData["emailList"] = request.POST.get("emailList") taskData["addBy_id"] = request.session.get("loginName") taskData["isSaveHistory"] = request.POST.get("isSaveHistory") taskData["isSendEmail"] = request.POST.get("isSendEmail") taskData["execComments"] = request.POST.get("execComments") taskData["retryCount"] = request.POST.get("retryCount",0) taskData["execBy_id"] = request.session.get("loginName") taskData["version"] = VersionService.getVersionName(request) httpConfList = request.POST.get("httpConfKey_id").split(",") retmsg = 0 for httpConfIndex in range(0,len(httpConfList)): taskData["httpConfKey_id"] = httpConfList[httpConfIndex] cres = DubboTaskExecuteService.taskRunAdd(taskData) addDataResult = dbModelToDict(cres) RedisCache().set_data("%s_taskExecute_%s" % ("DUBBO",addDataResult["id"]),"0:0:0:0:0",60*60*12) RedisCache().set_data("%s_taskExecuteStatus_%s" % ("DUBBO",addDataResult["id"]),"1",60*60*12) tcpin = '{"do":3,"TaskExecuteId":%s,"protocol":"DUBBO","TaskExecuteEnv":"%s","TaskId":"%s"}' % ( addDataResult["id"], addDataResult["httpConfKey_id"], addDataResult["taskId"]) retApiResult = send_tcp_request(tcpin) if retApiResult.code != ApiReturn.CODE_OK: retmsg = 1 if retmsg == 1: addUserLog(request,"任务管理->任务执行->任务执行添加成功,但是执行服务出现异常,请联系管理员","FAIL") return HttpResponse(ApiReturn(ApiReturn.CODE_TASK_EXCEPITON, "任务执行添加成功,但是执行服务出现异常,请联系管理员").toJson()) addUserLog(request, "任务管理->任务执行->成功", "PASS") return HttpResponse(ApiReturn(ApiReturn.CODE_OK).toJson()) def getSelectExecuteStatus(request): sql = "SELECT testResult,count(*) as count from tb2_dubbo_task_execute GROUP BY testResult" return HttpResponse(ApiReturn(ApiReturn.CODE_OK,body=executeSqlGetDict(sql,[])).toJson())
22,062
335
{ "word": "Geode", "definitions": [ "A small cavity in rock lined with crystals or other mineral matter.", "A rock containing a geode." ], "parts-of-speech": "Noun" }
81
12,278
<filename>external/include/boost/fusion/container/list/detail/deref_impl.hpp /*============================================================================= Copyright (c) 2001-2011 <NAME> Copyright (c) 2005 <NAME> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) ==============================================================================*/ #if !defined(FUSION_DEREF_IMPL_07172005_0831) #define FUSION_DEREF_IMPL_07172005_0831 #include <boost/fusion/support/config.hpp> #include <boost/mpl/eval_if.hpp> #include <boost/type_traits/is_const.hpp> #include <boost/type_traits/add_const.hpp> #include <boost/type_traits/add_reference.hpp> namespace boost { namespace fusion { struct cons_iterator_tag; namespace extension { template <typename Tag> struct deref_impl; template <> struct deref_impl<cons_iterator_tag> { template <typename Iterator> struct apply { typedef typename Iterator::cons_type cons_type; typedef typename cons_type::car_type value_type; typedef typename mpl::eval_if< is_const<cons_type> , add_reference<typename add_const<value_type>::type> , add_reference<value_type> >::type type; BOOST_CONSTEXPR BOOST_FUSION_GPU_ENABLED static type call(Iterator const& i) { return i.cons.car; } }; }; } }} #endif
771
8,805
// // KBNotifications.h // Keybase // // Created by Gabriel on 6/10/15. // Copyright (c) 2015 Keybase. All rights reserved. // #import <Foundation/Foundation.h> extern NSString *const KBTrackingListDidChangeNotification; extern NSString *const KBUserDidChangeNotification; extern NSString *const KBStatusDidChangeNotification;
105
454
<reponame>Oaklight/parallelformers<gh_stars>100-1000 # Copyright 2021 TUNiB inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from transformers.models.wav2vec2.modeling_wav2vec2 import Wav2Vec2EncoderLayer from parallelformers.policies.base import Layer, Policy from parallelformers.transformers.modeling_bart import BartAttention_ from parallelformers.utils import AllReduceLinear class Wav2VecPolicy(Policy): @staticmethod def replace_arguments(config, world_size): return { # 1. reduce hidden size "attention.embed_dim": config.hidden_size // world_size, # 2. reduce number of heads "attention.num_heads": config.num_attention_heads // world_size, } @staticmethod def replace_modules(): return { "Wav2Vec2Attention": BartAttention_, } @staticmethod def attn_qkv(): return [ Layer( weight="attention.q_proj.weight", bias="attention.q_proj.bias", ), Layer( weight="attention.k_proj.weight", bias="attention.k_proj.bias", ), Layer( weight="attention.v_proj.weight", bias="attention.v_proj.bias", ), ] @staticmethod def attn_out(): return [ Layer( weight="attention.out_proj.weight", bias="attention.out_proj.bias", replace=AllReduceLinear, ), ] @staticmethod def mlp_in(): return [ Layer( weight="feed_forward.intermediate_dense.weight", bias="feed_forward.intermediate_dense.bias", ), ] @staticmethod def mlp_out(): return [ Layer( weight="feed_forward.output_dense.weight", bias="feed_forward.output_dense.bias", replace=AllReduceLinear, ), ] @staticmethod def original_layer_class(): return Wav2Vec2EncoderLayer
1,202
3,227
namespace CGAL { /*! \ingroup nt_cgal An object of the class `Lazy_exact_nt<NT>` is able to represent any real embeddable number which `NT` is able to represent. The idea is that `Lazy_exact_nt<NT>` works exactly like `NT`, except that it is expected to be faster because it tries to only compute an approximation of the value, and only refers to `NT` when needed. The goal is to speed up exact computations done by any exact but slow number type `NT`. The function `to_double()` can be used to get a double approximation of the represented number. Note that two subsequent calls to this function on the same number of type `Lazy_exact_nt<NT>` might not return the same value as the exact representation might have been computed between the two calls, thus refining the double approximation. If you want to avoid this behavior, you need to first call `exact()` (loosing the benefit of the lazyness if done systematically). \tparam NT must be a model of concept `RealEmbeddable`, and at least model of concept `IntegralDomainWithoutDivision`. Note that some filtering mechanism is available at the predicate level using `Filtered_predicate` and `Filtered_kernel`. \cgalModels `IntegralDomainWithoutDivision` same as `NT` \cgalModels `RealEmbeddable` \cgalModels `Fraction`, if `NT` is a `Fraction` \cgalHeading{Example} \code #include <CGAL/Cartesian.h> #include <CGAL/MP_Float.h> #include <CGAL/Lazy_exact_nt.h> #include <CGAL/Quotient.h> typedef CGAL::Lazy_exact_nt<CGAL::Quotient<CGAL::MP_Float> > NT; typedef CGAL::Cartesian<NT> K; \endcode */ template< typename NT > class Lazy_exact_nt { public: /// \name Creation /// @{ /*! introduces an uninitialized variable `m`. */ Lazy_exact_nt(); /*! introduces the value \a x, of any built-in arithmetic type (`int`, `double`, etc) (works only if `NT` has a constructor from this type too). */ Lazy_exact_nt(BuiltIn i); /*! introduces the value `n`. */ Lazy_exact_nt(NT n); /*! introduces the value `n`. `NT1` needs to be convertible to `NT` (and this conversion will only be done if necessary). */ template <class NT1> Lazy_exact_nt(Lazy_exact_nt<NT1> n); /// @} /// \name Operations /// @{ /*! returns the corresponding NT value. */ NT exact(); /*! returns an interval containing the exact value. */ Interval_nt<false> approx(); /*! returns an interval containing the exact value. */ Interval_nt<true> interval(); /*! specifies the relative precision that `to_double()` has to fulfill. The relative precision is thread local, and the default value is \f$ 10^{-5}\f$. \pre `d>0` and `d<1`. */ static void set_relative_precision_of_to_double(double d); /*! returns the relative precision that `to_double()` currently fulfills. */ static double get_relative_precision_of_to_double(); /// @} }; /* end Lazy_exact_nt */ /*! writes `m` to ostream `out` in an interval format. \relates Lazy_exact_nt */ std::ostream& operator<<(std::ostream& out, const Lazy_exact_nt<NT>& m); /*! reads a `NT` from `in`, then converts it to a `Lazy_exact_nt<NT>`. \relates Lazy_exact_nt */ std::istream& operator>>(std::istream& in, Lazy_exact_nt<NT>& m); } /* end namespace CGAL */
1,066
618
/* Copyright 2016 <NAME>, NVIDIA Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <atomic> #include <condition_variable> #include <deque> #include <mutex> #include "scanner/util/blockingconcurrentqueue.h" namespace scanner { using namespace moodycamel; template <typename T> class Queue : public BlockingConcurrentQueue<T> { public: Queue(size_t size=8) : BlockingConcurrentQueue<T>(size) {} inline void clear() { T t; while (BlockingConcurrentQueue<T>::try_dequeue(t)) {} } inline size_t size() { return BlockingConcurrentQueue<T>::size_approx(); } inline void push(T item) { bool success = BlockingConcurrentQueue<T>::enqueue(item); LOG_IF(FATAL, !success) << "Queue push failed"; } inline void pop(T& item) { BlockingConcurrentQueue<T>::wait_dequeue(item); } }; }
437
1,557
#include "bootstrap.h" #include <stdint.h> void early_init() {} extern uint64_t heap_head; extern void* initial_stack_start(); void kernel_environ_init() { heap_head = (uint64_t) initial_stack_start(); } extern int main(uint64_t argc, const char** argv); int start_init_process (uint64_t argc, const char** argv) { return main(argc, argv); }
134
2,728
<filename>sdk/recoveryservices/azure-mgmt-recoveryservicessiterecovery/azure/mgmt/recoveryservicessiterecovery/operations/_replication_fabrics_operations.py<gh_stars>1000+ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class ReplicationFabricsOperations(object): """ReplicationFabricsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.recoveryservicessiterecovery.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list( self, **kwargs # type: Any ): # type: (...) -> Iterable["_models.FabricCollection"] """Gets the list of ASR fabrics. Gets a list of the Azure Site Recovery fabrics in the vault. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FabricCollection or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.recoveryservicessiterecovery.models.FabricCollection] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FabricCollection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('FabricCollection', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics'} # type: ignore def get( self, fabric_name, # type: str filter=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "_models.Fabric" """Gets the details of an ASR fabric. Gets the details of an Azure Site Recovery fabric. :param fabric_name: Fabric name. :type fabric_name: str :param filter: OData filter options. :type filter: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Fabric, or the result of cls(response) :rtype: ~azure.mgmt.recoveryservicessiterecovery.models.Fabric :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Fabric"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}'} # type: ignore def _create_initial( self, fabric_name, # type: str input, # type: "_models.FabricCreationInput" **kwargs # type: Any ): # type: (...) -> Optional["_models.Fabric"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Fabric"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(input, 'FabricCreationInput') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}'} # type: ignore def begin_create( self, fabric_name, # type: str input, # type: "_models.FabricCreationInput" **kwargs # type: Any ): # type: (...) -> LROPoller["_models.Fabric"] """Creates an Azure Site Recovery fabric. The operation to create an Azure Site Recovery fabric (for e.g. Hyper-V site). :param fabric_name: Name of the ASR fabric. :type fabric_name: str :param input: Fabric creation input. :type input: ~azure.mgmt.recoveryservicessiterecovery.models.FabricCreationInput :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Fabric or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.recoveryservicessiterecovery.models.Fabric] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.Fabric"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._create_initial( fabric_name=fabric_name, input=input, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}'} # type: ignore def _purge_initial( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" # Construct URL url = self._purge_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _purge_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}'} # type: ignore def begin_purge( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Purges the site. The operation to purge(force delete) an Azure Site Recovery fabric. :param fabric_name: ASR fabric to purge. :type fabric_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._purge_initial( fabric_name=fabric_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_purge.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}'} # type: ignore def _check_consistency_initial( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> Optional["_models.Fabric"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Fabric"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" accept = "application/json" # Construct URL url = self._check_consistency_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _check_consistency_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/checkConsistency'} # type: ignore def begin_check_consistency( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller["_models.Fabric"] """Checks the consistency of the ASR fabric. The operation to perform a consistency check on the fabric. :param fabric_name: Fabric name. :type fabric_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Fabric or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.recoveryservicessiterecovery.models.Fabric] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.Fabric"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._check_consistency_initial( fabric_name=fabric_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_check_consistency.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/checkConsistency'} # type: ignore def _migrate_to_aad_initial( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" # Construct URL url = self._migrate_to_aad_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _migrate_to_aad_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/migratetoaad'} # type: ignore def begin_migrate_to_aad( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Migrates the site to AAD. The operation to migrate an Azure Site Recovery fabric to AAD. :param fabric_name: ASR fabric to migrate. :type fabric_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._migrate_to_aad_initial( fabric_name=fabric_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_migrate_to_aad.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/migratetoaad'} # type: ignore def _reassociate_gateway_initial( self, fabric_name, # type: str failover_process_server_request, # type: "_models.FailoverProcessServerRequest" **kwargs # type: Any ): # type: (...) -> Optional["_models.Fabric"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Fabric"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._reassociate_gateway_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(failover_process_server_request, 'FailoverProcessServerRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _reassociate_gateway_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/reassociateGateway'} # type: ignore def begin_reassociate_gateway( self, fabric_name, # type: str failover_process_server_request, # type: "_models.FailoverProcessServerRequest" **kwargs # type: Any ): # type: (...) -> LROPoller["_models.Fabric"] """Perform failover of the process server. The operation to move replications from a process server to another process server. :param fabric_name: The name of the fabric containing the process server. :type fabric_name: str :param failover_process_server_request: The input to the failover process server operation. :type failover_process_server_request: ~azure.mgmt.recoveryservicessiterecovery.models.FailoverProcessServerRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Fabric or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.recoveryservicessiterecovery.models.Fabric] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.Fabric"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._reassociate_gateway_initial( fabric_name=fabric_name, failover_process_server_request=failover_process_server_request, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_reassociate_gateway.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/reassociateGateway'} # type: ignore def _delete_initial( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/remove'} # type: ignore def begin_delete( self, fabric_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Deletes the site. The operation to delete or remove an Azure Site Recovery fabric. :param fabric_name: ASR fabric to delete. :type fabric_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( fabric_name=fabric_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/remove'} # type: ignore def _renew_certificate_initial( self, fabric_name, # type: str renew_certificate, # type: "_models.RenewCertificateInput" **kwargs # type: Any ): # type: (...) -> Optional["_models.Fabric"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Fabric"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._renew_certificate_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(renew_certificate, 'RenewCertificateInput') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _renew_certificate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/renewCertificate'} # type: ignore def begin_renew_certificate( self, fabric_name, # type: str renew_certificate, # type: "_models.RenewCertificateInput" **kwargs # type: Any ): # type: (...) -> LROPoller["_models.Fabric"] """Renews certificate for the fabric. Renews the connection certificate for the ASR replication fabric. :param fabric_name: fabric name to renew certs for. :type fabric_name: str :param renew_certificate: Renew certificate input. :type renew_certificate: ~azure.mgmt.recoveryservicessiterecovery.models.RenewCertificateInput :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Fabric or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.recoveryservicessiterecovery.models.Fabric] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.Fabric"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._renew_certificate_initial( fabric_name=fabric_name, renew_certificate=renew_certificate, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('Fabric', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'), 'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_renew_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{resourceName}/replicationFabrics/{fabricName}/renewCertificate'} # type: ignore
20,719
315
<reponame>FredrikBlomgren/aff3ct #include "Tools/Exception/exception.hpp" #include "Tools/Documentation/documentation.h" #include "Module/Encoder/Polar/Encoder_polar.hpp" #include "Module/Encoder/Polar/Encoder_polar_sys.hpp" #include "Factory/Module/Encoder/Polar/Encoder_polar.hpp" using namespace aff3ct; using namespace aff3ct::factory; const std::string aff3ct::factory::Encoder_polar_name = "Encoder Polar"; const std::string aff3ct::factory::Encoder_polar_prefix = "enc"; Encoder_polar ::Encoder_polar(const std::string &prefix) : Encoder(Encoder_polar_name, prefix) { this->type = "POLAR"; } Encoder_polar* Encoder_polar ::clone() const { return new Encoder_polar(*this); } void Encoder_polar ::get_description(cli::Argument_map_info &args) const { Encoder::get_description(args); auto p = this->get_prefix(); const std::string class_name = "factory::Encoder_polar::"; cli::add_options(args.at({p+"-type"}), 0, "POLAR"); tools::add_arg(args, p, class_name+"p+no-sys", cli::None()); } void Encoder_polar ::store(const cli::Argument_map_value &vals) { Encoder::store(vals); } void Encoder_polar ::get_headers(std::map<std::string,tools::header_list>& headers, const bool full) const { Encoder::get_headers(headers, full); } template <typename B> module::Encoder_polar<B>* Encoder_polar ::build(const std::vector<bool> &frozen_bits) const { if (this->type == "POLAR" && !this->systematic) return new module::Encoder_polar <B>(this->K, this->N_cw, frozen_bits); if (this->type == "POLAR" && this->systematic) return new module::Encoder_polar_sys<B>(this->K, this->N_cw, frozen_bits); throw tools::cannot_allocate(__FILE__, __LINE__, __func__); } // ==================================================================================== explicit template instantiation #include "Tools/types.h" #ifdef AFF3CT_MULTI_PREC template aff3ct::module::Encoder_polar<B_8 >* aff3ct::factory::Encoder_polar::build<B_8 >(const std::vector<bool>&) const; template aff3ct::module::Encoder_polar<B_16>* aff3ct::factory::Encoder_polar::build<B_16>(const std::vector<bool>&) const; template aff3ct::module::Encoder_polar<B_32>* aff3ct::factory::Encoder_polar::build<B_32>(const std::vector<bool>&) const; template aff3ct::module::Encoder_polar<B_64>* aff3ct::factory::Encoder_polar::build<B_64>(const std::vector<bool>&) const; #else template aff3ct::module::Encoder_polar<B>* aff3ct::factory::Encoder_polar::build<B>(const std::vector<bool>&) const; #endif // ==================================================================================== explicit template instantiation
942
2,216
<filename>mmdet3d/models/decode_heads/__init__.py # Copyright (c) OpenMMLab. All rights reserved. from .paconv_head import PAConvHead from .pointnet2_head import PointNet2Head __all__ = ['PointNet2Head', 'PAConvHead']
80
593
/** * TLS-Attacker - A Modular Penetration Testing Framework for TLS * * Copyright 2014-2022 Ruhr University Bochum, Paderborn University, Hackmanit GmbH * * Licensed under Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0.txt */ package de.rub.nds.tlsattacker.core.constants; public enum PublicKeyType { DH("1.2.840.113549.1.3.1"), RSA("1.2.840.113549.1.1.1"), DSS("1.2.840.10040.4.1"), ECDSA("1.2.840.10045.2.1"); private String oid; PublicKeyType(String oid) { this.oid = oid; } public static PublicKeyType fromOid(String oid) { for (PublicKeyType ccaCertificateKeyType : values()) { if (ccaCertificateKeyType.getOid().equals(oid)) { return ccaCertificateKeyType; } } return null; } public String getOid() { return oid; } }
401
4,986
package im.zhaojun.zfile.context; import com.alibaba.fastjson.JSON; import im.zhaojun.zfile.exception.InvalidDriveException; import im.zhaojun.zfile.model.entity.DriveConfig; import im.zhaojun.zfile.model.enums.StorageTypeEnum; import im.zhaojun.zfile.service.DriveConfigService; import im.zhaojun.zfile.service.base.AbstractBaseFileService; import im.zhaojun.zfile.util.SpringContextHolder; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.context.annotation.DependsOn; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; /** * 每个驱动器对应一个 Service, 其中初始化好了与对象存储的连接信息. * 此驱动器上下文环境用户缓存每个 Service, 避免重复创建连接. * @author zhaojun */ @Component @DependsOn("springContextHolder") @Slf4j public class DriveContext implements ApplicationContextAware { /** * Map<Integer, AbstractBaseFileService> * Map<驱动器 ID, 驱动器连接 Service> */ private static Map<Integer, AbstractBaseFileService> drivesServiceMap = new ConcurrentHashMap<>(); @Resource private DriveConfigService driveConfigService; /** * 项目启动时, 自动调用数据库已存储的所有驱动器进行初始化. */ @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { List<DriveConfig> list = driveConfigService.list(); for (DriveConfig driveConfig : list) { try { init(driveConfig.getId()); log.info("启动时初始化驱动器成功, 驱动器信息: {}", JSON.toJSONString(driveConfig)); } catch (Exception e) { log.error("启动时初始化驱动器失败, 驱动器信息: {}", JSON.toJSONString(driveConfig), e); } } } /** * 初始化指定驱动器的 Service, 添加到上下文环境中. * * @param driveId * 驱动器 ID. */ public void init(Integer driveId) { AbstractBaseFileService baseFileService = getBeanByDriveId(driveId); if (baseFileService != null) { if (log.isDebugEnabled()) { log.debug("尝试初始化驱动器, driveId: {}", driveId); } baseFileService.init(driveId); if (log.isDebugEnabled()) { log.debug("初始化驱动器成功, driveId: {}", driveId); } drivesServiceMap.put(driveId, baseFileService); } } /** * 获取指定驱动器的 Service. * * @param driveId * 驱动器 ID * * @return 驱动器对应的 Service */ public AbstractBaseFileService get(Integer driveId) { AbstractBaseFileService abstractBaseFileService = drivesServiceMap.get(driveId); if (abstractBaseFileService == null) { throw new InvalidDriveException("此驱动器不存在或初始化失败, 请检查后台参数配置"); } return abstractBaseFileService; } /** * 销毁指定驱动器的 Service. * * @param driveId * 驱动器 ID */ public void destroy(Integer driveId) { if (log.isDebugEnabled()) { log.debug("清理驱动器上下文对象, driveId: {}", driveId); } drivesServiceMap.remove(driveId); } /** * 获取指定驱动器对应的 Service, 状态为未初始化 * * @param driveId * 驱动器 ID * * @return 驱动器对应未初始化的 Service */ private AbstractBaseFileService getBeanByDriveId(Integer driveId) { StorageTypeEnum storageTypeEnum = driveConfigService.findStorageTypeById(driveId); Map<String, AbstractBaseFileService> beansOfType = SpringContextHolder.getBeansOfType(AbstractBaseFileService.class); for (AbstractBaseFileService value : beansOfType.values()) { if (Objects.equals(value.getStorageTypeEnum(), storageTypeEnum)) { return SpringContextHolder.getBean(value.getClass()); } } return null; } /** * 更新上下文环境中的驱动器 ID * * @param updateId * 驱动器原 ID * * @param newId * 驱动器新 ID */ public void updateDriveId(Integer updateId, Integer newId) { AbstractBaseFileService fileService = drivesServiceMap.remove(updateId); fileService.setDriveId(newId); drivesServiceMap.put(newId, fileService); } }
2,332
1,444
<gh_stars>1000+ package mage.cards.t; import java.util.UUID; import mage.abilities.Ability; import mage.abilities.effects.OneShotEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.Outcome; import mage.game.Game; import mage.game.turn.TurnMod; import mage.target.TargetPlayer; /** * * @author <EMAIL> */ public final class TimeWarp extends CardImpl { public TimeWarp(UUID ownerId, CardSetInfo setInfo) { super(ownerId,setInfo,new CardType[]{CardType.SORCERY},"{3}{U}{U}"); this.getSpellAbility().addTarget(new TargetPlayer()); this.getSpellAbility().addEffect(new TimeWarpEffect()); } private TimeWarp(final TimeWarp card) { super(card); } @Override public TimeWarp copy() { return new TimeWarp(this); } } class TimeWarpEffect extends OneShotEffect { public TimeWarpEffect() { super(Outcome.ExtraTurn); staticText = "Target player takes an extra turn after this one"; } public TimeWarpEffect(final TimeWarpEffect effect) { super(effect); } @Override public TimeWarpEffect copy() { return new TimeWarpEffect(this); } @Override public boolean apply(Game game, Ability source) { game.getState().getTurnMods().add(new TurnMod(source.getFirstTarget(), false)); return true; } }
532
1,059
import os import sys from moviepy.editor import VideoFileClip def convert_video_to_audio_moviepy(video_file, output_ext="mp3"): """Converts video to audio using MoviePy library that uses `ffmpeg` under the hood""" filename, ext = os.path.splitext(video_file) clip = VideoFileClip(video_file) clip.audio.write_audiofile(f"{filename}.{output_ext}") if __name__ == "__main__": vf = sys.argv[1] convert_video_to_audio_moviepy(vf)
173
327
/* <NAME> `gentilkiwi` https://blog.gentilkiwi.com <EMAIL> Licence : https://creativecommons.org/licenses/by/4.0/ */ #pragma once #include "globals.h" typedef NTSTATUS (* PKKLL_M_MODULE_CALLBACK) (SIZE_T szBufferIn, PVOID bufferIn, PKIWI_BUFFER outBuffer, PAUX_MODULE_EXTENDED_INFO pModule, PVOID pvArg, BOOLEAN * mustContinue); typedef struct _KKLL_M_MODULE_FROM_ADDR { BOOLEAN isFound; ULONG_PTR addr; } KKLL_M_MODULE_FROM_ADDR, *PKKLL_M_MODULE_FROM_ADDR; typedef struct _KKLL_M_MODULE_BASIC_INFOS { PUCHAR addr; SIZE_T size; } KKLL_M_MODULE_BASIC_INFOS, *PKKLL_M_MODULE_BASIC_INFOS; NTSTATUS kkll_m_modules_enum(SIZE_T szBufferIn, PVOID bufferIn, PKIWI_BUFFER outBuffer, PKKLL_M_MODULE_CALLBACK callback, PVOID pvArg); NTSTATUS kkll_m_modules_list_callback(SIZE_T szBufferIn, PVOID bufferIn, PKIWI_BUFFER outBuffer, PAUX_MODULE_EXTENDED_INFO pModule, PVOID pvArg, BOOLEAN * mustContinue); NTSTATUS kkll_m_modules_fromAddr(PKIWI_BUFFER outBuffer, PVOID addr); NTSTATUS kkll_m_modules_fromAddr_callback(SIZE_T szBufferIn, PVOID bufferIn, PKIWI_BUFFER outBuffer, PAUX_MODULE_EXTENDED_INFO pModule, PVOID pvArg, BOOLEAN * mustContinue); NTSTATUS kkll_m_modules_first_callback(SIZE_T szBufferIn, PVOID bufferIn, PKIWI_BUFFER outBuffer, PAUX_MODULE_EXTENDED_INFO pModule, PVOID pvArg, BOOLEAN * mustContinue);
582
452
<gh_stars>100-1000 # -*- coding: utf-8 -*- # # Tencent is pleased to support the open source community by making QTA available. # Copyright (C) 2016THL A29 Limited, a Tencent company. All rights reserved. # Licensed under the BSD 3-Clause License (the "License"); you may not use this # file except in compliance with the License. You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the specific language # governing permissions and limitations under the License. # '''conf test ''' from testbase.conf import settings, SettingsMixin from testbase.test import modify_settings import unittest class SettingTest(unittest.TestCase): def test_get(self): '''get settings ''' self.assertEqual(settings.DEBUG, False) self.assertEqual(settings.get('DEBUG'), False) self.assertEqual(settings.get('NOT_EXIST', False), False) def test_set(self): '''set settings failed ''' self.assertRaises(RuntimeError, setattr, settings, 'DEBUG', False) def test_contain(self): '''test settings in op ''' self.assertEqual("DEBUG" in settings, True, "DEBUG should have been in settings") self.assertEqual("IMPOSSIBLE" in settings, False, "IMPOSSIBLE is unexpected in settings") def test_iteration(self): self.assertEqual("DEBUG" in list(settings), True, "DEBUG should have been in list(settings)") class Dummy(SettingsMixin): class Settings(object): DUMMY_A = 0 def __init__(self): self.x = self.settings.DUMMY_A class Dummy2(SettingsMixin): class Settings(object): B = 1 class Dummy3(SettingsMixin): class Settings(object): Dummy3_A = 4 class DummyChild0(Dummy): pass class DummyChild1(Dummy): class Settings(object): DUMMYCHILD1_A = 2 class DummyChild2(Dummy): class Settings(object): DUMMY_A = 1 class DummyChild3(Dummy): class Settings(object): DUMMYCHILD3_B = -1 class SettingsMixinTest(unittest.TestCase): """test case for settings mixin class """ def test_get(self): self.reset_class_settings(Dummy) dummy = Dummy() self.assertEqual(dummy.settings.DUMMY_A, 0) self.assertRaises(AttributeError, getattr, dummy.settings, "B") with modify_settings(GLOBAL_X="xxxx", DUMMY_A=100): self.reset_class_settings(Dummy) self.assertEqual(dummy.settings.GLOBAL_X, "xxxx") self.assertEqual(dummy.settings.DUMMY_A, 100) def test_set(self): self.reset_class_settings(Dummy) dummy = Dummy() self.assertRaises(RuntimeError, setattr, dummy.settings, "C", 2) def test_declare(self): self.assertRaises(RuntimeError, getattr, Dummy2(), "settings") self.assertRaises(RuntimeError, getattr, Dummy3(), "settings") self.assertRaises(RuntimeError, getattr, Dummy3(), "settings") def test_deriving(self): self.reset_class_settings(DummyChild0) child = DummyChild0() self.assertEqual(child.settings.DUMMY_A, 0) self.reset_class_settings(DummyChild1) child = DummyChild1() self.assertEqual(child.settings.DUMMY_A, 2) self.assertEqual(child.settings.DUMMYCHILD1_A, 2) with modify_settings(DUMMY_A=3): self.reset_class_settings(Dummy) dummy = Dummy() self.assertEqual(dummy.settings.DUMMY_A, 3) self.reset_class_settings(DummyChild1) child = DummyChild1() self.assertEqual(child.settings.DUMMY_A, 2) with modify_settings(DUMMYCHILD1_A=4): self.reset_class_settings(DummyChild1) child = DummyChild1() self.assertEqual(child.settings.DUMMY_A, 4) self.assertEqual(child.settings.DUMMYCHILD1_A, 4) self.assertRaises(RuntimeError, DummyChild2) child = DummyChild3() self.assertEqual(child.settings.DUMMYCHILD3_B, -1) self.assertRaises(AttributeError, getattr, child.settings, "DUMMYCHILD3_A") def reset_class_settings(self, cls): settings_key = "_%s_settings" % cls.__name__ if hasattr(cls, settings_key): delattr(cls, settings_key) if __name__ == "__main__": unittest.main(defaultTest="SettingsMixinTest.test_deriving")
1,902
980
<filename>src/main/proj/src/org/jcodec/containers/mkv/muxer/MKVMuxerTrack.java<gh_stars>100-1000 package org.jcodec.containers.mkv.muxer; import static org.jcodec.containers.mkv.MKVType.createByType; import static org.jcodec.containers.mkv.boxes.MkvBlock.anyFrame; import java.io.IOException; import org.jcodec.common.MuxerTrack; import org.jcodec.common.VideoCodecMeta; import org.jcodec.common.io.SeekableByteChannel; import org.jcodec.common.model.Packet; import org.jcodec.common.model.Rational; import org.jcodec.containers.mkv.CuesFactory; import org.jcodec.containers.mkv.MKVType; import org.jcodec.containers.mkv.boxes.EbmlMaster; import org.jcodec.containers.mkv.boxes.MkvBlock; /** * This class is part of JCodec ( www.jcodec.org ) This software is distributed * under FreeBSD License * * @author The JCodec project * */ public class MKVMuxerTrack implements MuxerTrack { public static enum MKVMuxerTrackType { VIDEO, AUDIO }; public MKVMuxerTrackType type; public VideoCodecMeta videoMeta; public String codecId; public int trackNo; public long trackStart; public EbmlMaster firstCluster; public MkvBlock lastFrame; private int frameDuration; private Rational frameRate; private MkvBlock clusterHeadFrame; private EbmlMaster currentCluster; final SeekableByteChannel os; final CuesFactory cf; public MKVMuxerTrack(SeekableByteChannel os, CuesFactory cf) { this.type = MKVMuxerTrackType.VIDEO; this.os = os; this.cf = cf; } static final int DEFAULT_TIMESCALE = 1000000000; // NANOSECOND static final int NANOSECONDS_IN_A_MILISECOND = 1000000; static final int MULTIPLIER = DEFAULT_TIMESCALE / NANOSECONDS_IN_A_MILISECOND; public int getTimescale() { return NANOSECONDS_IN_A_MILISECOND; } public Rational getFrameRate() { return frameRate; } @Override public void addFrame(Packet outPacket) { MkvBlock frame = anyFrame(trackNo, 0, outPacket.getData(), outPacket.isKeyFrame()); if (frameRate == null || frameRate.den != outPacket.duration) { frameRate = new Rational((int) outPacket.duration, outPacket.timescale); } frame.absoluteTimecode = outPacket.getPts(); lastFrame = frame; // Creates one cluster per each keyframe. Before staring a new cluster we will // write the previous to the disk. if (outPacket.isKeyFrame()) { muxCurrentCluster(); currentCluster = singleBlockedCluster(frame); if (firstCluster == null) { firstCluster = currentCluster; } } else { if (currentCluster == null) { throw new RuntimeException("The first frame must be a keyframe in an MKV file"); } frame.timecode = (int) (frame.absoluteTimecode - clusterHeadFrame.absoluteTimecode); currentCluster.add(frame); } } public long getTrackNo() { return trackNo; } private void muxCurrentCluster() { if (currentCluster != null) { try { currentCluster.mux(os); cf.add(CuesFactory.CuePointMock.make(currentCluster)); } catch (IOException e) { throw new RuntimeException(e); } } } public void finish() { muxCurrentCluster(); } private EbmlMaster singleBlockedCluster(MkvBlock aBlock) { EbmlMaster mkvCluster = createByType(MKVType.Cluster); MKVMuxer.createLong(mkvCluster, MKVType.Timecode, aBlock.absoluteTimecode - aBlock.timecode); mkvCluster.add(aBlock); clusterHeadFrame = aBlock; return mkvCluster; } }
1,595
471
package io.tweag.sparkle.function; import org.apache.spark.api.java.function.*; import io.tweag.sparkle.Sparkle; public class HaskellVoidFunction<T> implements VoidFunction<T> { private final byte[] clos; public HaskellVoidFunction(final byte[] clos) { this.clos = clos; } public void call(T v1) throws Exception { Sparkle.apply(clos, v1); } }
141
2,160
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import import functools import os import re import shutil import subprocess from argparse import ArgumentParser, _SubParsersAction from contextlib import contextmanager from textwrap import dedent from threading import Thread from pex import dist_metadata, pex_warnings from pex.commands.command import Error, JsonMixin, Ok, OutputMixin, Result from pex.common import ( DETERMINISTIC_DATETIME_TIMESTAMP, pluralize, safe_mkdir, safe_mkdtemp, safe_open, ) from pex.compatibility import Queue from pex.environment import PEXEnvironment from pex.interpreter import PythonIdentity, PythonInterpreter, spawn_python_job from pex.jobs import Retain, SpawnedJob, execute_parallel from pex.pex import PEX from pex.third_party.pkg_resources import Distribution from pex.tools.command import PEXCommand from pex.typing import TYPE_CHECKING, cast if TYPE_CHECKING: import attr # vendor:skip from typing import Callable, IO, Iterable, Iterator, Text, Tuple RepositoryFunc = Callable[["Repository", PEX], Result] else: from pex.third_party import attr @attr.s(frozen=True) class FindLinksRepo(object): @classmethod def serve( cls, interpreter, # type: PythonInterpreter port, # type: int directory, # type: str ): # type: (...) -> FindLinksRepo http_server_module = "SimpleHTTPServer" if interpreter.version[0] == 2 else "http.server" cmd, http_server_process = interpreter.open_process( # N.B.: Running Python in unbuffered mode here is critical to being able to read stdout. args=["-u", "-m", http_server_module, str(port)], cwd=directory, stdout=subprocess.PIPE, ) real_port = Queue() # type: Queue[int] def read_data(): try: data = http_server_process.stdout.readline() match = re.match(br"^Serving HTTP on [^\s]+ port (?P<port>\d+)[^\d]", data) real_port.put(int(match.group("port"))) finally: real_port.task_done() reader = Thread(target=read_data) reader.daemon = True reader.start() real_port.join() reader.join() return cls(cmd=cmd, port=real_port.get(), server_process=http_server_process) cmd = attr.ib() # type: Iterable[str] port = attr.ib() # type: int _server_process = attr.ib() # type: subprocess.Popen @property def pid(self): # type: () -> int return self._server_process.pid def join(self): # type: () -> int return self._server_process.wait() def kill(self): # type: () -> None self._server_process.kill() class Repository(JsonMixin, OutputMixin, PEXCommand): """Interact with the Python distribution repository contained in a PEX file.""" @classmethod def _add_info_arguments(cls, subparsers): # type: (_SubParsersAction) -> ArgumentParser info_parser = subparsers.add_parser( name="info", help="Print information about the distributions in a PEX file." ) info_parser.add_argument( "-v", "--verbose", action="store_true", help="Print the distributions requirements in addition to its name version and path.", ) cls.add_json_options(info_parser, entity="verbose output") cls.register_global_arguments(info_parser, include_verbosity=False) return info_parser @classmethod def _add_extract_arguments(cls, subparsers): # type: (_SubParsersAction) -> ArgumentParser extract_parser = subparsers.add_parser( name="extract", help="Extract all distributions from a PEX file." ) extract_parser.add_argument( "-f", "--dest-dir", "--find-links", "--repo", metavar="PATH", help="The path to extract distribution as wheels to.", ) extract_parser.add_argument( "-D", "--sources", action="store_true", help="Also extract a wheel for the PEX file sources.", ) extract_parser.add_argument( "--use-system-time", dest="use_system_time", default=False, action="store_true", help=( "Use the current system time to generate timestamps for the extracted " "distributions. Otherwise, Pex will use midnight on January 1, 1980. By using " "system time, the extracted distributions will not be reproducible, meaning that " "if you were to re-run extraction against the same PEX file then the newly " "extracted distributions would not be byte-for-byte identical distributions " "extracted in prior runs." ), ) extract_parser.add_argument( "--serve", action="store_true", help="Serve the --find-links repo.", ) extract_parser.add_argument( "--port", type=int, default=0, metavar="PORT", help="The port to serve the --find-links repo on.", ) extract_parser.add_argument( "--pid-file", metavar="PATH", help="The path of a file to write the <pid>:<port> of the find links server to.", ) cls.register_global_arguments(extract_parser) return extract_parser @classmethod def add_arguments(cls, parser): # type: (ArgumentParser) -> None cls.add_output_option(parser, entity="distribution information") parser.set_defaults(repository_func=functools.partial(cls.show_help, parser)) subparsers = parser.add_subparsers( description=( "A PEX distribution repository can be operated on using any of the following " "subcommands." ) ) cls._add_info_arguments(subparsers).set_defaults(repository_func=cls._info) cls._add_extract_arguments(subparsers).set_defaults(repository_func=cls._extract) def run(self, pex): # type: (PEX) -> Result repository_func = cast("RepositoryFunc", self.options.repository_func) return repository_func(self, pex) @contextmanager def _distributions_output(self, pex): # type: (PEX) -> Iterator[Tuple[Iterable[Distribution], IO]] with self.output(self.options) as out: yield tuple(pex.resolve()), out def _info(self, pex): # type: (PEX) -> Result with self._distributions_output(pex) as (distributions, output): for distribution in distributions: if self.options.verbose: requires_python = dist_metadata.requires_python(distribution) requires_dists = list(dist_metadata.requires_dists(distribution)) self.dump_json( self.options, dict( project_name=distribution.project_name, version=distribution.version, requires_python=str(requires_python) if requires_python else None, requires_dists=[str(dist) for dist in requires_dists], location=distribution.location, ), output, ) else: output.write( "{project_name} {version} {location}".format( project_name=distribution.project_name, version=distribution.version, location=distribution.location, ) ) output.write("\n") return Ok() def _extract(self, pex): # type: (PEX) -> Result if not self.options.serve and not self.options.dest_dir: return Error("Specify a --find-links directory to extract wheels to.") dest_dir = ( os.path.abspath(os.path.expanduser(self.options.dest_dir)) if self.options.dest_dir else safe_mkdtemp() ) safe_mkdir(dest_dir) if self.options.sources: self._extract_sdist(pex, dest_dir) def spawn_extract(distribution): # type: (Distribution) -> SpawnedJob[Text] env = os.environ.copy() if not self.options.use_system_time: # N.B.: The `SOURCE_DATE_EPOCH` env var is semi-standard magic for controlling # build tools. Wheel has supported this since 2016. # See: # + https://reproducible-builds.org/docs/source-date-epoch/ # + https://github.com/pypa/wheel/blob/1b879e53fed1f179897ed47e55a68bc51df188db/wheel/archive.py#L36-L39 env.update(SOURCE_DATE_EPOCH=str(int(DETERMINISTIC_DATETIME_TIMESTAMP))) job = spawn_python_job( args=["-m", "wheel", "pack", "--dest-dir", dest_dir, distribution.location], interpreter=pex.interpreter, expose=["wheel"], stdout=subprocess.PIPE, env=env, ) return SpawnedJob.stdout( job, result_func=lambda out: "{}: {}".format(distribution, out.decode()) ) with self._distributions_output(pex) as (distributions, output): errors = [] for result in execute_parallel(distributions, spawn_extract, error_handler=Retain()): if isinstance(result, tuple): distribution, error = result errors.append(distribution) output.write( "Failed to build a wheel for {distribution}: {error}\n".format( distribution=distribution, error=error ) ) else: output.write(result) if errors: return Error( "Failed to build wheels for {count} {distributions}.".format( count=len(errors), distributions=pluralize(errors, "distribution") ) ) if not self.options.serve: return Ok() repo = FindLinksRepo.serve( interpreter=pex.interpreter, port=self.options.port, directory=dest_dir ) output.write( "Serving find-links repo of {pex} via {find_links} at http://localhost:{port}\n".format( pex=os.path.normpath(pex.path()), find_links=dest_dir, port=repo.port ) ) if self.options.pid_file: with safe_open(self.options.pid_file, "w") as fp: fp.write("{}:{}".format(repo.pid, repo.port)) try: return Result(exit_code=repo.join(), message=" ".join(repo.cmd)) except KeyboardInterrupt: repo.kill() return Ok("Shut down server for find links repo at {}.".format(dest_dir)) @staticmethod def _extract_sdist( pex, # type: PEX dest_dir, # type: str ): # type: (...) -> None pex_info = pex.pex_info() chroot = safe_mkdtemp() pex_path = pex.path() src = os.path.join(chroot, "src") excludes = ["__main__.py", pex_info.PATH, pex_info.bootstrap, pex_info.internal_cache] shutil.copytree( PEXEnvironment.mount(pex_path).path, src, ignore=lambda _dir, _names: excludes ) name, _ = os.path.splitext(os.path.basename(pex_path)) version = "0.0.0+{}".format(pex_info.code_hash) zip_safe = False # Since PEX files never require code to be zip safe, assume it isn't. py_modules = [os.path.splitext(f)[0] for f in os.listdir(src) if f.endswith(".py")] packages = [ os.path.relpath(os.path.join(root, d), src).replace(os.sep, ".") for root, dirs, _ in os.walk(src) for d in dirs ] install_requires = [str(req) for req in pex_info.requirements] python_requires = None if len(pex_info.interpreter_constraints) == 1: python_requires = str( PythonIdentity.parse_requirement(pex_info.interpreter_constraints[0]).specifier ) elif pex_info.interpreter_constraints: pex_warnings.warn( "Omitting `python_requires` for {name} sdist since {pex} has multiple " "interpreter constraints:\n{interpreter_constraints}".format( name=name, pex=os.path.normpath(pex_path), interpreter_constraints="\n".join( "{index}.) {constraint}".format(index=index, constraint=constraint) for index, constraint in enumerate( pex_info.interpreter_constraints, start=1 ) ), ) ) entry_points = [] if pex_info.entry_point and ":" in pex_info.entry_point: entry_points = [(name, pex_info.entry_point)] with open(os.path.join(chroot, "setup.cfg"), "w") as fp: fp.write( dedent( """\ [metadata] name = {name} version = {version} [options] zip_safe = {zip_safe} {py_modules} {packages} package_dir = =src include_package_data = True {python_requires} {install_requires} [options.entry_points] {entry_points} """ ).format( name=name, version=version, zip_safe=zip_safe, py_modules=( "py_modules =\n {}".format("\n ".join(py_modules)) if py_modules else "" ), packages=( "packages = \n {}".format("\n ".join(packages)) if packages else "" ), install_requires=( "install_requires =\n {}".format("\n ".join(install_requires)) if install_requires else "" ), python_requires=( "python_requires = {}".format(python_requires) if python_requires else "" ), entry_points=( "console_scripts =\n {}".format( "\n ".join( "{} = {}".format(name, entry_point) for name, entry_point in entry_points ) ) if entry_points else "" ), ) ) with open(os.path.join(chroot, "MANIFEST.in"), "w") as fp: fp.write("recursive-include src *") with open(os.path.join(chroot, "setup.py"), "w") as fp: fp.write("import setuptools; setuptools.setup()") spawn_python_job( args=["setup.py", "sdist", "--dist-dir", dest_dir], interpreter=pex.interpreter, expose=["setuptools"], cwd=chroot, ).wait()
8,094
488
/* Liao, 5/13/2009 C allows implicit declaration of functions */ int foo() { /* MSVC does not allow implicit functions: GNU specific */ /* bar(); */ return 0; }
51
831
<reponame>phpc0de/idea-android /* * Copyright (C) 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.adtui.trackgroup; import org.jetbrains.annotations.NotNull; /** * Factory interface to instantiate {@link TrackRenderer}s. Implement this with a concrete enum of renderer types for a given domain (e.g. * profilers). * * @param <R> concrete renderer enum type for a specific domain. */ public interface TrackRendererFactory<R extends Enum> { /** * @param rendererType renderer type * @return renderer instance for the given renderer type. */ @NotNull TrackRenderer<?, R> createRenderer(@NotNull R rendererType); }
349
5,169
<filename>Specs/KMMSmileyStrength/1.0/KMMSmileyStrength.podspec.json { "name": "KMMSmileyStrength", "version": "1.0", "summary": "A password strength display using smiley faces.", "description": "A password strength display that uses zxcvbn and custom fonts to display smiley faces that represent the strength of a password.", "homepage": "https://github.com/kerrmarin/KMMSmileyStrength", "license": "MIT", "authors": { "<NAME>": "<EMAIL>" }, "social_media_url": "http://twitter.com/kerrmarin", "platforms": { "ios": "8.0" }, "source": { "git": "https://github.com/kerrmarin/KMMSmileyStrength.git", "tag": "1.0" }, "source_files": "KMMPasswordStrength/KMMPasswordStrengthView/*.{h,m}", "resources": "KMMPasswordStrength/KMMPasswordStrengthView/Assets/flaticon.ttf", "requires_arc": true, "dependencies": { "zxcvbn-ios": [ "~> 1.0.1" ] } }
362
619
<gh_stars>100-1000 /* * Author: <NAME> <<EMAIL>> * Copyright (c) 2017 Intel Corporation. * * This program and the accompanying materials are made available under the * terms of the The MIT License which is available at * https://opensource.org/licenses/MIT. * * SPDX-License-Identifier: MIT */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include "abp.h" #include "upm_utilities.h" int main() { abp_context dev = abp_init(0, ABP_DEFAULT_ADDRESS); if(dev == NULL) { printf("Unable to initialize sensor\n"); return 0; } while(1){ abp_update(dev); printf("Retrieved pressure value: %f\n", abp_get_pressure(dev)); printf("Retrieved temperature value: %f\n", abp_get_temperature(dev)); upm_delay(1); } return 0; }
347
1,338
/* * Copyright 1999, Be Incorporated. All Rights Reserved. * This file may be used under the terms of the Be Sample Code License. * */ #ifndef _CLOCK_WINDOW_H #define _CLOCK_WINDOW_H #include <Window.h> class TOnscreenView; class TClockWindow : public BWindow { public: TClockWindow(BRect rect, const char* name); virtual ~TClockWindow(); virtual bool QuitRequested(); private: void _InitWindow(); private: TOnscreenView *fOnScreenView; }; #endif // _CLOCK_WINDOW_H
185
502
<reponame>vscoder-mas/wlplayer // // Created by ywl on 2017-12-17. // #ifndef WLPLAYER_WLSTATAUS_H #define WLPLAYER_WLSTATAUS_H #endif //WLPLAYER_WLSTATAUS_H #define WL_THREAD_MAIN 1 #define WL_THREAD_CHILD 2 #define WL_FFMPEG_CAN_NOT_OPEN_URL 3 #define WL_FFMPEG_CAN_NOT_FIND_STREAMS 4 #define WL_FFMPEG_CAN_NOT_OPEN_URL 5
166
403
package com.camunda.demo.webinar.cmmn.listener; import java.util.List; import org.camunda.bpm.engine.delegate.CaseExecutionListener; import org.camunda.bpm.engine.delegate.DelegateCaseExecution; import org.camunda.bpm.engine.impl.cmmn.entity.runtime.CaseExecutionEntity; import org.camunda.bpm.engine.impl.cmmn.execution.CaseExecutionState; import org.camunda.bpm.engine.runtime.CaseExecution; public class DisableRemainingActivitiesListener implements CaseExecutionListener { @Override public void notify(DelegateCaseExecution caseExecution) throws Exception { System.out.println(" --- Disabling some stuff ---"); String caseInstanceId = caseExecution.getCaseInstanceId(); List<CaseExecution> caseExecutions = caseExecution.getProcessEngineServices().getCaseService().createCaseExecutionQuery().caseInstanceId(caseInstanceId).list(); for (CaseExecution otherCaseExecution : caseExecutions) { if (otherCaseExecution.isEnabled()) { caseExecution.getProcessEngineServices().getCaseService().disableCaseExecution(otherCaseExecution.getId()); } System.out.println(otherCaseExecution.getActivityName() + " -> " + ((CaseExecutionEntity)otherCaseExecution).getCurrentState().toString()); } } }
390
706
<reponame>wujuguang/shellpy import sys from shellpython.importer import PreprocessorImporter _importer = PreprocessorImporter() def init(): """Initialize shellpython by installing the import hook """ if _importer not in sys.meta_path: sys.meta_path.insert(0, _importer) def uninit(): """Uninitialize shellpython by removing the import hook """ sys.meta_path.remove(_importer)
142
8,194
package com.hippo.ehviewer.ui.fragment; import android.os.Bundle; import android.preference.Preference; import android.preference.PreferenceFragment; import android.text.TextUtils; import com.hippo.ehviewer.Analytics; import com.hippo.ehviewer.R; import com.hippo.ehviewer.Settings; /** * Created by Mo10 on 2018/2/10. */ public class PrivacyFragment extends PreferenceFragment implements Preference.OnPreferenceChangeListener { private static final String KEY_PATTERN_PROTECTION = "pattern_protection"; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.privacy_settings); Preference enableAnalytics = findPreference(Settings.KEY_ENABLE_ANALYTICS); enableAnalytics.setOnPreferenceChangeListener(this); } @Override public void onResume() { super.onResume(); Preference patternProtection = findPreference(KEY_PATTERN_PROTECTION); patternProtection.setSummary(TextUtils.isEmpty(Settings.getSecurity()) ? R.string.settings_privacy_pattern_protection_not_set : R.string.settings_privacy_pattern_protection_set); } @Override public boolean onPreferenceChange(Preference preference, Object newValue) { String key = preference.getKey(); if (Settings.KEY_ENABLE_ANALYTICS.equals(key)) { if (newValue instanceof Boolean && (Boolean) newValue) { Analytics.start(getActivity()); } return true; } return true; } }
628
1,060
/* code for the "obj4" pd class. This adds a creation argument, of type "float". */ #include "m_pd.h" typedef struct obj4 { t_object x_ob; t_outlet *x_outlet; float x_value; } t_obj4; void obj4_float(t_obj4 *x, t_floatarg f) { outlet_float(x->x_outlet, x->x_value + f); } void obj4_ft1(t_obj4 *x, t_floatarg g) { x->x_value = g; } t_class *obj4_class; /* as requested by the new invocation of "class_new" below, the new routine will be called with a "float" argument. */ void *obj4_new(t_floatarg f) { t_obj4 *x = (t_obj4 *)pd_new(obj4_class); inlet_new(&x->x_ob, &x->x_ob.ob_pd, gensym("float"), gensym("ft1")); x->x_outlet = outlet_new(&x->x_ob, gensym("float")); /* just stick the argument in the object structure for later. */ x->x_value = f; return (void *)x; } void obj4_setup(void) { /* here we add "A_DEFFLOAT" to the (zero-terminated) list of arg types we declare for a new object. The value will be filled in as 0 if not given in the object box. */ obj4_class = class_new(gensym("obj4"), (t_newmethod)obj4_new, 0, sizeof(t_obj4), 0, A_DEFFLOAT, 0); class_addmethod(obj4_class, (t_method)obj4_ft1, gensym("ft1"), A_FLOAT, 0); class_addfloat(obj4_class, obj4_float); }
562
601
{ "$schema": "https://developer.microsoft.com/json-schemas/spfx-build/config.2.0.schema.json", "version": "2.0", "bundles": { "search-documents-web-part": { "components": [ { "entrypoint": "./lib/webparts/searchDocuments/SearchDocumentsWebPart.js", "manifest": "./src/webparts/searchDocuments/SearchDocumentsWebPart.manifest.json" } ] }, "library-documents-web-part": { "components": [ { "entrypoint": "./lib/webparts/libraryDocuments/LibraryDocumentsWebPart.js", "manifest": "./src/webparts/libraryDocuments/LibraryDocumentsWebPart.manifest.json" } ] } }, "externals": {}, "localizedResources": { "SearchDocumentsWebPartStrings": "lib/webparts/searchDocuments/loc/{locale}.js", "LibraryDocumentsWebPartStrings": "lib/webparts/libraryDocuments/loc/{locale}.js" } }
383
1,056
<reponame>timfel/netbeans /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.java.hints.bugs; import com.sun.source.util.TreePath; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Types; import org.netbeans.api.java.source.TreeUtilities; import org.netbeans.spi.editor.hints.ErrorDescription; import org.netbeans.spi.editor.hints.Fix; import org.netbeans.spi.java.hints.HintContext; import org.netbeans.spi.java.hints.JavaFix; import org.netbeans.spi.java.hints.BooleanOption; import org.netbeans.spi.java.hints.ConstraintVariableType; import org.netbeans.spi.java.hints.Hint; import org.netbeans.spi.java.hints.Hint.Options; import org.netbeans.spi.java.hints.TriggerPattern; import org.netbeans.spi.java.hints.TriggerPatterns; import org.netbeans.spi.java.hints.UseOptions; import org.netbeans.spi.java.hints.ErrorDescriptionFactory; import org.netbeans.spi.java.hints.JavaFixUtilities; import org.openide.util.NbBundle; /** * * @author <NAME> */ public class EqualsHint { private static final boolean ERASURE_PREFS_DEFAULT = true; // NOI18N @BooleanOption(displayName = "#LBL_org.netbeans.modules.java.hints.bugs.EqualsHint.ERASURE_PREFS_KEY", tooltip = "#TP_org.netbeans.modules.java.hints.bugs.EqualsHint.ERASURE_PREFS_KEY", defaultValue=ERASURE_PREFS_DEFAULT) private static final String ERASURE_PREFS_KEY = "eguals-hint-erasure"; // NOI18N @Hint(displayName = "#DN_org.netbeans.modules.java.hints.bugs.EqualsHint.arrayEquals", description = "#DESC_org.netbeans.modules.java.hints.bugs.EqualsHint.arrayEquals", category="bugs", suppressWarnings="ArrayEquals") @TriggerPatterns({ @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="java.lang.Object[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="boolean[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="byte[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="short[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="char[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="int[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="long[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="float[]") }), @TriggerPattern(value="$obj.equals($arr)", constraints={ @ConstraintVariableType(variable="$obj", type="java.lang.Object"), @ConstraintVariableType(variable="$arr", type="double[]") }) }) public static ErrorDescription arrayEquals(HintContext ctx) { //XXX: this check should not be needed: TreePath arr = ctx.getVariables().get("$arr"); TypeMirror tm = ctx.getInfo().getTrees().getTypeMirror(arr); if (tm == null || tm.getKind() != TypeKind.ARRAY) { return null; } //XXX end String fixArraysDisplayName = NbBundle.getMessage(EqualsHint.class, "FIX_ReplaceWithArraysEquals"); Fix arrays = JavaFixUtilities.rewriteFix(ctx, fixArraysDisplayName, ctx.getPath(), "java.util.Arrays.equals($obj, $arr)"); String fixInstanceDisplayName = NbBundle.getMessage(EqualsHint.class, "FIX_ReplaceWithInstanceEquals"); Fix instance = JavaFixUtilities.rewriteFix(ctx, fixInstanceDisplayName, ctx.getPath(), "$obj == $arr"); String displayName = NbBundle.getMessage(EqualsHint.class, "ERR_ARRAY_EQUALS"); return ErrorDescriptionFactory.forName(ctx, ctx.getPath(), displayName, arrays, instance); } @Hint(displayName = "#DN_org.netbeans.modules.java.hints.EqualsHint", description = "#DESC_org.netbeans.modules.java.hints.EqualsHint", id="org.netbeans.modules.java.hints.EqualsHint", category="bugs", suppressWarnings={"IncompatibleEquals", "", "EqualsBetweenInconvertibleTypes"}, options=Options.QUERY) @UseOptions(ERASURE_PREFS_KEY) @TriggerPattern(value="$this.equals($par)", constraints={ @ConstraintVariableType(variable="$this", type="java.lang.Object"), @ConstraintVariableType(variable="$par", type="java.lang.Object") }) public static ErrorDescription incompatibleEquals(HintContext ctx) { TreePath ths = ctx.getVariables().get("$this"); TreePath par = ctx.getVariables().get("$par"); TypeMirror thsType; if (ths != null) { thsType = ctx.getInfo().getTrees().getTypeMirror(ths); } else { TreePath cls = ctx.getPath(); while (cls != null && !TreeUtilities.CLASS_TREE_KINDS.contains(cls.getLeaf().getKind())) { cls = cls.getParentPath(); } if (cls == null) { return null; } thsType = ctx.getInfo().getTrees().getTypeMirror(cls); } if (thsType == null || thsType.getKind() != TypeKind.DECLARED) { return null; } TypeMirror parType = ctx.getInfo().getTrees().getTypeMirror(par); if (parType == null || parType.getKind() != TypeKind.DECLARED) { return null; } if (ctx.getPreferences().getBoolean(ERASURE_PREFS_KEY, ERASURE_PREFS_DEFAULT)) { Types types = ctx.getInfo().getTypes(); thsType = types.erasure(thsType); parType = types.erasure(parType); } boolean castable = ctx.getInfo().getTypeUtilities().isCastable(thsType, parType) || ctx.getInfo().getTypeUtilities().isCastable(parType, thsType); if (castable) { return null; } String displayName = NbBundle.getMessage(EqualsHint.class, "ERR_INCOMPATIBLE_EQUALS"); // NOI18N return ErrorDescriptionFactory.forName(ctx, ctx.getPath(), displayName); } }
3,813
1,545
import doctest import math import pickle import re import warnings from distutils.version import LooseVersion from numbers import Number import pytest from pint import Quantity from pint.compat import ndarray, np from ..compat import ( HAS_BABEL, HAS_NUMPY, HAS_NUMPY_ARRAY_FUNCTION, HAS_UNCERTAINTIES, NUMPY_VER, ) _number_re = r"([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)" _q_re = re.compile( r"<Quantity\(" + r"\s*" + r"(?P<magnitude>%s)" % _number_re + r"\s*,\s*" + r"'(?P<unit>.*)'" + r"\s*" + r"\)>" ) _sq_re = re.compile( r"\s*" + r"(?P<magnitude>%s)" % _number_re + r"\s" + r"(?P<unit>.*)" ) _unit_re = re.compile(r"<Unit\((.*)\)>") class PintOutputChecker(doctest.OutputChecker): def check_output(self, want, got, optionflags): check = super().check_output(want, got, optionflags) if check: return check try: if eval(want) == eval(got): return True except Exception: pass for regex in (_q_re, _sq_re): try: parsed_got = regex.match(got.replace(r"\\", "")).groupdict() parsed_want = regex.match(want.replace(r"\\", "")).groupdict() v1 = float(parsed_got["magnitude"]) v2 = float(parsed_want["magnitude"]) if abs(v1 - v2) > abs(v1) / 1000: return False if parsed_got["unit"] != parsed_want["unit"]: return False return True except Exception: pass cnt = 0 for regex in (_unit_re,): try: parsed_got, tmp = regex.subn("\1", got) cnt += tmp parsed_want, temp = regex.subn("\1", want) cnt += tmp if parsed_got == parsed_want: return True except Exception: pass if cnt: # If there was any replacement, we try again the previous methods. return self.check_output(parsed_want, parsed_got, optionflags) return False def _get_comparable_magnitudes(first, second, msg): if isinstance(first, Quantity) and isinstance(second, Quantity): second = second.to(first) assert first.units == second.units, msg + " Units are not equal." m1, m2 = first.magnitude, second.magnitude elif isinstance(first, Quantity): assert first.dimensionless, msg + " The first is not dimensionless." first = first.to("") m1, m2 = first.magnitude, second elif isinstance(second, Quantity): assert second.dimensionless, msg + " The second is not dimensionless." second = second.to("") m1, m2 = first, second.magnitude else: m1, m2 = first, second return m1, m2 def assert_quantity_equal(first, second, msg=None): if msg is None: msg = "Comparing %r and %r. " % (first, second) m1, m2 = _get_comparable_magnitudes(first, second, msg) msg += " (Converted to %r and %r)" % (m1, m2) if isinstance(m1, ndarray) or isinstance(m2, ndarray): np.testing.assert_array_equal(m1, m2, err_msg=msg) elif not isinstance(m1, Number): warnings.warn(RuntimeWarning) return elif not isinstance(m2, Number): warnings.warn(RuntimeWarning) return elif math.isnan(m1): assert math.isnan(m2), msg elif math.isnan(m2): assert math.isnan(m1), msg else: assert m1 == m2, msg def assert_quantity_almost_equal(first, second, rtol=1e-07, atol=0, msg=None): if msg is None: try: msg = "Comparing %r and %r. " % (first, second) except TypeError: try: msg = "Comparing %s and %s. " % (first, second) except Exception: msg = "Comparing" m1, m2 = _get_comparable_magnitudes(first, second, msg) msg += " (Converted to %r and %r)" % (m1, m2) if isinstance(m1, ndarray) or isinstance(m2, ndarray): np.testing.assert_allclose(m1, m2, rtol=rtol, atol=atol, err_msg=msg) elif not isinstance(m1, Number): warnings.warn(RuntimeWarning) return elif not isinstance(m2, Number): warnings.warn(RuntimeWarning) return elif math.isnan(m1): assert math.isnan(m2), msg elif math.isnan(m2): assert math.isnan(m1), msg elif math.isinf(m1): assert math.isinf(m2), msg elif math.isinf(m2): assert math.isinf(m1), msg else: # Numpy version (don't like because is not symmetric) # assert abs(m1 - m2) <= atol + rtol * abs(m2), msg assert abs(m1 - m2) <= max(rtol * max(abs(m1), abs(m2)), atol), msg requires_numpy = pytest.mark.skipif(not HAS_NUMPY, reason="Requires NumPy") requires_not_numpy = pytest.mark.skipif( HAS_NUMPY, reason="Requires NumPy not to be installed." ) def requires_array_function_protocol(): if not HAS_NUMPY: return pytest.mark.skip("Requires NumPy") return pytest.mark.skipif( not HAS_NUMPY_ARRAY_FUNCTION, reason="Requires __array_function__ protocol to be enabled", ) def requires_not_array_function_protocol(): if not HAS_NUMPY: return pytest.mark.skip("Requires NumPy") return pytest.mark.skipif( HAS_NUMPY_ARRAY_FUNCTION, reason="Requires __array_function__ protocol to be unavailable or disabled", ) def requires_numpy_previous_than(version): if not HAS_NUMPY: return pytest.mark.skip("Requires NumPy") return pytest.mark.skipif( not LooseVersion(NUMPY_VER) < LooseVersion(version), reason="Requires NumPy < %s" % version, ) def requires_numpy_at_least(version): if not HAS_NUMPY: return pytest.mark.skip("Requires NumPy") return pytest.mark.skipif( not LooseVersion(NUMPY_VER) >= LooseVersion(version), reason="Requires NumPy >= %s" % version, ) requires_babel = pytest.mark.skipif( not HAS_BABEL, reason="Requires Babel with units support" ) requires_not_babel = pytest.mark.skipif( HAS_BABEL, reason="Requires Babel not to be installed" ) requires_uncertainties = pytest.mark.skipif( not HAS_UNCERTAINTIES, reason="Requires Uncertainties" ) requires_not_uncertainties = pytest.mark.skipif( HAS_UNCERTAINTIES, reason="Requires Uncertainties not to be installed." ) # Parametrization allprotos = pytest.mark.parametrize( ("protocol",), [(p,) for p in range(pickle.HIGHEST_PROTOCOL + 1)] ) check_all_bool = pytest.mark.parametrize("check_all", [False, True])
3,100
335
{ "word": "Hub", "definitions": [ "The central part of a wheel, rotating on or with the axle, and from which the spokes radiate.", "The effective centre of an activity, region, or network.", "A central airport or other transport facility from which many services operate." ], "parts-of-speech": "Noun" }
116
2,151
// Copyright 2018 The Feed Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.android.libraries.feed.piet; import android.content.Context; import android.support.annotation.IntDef; import android.support.annotation.VisibleForTesting; import android.support.v4.widget.TextViewCompat; import android.util.SparseArray; import android.util.SparseIntArray; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.LinearLayout; import android.widget.TextView; import java.util.ArrayList; import java.util.List; /** Utility class for collecting and displaying debug information. */ class DebugLogger { // Formatting parameters for report views: private static final int PADDING = 4; private static final int SIDE_PADDING = 16; private static final int DIVIDER_COLOR = 0x65000000; private static final int ERROR_BACKGROUND_COLOR = 0xFFEF9A9A; private static final int WARNING_BACKGROUND_COLOR = 0xFFFFFF66; @VisibleForTesting static final int ERROR_DIVIDER_WIDTH_DP = 1; /** What kind of error are we reporting when calling {@link #recordMessage(int, String)}. */ @IntDef({MessageType.ERROR, MessageType.WARNING}) @interface MessageType { int ERROR = 1; int WARNING = 2; } private final SparseArray<List<String>> messages; private final SparseIntArray backgroundColors; DebugLogger() { messages = new SparseArray<>(); messages.put(MessageType.ERROR, new ArrayList<>()); messages.put(MessageType.WARNING, new ArrayList<>()); backgroundColors = new SparseIntArray(); backgroundColors.put(MessageType.ERROR, ERROR_BACKGROUND_COLOR); backgroundColors.put(MessageType.WARNING, WARNING_BACKGROUND_COLOR); } void recordMessage(@MessageType int messageType, String error) { messages.get(messageType).add(error); } /** Create a {@code View} containing all the messages of a certain type; null for no messages. */ /*@Nullable*/ View getReportView(@MessageType int messageType, Context context) { List<String> messages = this.messages.get(messageType); if (messages.isEmpty()) { return null; } LinearLayout view = new LinearLayout(context); view.setOrientation(LinearLayout.VERTICAL); LayoutParams layoutParams = new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); view.setLayoutParams(layoutParams); view.setBackgroundColor(backgroundColors.get(messageType)); view.addView(getDivider(context)); for (String message : messages) { view.addView(getMessageTextView(message, context)); } return view; } @VisibleForTesting List<String> getMessages(@MessageType int messageType) { return messages.get(messageType); } private View getDivider(Context context) { View v = new View(context); LayoutParams layoutParams = new LinearLayout.LayoutParams( LayoutParams.MATCH_PARENT, (int) ViewUtils.dpToPx(ERROR_DIVIDER_WIDTH_DP, context)); v.setLayoutParams(layoutParams); v.setBackgroundColor(DIVIDER_COLOR); return v; } private TextView getMessageTextView(String message, Context context) { TextView textView = new TextView(context); TextViewCompat.setTextAppearance(textView, R.style.gm_font_weight_regular); textView.setPadding( (int) ViewUtils.dpToPx(SIDE_PADDING, context), (int) ViewUtils.dpToPx(PADDING, context), (int) ViewUtils.dpToPx(SIDE_PADDING, context), (int) ViewUtils.dpToPx(PADDING, context)); textView.setText(message); return textView; } }
1,337
5,169
<filename>Specs/7/7/9/ProximiioMap/0.2.6/ProximiioMap.podspec.json { "name": "ProximiioMap", "version": "0.2.6", "summary": "Proximi.io Integrated Map", "description": "Proximi.io Integrated Map Solution for IOS Platform", "homepage": "https://github.com/proximiio/proximiio-map-pod", "license": "Commercial", "authors": { "Proximi.io": "<EMAIL>" }, "source": { "git": "https://github.com/proximiio/proximiio-map-pod.git", "tag": "0.2.6" }, "platforms": { "ios": "8.0" }, "vendored_frameworks": "Pod/ProximiioMap.framework", "frameworks": [ "CoreLocation", "QuartzCore", "Proximiio" ], "dependencies": { "Proximiio": [ ] } }
314
1,781
<gh_stars>1000+ /* * HaoRan ImageFilter Classes v0.3 * Copyright (C) 2012 <NAME> * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by the * Free Software Foundation; either version 2.1 of the License, or (at your * option) any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation. */ package com.marshalchen.common.uimodule.ImageFilter; import com.marshalchen.common.uimodule.ImageFilter.IImageFilter.Function; public class PosterizeFilter extends LUTFilter{ int _level ; public int InitLUTtable (int LUTIndex) { double d = 255.0 / (_level - 1.0) ; int n = (int)(LUTIndex / d + 0.5) ; // round return Function.FClamp0255 (d * n) ; // round } public PosterizeFilter (int nLevel) { _level = ((nLevel >= 2) ? nLevel : 2) ; } }
417
528
#ifndef CPARAMS_H #define CPARAMS_H #ifdef __cplusplus extern "C" { #endif #include "lib_export.h" #include <stdbool.h> #include <stddef.h> typedef struct _Device Device; typedef enum _FirmwareMode { FirmwareModeApplication, FirmwareModeBootloader } FirmwareMode; typedef enum _DeviceState { DeviceStateDisconnected, DeviceStateConnected } DeviceState; typedef enum _SamplingFrequency { SamplingFrequencyHz125, SamplingFrequencyHz250, SamplingFrequencyHz500, SamplingFrequencyHz1000, SamplingFrequencyHz2000, SamplingFrequencyHz4000, SamplingFrequencyHz8000 } SamplingFrequency; typedef enum _Gain { Gain1, Gain2, Gain3, Gain4, Gain6, Gain8, Gain12 } Gain; typedef enum _ExternalSwitchInput { ExternalSwitchInputMioElectrodesRespUSB, ExternalSwitchInputMioElectrodes, ExternalSwitchInputMioUSB, ExternalSwitchInputRespUSB } ExternalSwitchInput; typedef enum _ADCInput { ADCInputElectrodes, ADCInputShort, ADCInputTest, ADCInputResistance } ADCInput; typedef enum _AccelerometerSensitivity { AccelerometerSens2g, AccelerometerSens4g, AccelerometerSens8g, AccelerometerSens16g } AccelerometerSensitivity; typedef enum _GyroscopeSensitivity { GyroscopeSens250Grad, GyroscopeSens500Grad, GyroscopeSens1000Grad, GyroscopeSens2000Grad } GyroscopeSensitivity; typedef enum _StimulationDeviceState { StateNoParams, StateDisabled, StateEnabled } StimulationDeviceState; typedef struct _StimulatorAndMAState { StimulationDeviceState StimulatorState; StimulationDeviceState MAState; } StimulatorAndMaState; typedef enum _MotionAssistantLimb { MotionAssistantLimbRightLeg, MotionAssistantLimbLeftLeg, MotionAssistantLimbRightArm, MotionAssistantLimbLeftArm } MotionAssistantLimb; typedef struct _MotionAssistantParams { int gyroStart; int gyroStop; MotionAssistantLimb limb; int minPause; } MotionAssistantParams; typedef struct _StimulationParams { int current; int pulse_width; int frequency; int stimulus_duration; } StimulationParams; typedef struct _FirmwareVersion { unsigned int version; unsigned int build; } FirmwareVersion; #ifdef __cplusplus } #endif #endif // CPARAMS_H
1,190
1,338
<filename>src/apps/mediaplayer/support/Command.cpp /* * Copyright 2006, Haiku. * Distributed under the terms of the MIT License. * * Authors: * <NAME> <<EMAIL>> */ #include "Command.h" #include <stdio.h> #include <OS.h> // constructor Command::Command() : fTimeStamp(system_time()) { } // destructor Command::~Command() { } // InitCheck status_t Command::InitCheck() { return B_NO_INIT; } // Perform status_t Command::Perform() { return B_ERROR; } // Undo status_t Command::Undo() { return B_ERROR; } // Redo status_t Command::Redo() { return Perform(); } // GetName void Command::GetName(BString& name) { name << "Name of action goes here."; } // UndoesPrevious bool Command::UndoesPrevious(const Command* previous) { return false; } // CombineWithNext bool Command::CombineWithNext(const Command* next) { return false; } // CombineWithPrevious bool Command::CombineWithPrevious(const Command* previous) { return false; } // _GetString const char* Command::_GetString(uint32 key, const char* defaultString) const { // if (LanguageManager* manager = LanguageManager::Default()) // return manager->GetString(key, defaultString); // else return defaultString; }
425
14,668
<filename>components/network_session_configurator/common/network_switches.h // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_NETWORK_SESSION_CONFIGURATOR_COMMON_NETWORK_SWITCHES_H_ #define COMPONENTS_NETWORK_SESSION_CONFIGURATOR_COMMON_NETWORK_SWITCHES_H_ #include "network_session_configurator_export.h" namespace base { class CommandLine; } namespace switches { #define NETWORK_SWITCH(name, value) \ NETWORK_SESSION_CONFIGURATOR_EXPORT extern const char name[]; #include "components/network_session_configurator/common/network_switch_list.h" #undef NETWORK_SWITCH } // namespace switches namespace network_session_configurator { // Copies all command line switches the configurator handles from the |src| // CommandLine to the |dest| one. NETWORK_SESSION_CONFIGURATOR_EXPORT void CopyNetworkSwitches( const base::CommandLine& src_command_line, base::CommandLine* dest_command_line); } // namespace network_session_configurator #endif // COMPONENTS_NETWORK_SESSION_CONFIGURATOR_COMMON_NETWORK_SWITCHES_H_
377
589
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.fluentjob.api.action; import com.google.common.collect.ImmutableList; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.oozie.fluentjob.api.ModifyOnce; import java.util.ArrayList; import java.util.List; /** * A builder class for {@link JavaAction}. * * The properties of the builder can only be set once, an attempt to set them a second time will trigger * an {@link IllegalStateException}. The properties that are lists are an exception to this rule, of course multiple * elements can be added / removed. * * Builder instances can be used to build several elements, although properties already set cannot be changed after * a call to {@link JavaActionBuilder#build} either. */ @InterfaceAudience.Private @InterfaceStability.Unstable public class JavaActionBuilder extends NodeBuilderBaseImpl<JavaActionBuilder> implements Builder<JavaAction> { private final ActionAttributesBuilder attributesBuilder; private final ModifyOnce<String> mainClass; private final ModifyOnce<String> javaOptsString; private final List<String> javaOpts; public static JavaActionBuilder create() { final ActionAttributesBuilder builder = ActionAttributesBuilder.create(); final ModifyOnce<String> mainClass = new ModifyOnce<>(); final ModifyOnce<String> javaOptsString = new ModifyOnce<>(); final List<String> javaOpts = new ArrayList<>(); return new JavaActionBuilder( null, builder, mainClass, javaOptsString, javaOpts); } public static JavaActionBuilder createFromExistingAction(final JavaAction action) { final ActionAttributesBuilder builder = ActionAttributesBuilder.createFromExisting(action.getAttributes()); final ModifyOnce<String> mainClass = new ModifyOnce<>(action.getMainClass()); final ModifyOnce<String> javaOptsString = new ModifyOnce<>(action.getJavaOptsString()); final List<String> javaOpts = new ArrayList<>(action.getJavaOpts()); return new JavaActionBuilder(action, builder, mainClass, javaOptsString, javaOpts); } public static JavaActionBuilder createFromExistingAction(final Node action) { final ActionAttributesBuilder builder = ActionAttributesBuilder.createFromAction(action); final ModifyOnce<String> mainClass = new ModifyOnce<>(); final ModifyOnce<String> javaOptsString = new ModifyOnce<>(); final List<String> javaOpts = new ArrayList<>(); return new JavaActionBuilder(action, builder, mainClass, javaOptsString, javaOpts); } private JavaActionBuilder(final Node action, final ActionAttributesBuilder attributesBuilder, final ModifyOnce<String> mainClass, final ModifyOnce<String> javaOptsString, final List<String> javaOpts) { super(action); this.attributesBuilder = attributesBuilder; this.mainClass = mainClass; this.javaOptsString = javaOptsString; this.javaOpts = javaOpts; } public JavaActionBuilder withResourceManager(final String resourceManager) { this.attributesBuilder.withResourceManager(resourceManager); return this; } public JavaActionBuilder withNameNode(final String nameNode) { this.attributesBuilder.withNameNode(nameNode); return this; } public JavaActionBuilder withPrepare(final Prepare prepare) { this.attributesBuilder.withPrepare(prepare); return this; } public JavaActionBuilder withLauncher(final Launcher launcher) { this.attributesBuilder.withLauncher(launcher); return this; } public JavaActionBuilder withJobXml(final String jobXml) { this.attributesBuilder.withJobXml(jobXml); return this; } public JavaActionBuilder withoutJobXml(final String jobXml) { this.attributesBuilder.withoutJobXml(jobXml); return this; } public JavaActionBuilder clearJobXmls() { this.attributesBuilder.clearJobXmls(); return this; } public JavaActionBuilder withConfigProperty(final String key, final String value) { this.attributesBuilder.withConfigProperty(key, value); return this; } public JavaActionBuilder withMainClass(final String mainClass) { this.mainClass.set(mainClass); return this; } public JavaActionBuilder withJavaOptsString(final String javaOptsString) { this.javaOptsString.set(javaOptsString); return this; } public JavaActionBuilder withJavaOpt(final String javaOpt) { this.javaOpts.add(javaOpt); return this; } public JavaActionBuilder withoutJavaOpt(final String javaOpt) { this.javaOpts.remove(javaOpt); return this; } public JavaActionBuilder clearJavaOpts() { this.javaOpts.clear(); return this; } public JavaActionBuilder withArg(final String arg) { this.attributesBuilder.withArg(arg); return this; } public JavaActionBuilder withoutArg(final String arg) { this.attributesBuilder.withoutArg(arg); return this; } public JavaActionBuilder clearArgs() { this.attributesBuilder.clearArgs(); return this; } public JavaActionBuilder withFile(final String file) { this.attributesBuilder.withFile(file); return this; } public JavaActionBuilder withoutFile(final String file) { this.attributesBuilder.withoutFile(file); return this; } public JavaActionBuilder clearFiles() { this.attributesBuilder.clearFiles(); return this; } public JavaActionBuilder withArchive(final String archive) { this.attributesBuilder.withArchive(archive); return this; } public JavaActionBuilder withoutArchive(final String archive) { this.attributesBuilder.withoutArchive(archive); return this; } public JavaActionBuilder clearArchives() { this.attributesBuilder.clearArchives(); return this; } public JavaActionBuilder withCaptureOutput(final Boolean captureOutput) { this.attributesBuilder.withCaptureOutput(captureOutput); return this; } @Override public JavaAction build() { final Node.ConstructionData constructionData = getConstructionData(); final JavaAction instance = new JavaAction( constructionData, attributesBuilder.build(), mainClass.get(), javaOptsString.get(), ImmutableList.copyOf(javaOpts)); addAsChildToAllParents(instance); return instance; } @Override protected JavaActionBuilder getRuntimeSelfReference() { return this; } }
2,899
537
<filename>test/json_test/flatcc_golden.c /* * Flatcc generated monster test binary based on parsing Google flatc's * golden json file. */ static const unsigned char flatcc_golden_le[] = { 0x0c, 0x00, 0x00, 0x00, 0x4d, 0x4f, 0x4e, 0x53, 0x00, 0x00, 0x00, 0x00, 0x20, 0xff, 0xff, 0xff, 0x00, 0x00, 0x80, 0x3f, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x40, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x40, 0x02, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x9c, 0x00, 0x00, 0x00, 0x8c, 0x00, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x41, 0xc9, 0x79, 0xdd, 0x41, 0xc9, 0x79, 0xdd, 0x00, 0x00, 0x00, 0x00, 0x81, 0x91, 0x7b, 0xf2, 0xcd, 0x80, 0x0f, 0x6e, 0x81, 0x91, 0x7b, 0xf2, 0xcd, 0x80, 0x0f, 0x6e, 0x71, 0xa4, 0x81, 0x8e, 0x71, 0xa4, 0x81, 0x8e, 0xf1, 0xdd, 0x67, 0xc7, 0xdc, 0x48, 0xf9, 0x43, 0xf1, 0xdd, 0x67, 0xc7, 0xdc, 0x48, 0xf9, 0x43, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x74, 0x65, 0x73, 0x74, 0x32, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x74, 0x65, 0x73, 0x74, 0x31, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x14, 0x00, 0x1e, 0x00, 0x28, 0x00, 0xd0, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x46, 0x72, 0x65, 0x64, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4d, 0x79, 0x4d, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x72, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x34, 0x00, 0x74, 0x00, 0x04, 0x00, 0x00, 0x00, 0x24, 0x00, 0x28, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x34, 0x00, 0x30, 0x00, 0x38, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x44, 0x00, 0x4c, 0x00, 0x54, 0x00, 0x5c, 0x00, 0x60, 0x00, 0x64, 0x00, 0x6c, 0x00, }; static const unsigned char flatcc_golden_be[] = { 0x00, 0x00, 0x00, 0x0c, 0x53, 0x4e, 0x4f, 0x4d, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x20, 0x3f, 0x80, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x40, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x05, 0x06, 0x00, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9c, 0x00, 0x00, 0x00, 0x8c, 0x00, 0x00, 0x00, 0x74, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x38, 0xdd, 0x79, 0xc9, 0x41, 0xdd, 0x79, 0xc9, 0x41, 0x00, 0x00, 0x00, 0x00, 0x6e, 0x0f, 0x80, 0xcd, 0xf2, 0x7b, 0x91, 0x81, 0x6e, 0x0f, 0x80, 0xcd, 0xf2, 0x7b, 0x91, 0x81, 0x8e, 0x81, 0xa4, 0x71, 0x8e, 0x81, 0xa4, 0x71, 0x43, 0xf9, 0x48, 0xdc, 0xc7, 0x67, 0xdd, 0xf1, 0x43, 0xf9, 0x48, 0xdc, 0xc7, 0x67, 0xdd, 0xf1, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x05, 0x74, 0x65, 0x73, 0x74, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x74, 0x65, 0x73, 0x74, 0x31, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x0a, 0x14, 0x00, 0x00, 0x1e, 0x28, 0x00, 0xff, 0xff, 0xff, 0xd0, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x46, 0x72, 0x65, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x01, 0x02, 0x03, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x4d, 0x79, 0x4d, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x72, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x34, 0x00, 0x74, 0x00, 0x04, 0x00, 0x00, 0x00, 0x24, 0x00, 0x28, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x34, 0x00, 0x30, 0x00, 0x38, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x44, 0x00, 0x4c, 0x00, 0x54, 0x00, 0x5c, 0x00, 0x60, 0x00, 0x64, 0x00, 0x6c, };
2,675
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Cramaille","circ":"5ème circonscription","dpt":"Aisne","inscrits":109,"abs":58,"votants":51,"blancs":5,"nuls":0,"exp":46,"res":[{"nuance":"FN","nom":"<NAME>","voix":27},{"nuance":"REM","nom":"M. <NAME>","voix":19}]}
111
7,737
extern zend_class_entry *phalcon_forms_element_numeric_ce; ZEPHIR_INIT_CLASS(Phalcon_Forms_Element_Numeric); PHP_METHOD(Phalcon_Forms_Element_Numeric, render); ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_forms_element_numeric_render, 0, 0, IS_STRING, 0) #if PHP_VERSION_ID >= 80000 ZEND_ARG_TYPE_INFO_WITH_DEFAULT_VALUE(0, attributes, IS_ARRAY, 0, "[]") #else ZEND_ARG_ARRAY_INFO(0, attributes, 0) #endif ZEND_END_ARG_INFO() ZEPHIR_INIT_FUNCS(phalcon_forms_element_numeric_method_entry) { PHP_ME(Phalcon_Forms_Element_Numeric, render, arginfo_phalcon_forms_element_numeric_render, ZEND_ACC_PUBLIC) PHP_FE_END };
281
2,338
<reponame>medismailben/llvm-project<filename>compiler-rt/test/fuzzer/UseAfterDtor.cpp<gh_stars>1000+ #include <cstdint> #include <cstdio> struct Simple { int x_; Simple() { x_ = 5; } ~Simple() { x_ += 1; } }; Simple *volatile SimpleSink; extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { if (Size < 4) return 0; if (Data[0] == 'F' && Data[1] == 'U' && Data[2] == 'Z' && Data[3] == 'Z') { { Simple S; SimpleSink = &S; } if (SimpleSink->x_) fprintf(stderr, "Failed to catch use-after-dtor\n"); } return 0; }
274
66,985
<filename>spring-boot-project/spring-boot-actuator-autoconfigure/src/main/java/org/springframework/boot/actuate/autoconfigure/neo4j/Neo4jHealthContributorConfigurations.java /* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.autoconfigure.neo4j; import java.util.Map; import org.neo4j.driver.Driver; import reactor.core.publisher.Flux; import org.springframework.boot.actuate.autoconfigure.health.CompositeHealthContributorConfiguration; import org.springframework.boot.actuate.autoconfigure.health.CompositeReactiveHealthContributorConfiguration; import org.springframework.boot.actuate.health.HealthContributor; import org.springframework.boot.actuate.health.ReactiveHealthContributor; import org.springframework.boot.actuate.neo4j.Neo4jHealthIndicator; import org.springframework.boot.actuate.neo4j.Neo4jReactiveHealthIndicator; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * Health contributor options for Neo4j. * * @author <NAME> * @author <NAME> */ class Neo4jHealthContributorConfigurations { @Configuration(proxyBeanMethods = false) static class Neo4jConfiguration extends CompositeHealthContributorConfiguration<Neo4jHealthIndicator, Driver> { @Bean @ConditionalOnMissingBean(name = { "neo4jHealthIndicator", "neo4jHealthContributor" }) HealthContributor neo4jHealthContributor(Map<String, Driver> drivers) { return createContributor(drivers); } } @Configuration(proxyBeanMethods = false) @ConditionalOnClass(Flux.class) static class Neo4jReactiveConfiguration extends CompositeReactiveHealthContributorConfiguration<Neo4jReactiveHealthIndicator, Driver> { @Bean @ConditionalOnMissingBean(name = { "neo4jHealthIndicator", "neo4jHealthContributor" }) ReactiveHealthContributor neo4jHealthContributor(Map<String, Driver> drivers) { return createContributor(drivers); } } }
795
348
<filename>docs/data/leg-t2/057/05701751.json {"nom":"Woippy","circ":"1ère circonscription","dpt":"Moselle","inscrits":8303,"abs":5512,"votants":2791,"blancs":185,"nuls":57,"exp":2549,"res":[{"nuance":"REM","nom":"<NAME>","voix":1485},{"nuance":"FN","nom":"<NAME>","voix":1064}]}
114
839
<reponame>kimjand/cxf<gh_stars>100-1000 /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.ws.policy.attachment; import java.util.ResourceBundle; import org.apache.cxf.Bus; import org.apache.cxf.common.i18n.BundleUtils; import org.apache.cxf.common.i18n.Message; import org.apache.cxf.ws.policy.PolicyBuilder; import org.apache.cxf.ws.policy.PolicyEngine; import org.apache.cxf.ws.policy.PolicyEngineImpl; import org.apache.cxf.ws.policy.PolicyException; import org.apache.cxf.ws.policy.PolicyProvider; import org.apache.cxf.ws.policy.attachment.reference.ReferenceResolver; import org.apache.cxf.ws.policy.attachment.reference.RemoteReferenceResolver; import org.apache.neethi.Policy; import org.apache.neethi.PolicyReference; import org.apache.neethi.PolicyRegistry; /** * */ public abstract class AbstractPolicyProvider implements PolicyProvider { private static final ResourceBundle BUNDLE = BundleUtils.getBundle(AbstractPolicyProvider.class); protected PolicyBuilder builder; protected PolicyRegistry registry; protected Bus bus; protected AbstractPolicyProvider() { this(null); } protected AbstractPolicyProvider(Bus b) { setBus(b); } public final void setBus(Bus b) { bus = b; if (null != bus) { setBuilder(bus.getExtension(PolicyBuilder.class)); PolicyEngine pe = bus.getExtension(PolicyEngine.class); if (pe != null) { setRegistry(pe.getRegistry()); ((PolicyEngineImpl)pe).addPolicyProvider(this); } } } public final void setBuilder(PolicyBuilder b) { builder = b; } public final void setRegistry(PolicyRegistry r) { registry = r; } protected Policy resolveExternal(PolicyReference ref, String baseURI) { Policy resolved = registry.lookup(ref.getURI()); if (null != resolved) { return resolved; } ReferenceResolver resolver = new RemoteReferenceResolver(baseURI, builder); return resolver.resolveReference(ref.getURI()); } protected boolean isExternal(PolicyReference ref) { return !ref.getURI().startsWith("#"); } protected void checkResolved(PolicyReference ref, Policy p) { if (null == p) { throw new PolicyException(new Message("UNRESOLVED_POLICY_REFERENCE_EXC", BUNDLE, ref.getURI())); } } }
1,114
348
{"nom":"Sablonnières","circ":"4ème circonscription","dpt":"Seine-et-Marne","inscrits":517,"abs":335,"votants":182,"blancs":12,"nuls":5,"exp":165,"res":[{"nuance":"LR","nom":"<NAME>","voix":95},{"nuance":"REM","nom":"M. <NAME>","voix":70}]}
97
3,274
package com.ql.util.express.parse; public class Word { public String word; public int line; public int col; public int index; public Word(String aWord,int aLine,int aCol){ this.word = aWord; this.line = aLine; this.col = aCol; } public String toString(){ return this.word;// + "[" + this.line + "," + this.col + "]"; } }
140
396
<filename>samples/src/main/java/com/kunminx/samples/ui/operators/TakeUntilExampleFragment.java package com.kunminx.samples.ui.operators; import android.util.Log; import com.kunminx.samples.utils.AppConstant; import com.kunminx.samples.utils.ObserverAdapter; import java.util.concurrent.TimeUnit; import io.reactivex.rxjava3.android.schedulers.AndroidSchedulers; import io.reactivex.rxjava3.core.Observable; import io.reactivex.rxjava3.functions.BiFunction; public class TakeUntilExampleFragment extends TakeOperatorBaseFragment { private static final String TAG = TakeWhileExampleFragment.class.getSimpleName(); @Override protected void doSomeWork() { Observable<Long> timerObservable = Observable.timer(5, TimeUnit.SECONDS); timerObservable.subscribe(new ObserverAdapter<Long>() { @Override public void onComplete() { String print = " Timer completed"; textView.append(print); textView.append(AppConstant.LINE_SEPARATOR); Log.d(TAG, print); } }); getStringObservable() //Delay item emission by one second .zipWith(Observable.interval(0, 1, TimeUnit.SECONDS), new BiFunction<String, Long, String>() { @Override public String apply(String s, Long aLong) throws Exception { return s; } }) //Will receive the items from Strings observable until timerObservable doesn't start emitting data. .takeUntil(timerObservable) //We need to observe on MainThread because delay works on background thread to avoid UI blocking. .observeOn(AndroidSchedulers.mainThread()) .subscribe(getObserver()); } }
672
347
/*************************************************************************** * _ _ ____ _ * Project ___| | | | _ \| | * / __| | | | |_) | | * | (__| |_| | _ <| |___ * \___|\___/|_| \_\_____| * * Copyright (C) 1998 - 2015, <NAME>, <<EMAIL>>, et al. * * This software is licensed as described in the file COPYING, which * you should have received as part of this distribution. The terms * are also available at http://curl.haxx.se/docs/copyright.html. * * You may opt to use, copy, modify, merge, publish, distribute and/or sell * copies of the Software, and permit persons to whom the Software is * furnished to do so, under the terms of the COPYING file. * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY * KIND, either express or implied. * ***************************************************************************/ #include "curl_setup.h" #include <curl/curl.h> #ifndef CURL_DISABLE_HTTP #if defined(HAVE_LIBGEN_H) && defined(HAVE_BASENAME) #include <libgen.h> #endif #include "urldata.h" /* for struct SessionHandle */ #include "formdata.h" #include "vtls/vtls.h" #include "strequal.h" #include "sendf.h" #include "strdup.h" #include "curl_printf.h" /* The last #include files should be: */ #include "curl_memory.h" #include "memdebug.h" #ifndef HAVE_BASENAME static char *Curl_basename(char *path); #define basename(x) Curl_basename((x)) #endif static size_t readfromfile(struct Form *form, char *buffer, size_t size); static char *formboundary(struct SessionHandle *data); /* What kind of Content-Type to use on un-specified files with unrecognized extensions. */ #define HTTPPOST_CONTENTTYPE_DEFAULT "application/octet-stream" #define FORM_FILE_SEPARATOR ',' #define FORM_TYPE_SEPARATOR ';' /*************************************************************************** * * AddHttpPost() * * Adds a HttpPost structure to the list, if parent_post is given becomes * a subpost of parent_post instead of a direct list element. * * Returns newly allocated HttpPost on success and NULL if malloc failed. * ***************************************************************************/ static struct curl_httppost * AddHttpPost(char *name, size_t namelength, char *value, size_t contentslength, char *buffer, size_t bufferlength, char *contenttype, long flags, struct curl_slist* contentHeader, char *showfilename, char *userp, struct curl_httppost *parent_post, struct curl_httppost **httppost, struct curl_httppost **last_post) { struct curl_httppost *post; post = calloc(1, sizeof(struct curl_httppost)); if(post) { post->name = name; post->namelength = (long)(name?(namelength?namelength:strlen(name)):0); post->contents = value; post->contentslength = (long)contentslength; post->buffer = buffer; post->bufferlength = (long)bufferlength; post->contenttype = contenttype; post->contentheader = contentHeader; post->showfilename = showfilename; post->userp = userp, post->flags = flags; } else return NULL; if(parent_post) { /* now, point our 'more' to the original 'more' */ post->more = parent_post->more; /* then move the original 'more' to point to ourselves */ parent_post->more = post; } else { /* make the previous point to this */ if(*last_post) (*last_post)->next = post; else (*httppost) = post; (*last_post) = post; } return post; } /*************************************************************************** * * AddFormInfo() * * Adds a FormInfo structure to the list presented by parent_form_info. * * Returns newly allocated FormInfo on success and NULL if malloc failed/ * parent_form_info is NULL. * ***************************************************************************/ static FormInfo * AddFormInfo(char *value, char *contenttype, FormInfo *parent_form_info) { FormInfo *form_info; form_info = calloc(1, sizeof(struct FormInfo)); if(form_info) { if(value) form_info->value = value; if(contenttype) form_info->contenttype = contenttype; form_info->flags = HTTPPOST_FILENAME; } else return NULL; if(parent_form_info) { /* now, point our 'more' to the original 'more' */ form_info->more = parent_form_info->more; /* then move the original 'more' to point to ourselves */ parent_form_info->more = form_info; } return form_info; } /*************************************************************************** * * ContentTypeForFilename() * * Provides content type for filename if one of the known types (else * (either the prevtype or the default is returned). * * Returns some valid contenttype for filename. * ***************************************************************************/ static const char *ContentTypeForFilename(const char *filename, const char *prevtype) { const char *contenttype = NULL; unsigned int i; /* * No type was specified, we scan through a few well-known * extensions and pick the first we match! */ struct ContentType { const char *extension; const char *type; }; static const struct ContentType ctts[]={ {".gif", "image/gif"}, {".jpg", "image/jpeg"}, {".jpeg", "image/jpeg"}, {".txt", "text/plain"}, {".html", "text/html"}, {".xml", "application/xml"} }; if(prevtype) /* default to the previously set/used! */ contenttype = prevtype; else contenttype = HTTPPOST_CONTENTTYPE_DEFAULT; if(filename) { /* in case a NULL was passed in */ for(i=0; i<sizeof(ctts)/sizeof(ctts[0]); i++) { if(strlen(filename) >= strlen(ctts[i].extension)) { if(strequal(filename + strlen(filename) - strlen(ctts[i].extension), ctts[i].extension)) { contenttype = ctts[i].type; break; } } } } /* we have a contenttype by now */ return contenttype; } /*************************************************************************** * * FormAdd() * * Stores a formpost parameter and builds the appropriate linked list. * * Has two principal functionalities: using files and byte arrays as * post parts. Byte arrays are either copied or just the pointer is stored * (as the user requests) while for files only the filename and not the * content is stored. * * While you may have only one byte array for each name, multiple filenames * are allowed (and because of this feature CURLFORM_END is needed after * using CURLFORM_FILE). * * Examples: * * Simple name/value pair with copied contents: * curl_formadd (&post, &last, CURLFORM_COPYNAME, "name", * CURLFORM_COPYCONTENTS, "value", CURLFORM_END); * * name/value pair where only the content pointer is remembered: * curl_formadd (&post, &last, CURLFORM_COPYNAME, "name", * CURLFORM_PTRCONTENTS, ptr, CURLFORM_CONTENTSLENGTH, 10, CURLFORM_END); * (if CURLFORM_CONTENTSLENGTH is missing strlen () is used) * * storing a filename (CONTENTTYPE is optional!): * curl_formadd (&post, &last, CURLFORM_COPYNAME, "name", * CURLFORM_FILE, "filename1", CURLFORM_CONTENTTYPE, "plain/text", * CURLFORM_END); * * storing multiple filenames: * curl_formadd (&post, &last, CURLFORM_COPYNAME, "name", * CURLFORM_FILE, "filename1", CURLFORM_FILE, "filename2", CURLFORM_END); * * Returns: * CURL_FORMADD_OK on success * CURL_FORMADD_MEMORY if the FormInfo allocation fails * CURL_FORMADD_OPTION_TWICE if one option is given twice for one Form * CURL_FORMADD_NULL if a null pointer was given for a char * CURL_FORMADD_MEMORY if the allocation of a FormInfo struct failed * CURL_FORMADD_UNKNOWN_OPTION if an unknown option was used * CURL_FORMADD_INCOMPLETE if the some FormInfo is not complete (or error) * CURL_FORMADD_MEMORY if a HttpPost struct cannot be allocated * CURL_FORMADD_MEMORY if some allocation for string copying failed. * CURL_FORMADD_ILLEGAL_ARRAY if an illegal option is used in an array * ***************************************************************************/ static CURLFORMcode FormAdd(struct curl_httppost **httppost, struct curl_httppost **last_post, va_list params) { FormInfo *first_form, *current_form, *form = NULL; CURLFORMcode return_value = CURL_FORMADD_OK; const char *prevtype = NULL; struct curl_httppost *post = NULL; CURLformoption option; struct curl_forms *forms = NULL; char *array_value=NULL; /* value read from an array */ /* This is a state variable, that if TRUE means that we're parsing an array that we got passed to us. If FALSE we're parsing the input va_list arguments. */ bool array_state = FALSE; /* * We need to allocate the first struct to fill in. */ first_form = calloc(1, sizeof(struct FormInfo)); if(!first_form) return CURL_FORMADD_MEMORY; current_form = first_form; /* * Loop through all the options set. Break if we have an error to report. */ while(return_value == CURL_FORMADD_OK) { /* first see if we have more parts of the array param */ if(array_state && forms) { /* get the upcoming option from the given array */ option = forms->option; array_value = (char *)forms->value; forms++; /* advance this to next entry */ if(CURLFORM_END == option) { /* end of array state */ array_state = FALSE; continue; } } else { /* This is not array-state, get next option */ option = va_arg(params, CURLformoption); if(CURLFORM_END == option) break; } switch (option) { case CURLFORM_ARRAY: if(array_state) /* we don't support an array from within an array */ return_value = CURL_FORMADD_ILLEGAL_ARRAY; else { forms = va_arg(params, struct curl_forms *); if(forms) array_state = TRUE; else return_value = CURL_FORMADD_NULL; } break; /* * Set the Name property. */ case CURLFORM_PTRNAME: #ifdef CURL_DOES_CONVERSIONS /* Treat CURLFORM_PTR like CURLFORM_COPYNAME so that libcurl will copy * the data in all cases so that we'll have safe memory for the eventual * conversion. */ #else current_form->flags |= HTTPPOST_PTRNAME; /* fall through */ #endif case CURLFORM_COPYNAME: if(current_form->name) return_value = CURL_FORMADD_OPTION_TWICE; else { char *name = array_state? array_value:va_arg(params, char *); if(name) current_form->name = name; /* store for the moment */ else return_value = CURL_FORMADD_NULL; } break; case CURLFORM_NAMELENGTH: if(current_form->namelength) return_value = CURL_FORMADD_OPTION_TWICE; else current_form->namelength = array_state?(size_t)array_value:(size_t)va_arg(params, long); break; /* * Set the contents property. */ case CURLFORM_PTRCONTENTS: current_form->flags |= HTTPPOST_PTRCONTENTS; /* fall through */ case CURLFORM_COPYCONTENTS: if(current_form->value) return_value = CURL_FORMADD_OPTION_TWICE; else { char *value = array_state?array_value:va_arg(params, char *); if(value) current_form->value = value; /* store for the moment */ else return_value = CURL_FORMADD_NULL; } break; case CURLFORM_CONTENTSLENGTH: if(current_form->contentslength) return_value = CURL_FORMADD_OPTION_TWICE; else current_form->contentslength = array_state?(size_t)array_value:(size_t)va_arg(params, long); break; /* Get contents from a given file name */ case CURLFORM_FILECONTENT: if(current_form->flags & (HTTPPOST_PTRCONTENTS|HTTPPOST_READFILE)) return_value = CURL_FORMADD_OPTION_TWICE; else { const char *filename = array_state? array_value:va_arg(params, char *); if(filename) { current_form->value = strdup(filename); if(!current_form->value) return_value = CURL_FORMADD_MEMORY; else { current_form->flags |= HTTPPOST_READFILE; current_form->value_alloc = TRUE; } } else return_value = CURL_FORMADD_NULL; } break; /* We upload a file */ case CURLFORM_FILE: { const char *filename = array_state?array_value: va_arg(params, char *); if(current_form->value) { if(current_form->flags & HTTPPOST_FILENAME) { if(filename) { char *fname = strdup(filename); if(!fname) return_value = CURL_FORMADD_MEMORY; else { form = AddFormInfo(fname, NULL, current_form); if(!form) { free(fname); return_value = CURL_FORMADD_MEMORY; } else { form->value_alloc = TRUE; current_form = form; form = NULL; } } } else return_value = CURL_FORMADD_NULL; } else return_value = CURL_FORMADD_OPTION_TWICE; } else { if(filename) { current_form->value = strdup(filename); if(!current_form->value) return_value = CURL_FORMADD_MEMORY; else { current_form->flags |= HTTPPOST_FILENAME; current_form->value_alloc = TRUE; } } else return_value = CURL_FORMADD_NULL; } break; } case CURLFORM_BUFFERPTR: current_form->flags |= HTTPPOST_PTRBUFFER|HTTPPOST_BUFFER; if(current_form->buffer) return_value = CURL_FORMADD_OPTION_TWICE; else { char *buffer = array_state?array_value:va_arg(params, char *); if(buffer) { current_form->buffer = buffer; /* store for the moment */ current_form->value = buffer; /* make it non-NULL to be accepted as fine */ } else return_value = CURL_FORMADD_NULL; } break; case CURLFORM_BUFFERLENGTH: if(current_form->bufferlength) return_value = CURL_FORMADD_OPTION_TWICE; else current_form->bufferlength = array_state?(size_t)array_value:(size_t)va_arg(params, long); break; case CURLFORM_STREAM: current_form->flags |= HTTPPOST_CALLBACK; if(current_form->userp) return_value = CURL_FORMADD_OPTION_TWICE; else { char *userp = array_state?array_value:va_arg(params, char *); if(userp) { current_form->userp = userp; current_form->value = userp; /* this isn't strictly true but we derive a value from this later on and we need this non-NULL to be accepted as a fine form part */ } else return_value = CURL_FORMADD_NULL; } break; case CURLFORM_CONTENTTYPE: { const char *contenttype = array_state?array_value:va_arg(params, char *); if(current_form->contenttype) { if(current_form->flags & HTTPPOST_FILENAME) { if(contenttype) { char *type = strdup(contenttype); if(!type) return_value = CURL_FORMADD_MEMORY; else { form = AddFormInfo(NULL, type, current_form); if(!form) { free(type); return_value = CURL_FORMADD_MEMORY; } else { form->contenttype_alloc = TRUE; current_form = form; form = NULL; } } } else return_value = CURL_FORMADD_NULL; } else return_value = CURL_FORMADD_OPTION_TWICE; } else { if(contenttype) { current_form->contenttype = strdup(contenttype); if(!current_form->contenttype) return_value = CURL_FORMADD_MEMORY; else current_form->contenttype_alloc = TRUE; } else return_value = CURL_FORMADD_NULL; } break; } case CURLFORM_CONTENTHEADER: { /* this "cast increases required alignment of target type" but we consider it OK anyway */ struct curl_slist* list = array_state? (struct curl_slist*)array_value: va_arg(params, struct curl_slist*); if(current_form->contentheader) return_value = CURL_FORMADD_OPTION_TWICE; else current_form->contentheader = list; break; } case CURLFORM_FILENAME: case CURLFORM_BUFFER: { const char *filename = array_state?array_value: va_arg(params, char *); if(current_form->showfilename) return_value = CURL_FORMADD_OPTION_TWICE; else { current_form->showfilename = strdup(filename); if(!current_form->showfilename) return_value = CURL_FORMADD_MEMORY; else current_form->showfilename_alloc = TRUE; } break; } default: return_value = CURL_FORMADD_UNKNOWN_OPTION; break; } } if(CURL_FORMADD_OK != return_value) { /* On error, free allocated fields for all nodes of the FormInfo linked list without deallocating nodes. List nodes are deallocated later on */ FormInfo *ptr; for(ptr = first_form; ptr != NULL; ptr = ptr->more) { if(ptr->name_alloc) { Curl_safefree(ptr->name); ptr->name_alloc = FALSE; } if(ptr->value_alloc) { Curl_safefree(ptr->value); ptr->value_alloc = FALSE; } if(ptr->contenttype_alloc) { Curl_safefree(ptr->contenttype); ptr->contenttype_alloc = FALSE; } if(ptr->showfilename_alloc) { Curl_safefree(ptr->showfilename); ptr->showfilename_alloc = FALSE; } } } if(CURL_FORMADD_OK == return_value) { /* go through the list, check for completeness and if everything is * alright add the HttpPost item otherwise set return_value accordingly */ post = NULL; for(form = first_form; form != NULL; form = form->more) { if(((!form->name || !form->value) && !post) || ( (form->contentslength) && (form->flags & HTTPPOST_FILENAME) ) || ( (form->flags & HTTPPOST_FILENAME) && (form->flags & HTTPPOST_PTRCONTENTS) ) || ( (!form->buffer) && (form->flags & HTTPPOST_BUFFER) && (form->flags & HTTPPOST_PTRBUFFER) ) || ( (form->flags & HTTPPOST_READFILE) && (form->flags & HTTPPOST_PTRCONTENTS) ) ) { return_value = CURL_FORMADD_INCOMPLETE; break; } else { if(((form->flags & HTTPPOST_FILENAME) || (form->flags & HTTPPOST_BUFFER)) && !form->contenttype ) { char *f = form->flags & HTTPPOST_BUFFER? form->showfilename : form->value; /* our contenttype is missing */ form->contenttype = strdup(ContentTypeForFilename(f, prevtype)); if(!form->contenttype) { return_value = CURL_FORMADD_MEMORY; break; } form->contenttype_alloc = TRUE; } if(!(form->flags & HTTPPOST_PTRNAME) && (form == first_form) ) { /* Note that there's small risk that form->name is NULL here if the app passed in a bad combo, so we better check for that first. */ if(form->name) { /* copy name (without strdup; possibly contains null characters) */ form->name = Curl_memdup(form->name, form->namelength? form->namelength: strlen(form->name)+1); } if(!form->name) { return_value = CURL_FORMADD_MEMORY; break; } form->name_alloc = TRUE; } if(!(form->flags & (HTTPPOST_FILENAME | HTTPPOST_READFILE | HTTPPOST_PTRCONTENTS | HTTPPOST_PTRBUFFER | HTTPPOST_CALLBACK)) && form->value) { /* copy value (without strdup; possibly contains null characters) */ form->value = Curl_memdup(form->value, form->contentslength? form->contentslength: strlen(form->value)+1); if(!form->value) { return_value = CURL_FORMADD_MEMORY; break; } form->value_alloc = TRUE; } post = AddHttpPost(form->name, form->namelength, form->value, form->contentslength, form->buffer, form->bufferlength, form->contenttype, form->flags, form->contentheader, form->showfilename, form->userp, post, httppost, last_post); if(!post) { return_value = CURL_FORMADD_MEMORY; break; } if(form->contenttype) prevtype = form->contenttype; } } if(CURL_FORMADD_OK != return_value) { /* On error, free allocated fields for nodes of the FormInfo linked list which are not already owned by the httppost linked list without deallocating nodes. List nodes are deallocated later on */ FormInfo *ptr; for(ptr = form; ptr != NULL; ptr = ptr->more) { if(ptr->name_alloc) { Curl_safefree(ptr->name); ptr->name_alloc = FALSE; } if(ptr->value_alloc) { Curl_safefree(ptr->value); ptr->value_alloc = FALSE; } if(ptr->contenttype_alloc) { Curl_safefree(ptr->contenttype); ptr->contenttype_alloc = FALSE; } if(ptr->showfilename_alloc) { Curl_safefree(ptr->showfilename); ptr->showfilename_alloc = FALSE; } } } } /* Always deallocate FormInfo linked list nodes without touching node fields given that these have either been deallocated or are owned now by the httppost linked list */ while(first_form) { FormInfo *ptr = first_form->more; free(first_form); first_form = ptr; } return return_value; } /* * curl_formadd() is a public API to add a section to the multipart formpost. * * @unittest: 1308 */ CURLFORMcode curl_formadd(struct curl_httppost **httppost, struct curl_httppost **last_post, ...) { va_list arg; CURLFORMcode result; va_start(arg, last_post); result = FormAdd(httppost, last_post, arg); va_end(arg); return result; } #ifdef __VMS #include <fabdef.h> /* * get_vms_file_size does what it takes to get the real size of the file * * For fixed files, find out the size of the EOF block and adjust. * * For all others, have to read the entire file in, discarding the contents. * Most posted text files will be small, and binary files like zlib archives * and CD/DVD images should be either a STREAM_LF format or a fixed format. * */ curl_off_t VmsRealFileSize(const char * name, const struct_stat * stat_buf) { char buffer[8192]; curl_off_t count; int ret_stat; FILE * file; file = fopen(name, "r"); /* VMS */ if(file == NULL) return 0; count = 0; ret_stat = 1; while(ret_stat > 0) { ret_stat = fread(buffer, 1, sizeof(buffer), file); if(ret_stat != 0) count += ret_stat; } fclose(file); return count; } /* * * VmsSpecialSize checks to see if the stat st_size can be trusted and * if not to call a routine to get the correct size. * */ static curl_off_t VmsSpecialSize(const char * name, const struct_stat * stat_buf) { switch(stat_buf->st_fab_rfm) { case FAB$C_VAR: case FAB$C_VFC: return VmsRealFileSize(name, stat_buf); break; default: return stat_buf->st_size; } } #endif #ifndef __VMS #define filesize(name, stat_data) (stat_data.st_size) #else /* Getting the expected file size needs help on VMS */ #define filesize(name, stat_data) VmsSpecialSize(name, &stat_data) #endif /* * AddFormData() adds a chunk of data to the FormData linked list. * * size is incremented by the chunk length, unless it is NULL */ static CURLcode AddFormData(struct FormData **formp, enum formtype type, const void *line, size_t length, curl_off_t *size) { struct FormData *newform = malloc(sizeof(struct FormData)); if(!newform) return CURLE_OUT_OF_MEMORY; newform->next = NULL; if(type <= FORM_CONTENT) { /* we make it easier for plain strings: */ if(!length) length = strlen((char *)line); newform->line = malloc(length+1); if(!newform->line) { free(newform); return CURLE_OUT_OF_MEMORY; } memcpy(newform->line, line, length); newform->length = length; newform->line[length]=0; /* zero terminate for easier debugging */ } else /* For callbacks and files we don't have any actual data so we just keep a pointer to whatever this points to */ newform->line = (char *)line; newform->type = type; if(*formp) { (*formp)->next = newform; *formp = newform; } else *formp = newform; if(size) { if(type != FORM_FILE) /* for static content as well as callback data we add the size given as input argument */ *size += length; else { /* Since this is a file to be uploaded here, add the size of the actual file */ if(!strequal("-", newform->line)) { struct_stat file; if(!stat(newform->line, &file) && !S_ISDIR(file.st_mode)) *size += filesize(newform->line, file); else return CURLE_BAD_FUNCTION_ARGUMENT; } } } return CURLE_OK; } /* * AddFormDataf() adds printf()-style formatted data to the formdata chain. */ static CURLcode AddFormDataf(struct FormData **formp, curl_off_t *size, const char *fmt, ...) { char s[4096]; va_list ap; va_start(ap, fmt); vsnprintf(s, sizeof(s), fmt, ap); va_end(ap); return AddFormData(formp, FORM_DATA, s, 0, size); } /* * Curl_formclean() is used from http.c, this cleans a built FormData linked * list */ void Curl_formclean(struct FormData **form_ptr) { struct FormData *next, *form; form = *form_ptr; if(!form) return; do { next=form->next; /* the following form line */ if(form->type <= FORM_CONTENT) free(form->line); /* free the line */ free(form); /* free the struct */ } while((form = next) != NULL); /* continue */ *form_ptr = NULL; } /* * curl_formget() * Serialize a curl_httppost struct. * Returns 0 on success. * * @unittest: 1308 */ int curl_formget(struct curl_httppost *form, void *arg, curl_formget_callback append) { CURLcode result; curl_off_t size; struct FormData *data, *ptr; result = Curl_getformdata(NULL, &data, form, NULL, &size); if(result) return (int)result; for(ptr = data; ptr; ptr = ptr->next) { if((ptr->type == FORM_FILE) || (ptr->type == FORM_CALLBACK)) { char buffer[8192]; size_t nread; struct Form temp; Curl_FormInit(&temp, ptr); do { nread = readfromfile(&temp, buffer, sizeof(buffer)); if((nread == (size_t) -1) || (nread > sizeof(buffer)) || (nread != append(arg, buffer, nread))) { if(temp.fp) fclose(temp.fp); Curl_formclean(&data); return -1; } } while(nread); } else { if(ptr->length != append(arg, ptr->line, ptr->length)) { Curl_formclean(&data); return -1; } } } Curl_formclean(&data); return 0; } /* * curl_formfree() is an external function to free up a whole form post * chain */ void curl_formfree(struct curl_httppost *form) { struct curl_httppost *next; if(!form) /* no form to free, just get out of this */ return; do { next=form->next; /* the following form line */ /* recurse to sub-contents */ curl_formfree(form->more); if(!(form->flags & HTTPPOST_PTRNAME)) free(form->name); /* free the name */ if(!(form->flags & (HTTPPOST_PTRCONTENTS|HTTPPOST_BUFFER|HTTPPOST_CALLBACK)) ) free(form->contents); /* free the contents */ free(form->contenttype); /* free the content type */ free(form->showfilename); /* free the faked file name */ free(form); /* free the struct */ } while((form = next) != NULL); /* continue */ } #ifndef HAVE_BASENAME /* (Quote from The Open Group Base Specifications Issue 6 IEEE Std 1003.1, 2004 Edition) The basename() function shall take the pathname pointed to by path and return a pointer to the final component of the pathname, deleting any trailing '/' characters. If the string pointed to by path consists entirely of the '/' character, basename() shall return a pointer to the string "/". If the string pointed to by path is exactly "//", it is implementation-defined whether '/' or "//" is returned. If path is a null pointer or points to an empty string, basename() shall return a pointer to the string ".". The basename() function may modify the string pointed to by path, and may return a pointer to static storage that may then be overwritten by a subsequent call to basename(). The basename() function need not be reentrant. A function that is not required to be reentrant is not required to be thread-safe. */ static char *Curl_basename(char *path) { /* Ignore all the details above for now and make a quick and simple implementaion here */ char *s1; char *s2; s1=strrchr(path, '/'); s2=strrchr(path, '\\'); if(s1 && s2) { path = (s1 > s2? s1 : s2)+1; } else if(s1) path = s1 + 1; else if(s2) path = s2 + 1; return path; } #endif static char *strippath(const char *fullfile) { char *filename; char *base; filename = strdup(fullfile); /* duplicate since basename() may ruin the buffer it works on */ if(!filename) return NULL; base = strdup(basename(filename)); free(filename); /* free temporary buffer */ return base; /* returns an allocated string or NULL ! */ } static CURLcode formdata_add_filename(const struct curl_httppost *file, struct FormData **form, curl_off_t *size) { CURLcode result = CURLE_OK; char *filename = file->showfilename; char *filebasename = NULL; char *filename_escaped = NULL; if(!filename) { filebasename = strippath(file->contents); if(!filebasename) return CURLE_OUT_OF_MEMORY; filename = filebasename; } if(strchr(filename, '\\') || strchr(filename, '"')) { char *p0, *p1; /* filename need be escaped */ filename_escaped = malloc(strlen(filename)*2+1); if(!filename_escaped) { free(filebasename); return CURLE_OUT_OF_MEMORY; } p0 = filename_escaped; p1 = filename; while(*p1) { if(*p1 == '\\' || *p1 == '"') *p0++ = '\\'; *p0++ = *p1++; } *p0 = '\0'; filename = filename_escaped; } result = AddFormDataf(form, size, "; filename=\"%s\"", filename); free(filename_escaped); free(filebasename); return result; } /* * Curl_getformdata() converts a linked list of "meta data" into a complete * (possibly huge) multipart formdata. The input list is in 'post', while the * output resulting linked lists gets stored in '*finalform'. *sizep will get * the total size of the whole POST. * A multipart/form_data content-type is built, unless a custom content-type * is passed in 'custom_content_type'. * * This function will not do a failf() for the potential memory failures but * should for all other errors it spots. Just note that this function MAY get * a NULL pointer in the 'data' argument. */ CURLcode Curl_getformdata(struct SessionHandle *data, struct FormData **finalform, struct curl_httppost *post, const char *custom_content_type, curl_off_t *sizep) { struct FormData *form = NULL; struct FormData *firstform; struct curl_httppost *file; CURLcode result = CURLE_OK; curl_off_t size = 0; /* support potentially ENORMOUS formposts */ char *boundary; char *fileboundary = NULL; struct curl_slist* curList; *finalform = NULL; /* default form is empty */ if(!post) return result; /* no input => no output! */ boundary = formboundary(data); if(!boundary) return CURLE_OUT_OF_MEMORY; /* Make the first line of the output */ result = AddFormDataf(&form, NULL, "%s; boundary=%s\r\n", custom_content_type?custom_content_type: "Content-Type: multipart/form-data", boundary); if(result) { free(boundary); return result; } /* we DO NOT include that line in the total size of the POST, since it'll be part of the header! */ firstform = form; do { if(size) { result = AddFormDataf(&form, &size, "\r\n"); if(result) break; } /* boundary */ result = AddFormDataf(&form, &size, "--%s\r\n", boundary); if(result) break; /* Maybe later this should be disabled when a custom_content_type is passed, since Content-Disposition is not meaningful for all multipart types. */ result = AddFormDataf(&form, &size, "Content-Disposition: form-data; name=\""); if(result) break; result = AddFormData(&form, FORM_DATA, post->name, post->namelength, &size); if(result) break; result = AddFormDataf(&form, &size, "\""); if(result) break; if(post->more) { /* If used, this is a link to more file names, we must then do the magic to include several files with the same field name */ free(fileboundary); fileboundary = formboundary(data); if(!fileboundary) { result = CURLE_OUT_OF_MEMORY; break; } result = AddFormDataf(&form, &size, "\r\nContent-Type: multipart/mixed;" " boundary=%s\r\n", fileboundary); if(result) break; } file = post; do { /* If 'showfilename' is set, that is a faked name passed on to us to use to in the formpost. If that is not set, the actually used local file name should be added. */ if(post->more) { /* if multiple-file */ result = AddFormDataf(&form, &size, "\r\n--%s\r\nContent-Disposition: " "attachment", fileboundary); if(result) break; result = formdata_add_filename(file, &form, &size); if(result) break; } else if(post->flags & (HTTPPOST_FILENAME|HTTPPOST_BUFFER| HTTPPOST_CALLBACK)) { /* it should be noted that for the HTTPPOST_FILENAME and HTTPPOST_CALLBACK cases the ->showfilename struct member is always assigned at this point */ if(post->showfilename || (post->flags & HTTPPOST_FILENAME)) { result = formdata_add_filename(post, &form, &size); } if(result) break; } if(file->contenttype) { /* we have a specified type */ result = AddFormDataf(&form, &size, "\r\nContent-Type: %s", file->contenttype); if(result) break; } curList = file->contentheader; while(curList) { /* Process the additional headers specified for this form */ result = AddFormDataf( &form, &size, "\r\n%s", curList->data ); if(result) break; curList = curList->next; } if(result) break; result = AddFormDataf(&form, &size, "\r\n\r\n"); if(result) break; if((post->flags & HTTPPOST_FILENAME) || (post->flags & HTTPPOST_READFILE)) { /* we should include the contents from the specified file */ FILE *fileread; fileread = strequal("-", file->contents)? stdin:fopen(file->contents, "rb"); /* binary read for win32 */ /* * VMS: This only allows for stream files on VMS. Stream files are * OK, as are FIXED & VAR files WITHOUT implied CC For implied CC, * every record needs to have a \n appended & 1 added to SIZE */ if(fileread) { if(fileread != stdin) { /* close the file */ fclose(fileread); /* add the file name only - for later reading from this */ result = AddFormData(&form, FORM_FILE, file->contents, 0, &size); } else { /* When uploading from stdin, we can't know the size of the file, * thus must read the full file as before. We *could* use chunked * transfer-encoding, but that only works for HTTP 1.1 and we * can't be sure we work with such a server. */ size_t nread; char buffer[512]; while((nread = fread(buffer, 1, sizeof(buffer), fileread)) != 0) { result = AddFormData(&form, FORM_CONTENT, buffer, nread, &size); if(result) break; } } } else { if(data) failf(data, "couldn't open file \"%s\"", file->contents); *finalform = NULL; result = CURLE_READ_ERROR; } } else if(post->flags & HTTPPOST_BUFFER) /* include contents of buffer */ result = AddFormData(&form, FORM_CONTENT, post->buffer, post->bufferlength, &size); else if(post->flags & HTTPPOST_CALLBACK) /* the contents should be read with the callback and the size is set with the contentslength */ result = AddFormData(&form, FORM_CALLBACK, post->userp, post->contentslength, &size); else /* include the contents we got */ result = AddFormData(&form, FORM_CONTENT, post->contents, post->contentslength, &size); file = file->more; } while(file && !result); /* for each specified file for this field */ if(result) break; if(post->more) { /* this was a multiple-file inclusion, make a termination file boundary: */ result = AddFormDataf(&form, &size, "\r\n--%s--", fileboundary); if(result) break; } } while((post = post->next) != NULL); /* for each field */ /* end-boundary for everything */ if(!result) result = AddFormDataf(&form, &size, "\r\n--%s--\r\n", boundary); if(result) { Curl_formclean(&firstform); free(fileboundary); free(boundary); return result; } *sizep = size; free(fileboundary); free(boundary); *finalform = firstform; return result; } /* * Curl_FormInit() inits the struct 'form' points to with the 'formdata' * and resets the 'sent' counter. */ int Curl_FormInit(struct Form *form, struct FormData *formdata ) { if(!formdata) return 1; /* error */ form->data = formdata; form->sent = 0; form->fp = NULL; form->fread_func = ZERO_NULL; return 0; } #ifndef __VMS # define fopen_read fopen #else /* * vmsfopenread * * For upload to work as expected on VMS, different optional * parameters must be added to the fopen command based on * record format of the file. * */ # define fopen_read vmsfopenread static FILE * vmsfopenread(const char *file, const char *mode) { struct_stat statbuf; int result; result = stat(file, &statbuf); switch (statbuf.st_fab_rfm) { case FAB$C_VAR: case FAB$C_VFC: case FAB$C_STMCR: return fopen(file, "r"); /* VMS */ break; default: return fopen(file, "r", "rfm=stmlf", "ctx=stm"); } } #endif /* * readfromfile() * * The read callback that this function may use can return a value larger than * 'size' (which then this function returns) that indicates a problem and it * must be properly dealt with */ static size_t readfromfile(struct Form *form, char *buffer, size_t size) { size_t nread; bool callback = (form->data->type == FORM_CALLBACK)?TRUE:FALSE; if(callback) { if(form->fread_func == ZERO_NULL) return 0; else nread = form->fread_func(buffer, 1, size, form->data->line); } else { if(!form->fp) { /* this file hasn't yet been opened */ form->fp = fopen_read(form->data->line, "rb"); /* b is for binary */ if(!form->fp) return (size_t)-1; /* failure */ } nread = fread(buffer, 1, size, form->fp); } if(!nread) { /* this is the last chunk from the file, move on */ if(form->fp) { fclose(form->fp); form->fp = NULL; } form->data = form->data->next; } return nread; } /* * Curl_FormReader() is the fread() emulation function that will be used to * deliver the formdata to the transfer loop and then sent away to the peer. */ size_t Curl_FormReader(char *buffer, size_t size, size_t nitems, FILE *mydata) { struct Form *form; size_t wantedsize; size_t gotsize = 0; form=(struct Form *)mydata; wantedsize = size * nitems; if(!form->data) return 0; /* nothing, error, empty */ if((form->data->type == FORM_FILE) || (form->data->type == FORM_CALLBACK)) { gotsize = readfromfile(form, buffer, wantedsize); if(gotsize) /* If positive or -1, return. If zero, continue! */ return gotsize; } do { if((form->data->length - form->sent ) > wantedsize - gotsize) { memcpy(buffer + gotsize , form->data->line + form->sent, wantedsize - gotsize); form->sent += wantedsize-gotsize; return wantedsize; } memcpy(buffer+gotsize, form->data->line + form->sent, (form->data->length - form->sent) ); gotsize += form->data->length - form->sent; form->sent = 0; form->data = form->data->next; /* advance */ } while(form->data && (form->data->type < FORM_CALLBACK)); /* If we got an empty line and we have more data, we proceed to the next line immediately to avoid returning zero before we've reached the end. */ return gotsize; } /* * Curl_formpostheader() returns the first line of the formpost, the * request-header part (which is not part of the request-body like the rest of * the post). */ char *Curl_formpostheader(void *formp, size_t *len) { char *header; struct Form *form=(struct Form *)formp; if(!form->data) return 0; /* nothing, ERROR! */ header = form->data->line; *len = form->data->length; form->data = form->data->next; /* advance */ return header; } /* * formboundary() creates a suitable boundary string and returns an allocated * one. */ static char *formboundary(struct SessionHandle *data) { /* 24 dashes and 16 hexadecimal digits makes 64 bit (18446744073709551615) combinations */ return aprintf("------------------------%08x%08x", Curl_rand(data), Curl_rand(data)); } #else /* CURL_DISABLE_HTTP */ CURLFORMcode curl_formadd(struct curl_httppost **httppost, struct curl_httppost **last_post, ...) { (void)httppost; (void)last_post; return CURL_FORMADD_DISABLED; } int curl_formget(struct curl_httppost *form, void *arg, curl_formget_callback append) { (void) form; (void) arg; (void) append; return CURL_FORMADD_DISABLED; } void curl_formfree(struct curl_httppost *form) { (void)form; /* does nothing HTTP is disabled */ } #endif /* !defined(CURL_DISABLE_HTTP) */
19,932
10,320
<filename>aws-python-rest-api-with-pymongo/item/get.py<gh_stars>1000+ import json import os import pymongo # Fetch mongo env vars usr = os.environ['MONGO_DB_USER'] pwd = <PASSWORD>['<PASSWORD>'] mongo_db_name = os.environ['MONGO_DB_NAME'] mongo_collection_name = os.environ['MONGO_COLLECTION_NAME'] url = os.environ['MONGO_DB_URL'] # Connection String client = pymongo.MongoClient("mongodb+srv://" + usr + ":" + pwd + "@" + url + "/test?retryWrites=true&w=majority") db = client[mongo_db_name] collection = db[mongo_collection_name] def get(event, context): # get item_id to delete from path parameter item_id = event['pathParameters']['id'] # delete item from the database item = collection.find_one({"_id": item_id}) # create a response response = { "statusCode": 200, "body": json.dumps(item) } # return response return response
351
1,041
package io.ebeaninternal.server.type; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import io.ebean.core.type.DataBinder; import io.ebean.core.type.DataReader; import io.ebean.core.type.DocPropertyType; import io.ebeaninternal.server.core.BasicTypeConverter; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.math.BigDecimal; import java.sql.SQLException; import java.sql.Types; /** * ScalarType for BigDecimal. */ class ScalarTypeBigDecimal extends ScalarTypeBase<BigDecimal> { ScalarTypeBigDecimal() { super(BigDecimal.class, true, Types.DECIMAL); } @Override public void bind(DataBinder binder, BigDecimal value) throws SQLException { if (value == null) { binder.setNull(Types.DECIMAL); } else { binder.setBigDecimal(value); } } @Override public BigDecimal read(DataReader reader) throws SQLException { return reader.getBigDecimal(); } @Override public Object toJdbcType(Object value) { return BasicTypeConverter.toBigDecimal(value); } @Override public BigDecimal toBeanType(Object value) { return BasicTypeConverter.toBigDecimal(value); } @Override public String formatValue(BigDecimal t) { return t.toPlainString(); } @Override public BigDecimal parse(String value) { return new BigDecimal(value); } @Override public BigDecimal convertFromMillis(long systemTimeMillis) { return BigDecimal.valueOf(systemTimeMillis); } @Override public boolean isDateTimeCapable() { return true; } @Override public BigDecimal readData(DataInput dataInput) throws IOException { if (!dataInput.readBoolean()) { return null; } else { return new BigDecimal(dataInput.readDouble()); } } @Override public void writeData(DataOutput dataOutput, BigDecimal b) throws IOException { if (b == null) { dataOutput.writeBoolean(false); } else { dataOutput.writeBoolean(true); dataOutput.writeDouble(b.doubleValue()); } } @Override public BigDecimal jsonRead(JsonParser parser) throws IOException { return parser.getDecimalValue(); } @Override public void jsonWrite(JsonGenerator writer, BigDecimal value) throws IOException { writer.writeNumber(value); } @Override public DocPropertyType getDocType() { return DocPropertyType.DOUBLE; } }
855
460
package org.wildfly.swarm.howto.hollowjar; import javax.naming.NamingException; import javax.ws.rs.GET; import javax.ws.rs.Path; @Path("/") public class MyResource { @GET public String get() throws NamingException { return "Hello, Uberjar!"; } }
110
10,340
{ "comment": "as observed", "time": "2017-12-21T10:55:48.6238832Z", "resourceId": "/SUBSCRIPTIONS/F628BA4C-F07B-4AEB-86CB-C89784BBD9B3", "correlationId": "54ade416-3c64-42ba-994f-1f6309efc989", "operationName": "Microsoft.Security/tasks/write", "level": "Information", "resultType": "Succeeded", "category": "Action", "location": "global", "properties": {"eventCategory":"Recommendation","eventName":"New Recommendation","operationId":"e447c42f-af43-40ca-962b-c3740893f100"} }
201
5,169
{ "name": "YCTutorialBox", "version": "1.0.5", "license": { "type": "MIT", "file": "LICENSE" }, "homepage": "https://github.com/yuppiu/YCTutorialBox", "authors": { "<NAME>": "<EMAIL>" }, "summary": "A UIView subclass for Tutorial/Welcome/New Feature/Explanation flow. You can focus one element in the screen and show instructions about it.", "source": { "git": "https://github.com/yuppiu/YCTutorialBox.git", "tag": "1.0.5" }, "source_files": "YCTutorialBox.{h,m}", "resources": [ "YCTutorialBox.xib" ], "requires_arc": true, "dependencies": { "FXBlurView": [ ], "FXLabel": [ ] }, "platforms": { "ios": "6.0" } }
299
310
<gh_stars>100-1000 import torch import torch.nn as nn import torch.optim as optim import torch.autograd as autograd import torch.nn.functional as F EPOCHS = 100 test_sentence = """n-gram models are widely used in statistical natural language processing . In speech recognition , phonemes and sequences of phonemes are modeled using a n-gram distribution . For parsing , words are modeled such that each n-gram is composed of n words . For language identification , sequences of characters / graphemes ( letters of the alphabet ) are modeled for different languages For sequences of characters , the 3-grams ( sometimes referred to as " trigrams " ) that can be generated from " good morning " are " goo " , " ood " , " od " , " dm ", " mo " , " mor " and so forth , counting the space character as a gram ( sometimes the beginning and end of a text are modeled explicitly , adding " __g " , " _go " , " ng_ " , and " g__ " ) . For sequences of words , the trigrams that can be generated from " the dog smelled like a skunk " are " # the dog " , " the dog smelled " , " dog smelled like ", " smelled like a " , " like a skunk " and " a skunk # " .""".split() trigrams = [([test_sentence[i], test_sentence[i+1]], test_sentence[i+2]) for i in range(len(test_sentence) - 2)] vocab = set(test_sentence) word2idx = {word: i for i, word in enumerate(vocab)} idx2word = {i: word for word, i in word2idx.items()} class NGram(nn.Module): def __init__(self, vocab_size, embedding_dim=16, context_size=2): super().__init__() self.embeddings = nn.Embedding(vocab_size, embedding_dim) self.l1 = nn.Linear(context_size * embedding_dim, 128) self.l2 = nn.Linear(128, vocab_size) self._init_weight() def forward(self, inputs): embeds = self.embeddings(inputs).view(1, -1) out = F.relu(self.l1(embeds)) out = self.l2(out) log_probs = F.log_softmax(out) return log_probs def _init_weight(self, scope=0.1): self.embeddings.weight.data.uniform_(-scope, scope) self.l1.weight.data.uniform_(0, scope) self.l1.bias.data.fill_(0) self.l2.weight.data.uniform_(0, scope) self.l2.bias.data.fill_(0) criterion = nn.NLLLoss() model = NGram(len(vocab)) optimizer = optim.Adam(model.parameters(), lr=1e-3) model.train() for epoch in range(EPOCHS): total_loss = torch.Tensor([0]) for context, target in trigrams: context_idxs = list(map(lambda w: word2idx[w], context)) context_var = autograd.Variable(torch.LongTensor(context_idxs)) model.zero_grad() log_probs = model(context_var) loss = criterion(log_probs, autograd.Variable(torch.LongTensor([word2idx[target]]))) loss.backward() optimizer.step() total_loss += loss.data print(total_loss[0]) model.eval() def predict(context): context_idxs = list(map(lambda w: word2idx[w], context)) context_var = autograd.Variable( torch.LongTensor(context_idxs), volatile=True) predict = model(context_var) index = (torch.max(predict, 1)[1]).data.tolist()[0] return idx2word[index] for context in [["widely", "used"], ["and", "so"], ["are", "modeled"]]: print("{} + {} = {}".format(context[0], context[1], predict(context)))
1,304
589
<reponame>ClaudioWaldvogel/inspectIT package rocks.inspectit.agent.java.tracing.core.adapter.http; import java.util.Collections; import java.util.Map; import io.opentracing.tag.Tags; import rocks.inspectit.agent.java.tracing.core.adapter.ResponseAdapter; import rocks.inspectit.agent.java.tracing.core.adapter.http.data.HttpResponse; /** * The base {@link ResponseAdapter} for all synchronous HTTP client responses. * * @author <NAME> */ public class HttpResponseAdapter implements ResponseAdapter { /** * HTTP response to read data from. */ private HttpResponse httpResponse; /** * Default constructor. * * @param httpResponse * HTTP response to read data from. */ public HttpResponseAdapter(HttpResponse httpResponse) { this.httpResponse = httpResponse; } /** * {@inheritDoc} */ @Override public Map<String, String> getTags() { int status = httpResponse.getStatus(); if (status > 0) { return Collections.<String, String> singletonMap(Tags.HTTP_STATUS.getKey(), String.valueOf(status)); } else { return Collections.emptyMap(); } } }
373
777
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Without any args, this simply loads the IDs out of a bunch of the Chrome GRD files, and then checks the subset of the code that loads the strings to try and figure out what isn't in use any more. You can give paths to GRD files and source directories to control what is check instead. """ import os import re import sys import xml.sax # Extra messages along the way # 1 - Print ids that are found in sources but not in the found id set # 2 - Files that aren't processes (don't match the source name regex) DEBUG = 0 class GrdIDExtractor(xml.sax.handler.ContentHandler): """Extracts the IDs from messages in GRIT files""" def __init__(self): self.id_set_ = set() def startElement(self, name, attrs): if name == 'message': self.id_set_.add(attrs['name']) def allIDs(self): """Return all the IDs found""" return self.id_set_.copy() def CheckForUnusedGrdIDsInSources(grd_files, src_dirs): """Will collect the message ids out of the given GRD files and then scan the source directories to try and figure out what ids are not currently being used by any source. grd_files: A list of GRD files to collect the ids from. src_dirs: A list of directories to walk looking for source files. """ # Collect all the ids into a large map all_ids = set() file_id_map = {} for y in grd_files: handler = GrdIDExtractor() xml.sax.parse(y, handler) files_ids = handler.allIDs() file_id_map[y] = files_ids all_ids |= files_ids # The regex that will be used to check sources id_regex = re.compile('IDS_[A-Z0-9_]+') # Make sure the regex matches every id found. got_err = False for x in all_ids: match = id_regex.search(x) if match is None: print 'ERROR: "%s" did not match our regex' % (x) got_err = True if not match.group(0) is x: print 'ERROR: "%s" did not fully match our regex' % (x) got_err = True if got_err: return 1 # The regex for deciding what is a source file src_regex = re.compile('\.(([chm])|(mm)|(cc)|(cp)|(cpp)|(xib)|(py))$') ids_left = all_ids.copy() # Scanning time. for src_dir in src_dirs: for root, dirs, files in os.walk(src_dir): # Remove svn directories from recursion if '.svn' in dirs: dirs.remove('.svn') for file in files: if src_regex.search(file.lower()): full_path = os.path.join(root, file) src_file_contents = open(full_path).read() for match in sorted(set(id_regex.findall(src_file_contents))): if match in ids_left: ids_left.remove(match) if DEBUG: if not match in all_ids: print '%s had "%s", which was not in the found IDs' % \ (full_path, match) elif DEBUG > 1: full_path = os.path.join(root, file) print 'Skipping %s.' % (full_path) # Anything left? if len(ids_left) > 0: print 'The following ids are in GRD files, but *appear* to be unused:' for file_path, file_ids in file_id_map.iteritems(): missing = ids_left.intersection(file_ids) if len(missing) > 0: print ' %s:' % (file_path) print '\n'.join(' %s' % (x) for x in sorted(missing)) return 0 def main(): # script lives in src/tools tools_dir = os.path.dirname(os.path.abspath(sys.argv[0])) src_dir = os.path.dirname(tools_dir) # Collect the args into the right buckets src_dirs = [] grd_files = [] for arg in sys.argv[1:]: if arg.lower().endswith('.grd'): grd_files.append(arg) else: src_dirs.append(arg) # If no GRD files were given, default them: if len(grd_files) == 0: ash_base_dir = os.path.join(src_dir, 'ash') chrome_dir = os.path.join(src_dir, 'chrome') chrome_app_dir = os.path.join(chrome_dir, 'app') chrome_app_res_dir = os.path.join(chrome_app_dir, 'resources') device_base_dir = os.path.join(src_dir, 'device') ui_dir = os.path.join(src_dir, 'ui') ui_strings_dir = os.path.join(ui_dir, 'strings') ui_chromeos_dir = os.path.join(ui_dir, 'chromeos') grd_files = [ os.path.join(ash_base_dir, 'ash_strings.grd'), os.path.join(ash_base_dir, 'resources', 'ash_resources.grd'), os.path.join(chrome_app_dir, 'chromium_strings.grd'), os.path.join(chrome_app_dir, 'generated_resources.grd'), os.path.join(chrome_app_dir, 'google_chrome_strings.grd'), os.path.join(chrome_app_res_dir, 'locale_settings.grd'), os.path.join(chrome_app_res_dir, 'locale_settings_chromiumos.grd'), os.path.join(chrome_app_res_dir, 'locale_settings_google_chromeos.grd'), os.path.join(chrome_app_res_dir, 'locale_settings_linux.grd'), os.path.join(chrome_app_res_dir, 'locale_settings_mac.grd'), os.path.join(chrome_app_res_dir, 'locale_settings_win.grd'), os.path.join(chrome_app_dir, 'theme', 'theme_resources.grd'), os.path.join(chrome_dir, 'browser', 'browser_resources.grd'), os.path.join(chrome_dir, 'common', 'common_resources.grd'), os.path.join(chrome_dir, 'renderer', 'resources', 'renderer_resources.grd'), os.path.join(device_base_dir, 'bluetooth', 'bluetooth_strings.grd'), os.path.join(src_dir, 'extensions', 'extensions_strings.grd'), os.path.join(src_dir, 'ui', 'resources', 'ui_resources.grd'), os.path.join(src_dir, 'ui', 'webui', 'resources', 'webui_resources.grd'), os.path.join(ui_strings_dir, 'app_locale_settings.grd'), os.path.join(ui_strings_dir, 'ui_strings.grd'), os.path.join(ui_chromeos_dir, 'ui_chromeos_strings.grd'), ] # If no source directories were given, default them: if len(src_dirs) == 0: src_dirs = [ os.path.join(src_dir, 'app'), os.path.join(src_dir, 'ash'), os.path.join(src_dir, 'chrome'), os.path.join(src_dir, 'components'), os.path.join(src_dir, 'content'), os.path.join(src_dir, 'device'), os.path.join(src_dir, 'extensions'), os.path.join(src_dir, 'ui'), # nsNSSCertHelper.cpp has a bunch of ids os.path.join(src_dir, 'third_party', 'mozilla_security_manager'), os.path.join(chrome_dir, 'installer'), ] return CheckForUnusedGrdIDsInSources(grd_files, src_dirs) if __name__ == '__main__': sys.exit(main())
2,711
3,586
<reponame>pramodbiligiri/datahub from datahub.ingestion.source.file import GenericFileSource def check_mce_file(filepath: str) -> str: mce_source = GenericFileSource.create({"filename": filepath}, None) for _ in mce_source.get_workunits(): pass return f"{mce_source.get_report().workunits_produced} MCEs found - all valid"
130
775
""" This module contains the implementations of performance providers for multi-score anomaly metrics. """ # Copyright (C) 2021-2022 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # from abc import ABC from typing import List, Optional from ote_sdk.entities.metrics import ( MetricsGroup, MultiScorePerformance, Performance, ScoreMetric, ) from ote_sdk.entities.resultset import ResultSetEntity from ote_sdk.usecases.evaluation.averaging import MetricAverageMethod from ote_sdk.usecases.evaluation.dice import DiceAverage from ote_sdk.usecases.evaluation.f_measure import FMeasure from ote_sdk.usecases.evaluation.performance_provider_interface import ( IPerformanceProvider, ) from ote_sdk.utils.dataset_utils import ( contains_anomalous_images, split_local_global_resultset, ) class AnomalyLocalizationPerformance(MultiScorePerformance): """ This class implements a special case of the MultiScorePerformance, specific for anomaly tasks that perform anomaly localization (detection/segmentation), in addition to anomaly classification. :param global_score: Image-level performance metric. :param local_score: Pixel- or bbox-level performance metric, depending on the task type. :param dashboard_metrics: (optional) additional statistics, containing charts, curves, and other additional info. """ def __init__( self, global_score: ScoreMetric, local_score: Optional[ScoreMetric], dashboard_metrics: Optional[List[MetricsGroup]], ): super().__init__( primary_score=local_score, additional_scores=[global_score], dashboard_metrics=dashboard_metrics, ) self._global_score = global_score self._local_score = local_score @property def global_score(self): """Return the global (image-level) score metric.""" return self._global_score @property def local_score(self): """Return the local (pixel-/bbox-level) score metric.""" return self._local_score class AnomalyLocalizationScores(IPerformanceProvider, ABC): """ This class provides the AnomalyLocalizationPerformance object for anomaly segmentation and anomaly detection tasks. Depending on the subclass, the `get_performance` method returns an AnomalyLocalizationPerformance object with the pixel- or bbox-level metric as the primary score. The global (image-level) performance metric is included as an additional metric. :param resultset: ResultSet that scores will be computed for """ def __init__(self, resultset: ResultSetEntity): self.local_score: Optional[ScoreMetric] = None self.dashboard_metrics: List[MetricsGroup] = [] global_resultset, local_resultset = split_local_global_resultset(resultset) global_metric = FMeasure(resultset=global_resultset) global_performance = global_metric.get_performance() self.global_score = global_performance.score self.dashboard_metrics += global_performance.dashboard_metrics if contains_anomalous_images(local_resultset.ground_truth_dataset): local_metric = self._get_local_metric(local_resultset) local_performance = local_metric.get_performance() self.local_score = local_performance.score self.dashboard_metrics += local_performance.dashboard_metrics @staticmethod def _get_local_metric(local_resultset: ResultSetEntity) -> IPerformanceProvider: raise NotImplementedError def get_performance(self) -> Performance: return AnomalyLocalizationPerformance( global_score=self.global_score, local_score=self.local_score, dashboard_metrics=self.dashboard_metrics, ) class AnomalySegmentationScores(AnomalyLocalizationScores): """ Performance provider for anomaly segmentation tasks. """ @staticmethod def _get_local_metric(local_resultset: ResultSetEntity) -> IPerformanceProvider: return DiceAverage(resultset=local_resultset, average=MetricAverageMethod.MICRO) class AnomalyDetectionScores(AnomalyLocalizationScores): """ Performance provider for anomaly detection tasks. """ @staticmethod def _get_local_metric(local_resultset: ResultSetEntity) -> IPerformanceProvider: return FMeasure(resultset=local_resultset)
1,508
550
<reponame>changusmc/dropbox-sdk-java package com.dropbox.core.v1; import com.dropbox.core.json.JsonArrayReader; import com.dropbox.core.json.JsonReadException; import com.dropbox.core.json.JsonReader; import com.dropbox.core.util.Collector; import com.dropbox.core.util.DumpWriter; import com.dropbox.core.util.Dumpable; import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import java.io.IOException; /*>>> import checkers.nullness.quals.Nullable; */ /** * Represents a single "page" of results from a delta-style API call. This is the more * generic version of {@link DbxDelta} object. * * @param <C> * The type of value used to aggregate all the delta entries. For example, if you * process the delta entries as they come in and throw the rest away */ public class DbxDeltaC<C> extends Dumpable { /** * If {@code true}, then you should reset your local state to be an empty * folder before processing the list of delta entries. * * <p> * This is always {@code true} for the first delta result, but for subsequent results * it is true only in {@code true} rare situations. For example, if Dropbox * changes their cursor format, or if a user asks Dropbox to completely reset his/her * account, then the next time you call a delta API it may send down a reset and * start you from scratch. * </p> */ public final boolean reset; /** * Apply these entries to your local state to catch up with the Dropbox server's state. */ public final C entries; /** * A string that is used by the server to keep track of which entries have already been * returned to you. This is what you pass in to the next API call to continue where you * left off. * * <p> * This cursor is valid for a long time. You'd typically store this somewhere persistent * (such as a database) so you can resume continue you left off. * </p> */ public final String cursor; /** * If {@code true}, then there are more entries available. You can retrieve * them immediately by making the call again (passing in {@link #cursor}). * If {@code false}, then wait at least 5 minutes before checking again. */ public final boolean hasMore; /** * @param reset {@link #reset} * @param entries {@link #entries} * @param cursor {@link #cursor} * @param hasMore {@link #hasMore} */ public DbxDeltaC(boolean reset, C entries, String cursor, boolean hasMore) { this.reset = reset; this.entries = entries; this.cursor = cursor; this.hasMore = hasMore; } protected void dumpFields(DumpWriter out) { out.f("reset").v(reset); out.f("cursor").v(cursor); out.f("entries").v(hasMore); // TODO: Figure out how to print 'entries'. Might be too much to make it a Dumpable? } /** * For JSON parsing. */ public static final class Reader<C, MD extends Dumpable> extends JsonReader<DbxDeltaC<C>> { public final JsonReader<MD> metadataReader; public final Collector<DbxDeltaC.Entry<MD>, C> entryCollector; public Reader(JsonReader<MD> metadataReader, Collector<DbxDeltaC.Entry<MD>, C> entryCollector) { this.metadataReader = metadataReader; this.entryCollector = entryCollector; } public DbxDeltaC<C> read(JsonParser parser) throws IOException, JsonReadException { return read(parser, metadataReader, entryCollector); } public static <C, MD extends Dumpable> DbxDeltaC<C> read(JsonParser parser, JsonReader<MD> metadataReader, Collector<DbxDeltaC.Entry<MD>, C> entryCollector) throws IOException, JsonReadException { JsonLocation top = JsonReader.expectObjectStart(parser); Boolean reset = null; C entries = null; String cursor = null; Boolean has_more = null; while (parser.getCurrentToken() == JsonToken.FIELD_NAME) { String fieldName = parser.getCurrentName(); JsonReader.nextToken(parser); int fi = FM.get(fieldName); try { if (fi == -1) { // Unknown field. Skip over it. JsonReader.skipValue(parser); continue; } switch (fi) { case FM_reset: reset = JsonReader.BooleanReader.readField(parser, fieldName, reset); break; case FM_entries: JsonReader<Entry<MD>> entryReader = new Entry.Reader<MD>(metadataReader); entries = JsonArrayReader.mk(entryReader, entryCollector).readField(parser, fieldName, entries); break; case FM_cursor: cursor = JsonReader.StringReader.readField(parser, fieldName, cursor); break; case FM_has_more: has_more = JsonReader.BooleanReader.readField(parser, fieldName, has_more); break; default: throw new AssertionError("bad index: " + fi + ", field = \"" + fieldName + "\""); } } catch (JsonReadException ex) { throw ex.addFieldContext(fieldName); } } JsonReader.expectObjectEnd(parser); if (reset == null) throw new JsonReadException("missing field \"path\"", top); if (entries == null) throw new JsonReadException("missing field \"entries\"", top); if (cursor == null) throw new JsonReadException("missing field \"cursor\"", top); if (has_more == null) throw new JsonReadException("missing field \"has_more\"", top); return new DbxDeltaC<C>(reset, entries, cursor, has_more); } private static final int FM_reset = 0; private static final int FM_entries = 1; private static final int FM_cursor = 2; private static final int FM_has_more = 3; private static final JsonReader.FieldMapping FM; static { JsonReader.FieldMapping.Builder b = new JsonReader.FieldMapping.Builder(); b.add("reset", FM_reset); b.add("entries", FM_entries); b.add("cursor", FM_cursor); b.add("has_more", FM_has_more); FM = b.build(); } } /** * A single "delta entry" in a {@link DbxDeltaC} page. * * @param <MD> * The type of metadata being returned in the delta results. */ public static final class Entry<MD extends Dumpable> extends Dumpable { /** * The lower-cased path of the entry. Dropbox compares file paths in a * case-insensitive manner. For example, an entry for {@code "/readme.txt"} * should overwrite the entry for {@code "/ReadMe.TXT"}. * * <p> * To get the original case-preserved path, look in the {@link #metadata metadata} field. * </p> */ public final String lcPath; /** * If this is {@code null}, it means that this path doesn't exist on * on Dropbox's copy of the file system. To update your local state to * match, delete whatever is at that path, including any children. * If your local state doesn't have anything at this path, ignore this entry. * * <p> * If this is not {@code null}, it means that Dropbox has a file/folder * at this path with the given metadata. To update your local state to match, * add the entry to your local state as well. * </p> * <ul> * <li> * If the path refers to parent folders that don't exist yet in your local * state, create those parent folders in your local state. * </li> * <li> * If the metadata is for a file, replace whatever your local state has at * that path with the new entry. * </li> * <li> * If the metadata is for a folder, check what your local state has at the * path. If it's a file, replace it with the new entry. If it's a folder, * apply the new metadata to the folder, but do not modify the folder's * children. * </li> * </ul> */ public final /*@Nullable*/MD metadata; /** * @param lcPath {@link #lcPath} * @param metadata {@link #metadata} */ public Entry(String lcPath, /*@Nullable*/MD metadata) { this.lcPath = lcPath; this.metadata = metadata; } protected void dumpFields(DumpWriter out) { out.f("lcPath").v(lcPath); out.f("metadata").v(metadata); } /** * For JSON parsing. */ public static final class Reader<MD extends Dumpable> extends JsonReader<Entry<MD>> { public final JsonReader<MD> metadataReader; public Reader(JsonReader<MD> metadataReader) { this.metadataReader = metadataReader; } public Entry<MD> read(JsonParser parser) throws IOException, JsonReadException { return read(parser, metadataReader); } public static <MD extends Dumpable> Entry<MD> read(JsonParser parser, JsonReader<MD> metadataReader) throws IOException, JsonReadException { JsonLocation arrayStart = JsonReader.expectArrayStart(parser); if (JsonReader.isArrayEnd(parser)) { throw new JsonReadException("expecting a two-element array of [path, metadata], found a zero-element array", arrayStart); } String lcPath; try { lcPath = JsonReader.StringReader.read(parser); } catch (JsonReadException ex) { throw ex.addArrayContext(0); } if (JsonReader.isArrayEnd(parser)) { throw new JsonReadException("expecting a two-element array of [path, metadata], found a one-element array", arrayStart); } /*@Nullable*/MD metadata; try { metadata = metadataReader.readOptional(parser); } catch (JsonReadException ex) { throw ex.addArrayContext(1); } if (!JsonReader.isArrayEnd(parser)) { throw new JsonReadException("expecting a two-element array of [path, metadata], found more than two elements", arrayStart); } parser.nextToken(); return new Entry<MD>(lcPath, metadata); } } } }
4,946
529
<gh_stars>100-1000 #include <openssl/opensslconf.h> #ifdef OPENSSL_NO_JPAKE #ifdef OPENSSL_SYS_WINDOWS #include <stdio.h> #endif int main(int argc, char *argv[]) { TINYCLR_SSL_PRINTF("No J-PAKE support\n"); return(0); } #else #include <openssl/jpake.h> #include <openssl/err.h> static void showbn(const char *name, const BIGNUM *bn) { TINYCLR_SSL_FPUTS(name, OPENSSL_TYPE__FILE_STDOUT); TINYCLR_SSL_FPUTS(" = ", OPENSSL_TYPE__FILE_STDOUT); BN_print_fp(OPENSSL_TYPE__FILE_STDOUT, bn); putc('\n', OPENSSL_TYPE__FILE_STDOUT); } static int run_jpake(JPAKE_CTX *alice, JPAKE_CTX *bob) { JPAKE_STEP1 alice_s1; JPAKE_STEP1 bob_s1; JPAKE_STEP2 alice_s2; JPAKE_STEP2 bob_s2; JPAKE_STEP3A alice_s3a; JPAKE_STEP3B bob_s3b; /* Alice -> Bob: step 1 */ puts("A->B s1"); JPAKE_STEP1_init(&alice_s1); JPAKE_STEP1_generate(&alice_s1, alice); if(!JPAKE_STEP1_process(bob, &alice_s1)) { TINYCLR_SSL_PRINTF("Bob fails to process Alice's step 1\n"); ERR_print_errors_fp(OPENSSL_TYPE__FILE_STDOUT); return 1; } JPAKE_STEP1_release(&alice_s1); /* Bob -> Alice: step 1 */ puts("B->A s1"); JPAKE_STEP1_init(&bob_s1); JPAKE_STEP1_generate(&bob_s1, bob); if(!JPAKE_STEP1_process(alice, &bob_s1)) { TINYCLR_SSL_PRINTF("Alice fails to process Bob's step 1\n"); ERR_print_errors_fp(OPENSSL_TYPE__FILE_STDOUT); return 2; } JPAKE_STEP1_release(&bob_s1); /* Alice -> Bob: step 2 */ puts("A->B s2"); JPAKE_STEP2_init(&alice_s2); JPAKE_STEP2_generate(&alice_s2, alice); if(!JPAKE_STEP2_process(bob, &alice_s2)) { TINYCLR_SSL_PRINTF("Bob fails to process Alice's step 2\n"); ERR_print_errors_fp(OPENSSL_TYPE__FILE_STDOUT); return 3; } JPAKE_STEP2_release(&alice_s2); /* Bob -> Alice: step 2 */ puts("B->A s2"); JPAKE_STEP2_init(&bob_s2); JPAKE_STEP2_generate(&bob_s2, bob); if(!JPAKE_STEP2_process(alice, &bob_s2)) { TINYCLR_SSL_PRINTF("Alice fails to process Bob's step 2\n"); ERR_print_errors_fp(OPENSSL_TYPE__FILE_STDOUT); return 4; } JPAKE_STEP2_release(&bob_s2); showbn("Alice's key", JPAKE_get_shared_key(alice)); showbn("Bob's key ", JPAKE_get_shared_key(bob)); /* Alice -> Bob: step 3a */ puts("A->B s3a"); JPAKE_STEP3A_init(&alice_s3a); JPAKE_STEP3A_generate(&alice_s3a, alice); if(!JPAKE_STEP3A_process(bob, &alice_s3a)) { TINYCLR_SSL_PRINTF("Bob fails to process Alice's step 3a\n"); ERR_print_errors_fp(OPENSSL_TYPE__FILE_STDOUT); return 5; } JPAKE_STEP3A_release(&alice_s3a); /* Bob -> Alice: step 3b */ puts("B->A s3b"); JPAKE_STEP3B_init(&bob_s3b); JPAKE_STEP3B_generate(&bob_s3b, bob); if(!JPAKE_STEP3B_process(alice, &bob_s3b)) { TINYCLR_SSL_PRINTF("Alice fails to process Bob's step 3b\n"); ERR_print_errors_fp(OPENSSL_TYPE__FILE_STDOUT); return 6; } JPAKE_STEP3B_release(&bob_s3b); return 0; } int main(int argc, char **argv) { JPAKE_CTX *alice; JPAKE_CTX *bob; BIGNUM *p = NULL; BIGNUM *g = NULL; BIGNUM *q = NULL; BIGNUM *secret = BN_new(); BIO *bio_err; bio_err = BIO_new_fp(OPENSSL_TYPE__FILE_STDERR, BIO_NOCLOSE); CRYPTO_malloc_debug_init(); CRYPTO_dbg_set_options(V_CRYPTO_MDEBUG_ALL); CRYPTO_mem_ctrl(CRYPTO_MEM_CHECK_ON); ERR_load_crypto_strings(); /* BN_hex2bn(&p, "<KEY>"); BN_hex2bn(&g, "<KEY>"); BN_hex2bn(&q, "9760508f15230bccb292b982a2eb840bf0581cf5"); */ /* p = BN_new(); BN_generate_prime(p, 1024, 1, NULL, NULL, NULL, NULL); */ /* Use a safe prime for p (that we found earlier) */ BN_hex2bn(&p, "F9E5B365665EA7A05A9C534502780FEE6F1AB5BD4F49947FD036DBD7E905269AF46EF28B0FC07487EE4F5D20FB3C0AF8E700F3A2FA3414970CBED44FEDFF80CE78D800F184BB82435D137AADA2C6C16523247930A63B85661D1FC817A51ACD96168E95898A1F83A79FFB529368AA7833ABD1B0C3AEDDB14D2E1A2F71D99F763F"); showbn("p", p); g = BN_new(); BN_set_word(g, 2); showbn("g", g); q = BN_new(); BN_rshift1(q, p); showbn("q", q); BN_rand(secret, 32, -1, 0); /* A normal run, expect this to work... */ alice = JPAKE_CTX_new("Alice", "Bob", p, g, q, secret); bob = JPAKE_CTX_new("Bob", "Alice", p, g, q, secret); if(run_jpake(alice, bob) != 0) { TINYCLR_SSL_FPRINTF(OPENSSL_TYPE__FILE_STDERR, "Plain JPAKE run failed\n"); return 1; } JPAKE_CTX_free(bob); JPAKE_CTX_free(alice); /* Now give Alice and Bob different secrets */ alice = JPAKE_CTX_new("Alice", "Bob", p, g, q, secret); BN_add_word(secret, 1); bob = JPAKE_CTX_new("Bob", "Alice", p, g, q, secret); if(run_jpake(alice, bob) != 5) { TINYCLR_SSL_FPRINTF(OPENSSL_TYPE__FILE_STDERR, "Mismatched secret JPAKE run failed\n"); return 1; } JPAKE_CTX_free(bob); JPAKE_CTX_free(alice); BN_free(secret); BN_free(q); BN_free(g); BN_free(p); CRYPTO_cleanup_all_ex_data(); ERR_remove_thread_state(NULL); ERR_free_strings(); CRYPTO_mem_leaks(bio_err); return 0; } #endif
2,816
372
/* Editor Settings: expandtabs and use 4 spaces for indentation * ex: set softtabstop=4 tabstop=8 expandtab shiftwidth=4: * * -*- mode: c, c-basic-offset: 4 -*- */ /* * Copyright © BeyondTrust Software 2004 - 2019 * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * BEYONDTRUST MAKES THIS SOFTWARE AVAILABLE UNDER OTHER LICENSING TERMS AS * WELL. IF YOU HAVE ENTERED INTO A SEPARATE LICENSE AGREEMENT WITH * BEYONDTRUST, THEN YOU MAY ELECT TO USE THE SOFTWARE UNDER THE TERMS OF THAT * SOFTWARE LICENSE AGREEMENT INSTEAD OF THE TERMS OF THE APACHE LICENSE, * NOTWITHSTANDING THE ABOVE NOTICE. IF YOU HAVE QUESTIONS, OR WISH TO REQUEST * A COPY OF THE ALTERNATE LICENSING TERMS OFFERED BY BEYONDTRUST, PLEASE CONTACT * BEYONDTRUST AT beyondtrust.com/contact */ /* * Copyright (C) BeyondTrust Software. All rights reserved. * * Module Name: * * usermonitor.h * * Abstract: * * User monitor service for local users and groups * * Public header * * Authors: <NAME> <<EMAIL>> * */ #ifndef __USERMONITOR_H__ #define __USERMONITOR_H__ #if defined(_DCE_IDL_) || defined(__midl) cpp_quote("#include <usermonitor-encoding.h>") cpp_quote("#if 0") #endif #ifdef __cplusplus_cli #define STRUCT value struct #else #define STRUCT struct #endif /* * UNIX struct passwd and related AD RFC2307 attributes */ typedef STRUCT _USER_MONITOR_PASSWD { PSTR pw_name; PSTR pw_passwd; DWORD pw_uid; DWORD pw_gid; PSTR pw_gecos; PSTR pw_dir; PSTR pw_shell; PSTR pDisplayName; DWORD LastUpdated; } USER_MONITOR_PASSWD, *PUSER_MONITOR_PASSWD; /* used when processing to discriminate between format changes */ #define AD_USER_CHANGE_VERSION "(v2)" #define AD_USER_INFO_VERSION 0 /* * AD user attributes */ typedef STRUCT _AD_USER_INFO { // the version of this struct DWORD version; // This is a subset of LSA_SECURITY_OBJECT, // and sub LSA_SECURITY_OBJECT_USER_INFO // struct PSTR pszDN; PSTR pszObjectSid; BOOL enabled; BOOL bIsLocal; PSTR pszNetbiosDomainName; PSTR pszSamAccountName; // the LSA_SECURITY_OBJECT_USER_INFO PSTR pszPrimaryGroupSid; PSTR pszUPN; PSTR pszAliasName; // NT time values UINT64 qwPwdLastSet; UINT64 qwMaxPwdAge; UINT64 qwPwdExpires; UINT64 qwAccountExpires; BOOL bIsGeneratedUPN; BOOL bIsAccountInfoKnown; BOOL bPasswordExpired; BOOL bPasswordNeverExpires; BOOL bPromptPasswordChange; BOOL bUserCanChangePassword; BOOL bAccountDisabled; BOOL bAccountExpired; BOOL bAccountLocked; // the UNIX attributes DWORD pw_uid; DWORD pw_gid; PSTR pw_name; PSTR pw_passwd; PSTR pw_gecos; PSTR pw_shell; PSTR pw_dir; PSTR pDisplayName; PSTR pszWindowsHomeFolder; PSTR pszLocalWindowsHomeFolder; DWORD LastUpdated; } AD_USER_INFO, *PAD_USER_INFO; typedef STRUCT USER_MONITOR_GROUP { PSTR gr_name; PSTR gr_passwd; DWORD gr_gid; DWORD LastUpdated; } USER_MONITOR_GROUP, *PUSER_MONITOR_GROUP; /* new, changed and deleted local users */ typedef STRUCT _USER_CHANGE { // unix passwd like attributes USER_MONITOR_PASSWD OldValue; USER_MONITOR_PASSWD NewValue; } USER_CHANGE, *PUSER_CHANGE; typedef STRUCT _GROUP_CHANGE { USER_MONITOR_GROUP OldValue; USER_MONITOR_GROUP NewValue; } GROUP_CHANGE, *PGROUP_CHANGE; typedef STRUCT _GROUP_MEMBERSHIP_CHANGE { BOOL Added; BOOL OnlyGidChange; PSTR pUserName; DWORD Gid; PSTR pGroupName; } GROUP_MEMBERSHIP_CHANGE, *PGROUP_MEMBERSHIP_CHANGE; /* new, changed and deleted AD users */ typedef STRUCT _AD_USER_CHANGE { AD_USER_INFO OldValue; AD_USER_INFO ADNewValue; } AD_USER_CHANGE, *PAD_USER_CHANGE; #if !defined(_DCE_IDL_) && !defined(__midl) #ifdef _WIN32 #ifndef LW_USERMONITORLIB_API #define LW_USERMONITORLIB_API __declspec(dllimport) __stdcall #endif #else #define LW_USERMONITORLIB_API #endif #ifdef __cplusplus extern "C" { #endif DWORD LW_USERMONITORLIB_API DecodeUserChange( IN PVOID pBuffer, IN size_t sBufferLen, OUT PUSER_CHANGE* ppValue ); DWORD LW_USERMONITORLIB_API EncodeUserChange( IN PUSER_CHANGE pValue, OUT PDWORD pdwEncodedSize, OUT PVOID* ppEncodedBuffer ); VOID LW_USERMONITORLIB_API FreeUserChange( PUSER_CHANGE pValue ); DWORD LW_USERMONITORLIB_API DecodeGroupChange( IN PVOID pBuffer, IN size_t sBufferLen, OUT PGROUP_CHANGE* ppValue ); DWORD LW_USERMONITORLIB_API EncodeGroupChange( IN PGROUP_CHANGE pValue, OUT PDWORD pdwEncodedSize, OUT PVOID* ppEncodedBuffer ); VOID LW_USERMONITORLIB_API FreeGroupChange( PGROUP_CHANGE pValue ); DWORD LW_USERMONITORLIB_API DecodeGroupMembershipChange( IN PVOID pBuffer, IN size_t sBufferLen, OUT PGROUP_MEMBERSHIP_CHANGE* ppValue ); DWORD LW_USERMONITORLIB_API EncodeGroupMembershipChange( IN PGROUP_MEMBERSHIP_CHANGE pValue, OUT PDWORD pdwEncodedSize, OUT PVOID* ppEncodedBuffer ); VOID LW_USERMONITORLIB_API FreeGroupMembershipChange( PGROUP_MEMBERSHIP_CHANGE pValue ); VOID LW_USERMONITORLIB_API FreeADUserChange( PAD_USER_CHANGE pValue ); DWORD LW_USERMONITORLIB_API DecodeADUserChange( IN PVOID pBuffer, IN size_t sBufferLen, OUT PAD_USER_CHANGE* ppValue ); DWORD LW_USERMONITORLIB_API EncodeADUserChange( IN PAD_USER_CHANGE pValue, OUT PDWORD pdwEncodedSize, OUT PVOID* ppEncodedBuffer ); #ifdef __cplusplus } #endif #endif #if defined(_DCE_IDL_) || defined(__midl) cpp_quote("#endif") #endif #endif /* __USERMONITOR_H__ */
2,826
2,659
/* * Copyright 2021 4Paradigm * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. // // Endian-neutral encoding: // * Fixed-length numbers are encoded with least-significant byte first // * In addition we support variable length "varint" encoding // * Strings are encoded prefixed by their length in varint format #ifndef SRC_LOG_CODING_H_ #define SRC_LOG_CODING_H_ #include <stdint.h> #include <string.h> #include <string> #include "base/port.h" namespace openmldb { namespace log { inline void EncodeFixed32(char* buf, uint32_t value) { if (openmldb::base::kLittleEndian) { memcpy(buf, &value, sizeof(value)); } else { buf[0] = value & 0xff; buf[1] = (value >> 8) & 0xff; buf[2] = (value >> 16) & 0xff; buf[3] = (value >> 24) & 0xff; } } inline void EncodeFixed64(char* buf, uint64_t value) { if (openmldb::base::kLittleEndian) { memcpy(buf, &value, sizeof(value)); } else { buf[0] = value & 0xff; buf[1] = (value >> 8) & 0xff; buf[2] = (value >> 16) & 0xff; buf[3] = (value >> 24) & 0xff; buf[4] = (value >> 32) & 0xff; buf[5] = (value >> 40) & 0xff; buf[6] = (value >> 48) & 0xff; buf[7] = (value >> 56) & 0xff; } } inline uint32_t DecodeFixed32(const char* ptr) { if (openmldb::base::kLittleEndian) { // Load the raw bytes uint32_t result; memcpy(&result, ptr, sizeof(result)); // gcc optimizes this to a plain load return result; } else { return ((static_cast<uint32_t>(static_cast<unsigned char>(ptr[0]))) | (static_cast<uint32_t>(static_cast<unsigned char>(ptr[1])) << 8) | (static_cast<uint32_t>(static_cast<unsigned char>(ptr[2])) << 16) | (static_cast<uint32_t>(static_cast<unsigned char>(ptr[3])) << 24)); } } inline uint64_t DecodeFixed64(const char* ptr) { if (openmldb::base::kLittleEndian) { // Load the raw bytes uint64_t result; memcpy(&result, ptr, sizeof(result)); // gcc optimizes this to a plain load return result; } else { uint64_t lo = DecodeFixed32(ptr); uint64_t hi = DecodeFixed32(ptr + 4); return (hi << 32) | lo; } } } // namespace log } // namespace openmldb #endif // SRC_LOG_CODING_H_
1,266
5,169
{ "name": "KSYMediaPlayer_iOS", "version": "2.0.3", "license": { "type": "Proprietary", "text": " Copyright 2015 kingsoft Ltd. All rights reserved.\n" }, "homepage": "http://v.ksyun.com/doc.html", "authors": { "FanpingZeng": "<EMAIL>" }, "summary": "KSYMediaPlayer_iOS sdk manages the playback of a movie or live streaming.", "description": "KSYUN Live Streaming player SDK, upporting RTMP HTTP-FLV HLS protocol, Living delay less than 2 or 3 seconds.\nKSYMediaPlayer_iOS.framework is a static framework.", "platforms": { "ios": "7.0" }, "requires_arc": true, "frameworks": "VideoToolbox", "ios": { "libraries": [ "z", "iconv", "stdc++.6" ] }, "source": { "git": "https://github.com/ksvc/KSYMediaPlayer_iOS.git", "tag": "v2.0.3" }, "vendored_frameworks": "framework/live/KSYMediaPlayer.framework" }
364
550
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.forscience.whistlepunk.devicemanager; import androidx.annotation.NonNull; import com.google.android.apps.forscience.whistlepunk.AppSingleton; import com.google.android.apps.forscience.whistlepunk.SensorAppearance; import com.google.android.apps.forscience.whistlepunk.SensorAppearanceProvider; import com.google.android.apps.forscience.whistlepunk.SensorProvider; import com.google.android.apps.forscience.whistlepunk.data.GoosciSensorSpec; import com.google.android.apps.forscience.whistlepunk.metadata.ExperimentSensors; import com.google.android.apps.forscience.whistlepunk.metadata.ExternalSensorSpec; import com.google.common.base.Preconditions; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; public class ConnectableSensor { private GoosciSensorSpec.SensorSpec spec; private String connectedSensorId; private boolean included; private final Map<String, SensorProvider> providerMap; /** * Manages creating representations of connected and disconnected sensors from stored * configurations. */ public static class Connector { private final Map<String, SensorProvider> providers; public static Connector fromDiscoverers(Map<String, SensorDiscoverer> discoverers) { return new Connector(AppSingleton.buildProviderMap(discoverers)); } public Connector(Map<String, SensorProvider> providers) { this.providers = providers; } /** Create an entry for an external sensor we've connected to in the past */ @NonNull public ConnectableSensor connected( GoosciSensorSpec.SensorSpec sensorSpec, String connectedSensorId) { return new ConnectableSensor( sensorSpec, connectedSensorId, connectedSensorId != null, providers); } /** Create an entry for an external sensor we've connected to in the past */ @NonNull public ConnectableSensor connected( GoosciSensorSpec.SensorSpec sensorSpec, String connectedSensorId, boolean included) { return new ConnectableSensor(sensorSpec, connectedSensorId, included, providers); } /** Create an entry for an external sensor we've never connected to */ public ConnectableSensor disconnected(GoosciSensorSpec.SensorSpec sensorSpec) { return new ConnectableSensor(sensorSpec, null, false, providers); } /** * Create an entry for an internal built-in sensor that we know how to retrieve from {@link * com.google.android.apps.forscience.whistlepunk.SensorRegistry} */ public ConnectableSensor builtIn(String sensorId, boolean included) { return new ConnectableSensor(null, sensorId, included, providers); } /** @return a new ConnectableSensor that's like this one, but in a disconnected state. */ public ConnectableSensor asDisconnected(ConnectableSensor sensor) { if (sensor.isBuiltIn()) { return builtIn(sensor.connectedSensorId, false); } else { return disconnected(sensor.spec); } } } /** * @param paired non-null if we've already paired with this sensor, and so there's already a * sensorId in the database for this sensor. Otherwise, it's null; we could connect, but a * sensorId would need to be created if we did * @param spec specification of the sensor if external, null if built-in (see {@link * #builtIn(String, boolean, Map)}). * @param included true if the sensor is included in the current experiment */ private ConnectableSensor( GoosciSensorSpec.SensorSpec spec, String connectedSensorId, boolean included, Map<String, SensorProvider> providerMap) { // TODO: handle built-in sensors as SensorSpec, too. this.spec = spec; this.connectedSensorId = connectedSensorId; this.included = included; this.providerMap = Preconditions.checkNotNull(providerMap); } public static Map<String, ExternalSensorSpec> makeMap(List<ConnectableSensor> sensors) { Map<String, ExternalSensorSpec> map = new HashMap<>(); for (ConnectableSensor sensor : sensors) { map.put(sensor.getConnectedSensorId(), sensor.getSpec()); } return map; } public static Map<String, ExternalSensorSpec> makeMap(ExperimentSensors sensors) { return makeMap(sensors.getExternalSensors()); } public boolean isPaired() { return included; } public void setPaired(boolean paired) { included = paired; } public ExternalSensorSpec getSpec() { return ExternalSensorSpec.fromGoosciSpec(spec, providerMap); } /** * @return the appearance of this connectable sensor. If it is an external sensor discovered via * the API or remembered in the database, will directly retrieve the stored appearance, * otherwise, use {@link SensorAppearanceProvider} to look up the built-in sensor. */ public SensorAppearance getAppearance(SensorAppearanceProvider sap) { if (spec != null) { return getSpec().getSensorAppearance(); } else { return sap.getAppearance(connectedSensorId); } } public String getAddress() { return spec.getInfo().getAddress(); } public String getConnectedSensorId() { return connectedSensorId; } @Override public String toString() { return "ConnectableSensor{" + "mSpec=" + spec + ", mConnectedSensorId='" + connectedSensorId + '\'' + '}'; } public boolean shouldShowOptionsOnConnect() { return spec != null && getSpec().shouldShowOptionsOnConnect(); } // auto-generated by Android Studio (then hand-edited for proto equality) @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ConnectableSensor that = (ConnectableSensor) o; if (!spec.equals(that.spec)) { return false; } if (connectedSensorId != null ? !connectedSensorId.equals(that.connectedSensorId) : that.connectedSensorId != null) { return false; } return true; } // auto-generated by Android Studio @Override public int hashCode() { int result = spec.hashCode(); result = 31 * result + (connectedSensorId != null ? connectedSensorId.hashCode() : 0); return result; } public boolean isSameSensor(ConnectableSensor other) { if (spec == null) { return Objects.equals(other.connectedSensorId, connectedSensorId); } return getSpec().isSameSensor(other.getSpec()); } public boolean isBuiltIn() { return spec == null; } }
2,274
963
package com.vladmihalcea.book.hpjp.hibernate.logging.inspector; import org.hibernate.resource.jdbc.spi.StatementInspector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author <NAME> */ public class SqlCommentStatementInspector implements StatementInspector { private static final Logger LOGGER = LoggerFactory.getLogger(SqlCommentStatementInspector.class); private static final Pattern SQL_COMMENT_PATTERN = Pattern.compile("\\/\\*.*?\\*\\/\\s*"); @Override public String inspect(String sql) { LOGGER.debug( "Executing SQL query: {}", sql ); return SQL_COMMENT_PATTERN.matcher(sql).replaceAll(""); } }
303
369
<reponame>bitigchi/MuditaOS // Copyright (c) 2017-2021, Mudit<NAME>. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #include "SettingsSerializer.hpp" #include "btstack_util.h" #include <log/log.hpp> namespace strings { constexpr inline auto addr = "addr"; constexpr inline auto name = "name"; constexpr inline auto devices = "devices"; } // namespace strings auto SettingsSerializer::toString(const std::vector<Devicei> &devices) -> std::string { json11::Json::array devicesJson; for (auto &device : devices) { auto deviceEntry = json11::Json::object{{strings::addr, bd_addr_to_str(device.address)}, {strings::name, std::string{device.name.data()}}}; devicesJson.emplace_back(deviceEntry); } json11::Json finalJson = json11::Json::object{{strings::devices, devicesJson}}; return finalJson.dump(); } auto SettingsSerializer::fromString(const std::string &jsonStr) -> std::vector<Devicei> { json11::Json devicesJson; std::string err; devicesJson = json11::Json::parse(jsonStr.c_str(), err); if (!err.empty()) { LOG_ERROR("Failed parsing device string!"); return std::vector<Devicei>(); } json11::Json::array devicesArray; devicesArray = std::move(devicesJson[strings::devices].array_items()); std::vector<Devicei> devicesVector; for (auto &device : devicesArray) { Devicei temp; sscanf_bd_addr(device[strings::addr].string_value().c_str(), temp.address); strcpy(temp.name.data(), device[strings::name].string_value().c_str()); temp.deviceState = DeviceState::Paired; devicesVector.emplace_back(temp); } return devicesVector; }
697
5,791
/* Copyright 2016 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow_serving/util/threadpool_executor.h" #include <gtest/gtest.h> namespace tensorflow { namespace serving { namespace { constexpr int kNumThreads = 30; TEST(ThreadPoolExecutor, Empty) { for (int num_threads = 1; num_threads < kNumThreads; num_threads++) { LOG(INFO) << "Testing with " << num_threads << " threads"; ThreadPoolExecutor pool(Env::Default(), "test", num_threads); } } TEST(ThreadPoolExecutor, DoWork) { for (int num_threads = 1; num_threads < kNumThreads; num_threads++) { LOG(INFO) << "Testing with " << num_threads << " threads"; const int kWorkItems = 15; // Not using std::vector<bool> due to its unusual implementation and API - // http://en.cppreference.com/w/cpp/container/vector_bool bool work[kWorkItems]; for (int i = 0; i < kWorkItems; ++i) { work[i] = false; } { ThreadPoolExecutor executor(Env::Default(), "test", num_threads); for (int i = 0; i < kWorkItems; i++) { executor.Schedule([&work, i]() { ASSERT_FALSE(work[i]); work[i] = true; }); } } for (int i = 0; i < kWorkItems; i++) { ASSERT_TRUE(work[i]); } } } } // namespace } // namespace serving } // namespace tensorflow
671
4,054
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. static void bomb(void) { char *p; p = nullptr; *p = 4; } class FastS_Bomber : public FastOS_Runnable { void Run(FastOS_ThreadInterface *thread, void *arg) { (void) thread; (void) arg; bomb(); } }; static int bombMain(void) { FastOS_ThreadPool *pool = new FastOS_ThreadPool(128*1024); FastS_Bomber bomber; FastOS_ThreadInterface *thread; thread = pool->NewThread(&bomber, nullptr); if (thread != nullptr) thread->Join(); pool->Close(); delete pool; return (0); } class FastS_CoreTestApp : public FastOS_Application { public: FastS_CoreTestApp(void) { } ~FastS_CoreTestApp(void) { } int Main(void); }; int FastS_CoreTestApp::Main(void) { return bombMain(); } int main(int argc, char **argv) { FastS_CoreTestApp app; setvbuf(stdout, nullptr, _IOLBF, 8192); if (argc == 1) return app.Entry(argc, argv); else return bombMain(); }
447
1,001
<filename>aliyun-python-sdk-imageprocess/aliyunsdkimageprocess/request/v20200320/RunCTRegistrationRequest.py # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkimageprocess.endpoint import endpoint_data class RunCTRegistrationRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'imageprocess', '2020-03-20', 'RunCTRegistration','imageprocess') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_DataSourceType(self): return self.get_body_params().get('DataSourceType') def set_DataSourceType(self,DataSourceType): self.add_body_params('DataSourceType', DataSourceType) def get_OrgName(self): return self.get_body_params().get('OrgName') def set_OrgName(self,OrgName): self.add_body_params('OrgName', OrgName) def get_ReferenceLists(self): return self.get_body_params().get('ReferenceList') def set_ReferenceLists(self, ReferenceLists): for depth1 in range(len(ReferenceLists)): if ReferenceLists[depth1].get('ReferenceURL') is not None: self.add_body_params('ReferenceList.' + str(depth1 + 1) + '.ReferenceURL', ReferenceLists[depth1].get('ReferenceURL')) def get_DataFormat(self): return self.get_body_params().get('DataFormat') def set_DataFormat(self,DataFormat): self.add_body_params('DataFormat', DataFormat) def get_OrgId(self): return self.get_body_params().get('OrgId') def set_OrgId(self,OrgId): self.add_body_params('OrgId', OrgId) def get_FloatingLists(self): return self.get_body_params().get('FloatingList') def set_FloatingLists(self, FloatingLists): for depth1 in range(len(FloatingLists)): if FloatingLists[depth1].get('FloatingURL') is not None: self.add_body_params('FloatingList.' + str(depth1 + 1) + '.FloatingURL', FloatingLists[depth1].get('FloatingURL'))
951
11,356
// Copyright <NAME> 2004. Distributed under the Boost // Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #include <boost/python/module.hpp> #include <boost/python/def.hpp> #include <boost/python/extract.hpp> #include <boost/python/to_python_converter.hpp> #include <boost/python/class.hpp> using namespace boost::python; struct A { }; struct B { A a; B(const A& a_):a(a_){} }; // Converter from A to python int struct BToPython #ifndef BOOST_PYTHON_NO_PY_SIGNATURES : converter::to_python_target_type<A> //inherits get_pytype #endif { static PyObject* convert(const B& b) { return boost::python::incref(boost::python::object(b.a).ptr()); } }; // Conversion from python int to A struct BFromPython { BFromPython() { boost::python::converter::registry::push_back( &convertible, &construct, boost::python::type_id< B >() #ifndef BOOST_PYTHON_NO_PY_SIGNATURES , &converter::expected_from_python_type<A>::get_pytype//convertible to A can be converted to B #endif ); } static void* convertible(PyObject* obj_ptr) { extract<const A&> ex(obj_ptr); if (!ex.check()) return 0; return obj_ptr; } static void construct( PyObject* obj_ptr, boost::python::converter::rvalue_from_python_stage1_data* data) { void* storage = ( (boost::python::converter::rvalue_from_python_storage< B >*)data)-> storage.bytes; extract<const A&> ex(obj_ptr); new (storage) B(ex()); data->convertible = storage; } }; B func(const B& b) { return b ; } BOOST_PYTHON_MODULE(pytype_function_ext) { to_python_converter< B , BToPython,true >(); //has get_pytype BFromPython(); class_<A>("A") ; def("func", &func); } #include "module_tail.cpp"
755
338
package com.tvd12.ezyfoxserver.testing.api; import static org.mockito.Mockito.*; import org.testng.annotations.Test; import com.tvd12.ezyfox.codec.EzyObjectToByteEncoder; import com.tvd12.ezyfox.codec.EzyObjectToStringEncoder; import com.tvd12.ezyfox.collect.Lists; import com.tvd12.ezyfox.constant.EzyConstant; import com.tvd12.ezyfoxserver.api.EzyProxyResponseApi; import com.tvd12.ezyfoxserver.codec.EzyCodecFactory; import com.tvd12.ezyfoxserver.constant.EzyConnectionType; import com.tvd12.ezyfoxserver.entity.EzyAbstractSession; import com.tvd12.ezyfoxserver.entity.EzyImmediateDeliver; import com.tvd12.ezyfoxserver.response.EzyPackage; import com.tvd12.test.base.BaseTest; public class EzyProxyResponseApiTest extends BaseTest { @Test public void test() throws Exception { EzyCodecFactory codecFactory = mock(EzyCodecFactory.class); EzyProxyResponseApi api = new EzyProxyResponseApi(codecFactory); EzyPackage pack = mock(EzyPackage.class); api.response(pack); EzyObjectToByteEncoder byteEncoder = mock(EzyObjectToByteEncoder.class); EzyObjectToStringEncoder stringEncoder = mock(EzyObjectToStringEncoder.class); when(codecFactory.newEncoder(EzyConnectionType.SOCKET)).thenReturn(byteEncoder); when(codecFactory.newEncoder(EzyConnectionType.WEBSOCKET)).thenReturn(stringEncoder); api = new EzyProxyResponseApi(codecFactory); api.response(pack); api.response(pack, true); EzyImmediateDeliver immediateDeliver = mock(EzyImmediateDeliver.class); EzyAbstractSession session = spy(EzyAbstractSession.class); session.setImmediateDeliver(immediateDeliver); when(pack.getRecipients(any(EzyConstant.class))).thenReturn(Lists.newArrayList(session)); api.response(pack); api.response(pack, true); } }
746
575
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/constants/ash_features.h" #include "ash/public/cpp/ash_pref_names.h" #include "ash/public/cpp/shelf_prefs.h" #include "chrome/browser/sync/test/integration/os_sync_test.h" #include "chrome/browser/sync/test/integration/preferences_helper.h" #include "chrome/browser/sync/test/integration/updated_progress_marker_checker.h" #include "components/prefs/pref_service.h" #include "components/sync/base/model_type.h" #include "components/sync/driver/sync_service.h" #include "components/sync/driver/sync_user_settings.h" #include "content/public/test/browser_test.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" namespace { using preferences_helper::ChangeStringPref; using preferences_helper::GetPrefs; using testing::Eq; class SingleClientOsPreferencesSyncTest : public OsSyncTest { public: SingleClientOsPreferencesSyncTest() : OsSyncTest(SINGLE_CLIENT) {} ~SingleClientOsPreferencesSyncTest() override = default; }; IN_PROC_BROWSER_TEST_F(SingleClientOsPreferencesSyncTest, Sanity) { ASSERT_TRUE(chromeos::features::IsSplitSettingsSyncEnabled()); ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; // Shelf alignment is a Chrome OS only preference. ChangeStringPref(/*profile_index=*/0, ash::prefs::kShelfAlignment, ash::kShelfAlignmentRight); EXPECT_TRUE(UpdatedProgressMarkerChecker(GetSyncService(0)).Wait()); EXPECT_THAT(GetPrefs(/*index=*/0)->GetString(ash::prefs::kShelfAlignment), Eq(ash::kShelfAlignmentRight)); } IN_PROC_BROWSER_TEST_F(SingleClientOsPreferencesSyncTest, DisablingOsSyncFeatureDisablesDataType) { ASSERT_TRUE(chromeos::features::IsSplitSettingsSyncEnabled()); ASSERT_TRUE(SetupSync()); syncer::SyncService* service = GetSyncService(0); syncer::SyncUserSettings* settings = service->GetUserSettings(); EXPECT_TRUE(settings->IsOsSyncFeatureEnabled()); EXPECT_TRUE(service->GetActiveDataTypes().Has(syncer::OS_PREFERENCES)); settings->SetOsSyncFeatureEnabled(false); EXPECT_FALSE(settings->IsOsSyncFeatureEnabled()); EXPECT_FALSE(service->GetActiveDataTypes().Has(syncer::OS_PREFERENCES)); } } // namespace
829
2,757
/** @file Provides the Simple Network functions. Copyright (c) 2011 - 2016, Intel Corporation. All rights reserved.<BR> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ #include "Ax88772.h" /** This function updates the filtering on the receiver. This support routine calls ::Ax88772MacAddressSet to update the MAC address. This routine then rebuilds the multicast hash by calling ::Ax88772MulticastClear and ::Ax88772MulticastSet. Finally this routine enables the receiver by calling ::Ax88772RxControl. @param [in] pSimpleNetwork Simple network mode pointer @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS ReceiveFilterUpdate ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork ) { EFI_SIMPLE_NETWORK_MODE * pMode; NIC_DEVICE * pNicDevice; EFI_STATUS Status; UINT32 Index; DBG_ENTER ( ); // // Set the MAC address // pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); pMode = pSimpleNetwork->Mode; Status = Ax88772MacAddressSet ( pNicDevice, &pMode->CurrentAddress.Addr[0]); if ( !EFI_ERROR ( Status )) { // // Clear the multicast hash table // Ax88772MulticastClear ( pNicDevice ); // // Load the multicast hash table // if ( 0 != ( pMode->ReceiveFilterSetting & EFI_SIMPLE_NETWORK_RECEIVE_MULTICAST )) { for ( Index = 0; ( !EFI_ERROR ( Status )) && ( Index < pMode->MCastFilterCount ); Index++ ) { // // Enable the next multicast address // Ax88772MulticastSet ( pNicDevice, &pMode->MCastFilter[ Index ].Addr[0]); } } // // Enable the receiver // if ( !EFI_ERROR ( Status )) { Status = Ax88772RxControl ( pNicDevice, pMode->ReceiveFilterSetting ); } } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function updates the SNP driver status. This function gets the current interrupt and recycled transmit buffer status from the network interface. The interrupt status and the media status are returned as a bit mask in InterruptStatus. If InterruptStatus is NULL, the interrupt status will not be read. Upon successful return of the media status, the MediaPresent field of EFI_SIMPLE_NETWORK_MODE will be updated to reflect any change of media status. If TxBuf is not NULL, a recycled transmit buffer address will be retrived. If a recycled transmit buffer address is returned in TxBuf, then the buffer has been successfully transmitted, and the status for that buffer is cleared. This function calls ::Ax88772Rx to update the media status and queue any receive packets. @param [in] pSimpleNetwork Protocol instance pointer @param [in] pInterruptStatus A pointer to the bit mask of the current active interrupts. If this is NULL, the interrupt status will not be read from the device. If this is not NULL, the interrupt status will be read from teh device. When the interrupt status is read, it will also be cleared. Clearing the transmit interrupt does not empty the recycled transmit buffer array. @param [out] ppTxBuf Recycled transmit buffer address. The network interface will not transmit if its internal recycled transmit buffer array is full. Reading the transmit buffer does not clear the transmit interrupt. If this is NULL, then the transmit buffer status will not be read. If there are not transmit buffers to recycle and TxBuf is not NULL, *TxBuf will be set to NULL. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. **/ EFI_STATUS EFIAPI SN_GetStatus ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, OUT UINT32 * pInterruptStatus, OUT VOID ** ppTxBuf ) { BOOLEAN bLinkIdle; EFI_SIMPLE_NETWORK_MODE * pMode; NIC_DEVICE * pNicDevice; EFI_STATUS Status; EFI_TPL TplPrevious; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // Return the transmit buffer // pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); if (( NULL != ppTxBuf ) && ( NULL != pNicDevice->pTxBuffer )) { *ppTxBuf = pNicDevice->pTxBuffer; pNicDevice->pTxBuffer = NULL; } // // Determine if interface is running // pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkStopped != pMode->State ) { // // Synchronize with Ax88772Timer // VERIFY_TPL ( TPL_AX88772 ); TplPrevious = gBS->RaiseTPL ( TPL_AX88772 ); // // Update the link status // bLinkIdle = pNicDevice->bLinkIdle; pNicDevice->bLinkIdle = TRUE; Ax88772Rx ( pNicDevice, bLinkIdle ); pMode->MediaPresent = pNicDevice->bLinkUp; // // Release the synchronization with Ax88772Timer // gBS->RestoreTPL ( TplPrevious ); // // Return the interrupt status // if ( NULL != pInterruptStatus ) { *pInterruptStatus = 0; } Status = EFI_SUCCESS; } else { Status = EFI_NOT_STARTED; } } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** Resets the network adapter and allocates the transmit and receive buffers required by the network interface; optionally, also requests allocation of additional transmit and receive buffers. This routine must be called before any other routine in the Simple Network protocol is called. @param [in] pSimpleNetwork Protocol instance pointer @param [in] ExtraRxBufferSize Size in bytes to add to the receive buffer allocation @param [in] ExtraTxBufferSize Size in bytes to add to the transmit buffer allocation @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_OUT_OF_RESOURCES There was not enough memory for the transmit and receive buffers @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_Initialize ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN UINTN ExtraRxBufferSize, IN UINTN ExtraTxBufferSize ) { EFI_SIMPLE_NETWORK_MODE * pMode; EFI_STATUS Status; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // Determine if the interface is already started // pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkStarted == pMode->State ) { if (( 0 == ExtraRxBufferSize ) && ( 0 == ExtraTxBufferSize )) { // // Start the adapter // Status = SN_Reset ( pSimpleNetwork, FALSE ); if ( !EFI_ERROR ( Status )) { // // Update the network state // pMode->State = EfiSimpleNetworkInitialized; } } else { Status = EFI_UNSUPPORTED; } } else { Status = EFI_NOT_STARTED; } } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function converts a multicast IP address to a multicast HW MAC address for all packet transactions. @param [in] pSimpleNetwork Protocol instance pointer @param [in] bIPv6 Set to TRUE if the multicast IP address is IPv6 [RFC2460]. Set to FALSE if the multicast IP address is IPv4 [RFC 791]. @param [in] pIP The multicast IP address that is to be converted to a multicast HW MAC address. @param [in] pMAC The multicast HW MAC address that is to be generated from IP. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_MCastIPtoMAC ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN BOOLEAN bIPv6, IN EFI_IP_ADDRESS * pIP, IN EFI_MAC_ADDRESS * pMAC ) { EFI_STATUS Status; DBG_ENTER ( ); // // This is not currently supported // Status = EFI_UNSUPPORTED; // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function performs read and write operations on the NVRAM device attached to a network interface. @param [in] pSimpleNetwork Protocol instance pointer @param [in] ReadWrite TRUE for read operations, FALSE for write operations. @param [in] Offset Byte offset in the NVRAM device at which to start the read or write operation. This must be a multiple of NvRamAccessSize and less than NvRamSize. @param [in] BufferSize The number of bytes to read or write from the NVRAM device. This must also be a multiple of NvramAccessSize. @param [in, out] pBuffer A pointer to the data buffer. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_NvData ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN BOOLEAN ReadWrite, IN UINTN Offset, IN UINTN BufferSize, IN OUT VOID * pBuffer ) { EFI_STATUS Status; DBG_ENTER ( ); // // This is not currently supported // Status = EFI_UNSUPPORTED; // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** Attempt to receive a packet from the network adapter. This function retrieves one packet from the receive queue of the network interface. If there are no packets on the receive queue, then EFI_NOT_READY will be returned. If there is a packet on the receive queue, and the size of the packet is smaller than BufferSize, then the contents of the packet will be placed in Buffer, and BufferSize will be udpated with the actual size of the packet. In addition, if SrcAddr, DestAddr, and Protocol are not NULL, then these values will be extracted from the media header and returned. If BufferSize is smaller than the received packet, then the size of the receive packet will be placed in BufferSize and EFI_BUFFER_TOO_SMALL will be returned. This routine calls ::Ax88772Rx to update the media status and empty the network adapter of receive packets. @param [in] pSimpleNetwork Protocol instance pointer @param [out] pHeaderSize The size, in bytes, of the media header to be filled in by the Transmit() function. If HeaderSize is non-zero, then it must be equal to SimpleNetwork->Mode->MediaHeaderSize and DestAddr and Protocol parameters must not be NULL. @param [out] pBufferSize The size, in bytes, of the entire packet (media header and data) to be transmitted through the network interface. @param [out] pBuffer A pointer to the packet (media header followed by data) to to be transmitted. This parameter can not be NULL. If HeaderSize is zero, then the media header is Buffer must already be filled in by the caller. If HeaderSize is nonzero, then the media header will be filled in by the Transmit() function. @param [out] pSrcAddr The source HW MAC address. If HeaderSize is zero, then this parameter is ignored. If HeaderSize is nonzero and SrcAddr is NULL, then SimpleNetwork->Mode->CurrentAddress is used for the source HW MAC address. @param [out] pDestAddr The destination HW MAC address. If HeaderSize is zero, then this parameter is ignored. @param [out] pProtocol The type of header to build. If HeaderSize is zero, then this parameter is ignored. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_NOT_READY No packets have been received on the network interface. @retval EFI_BUFFER_TOO_SMALL The packet is larger than BufferSize bytes. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. **/ EFI_STATUS EFIAPI SN_Receive ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, OUT UINTN * pHeaderSize, OUT UINTN * pBufferSize, OUT VOID * pBuffer, OUT EFI_MAC_ADDRESS * pSrcAddr, OUT EFI_MAC_ADDRESS * pDestAddr, OUT UINT16 * pProtocol ) { ETHERNET_HEADER * pHeader; EFI_SIMPLE_NETWORK_MODE * pMode; NIC_DEVICE * pNicDevice; RX_TX_PACKET * pRxPacket; EFI_STATUS Status; EFI_TPL TplPrevious; UINT16 Type; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // The interface must be running // pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkInitialized == pMode->State ) { // // Synchronize with Ax88772Timer // VERIFY_TPL ( TPL_AX88772 ); TplPrevious = gBS->RaiseTPL ( TPL_AX88772 ); // // Update the link status // pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); Ax88772Rx ( pNicDevice, FALSE ); pMode->MediaPresent = pNicDevice->bLinkUp; if ( pMode->MediaPresent ) { // // Attempt to receive a packet // pRxPacket = pNicDevice->pRxHead; if ( NULL != pRxPacket ) { pNicDevice->pRxHead = pRxPacket->pNext; if ( NULL == pNicDevice->pRxHead ) { pNicDevice->pRxTail = NULL; } // // Copy the received packet into the receive buffer // *pBufferSize = pRxPacket->Length; CopyMem ( pBuffer, &pRxPacket->Data[0], pRxPacket->Length ); pHeader = (ETHERNET_HEADER *) &pRxPacket->Data[0]; if ( NULL != pHeaderSize ) { *pHeaderSize = sizeof ( *pHeader ); } if ( NULL != pDestAddr ) { CopyMem ( pDestAddr, &pHeader->dest_addr, PXE_HWADDR_LEN_ETHER ); } if ( NULL != pSrcAddr ) { CopyMem ( pSrcAddr, &pHeader->src_addr, PXE_HWADDR_LEN_ETHER ); } if ( NULL != pProtocol ) { Type = pHeader->type; Type = (UINT16)(( Type >> 8 ) | ( Type << 8 )); *pProtocol = Type; } Status = EFI_SUCCESS; } else { // // No receive packets available // Status = EFI_NOT_READY; } } else { // // Link no up // Status = EFI_NOT_READY; } // // Release the synchronization with Ax88772Timer // gBS->RestoreTPL ( TplPrevious ); } else { Status = EFI_NOT_STARTED; } } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function is used to enable and disable the hardware and software receive filters for the underlying network device. The receive filter change is broken down into three steps: 1. The filter mask bits that are set (ON) in the Enable parameter are added to the current receive filter settings. 2. The filter mask bits that are set (ON) in the Disable parameter are subtracted from the updated receive filter settins. 3. If the resulting filter settigns is not supported by the hardware a more liberal setting is selected. If the same bits are set in the Enable and Disable parameters, then the bits in the Disable parameter takes precedence. If the ResetMCastFilter parameter is TRUE, then the multicast address list filter is disabled (irregardless of what other multicast bits are set in the enable and Disable parameters). The SNP->Mode->MCastFilterCount field is set to zero. The SNP->Mode->MCastFilter contents are undefined. After enableing or disabling receive filter settings, software should verify the new settings by checking the SNP->Mode->ReceeiveFilterSettings, SNP->Mode->MCastFilterCount and SNP->Mode->MCastFilter fields. Note: Some network drivers and/or devices will automatically promote receive filter settings if the requested setting can not be honored. For example, if a request for four multicast addresses is made and the underlying hardware only supports two multicast addresses the driver might set the promiscuous or promiscuous multicast receive filters instead. The receiving software is responsible for discarding any extra packets that get through the hardware receive filters. If ResetMCastFilter is TRUE, then the multicast receive filter list on the network interface will be reset to the default multicast receive filter list. If ResetMCastFilter is FALSE, and this network interface allows the multicast receive filter list to be modified, then the MCastFilterCnt and MCastFilter are used to update the current multicast receive filter list. The modified receive filter list settings can be found in the MCastFilter field of EFI_SIMPLE_NETWORK_MODE. This routine calls ::ReceiveFilterUpdate to update the receive state in the network adapter. @param [in] pSimpleNetwork Protocol instance pointer @param [in] Enable A bit mask of receive filters to enable on the network interface. @param [in] Disable A bit mask of receive filters to disable on the network interface. For backward compatibility with EFI 1.1 platforms, the EFI_SIMPLE_NETWORK_RECEIVE_MULTICAST bit must be set when the ResetMCastFilter parameter is TRUE. @param [in] bResetMCastFilter Set to TRUE to reset the contents of the multicast receive filters on the network interface to their default values. @param [in] MCastFilterCnt Number of multicast HW MAC address in the new MCastFilter list. This value must be less than or equal to the MaxMCastFilterCnt field of EFI_SIMPLE_NETWORK_MODE. This field is optional if ResetMCastFilter is TRUE. @param [in] pMCastFilter A pointer to a list of new multicast receive filter HW MAC addresses. This list will replace any existing multicast HW MAC address list. This field is optional if ResetMCastFilter is TRUE. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_ReceiveFilters ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN UINT32 Enable, IN UINT32 Disable, IN BOOLEAN bResetMCastFilter, IN UINTN MCastFilterCnt, IN EFI_MAC_ADDRESS * pMCastFilter ) { EFI_SIMPLE_NETWORK_MODE * pMode; EFI_MAC_ADDRESS * pMulticastAddress; EFI_MAC_ADDRESS * pTableEnd; EFI_STATUS Status; DBG_ENTER ( ); // // Verify the parameters // Status = EFI_INVALID_PARAMETER; if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { pMode = pSimpleNetwork->Mode; // // Update the multicast list if necessary // if ( !bResetMCastFilter ) { if ( 0 != MCastFilterCnt ) { if (( MAX_MCAST_FILTER_CNT >= MCastFilterCnt ) && ( NULL != pMCastFilter )) { // // Verify the multicast addresses // pMulticastAddress = pMCastFilter; pTableEnd = pMulticastAddress + MCastFilterCnt; while ( pTableEnd > pMulticastAddress ) { // // The first digit of the multicast address must have the LSB set // if ( 0 == ( pMulticastAddress->Addr[0] & 1 )) { // // Invalid multicast address // break; } pMulticastAddress += 1; } if ( pTableEnd == pMulticastAddress ) { // // Update the multicast filter list. // CopyMem (&pMode->MCastFilter[0], pMCastFilter, MCastFilterCnt * sizeof ( *pMCastFilter )); Status = EFI_SUCCESS; } } } else { Status = EFI_SUCCESS; } } else { // // No multicast address list is specified // MCastFilterCnt = 0; Status = EFI_SUCCESS; } if ( !EFI_ERROR ( Status )) { // // The parameters are valid! // pMode->ReceiveFilterSetting |= Enable; pMode->ReceiveFilterSetting &= ~Disable; pMode->MCastFilterCount = (UINT32)MCastFilterCnt; // // Update the receive filters in the adapter // Status = ReceiveFilterUpdate ( pSimpleNetwork ); } } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** Reset the network adapter. Resets a network adapter and reinitializes it with the parameters that were provided in the previous call to Initialize (). The transmit and receive queues are cleared. Receive filters, the station address, the statistics, and the multicast-IP-to-HW MAC addresses are not reset by this call. This routine calls ::Ax88772Reset to perform the adapter specific reset operation. This routine also starts the link negotiation by calling ::Ax88772NegotiateLinkStart. @param [in] pSimpleNetwork Protocol instance pointer @param [in] bExtendedVerification Indicates that the driver may perform a more exhaustive verification operation of the device during reset. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_Reset ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN BOOLEAN bExtendedVerification ) { EFI_SIMPLE_NETWORK_MODE * pMode; NIC_DEVICE * pNicDevice; RX_TX_PACKET * pRxPacket; EFI_STATUS Status; EFI_TPL TplPrevious; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // Synchronize with Ax88772Timer // VERIFY_TPL ( TPL_AX88772 ); TplPrevious = gBS->RaiseTPL ( TPL_AX88772 ); // // Update the device state // pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); pNicDevice->bComplete = FALSE; pNicDevice->bLinkUp = FALSE; pMode = pSimpleNetwork->Mode; pMode->MediaPresent = FALSE; // // Discard any received packets // while ( NULL != pNicDevice->pRxHead ) { // // Remove the packet from the received packet list // pRxPacket = pNicDevice->pRxHead; pNicDevice->pRxHead = pRxPacket->pNext; // // Queue the packet to the free list // pRxPacket->pNext = pNicDevice->pRxFree; pNicDevice->pRxFree = pRxPacket; } pNicDevice->pRxTail = NULL; // // Reset the device // Status = Ax88772Reset ( pNicDevice ); if ( !EFI_ERROR ( Status )) { // // Update the receive filters in the adapter // Status = ReceiveFilterUpdate ( pSimpleNetwork ); // // Try to get a connection to the network // if ( !EFI_ERROR ( Status )) { // // Start the autonegotiation // Status = Ax88772NegotiateLinkStart ( pNicDevice ); } } // // Release the synchronization with Ax88772Timer // gBS->RestoreTPL ( TplPrevious ); } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** Initialize the simple network protocol. This routine calls ::Ax88772MacAddressGet to obtain the MAC address. @param [in] pNicDevice NIC_DEVICE_INSTANCE pointer @retval EFI_SUCCESS Setup was successful **/ EFI_STATUS SN_Setup ( IN NIC_DEVICE * pNicDevice ) { EFI_SIMPLE_NETWORK_MODE * pMode; EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork; EFI_STATUS Status; DBG_ENTER ( ); // // Initialize the simple network protocol // pSimpleNetwork = &pNicDevice->SimpleNetwork; pSimpleNetwork->Revision = EFI_SIMPLE_NETWORK_PROTOCOL_REVISION; pSimpleNetwork->Start = (EFI_SIMPLE_NETWORK_START)SN_Start; pSimpleNetwork->Stop = (EFI_SIMPLE_NETWORK_STOP)SN_Stop; pSimpleNetwork->Initialize = (EFI_SIMPLE_NETWORK_INITIALIZE)SN_Initialize; pSimpleNetwork->Reset = (EFI_SIMPLE_NETWORK_RESET)SN_Reset; pSimpleNetwork->Shutdown = (EFI_SIMPLE_NETWORK_SHUTDOWN)SN_Shutdown; pSimpleNetwork->ReceiveFilters = (EFI_SIMPLE_NETWORK_RECEIVE_FILTERS)SN_ReceiveFilters; pSimpleNetwork->StationAddress = (EFI_SIMPLE_NETWORK_STATION_ADDRESS)SN_StationAddress; pSimpleNetwork->Statistics = (EFI_SIMPLE_NETWORK_STATISTICS)SN_Statistics; pSimpleNetwork->MCastIpToMac = (EFI_SIMPLE_NETWORK_MCAST_IP_TO_MAC)SN_MCastIPtoMAC; pSimpleNetwork->NvData = (EFI_SIMPLE_NETWORK_NVDATA)SN_NvData; pSimpleNetwork->GetStatus = (EFI_SIMPLE_NETWORK_GET_STATUS)SN_GetStatus; pSimpleNetwork->Transmit = (EFI_SIMPLE_NETWORK_TRANSMIT)SN_Transmit; pSimpleNetwork->Receive = (EFI_SIMPLE_NETWORK_RECEIVE)SN_Receive; pSimpleNetwork->WaitForPacket = NULL; pMode = &pNicDevice->SimpleNetworkData; pSimpleNetwork->Mode = pMode; pMode->State = EfiSimpleNetworkStopped; pMode->HwAddressSize = PXE_HWADDR_LEN_ETHER; pMode->MediaHeaderSize = sizeof ( ETHERNET_HEADER ); pMode->MaxPacketSize = MAX_ETHERNET_PKT_SIZE; pMode->NvRamSize = 0; pMode->NvRamAccessSize = 0; pMode->ReceiveFilterMask = EFI_SIMPLE_NETWORK_RECEIVE_UNICAST | EFI_SIMPLE_NETWORK_RECEIVE_MULTICAST | EFI_SIMPLE_NETWORK_RECEIVE_BROADCAST | EFI_SIMPLE_NETWORK_RECEIVE_PROMISCUOUS | EFI_SIMPLE_NETWORK_RECEIVE_PROMISCUOUS_MULTICAST; pMode->ReceiveFilterSetting = EFI_SIMPLE_NETWORK_RECEIVE_UNICAST | EFI_SIMPLE_NETWORK_RECEIVE_BROADCAST; pMode->MaxMCastFilterCount = MAX_MCAST_FILTER_CNT; pMode->MCastFilterCount = 0; SetMem ( &pMode->BroadcastAddress, PXE_HWADDR_LEN_ETHER, 0xff ); pMode->IfType = EfiNetworkInterfaceUndi; pMode->MacAddressChangeable = TRUE; pMode->MultipleTxSupported = TRUE; pMode->MediaPresentSupported = TRUE; pMode->MediaPresent = FALSE; // // Read the MAC address // pNicDevice->PhyId = PHY_ID_INTERNAL; pNicDevice->b100Mbps = TRUE; pNicDevice->bFullDuplex = TRUE; Status = gBS->AllocatePool ( EfiRuntimeServicesData, MAX_BULKIN_SIZE, (VOID **) &pNicDevice->pBulkInBuff); if ( EFI_ERROR(Status)) { DEBUG (( EFI_D_ERROR, "Memory are not enough\n")); return Status; } Status = Ax88772MacAddressGet ( pNicDevice, &pMode->PermanentAddress.Addr[0]); if ( !EFI_ERROR ( Status )) { // // Display the MAC address // DEBUG (( DEBUG_MAC_ADDRESS | DEBUG_INFO, "MAC: %02x-%02x-%02x-%02x-%02x-%02x\n", pMode->PermanentAddress.Addr[0], pMode->PermanentAddress.Addr[1], pMode->PermanentAddress.Addr[2], pMode->PermanentAddress.Addr[3], pMode->PermanentAddress.Addr[4], pMode->PermanentAddress.Addr[5])); // // Use the hardware address as the current address // CopyMem ( &pMode->CurrentAddress, &pMode->PermanentAddress, PXE_HWADDR_LEN_ETHER ); } // // Return the setup status // DBG_EXIT_STATUS ( Status ); return Status; } /** This routine starts the network interface. @param [in] pSimpleNetwork Protocol instance pointer @retval EFI_SUCCESS This operation was successful. @retval EFI_ALREADY_STARTED The network interface was already started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_Start ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork ) { NIC_DEVICE * pNicDevice; EFI_SIMPLE_NETWORK_MODE * pMode; EFI_STATUS Status; DBG_ENTER ( ); // // Verify the parameters // Status = EFI_INVALID_PARAMETER; if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkStopped == pMode->State ) { // // Initialize the mode structure // NVRAM access is not supported // ZeroMem ( pMode, sizeof ( *pMode )); pMode->State = EfiSimpleNetworkStarted; pMode->HwAddressSize = PXE_HWADDR_LEN_ETHER; pMode->MediaHeaderSize = sizeof ( ETHERNET_HEADER ); pMode->MaxPacketSize = MAX_ETHERNET_PKT_SIZE; pMode->ReceiveFilterMask = EFI_SIMPLE_NETWORK_RECEIVE_UNICAST | EFI_SIMPLE_NETWORK_RECEIVE_MULTICAST | EFI_SIMPLE_NETWORK_RECEIVE_BROADCAST | EFI_SIMPLE_NETWORK_RECEIVE_PROMISCUOUS | EFI_SIMPLE_NETWORK_RECEIVE_PROMISCUOUS_MULTICAST; pMode->ReceiveFilterSetting = EFI_SIMPLE_NETWORK_RECEIVE_UNICAST; pMode->MaxMCastFilterCount = MAX_MCAST_FILTER_CNT; pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); Status = Ax88772MacAddressGet ( pNicDevice, &pMode->PermanentAddress.Addr[0]); CopyMem ( &pMode->CurrentAddress, &pMode->PermanentAddress, sizeof ( pMode->CurrentAddress )); pMode->BroadcastAddress.Addr[0] = 0xff; pMode->BroadcastAddress.Addr[1] = 0xff; pMode->BroadcastAddress.Addr[2] = 0xff; pMode->BroadcastAddress.Addr[3] = 0xff; pMode->BroadcastAddress.Addr[4] = 0xff; pMode->BroadcastAddress.Addr[5] = 0xff; pMode->IfType = 1; pMode->MacAddressChangeable = TRUE; pMode->MultipleTxSupported = TRUE; pMode->MediaPresentSupported = TRUE; pMode->MediaPresent = FALSE; } else { Status = EFI_ALREADY_STARTED; } } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** Set the MAC address. This function modifies or resets the current station address of a network interface. If Reset is TRUE, then the current station address is set ot the network interface's permanent address. If Reset if FALSE then the current station address is changed to the address specified by pNew. This routine calls ::Ax88772MacAddressSet to update the MAC address in the network adapter. @param [in] pSimpleNetwork Protocol instance pointer @param [in] bReset Flag used to reset the station address to the network interface's permanent address. @param [in] pNew New station address to be used for the network interface. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_StationAddress ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN BOOLEAN bReset, IN EFI_MAC_ADDRESS * pNew ) { NIC_DEVICE * pNicDevice; EFI_SIMPLE_NETWORK_MODE * pMode; EFI_STATUS Status; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode ) && (( !bReset ) || ( bReset && ( NULL != pNew )))) { // // Verify that the adapter is already started // pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkStarted == pMode->State ) { // // Determine the adapter MAC address // if ( bReset ) { // // Use the permanent address // CopyMem ( &pMode->CurrentAddress, &pMode->PermanentAddress, sizeof ( pMode->CurrentAddress )); } else { // // Use the specified address // CopyMem ( &pMode->CurrentAddress, pNew, sizeof ( pMode->CurrentAddress )); } // // Update the address on the adapter // Status = Ax88772MacAddressSet ( pNicDevice, &pMode->CurrentAddress.Addr[0]); } else { Status = EFI_NOT_STARTED; } } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function resets or collects the statistics on a network interface. If the size of the statistics table specified by StatisticsSize is not big enough for all of the statistics that are collected by the network interface, then a partial buffer of statistics is returned in StatisticsTable. @param [in] pSimpleNetwork Protocol instance pointer @param [in] bReset Set to TRUE to reset the statistics for the network interface. @param [in, out] pStatisticsSize On input the size, in bytes, of StatisticsTable. On output the size, in bytes, of the resulting table of statistics. @param [out] pStatisticsTable A pointer to the EFI_NETWORK_STATISTICS structure that conains the statistics. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_BUFFER_TOO_SMALL The pStatisticsTable is NULL or the buffer is too small. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_Statistics ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN BOOLEAN bReset, IN OUT UINTN * pStatisticsSize, OUT EFI_NETWORK_STATISTICS * pStatisticsTable ) { EFI_STATUS Status; DBG_ENTER ( ); // // This is not currently supported // Status = EFI_UNSUPPORTED; // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function stops a network interface. This call is only valid if the network interface is in the started state. @param [in] pSimpleNetwork Protocol instance pointer @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_Stop ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork ) { EFI_SIMPLE_NETWORK_MODE * pMode; EFI_STATUS Status; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // Determine if the interface is started // pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkStopped != pMode->State ) { if ( EfiSimpleNetworkStarted == pMode->State ) { // // Release the resources acquired in SN_Start // // // Mark the adapter as stopped // pMode->State = EfiSimpleNetworkStopped; Status = EFI_SUCCESS; } else { Status = EFI_UNSUPPORTED; } } else { Status = EFI_NOT_STARTED; } } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** This function releases the memory buffers assigned in the Initialize() call. Pending transmits and receives are lost, and interrupts are cleared and disabled. After this call, only Initialize() and Stop() calls may be used. @param [in] pSimpleNetwork Protocol instance pointer @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. @retval EFI_UNSUPPORTED The increased buffer size feature is not supported. **/ EFI_STATUS EFIAPI SN_Shutdown ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork ) { EFI_SIMPLE_NETWORK_MODE * pMode; UINT32 RxFilter; EFI_STATUS Status; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // Determine if the interface is already started // pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkInitialized == pMode->State ) { // // Stop the adapter // RxFilter = pMode->ReceiveFilterSetting; pMode->ReceiveFilterSetting = 0; Status = SN_Reset ( pSimpleNetwork, FALSE ); pMode->ReceiveFilterSetting = RxFilter; if ( !EFI_ERROR ( Status )) { // // Release the resources acquired by SN_Initialize // // // Update the network state // pMode->State = EfiSimpleNetworkStarted; } } else { Status = EFI_NOT_STARTED; } } else { Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; } /** Send a packet over the network. This function places the packet specified by Header and Buffer on the transmit queue. This function performs a non-blocking transmit operation. When the transmit is complete, the buffer is returned via the GetStatus() call. This routine calls ::Ax88772Rx to empty the network adapter of receive packets. The routine then passes the transmit packet to the network adapter. @param [in] pSimpleNetwork Protocol instance pointer @param [in] HeaderSize The size, in bytes, of the media header to be filled in by the Transmit() function. If HeaderSize is non-zero, then it must be equal to SimpleNetwork->Mode->MediaHeaderSize and DestAddr and Protocol parameters must not be NULL. @param [in] BufferSize The size, in bytes, of the entire packet (media header and data) to be transmitted through the network interface. @param [in] pBuffer A pointer to the packet (media header followed by data) to to be transmitted. This parameter can not be NULL. If HeaderSize is zero, then the media header is Buffer must already be filled in by the caller. If HeaderSize is nonzero, then the media header will be filled in by the Transmit() function. @param [in] pSrcAddr The source HW MAC address. If HeaderSize is zero, then this parameter is ignored. If HeaderSize is nonzero and SrcAddr is NULL, then SimpleNetwork->Mode->CurrentAddress is used for the source HW MAC address. @param [in] pDestAddr The destination HW MAC address. If HeaderSize is zero, then this parameter is ignored. @param [in] pProtocol The type of header to build. If HeaderSize is zero, then this parameter is ignored. @retval EFI_SUCCESS This operation was successful. @retval EFI_NOT_STARTED The network interface was not started. @retval EFI_NOT_READY The network interface is too busy to accept this transmit request. @retval EFI_BUFFER_TOO_SMALL The BufferSize parameter is too small. @retval EFI_INVALID_PARAMETER pSimpleNetwork parameter was NULL or did not point to a valid EFI_SIMPLE_NETWORK_PROTOCOL structure. @retval EFI_DEVICE_ERROR The command could not be sent to the network interface. **/ EFI_STATUS EFIAPI SN_Transmit ( IN EFI_SIMPLE_NETWORK_PROTOCOL * pSimpleNetwork, IN UINTN HeaderSize, IN UINTN BufferSize, IN VOID * pBuffer, IN EFI_MAC_ADDRESS * pSrcAddr, IN EFI_MAC_ADDRESS * pDestAddr, IN UINT16 * pProtocol ) { RX_TX_PACKET Packet; ETHERNET_HEADER * pHeader; EFI_SIMPLE_NETWORK_MODE * pMode; NIC_DEVICE * pNicDevice; EFI_USB_IO_PROTOCOL * pUsbIo; EFI_STATUS Status; EFI_TPL TplPrevious; UINTN TransferLength; UINT32 TransferStatus; UINT16 Type; DBG_ENTER ( ); // // Verify the parameters // if (( NULL != pSimpleNetwork ) && ( NULL != pSimpleNetwork->Mode )) { // // The interface must be running // pMode = pSimpleNetwork->Mode; if ( EfiSimpleNetworkInitialized == pMode->State ) { // // Synchronize with Ax88772Timer // VERIFY_TPL ( TPL_AX88772 ); TplPrevious = gBS->RaiseTPL ( TPL_AX88772 ); // // Update the link status // pNicDevice = DEV_FROM_SIMPLE_NETWORK ( pSimpleNetwork ); // //No need to call receive to receive packet // //Ax88772Rx ( pNicDevice, FALSE ); pMode->MediaPresent = pNicDevice->bLinkUp; // // Release the synchronization with Ax88772Timer // gBS->RestoreTPL ( TplPrevious ); if ( pMode->MediaPresent ) { // // Copy the packet into the USB buffer // CopyMem ( &Packet.Data[0], pBuffer, BufferSize ); Packet.Length = (UINT16) BufferSize; // // Transmit the packet // pHeader = (ETHERNET_HEADER *) &Packet.Data[0]; if ( 0 != HeaderSize ) { if ( NULL != pDestAddr ) { CopyMem ( &pHeader->dest_addr, pDestAddr, PXE_HWADDR_LEN_ETHER ); } if ( NULL != pSrcAddr ) { CopyMem ( &pHeader->src_addr, pSrcAddr, PXE_HWADDR_LEN_ETHER ); } else { CopyMem ( &pHeader->src_addr, &pMode->CurrentAddress.Addr[0], PXE_HWADDR_LEN_ETHER ); } if ( NULL != pProtocol ) { Type = *pProtocol; } else { Type = Packet.Length; } Type = (UINT16)(( Type >> 8 ) | ( Type << 8 )); pHeader->type = Type; } if ( Packet.Length < MIN_ETHERNET_PKT_SIZE ) { Packet.Length = MIN_ETHERNET_PKT_SIZE; ZeroMem ( &Packet.Data[ BufferSize ], Packet.Length - BufferSize ); } DEBUG (( DEBUG_TX | DEBUG_INFO, "TX: %02x-%02x-%02x-%02x-%02x-%02x %02x-%02x-%02x-%02x-%02x-%02x %02x-%02x %d bytes\r\n", Packet.Data[0], Packet.Data[1], Packet.Data[2], Packet.Data[3], Packet.Data[4], Packet.Data[5], Packet.Data[6], Packet.Data[7], Packet.Data[8], Packet.Data[9], Packet.Data[10], Packet.Data[11], Packet.Data[12], Packet.Data[13], Packet.Length )); Packet.LengthBar = ~Packet.Length; TransferLength = sizeof ( Packet.Length ) + sizeof ( Packet.LengthBar ) + Packet.Length; // // Work around USB bus driver bug where a timeout set by receive // succeeds but the timeout expires immediately after, causing the // transmit operation to timeout. // pUsbIo = pNicDevice->pUsbIo; Status = pUsbIo->UsbBulkTransfer ( pUsbIo, BULK_OUT_ENDPOINT, &Packet.Length, &TransferLength, 0xfffffffe, &TransferStatus ); if ( !EFI_ERROR ( Status )) { Status = TransferStatus; } if (( !EFI_ERROR ( Status )) && ( TransferLength != (UINTN)( Packet.Length + 4 ))) { Status = EFI_WARN_WRITE_FAILURE; } if ( EFI_SUCCESS == Status ) { pNicDevice->pTxBuffer = pBuffer; } else { DEBUG (( DEBUG_ERROR | DEBUG_INFO, "Ax88772 USB transmit error, TransferLength: %d, Status: %r\r\n", sizeof ( Packet.Length ) + Packet.Length, Status )); // // Reset the controller to fix the error // if ( EFI_DEVICE_ERROR == Status ) { SN_Reset ( pSimpleNetwork, FALSE ); } } } else { // // No packets available. // Status = EFI_NOT_READY; } } else { Status = EFI_NOT_STARTED; } } else { DEBUG (( DEBUG_ERROR | DEBUG_INFO, "Ax88772 invalid transmit parameter\r\n" " 0x%08x: HeaderSize\r\n" " 0x%08x: BufferSize\r\n" " 0x%08x: Buffer\r\n" " 0x%08x: SrcAddr\r\n" " 0x%08x: DestAddr\r\n" " 0x%04x: Protocol\r\n", HeaderSize, BufferSize, pBuffer, pSrcAddr, pDestAddr, pProtocol )); Status = EFI_INVALID_PARAMETER; } // // Return the operation status // DBG_EXIT_STATUS ( Status ); return Status; }
23,166
771
<reponame>Thomas4465/X-tra-Telegram<filename>userbot/plugins/autopic.py # Copyright 2019 - 2020 DarkPrinc3 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from datetime import datetime from PIL import Image, ImageDraw, ImageFont from pySmartDL import SmartDL from telethon.tl import functions import asyncio import shutil FONT_FILE_TO_USE = "/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf" @command(pattern="^.autopic", outgoing=True) async def autopic(event): downloaded_file_name = "userbot/original_pic.png" downloader = SmartDL(Var.DOWNLOAD_PFP_URL_CLOCK, downloaded_file_name, progress_bar=False) downloader.start(blocking=False) photo = "userbot/photo_pfp.png" while not downloader.isFinished(): place_holder = None counter = -30 while True: shutil.copy(downloaded_file_name, photo) im = Image.open(photo) file_test = im.rotate(counter, expand=False).save(photo, "PNG") current_time = datetime.now().strftime("⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡ \n Time: %H:%M \n Date: %d.%m.%y \n⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡⚡") img = Image.open(photo) drawn_text = ImageDraw.Draw(img) fnt = ImageFont.truetype(FONT_FILE_TO_USE, 30) drawn_text.text((95, 250), current_time, font=fnt, fill=(255, 255, 255)) img.save(photo) file = await bot.upload_file(photo) # pylint:disable=E0602 try: await bot(functions.photos.UploadProfilePhotoRequest( # pylint:disable=E0602 file )) os.remove(photo) counter -= 30 await asyncio.sleep(60) except: return
967
488
<filename>projects/simulator/pthread_tests/pthread_spin_lock.1-1.c /* * Copyright (c) 2002, Intel Corporation. All rights reserved. * This file is licensed under the GPL license. For the full content * of this license, see the COPYING file at the top level of this * source tree. * Test pthread_spin_lock(pthread_spinlock_t *lock) * * The function shall lock the spin lock referenced by lock. The calling thread * shall acquire the lock if it is not held by another thread. Otherwise, the * thread shall spin (that is, shall not return from the pthread_spin_lock()) * until the lock becomes available. * * Steps: * 1. Initialize a pthread_spinlock_t object 'spinlock' with * pthread_spin_init() * 2. Main thread lock 'spinlock', should get the lock * 3. Create a child thread. The thread lock 'spinlock', should spin. * 4. After child thread spin for 2 seconds, send SIGALRM to it. * 5. Child thread check its status in the signal handler. */ #define _XOPEN_SOURCE 600 #include <pthread.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <signal.h> #include "posixtest.h" static pthread_spinlock_t spinlock; volatile static int thread_state; #define NOT_CREATED_THREAD 1 #define ENTERED_THREAD 2 #define EXITING_THREAD 3 #define GET_SPIN_LOCK 4 static void sig_handler() { /* Just return */ pthread_exit(0); return; } static void* fn_chld(void *arg) { int rc = 0; struct sigaction act; struct timespec ts; thread_state = ENTERED_THREAD; int cnt = 0; /* Unblock the SIGALRM signal for the thread */ sigemptyset (&act.sa_mask); sigaddset(&act.sa_mask, SIGALRM); if (pthread_sigmask (SIG_UNBLOCK, &act.sa_mask, NULL)) { perror("thread: could not unblock SIGALRM\n"); return (void *)PTS_UNRESOLVED; } /* Set up child thread to handle SIGALRM */ act.sa_flags = 0; act.sa_handler = sig_handler; sigfillset(&act.sa_mask); sigaction(SIGALRM, &act, 0); printf("thread: send SIGALRM to me after 2 secs\n"); alarm(2); printf("thread: attempt spin lock\n"); rc = pthread_spin_lock(&spinlock); if(rc != 0) { printf("Test FAILED: thread failed to get spin lock,error code:%d\n" , rc); pthread_exit((void*)PTS_FAIL); } printf("thread: acquired spin lock\n"); thread_state = GET_SPIN_LOCK; /* Wait 10 seconds for SIGALRM to be sent */ while( cnt++ < 10) { ts.tv_sec = 1; ts.tv_nsec = 0; nanosleep(&ts, NULL); } /* Shouldn't get here. If we do, it means that SIGALRM wasn't sent/received */ printf("Error in thread: SIGALRM was not received/sent correctly, timedout after 10 secs of waiting.\n"); pthread_exit((void*)PTS_UNRESOLVED); return NULL; } int main() { pthread_t child_thread; void *value_ptr; struct sigaction sa; /* Block the SIGALRM signal for main thread */ sigemptyset (&sa.sa_mask); sigaddset(&sa.sa_mask, SIGALRM); if (pthread_sigmask (SIG_BLOCK, &sa.sa_mask, NULL)) { perror("main: could not block SIGALRM\n"); return PTS_UNRESOLVED; } if(pthread_spin_init(&spinlock, PTHREAD_PROCESS_PRIVATE) != 0) { perror("main: Error at pthread_spin_init()\n"); return PTS_UNRESOLVED; } printf("main: attempt spin lock\n"); /* We should get the lock */ if(pthread_spin_lock(&spinlock) != 0) { printf("Test FAILED: main cannot get spin lock when no one owns the lock\n"); return PTS_FAIL; } printf("main: acquired spin lock\n"); thread_state = NOT_CREATED_THREAD; printf("main: create thread\n"); if(pthread_create(&child_thread, NULL, fn_chld, NULL) != 0) { printf("main: Error creating child thread\n"); return PTS_UNRESOLVED; } /* Wait for thread to end execution */ if(pthread_join(child_thread, &value_ptr) != 0) { perror("Error in pthread_join()\n"); return PTS_UNRESOLVED; } /* Check the return value of the thread */ if(thread_state == GET_SPIN_LOCK) { printf("Test FAILED: Child thread did not spin on spin lock when other thread holds the lock\n"); exit(PTS_FAIL); } else if(thread_state == ENTERED_THREAD) { printf("thread: spins on spin lock\n"); printf("Test PASSED\n"); exit(PTS_PASS); } else { printf("Unexpected child thread state: %d\n", thread_state); exit(PTS_UNRESOLVED); } }
1,653
60,910
// We define the operation of splitting // a binary number n into two numbers // a(n), b(n) as follows. Let 0 ≤ i1 < i2 < // . . . < ik be the indices of the bits (with // the least significant bit having index 0) in // n that are 1. Then the indices of the bits // of a(n) that are 1 are i1, i3, i5, . . . and the // indices of the bits of b(n) that are 1 are // i2, i4, i6, . . . // For example, if n is 110110101 in binary // then, again in binary, we have a = // 010010001 and b = 100100100. // Input // Each test case consists of a single integer // n between 1 and 231 − 1 written in standard decimal (base 10) format on a single line. Input is // terminated by a line containing ‘0’ which should not be processed. // Output // The output for each test case consists of a single line, containing the integers a(n) and b(n) separated // by a single space. Both a(n) and b(n) should be written in decimal format. // Sample Input // 6 // 7 // 13 // 0 // Sample Output // 2 4 // 5 2 // 9 4 /** * Created by kdn251 on 2/10/17. */ import java.util.*; import java.io.*; public class SplittingNumbers { public static void main(String args[]) throws Exception { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String line; while((line = br.readLine()) != null) { //read number int number = Integer.parseInt(line); //terminate if number is zero if(number == 0) break; //intialize variables int count = 0; int a = 0; int b = 0; while(number > 0) { //get lowest set bit int currentBit = number ^ (number & (number - 1)); //if count is even or a with current bit if(count % 2 == 0) { a |= currentBit; } //if count is odd or b with current bit else { b |= currentBit; } //increment count count++; //clear lowest set bit for next iteration number &= (number - 1); } //print a and b System.out.println(a + " " + b); } } }
1,005
12,278
// optional_last_value function object (documented as part of Boost.Signals2) // Copyright <NAME> 2007-2008. // Copyright <NAME> 2001-2003. // Distributed under the Boost Software License, Version // 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org/libs/signals2 for library home page. #ifndef BOOST_SIGNALS2_OPTIONAL_LAST_VALUE_HPP #define BOOST_SIGNALS2_OPTIONAL_LAST_VALUE_HPP #include <boost/core/no_exceptions_support.hpp> #include <boost/optional.hpp> #include <boost/signals2/expired_slot.hpp> namespace boost { namespace signals2 { template<typename T> class optional_last_value { public: typedef optional<T> result_type; template<typename InputIterator> optional<T> operator()(InputIterator first, InputIterator last) const { optional<T> value; while (first != last) { BOOST_TRY { value = *first; } BOOST_CATCH(const expired_slot &) {} BOOST_CATCH_END ++first; } return value; } }; template<> class optional_last_value<void> { public: typedef void result_type; template<typename InputIterator> result_type operator()(InputIterator first, InputIterator last) const { while (first != last) { BOOST_TRY { *first; } BOOST_CATCH(const expired_slot &) {} BOOST_CATCH_END ++first; } return; } }; } // namespace signals2 } // namespace boost #endif // BOOST_SIGNALS2_OPTIONAL_LAST_VALUE_HPP
782
707
<gh_stars>100-1000 // Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. #include "wpigui.h" int main() { wpi::gui::CreateContext(); wpi::gui::Initialize("Hello World", 1024, 768); wpi::gui::Main(); }
114
2,151
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "device/fido/get_assertion_request_handler.h" #include <utility> #include "base/bind.h" #include "device/fido/authenticator_get_assertion_response.h" #include "device/fido/fido_authenticator.h" #include "device/fido/fido_cable_discovery.h" #include "device/fido/get_assertion_task.h" namespace device { GetAssertionRequestHandler::GetAssertionRequestHandler( service_manager::Connector* connector, const base::flat_set<FidoTransportProtocol>& protocols, CtapGetAssertionRequest request, SignResponseCallback completion_callback) : FidoRequestHandler(connector, protocols, std::move(completion_callback)), request_(std::move(request)), weak_factory_(this) { if (base::ContainsKey( protocols, FidoTransportProtocol::kCloudAssistedBluetoothLowEnergy) && request_.cable_extension()) { auto discovery = std::make_unique<FidoCableDiscovery>(*request_.cable_extension()); discovery->set_observer(this); discoveries().push_back(std::move(discovery)); } Start(); } GetAssertionRequestHandler::~GetAssertionRequestHandler() = default; void GetAssertionRequestHandler::DispatchRequest( FidoAuthenticator* authenticator) { authenticator->GetAssertion( request_, base::BindOnce(&GetAssertionRequestHandler::OnAuthenticatorResponse, weak_factory_.GetWeakPtr(), authenticator)); } } // namespace device
548
14,564
<gh_stars>1000+ package com.alibaba.datax.core; import com.alibaba.datax.common.element.ColumnCast; import com.alibaba.datax.common.exception.DataXException; import com.alibaba.datax.common.spi.ErrorCode; import com.alibaba.datax.common.statistics.PerfTrace; import com.alibaba.datax.common.statistics.VMInfo; import com.alibaba.datax.common.util.Configuration; import com.alibaba.datax.core.job.JobContainer; import com.alibaba.datax.core.taskgroup.TaskGroupContainer; import com.alibaba.datax.core.util.ConfigParser; import com.alibaba.datax.core.util.ConfigurationValidate; import com.alibaba.datax.core.util.ExceptionTracker; import com.alibaba.datax.core.util.FrameworkErrorCode; import com.alibaba.datax.core.util.container.CoreConstant; import com.alibaba.datax.core.util.container.LoadUtil; import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Engine是DataX入口类,该类负责初始化Job或者Task的运行容器,并运行插件的Job或者Task逻辑 */ public class Engine { private static final Logger LOG = LoggerFactory.getLogger(Engine.class); private static String RUNTIME_MODE; /* check job model (job/task) first */ public void start(Configuration allConf) { // 绑定column转换信息 ColumnCast.bind(allConf); /** * 初始化PluginLoader,可以获取各种插件配置 */ LoadUtil.bind(allConf); boolean isJob = !("taskGroup".equalsIgnoreCase(allConf .getString(CoreConstant.DATAX_CORE_CONTAINER_MODEL))); //JobContainer会在schedule后再行进行设置和调整值 int channelNumber =0; AbstractContainer container; long instanceId; int taskGroupId = -1; if (isJob) { allConf.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, RUNTIME_MODE); container = new JobContainer(allConf); instanceId = allConf.getLong( CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, 0); } else { container = new TaskGroupContainer(allConf); instanceId = allConf.getLong( CoreConstant.DATAX_CORE_CONTAINER_JOB_ID); taskGroupId = allConf.getInt( CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID); channelNumber = allConf.getInt( CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL); } //缺省打开perfTrace boolean traceEnable = allConf.getBool(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, true); boolean perfReportEnable = allConf.getBool(CoreConstant.DATAX_CORE_REPORT_DATAX_PERFLOG, true); //standlone模式的datax shell任务不进行汇报 if(instanceId == -1){ perfReportEnable = false; } int priority = 0; try { priority = Integer.parseInt(System.getenv("SKYNET_PRIORITY")); }catch (NumberFormatException e){ LOG.warn("prioriy set to 0, because NumberFormatException, the value is: "+System.getProperty("PROIORY")); } Configuration jobInfoConfig = allConf.getConfiguration(CoreConstant.DATAX_JOB_JOBINFO); //初始化PerfTrace PerfTrace perfTrace = PerfTrace.getInstance(isJob, instanceId, taskGroupId, priority, traceEnable); perfTrace.setJobInfo(jobInfoConfig,perfReportEnable,channelNumber); container.start(); } // 注意屏蔽敏感信息 public static String filterJobConfiguration(final Configuration configuration) { Configuration jobConfWithSetting = configuration.getConfiguration("job").clone(); Configuration jobContent = jobConfWithSetting.getConfiguration("content"); filterSensitiveConfiguration(jobContent); jobConfWithSetting.set("content",jobContent); return jobConfWithSetting.beautify(); } public static Configuration filterSensitiveConfiguration(Configuration configuration){ Set<String> keys = configuration.getKeys(); for (final String key : keys) { boolean isSensitive = StringUtils.endsWithIgnoreCase(key, "password") || StringUtils.endsWithIgnoreCase(key, "accessKey"); if (isSensitive && configuration.get(key) instanceof String) { configuration.set(key, configuration.getString(key).replaceAll(".", "*")); } } return configuration; } public static void entry(final String[] args) throws Throwable { Options options = new Options(); options.addOption("job", true, "Job config."); options.addOption("jobid", true, "Job unique id."); options.addOption("mode", true, "Job runtime mode."); BasicParser parser = new BasicParser(); CommandLine cl = parser.parse(options, args); String jobPath = cl.getOptionValue("job"); // 如果用户没有明确指定jobid, 则 datax.py 会指定 jobid 默认值为-1 String jobIdString = cl.getOptionValue("jobid"); RUNTIME_MODE = cl.getOptionValue("mode"); Configuration configuration = ConfigParser.parse(jobPath); long jobId; if (!"-1".equalsIgnoreCase(jobIdString)) { jobId = Long.parseLong(jobIdString); } else { // only for dsc & ds & datax 3 update String dscJobUrlPatternString = "/instance/(\\d{1,})/config.xml"; String dsJobUrlPatternString = "/inner/job/(\\d{1,})/config"; String dsTaskGroupUrlPatternString = "/inner/job/(\\d{1,})/taskGroup/"; List<String> patternStringList = Arrays.asList(dscJobUrlPatternString, dsJobUrlPatternString, dsTaskGroupUrlPatternString); jobId = parseJobIdFromUrl(patternStringList, jobPath); } boolean isStandAloneMode = "standalone".equalsIgnoreCase(RUNTIME_MODE); if (!isStandAloneMode && jobId == -1) { // 如果不是 standalone 模式,那么 jobId 一定不能为-1 throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, "非 standalone 模式必须在 URL 中提供有效的 jobId."); } configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, jobId); //打印vmInfo VMInfo vmInfo = VMInfo.getVmInfo(); if (vmInfo != null) { LOG.info(vmInfo.toString()); } LOG.info("\n" + Engine.filterJobConfiguration(configuration) + "\n"); LOG.debug(configuration.toJSON()); ConfigurationValidate.doValidate(configuration); Engine engine = new Engine(); engine.start(configuration); } /** * -1 表示未能解析到 jobId * * only for dsc & ds & datax 3 update */ private static long parseJobIdFromUrl(List<String> patternStringList, String url) { long result = -1; for (String patternString : patternStringList) { result = doParseJobIdFromUrl(patternString, url); if (result != -1) { return result; } } return result; } private static long doParseJobIdFromUrl(String patternString, String url) { Pattern pattern = Pattern.compile(patternString); Matcher matcher = pattern.matcher(url); if (matcher.find()) { return Long.parseLong(matcher.group(1)); } return -1; } public static void main(String[] args) throws Exception { int exitCode = 0; try { Engine.entry(args); } catch (Throwable e) { exitCode = 1; LOG.error("\n\n经DataX智能分析,该任务最可能的错误原因是:\n" + ExceptionTracker.trace(e)); if (e instanceof DataXException) { DataXException tempException = (DataXException) e; ErrorCode errorCode = tempException.getErrorCode(); if (errorCode instanceof FrameworkErrorCode) { FrameworkErrorCode tempErrorCode = (FrameworkErrorCode) errorCode; exitCode = tempErrorCode.toExitValue(); } } System.exit(exitCode); } System.exit(exitCode); } }
4,035
726
"""Test Command.""" import pytest from pytradfri.command import Command def test_property_access(): """Test property access in Command.""" def ec(): pass command = Command( method="method", path="path", data="data", parse_json=True, observe=False, observe_duration=0, err_callback=ec, ) assert command.method == "method" assert command.path == "path" assert command.parse_json is True assert command.observe is False assert command.observe_duration == 0 assert command.err_callback == ec def test_result(): """Test callback process_result.""" def pr(value): return value + 1 command = Command("method", "path", {}, process_result=pr) assert command.result is None assert command.raw_result is None command.process_result(0) assert command.result == 1 assert command.raw_result == 0 def test_url(): """Test url is recognized.""" command = Command("method", ["path"], {}) url = command.url("host") assert url == "coaps://host:5684/path" command2 = Command("method", ["path1", "path2"], {}) url = command2.url("host") assert url == "coaps://host:5684/path1/path2" def test_add_unsupported(): """Test add unsupported causes error.""" command1 = Command("method", "path", {}) not_a_command = 0 with pytest.raises(TypeError): command1 + not_a_command
551
841
<filename>jbpm/jbpm-bpmn2/src/main/java/org/jbpm/bpmn2/core/SequenceFlow.java /* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.bpmn2.core; import java.io.Serializable; import java.util.HashMap; import java.util.Map; public class SequenceFlow implements Serializable { private static final long serialVersionUID = 510l; private String id; private String sourceRef; private String targetRef; private String bendpoints; private String expression; private String type; private String language; private String name; private int priority; private Map<String, Object> metaData = new HashMap<String, Object>(); public SequenceFlow(String id, String sourceRef, String targetRef) { this.id = id; this.sourceRef = sourceRef; this.targetRef = targetRef; } public String getId() { return id; } public String getSourceRef() { return sourceRef; } public String getTargetRef() { return targetRef; } public String getBendpoints() { return bendpoints; } public void setBendpoints(String bendpoints) { this.bendpoints = bendpoints; } public String getExpression() { return expression; } public void setExpression(String expression) { this.expression = expression; } public String getLanguage() { return language; } public void setLanguage(String language) { this.language = language; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getPriority() { return priority; } public void setPriority(int priority) { this.priority = priority; } public Map<String, Object> getMetaData() { return this.metaData; } public void setMetaData(String name, Object data) { this.metaData.put(name, data); } public String toString() { return "SequenceFlow (" + this.id + ") [" + this.sourceRef + " -> " + this.targetRef + "]"; } }
851
575
<reponame>Ron423c/chromium // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROMEOS_COMPONENTS_PHONEHUB_UTIL_HISTOGRAM_UTIL_H_ #define CHROMEOS_COMPONENTS_PHONEHUB_UTIL_HISTOGRAM_UTIL_H_ #include "chromeos/components/phonehub/proto/phonehub_api.pb.h" namespace chromeos { namespace phonehub { namespace util { // Enumeration of possible opt-in entry points for Phone Hub feature. Keep in // sync with corresponding enum in tools/metrics/histograms/enums.xml. These // values are persisted to logs. Entries should not be renumbered and numeric // values should never be reused. enum class OptInEntryPoint { kSetupFlow = 0, kOnboardingFlow = 1, kSettings = 2, kMaxValue = kSettings, }; // Enumeration of results of a tethering connection attempt. enum class TetherConnectionResult { kAttemptConnection = 0, kSuccess = 1, kMaxValue = kSuccess, }; // Keep in sync with corresponding enum in tools/metrics/histograms/enums.xml. // These values are persisted to logs. Entries should not be renumbered and // numeric values should never be reused. enum class PhoneHubMessageResult { kRequestAttempted = 0, kResponseReceived = 1, kMaxValue = kResponseReceived, }; // Logs a given opt-in |entry_point| for the PhoneHub feature. void LogFeatureOptInEntryPoint(OptInEntryPoint entry_point); // Logs a given |result| of a tethering connection attempt. void LogTetherConnectionResult(TetherConnectionResult result); // Logs a given |result| for a request message. void LogMessageResult(proto::MessageType message, PhoneHubMessageResult result); } // namespace util } // namespace phonehub } // namespace chromeos #endif // CHROMEOS_COMPONENTS_PHONEHUB_UTIL_HISTOGRAM_UTIL_H_
563
306
/* * Copyright (c) 2016 IBM Corporation and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brunel.util; import org.brunel.build.VisualizationBuilder; import java.awt.*; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.util.Arrays; import java.util.List; /** * Ensures JS files needed for local operations are in the right places */ public class LocalOutputFiles { private static final List<String> RESOURCES = Arrays.asList( "BrunelD3.js", "BrunelData.js", "BrunelBidi.js", "Brunel.css", "BrunelEventHandlers.js", "BrunelJQueryControlFactory.js", "sumoselect/jquery.sumoselect.min.js", "sumoselect/sumoselect.css"); private final File home, out; // Home and output resources directory public static void install() { for (String s : RESOURCES) INSTANCE.ensureResourceExists(s); } public static File makeDirectory(String dirName) { return INSTANCE.ensureWritable(new File(INSTANCE.home, dirName)); } private static final LocalOutputFiles INSTANCE = new LocalOutputFiles(); private LocalOutputFiles() { home = getHomeDirectory(); // Top level brunel output directory out = ensureWritable(new File(home, "out")); // for the standard parts new File(out, "/sumoselect").mkdirs(); // For the 3rd part JS items } public static Writer makeFileWriter(String fileName) { File f = new File(INSTANCE.home, fileName); try { f.getParentFile().mkdirs(); return new OutputStreamWriter(new FileOutputStream(f), "utf-8"); } catch (Exception e) { throw new RuntimeException("Error creating file to write to: " + f.getAbsolutePath()); } } public static void showInBrowser(String location) { File file = new File(INSTANCE.home, location); try { Desktop.getDesktop().browse(file.toURI()); } catch (Throwable ex) { throw new RuntimeException("Failed to show file in browser: " + file, ex); } } private File ensureWritable(File f) { f.mkdirs(); if (!f.canWrite()) throw new IllegalArgumentException("Cannot write to the directory: " + f.getAbsolutePath()); return f; } private File getHomeDirectory() { // Try special directory location, but if that is not defined, add to user's home directory String brunelDir = System.getProperty("brunel.home"); File home = brunelDir != null ? new File(brunelDir) : new File(System.getProperty("user.home"), "brunel"); return ensureWritable(home); } private void ensureResourceExists(String resourceName) { try { // Either we are running form the IDE, in which case we find the file in the file system, // Or we are in a jar, in which case it should be in the indicated directory InputStream is = VisualizationBuilder.class.getResourceAsStream("/readable/" + resourceName); if (is == null) { File file = new File("out/javascript/readable"); if (!file.exists()) file = new File("../out/javascript/readable"); is = new FileInputStream(new File(file, resourceName)); } Files.copy(is, new File(out, resourceName).toPath(), StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { throw new RuntimeException("Could not copy required " + resourceName + " to output folder: " + out, e); } } }
1,594
30,785
package jadx.core.dex.visitors.typeinference; import jadx.core.dex.instructions.BaseInvokeNode; import jadx.core.dex.instructions.args.ArgType; import jadx.core.dex.instructions.args.RegisterArg; import jadx.core.dex.nodes.RootNode; /** * Special dynamic bound for invoke with generics. * Arguments bound type calculated using instance generic type. */ public final class TypeBoundInvokeUse implements ITypeBoundDynamic { private final RootNode root; private final BaseInvokeNode invokeNode; private final RegisterArg arg; private final ArgType genericArgType; public TypeBoundInvokeUse(RootNode root, BaseInvokeNode invokeNode, RegisterArg arg, ArgType genericArgType) { this.root = root; this.invokeNode = invokeNode; this.arg = arg; this.genericArgType = genericArgType; } @Override public BoundEnum getBound() { return BoundEnum.USE; } @Override public ArgType getType(TypeUpdateInfo updateInfo) { return getArgType(updateInfo.getType(invokeNode.getInstanceArg()), updateInfo.getType(arg)); } @Override public ArgType getType() { return getArgType(invokeNode.getInstanceArg().getType(), arg.getType()); } private ArgType getArgType(ArgType instanceType, ArgType argType) { ArgType resultGeneric = root.getTypeUtils().replaceClassGenerics(instanceType, genericArgType); if (resultGeneric != null) { return resultGeneric; } return argType; } @Override public RegisterArg getArg() { return arg; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TypeBoundInvokeUse that = (TypeBoundInvokeUse) o; return invokeNode.equals(that.invokeNode); } @Override public int hashCode() { return invokeNode.hashCode(); } @Override public String toString() { return "InvokeAssign{" + invokeNode.getCallMth().getShortId() + ", argType=" + genericArgType + ", currentType=" + getType() + ", instanceArg=" + invokeNode.getInstanceArg() + '}'; } }
694
799
{ "id": "Policy Optimizer", "version": -1, "vcShouldIgnore": false, "locked": false, "name": "Policy Optimizer", "prevName": "Policy Optimizer", "color": "#7D28A7", "playbookId": "Policy Optimizer - Generic", "hours": 0, "days": 0, "weeks": 0, "hoursR": 0, "daysR": 0, "weeksR": 0, "system": false, "readonly": false, "default": false, "autorun": true, "disabled": false, "reputationCalc": 0, "onChangeRepAlg": 0, "layout": "Policy Optimizer Layout", "detached": false, "extractSettings": { "mode": "Specific", "fieldCliNameToExtractSettings": { } }, "fromVersion": "6.0.0" }
233
3,348
<gh_stars>1000+ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.heron.grouping; import java.util.ArrayList; import java.util.List; import org.apache.heron.api.grouping.CustomStreamGrouping; import org.apache.heron.api.topology.TopologyBuilder; import org.apache.heron.api.topology.TopologyContext; import org.apache.heron.common.basics.SingletonRegistry; import org.apache.heron.resource.TestBolt; /** * Tests custom grouping by using round robin grouping from SPOUT to BOLT_A */ public class CustomGroupingTest extends AbstractTupleRoutingTest { @Override protected void initBoltA(TopologyBuilder topologyBuilder, String boltId, String upstreamComponentId) { final CustomStreamGrouping myCustomGrouping = new MyRoundRobinCustomGrouping(getInitInfoKey(upstreamComponentId)); topologyBuilder.setBolt(boltId, new TestBolt(), 1) .customGrouping(upstreamComponentId, myCustomGrouping); } @Override protected Component getComponentToVerify() { return Component.SPOUT; } @Override protected String getExpectedComponentInitInfo() { return "test-spout+test-spout+default+[1]"; } private static final class MyRoundRobinCustomGrouping implements CustomStreamGrouping { private static final long serialVersionUID = -4141962710451507976L; private volatile int emitted = 0; private final String initInfoKey; private MyRoundRobinCustomGrouping(String initInfoKey) { super(); this.initInfoKey = initInfoKey; } @Override public void prepare(TopologyContext context, String component, String streamId, List<Integer> targetTasks) { ((StringBuilder) SingletonRegistry.INSTANCE.getSingleton(initInfoKey)) .append(String.format("%s+%s+%s+%s", context.getThisComponentId(), component, streamId, targetTasks.toString())); } @Override public List<Integer> chooseTasks(List<Object> values) { List<Integer> res = new ArrayList<>(); res.add(emitted); emitted++; return res; } } }
926
8,028
<reponame>dctelus/transformers # coding=utf-8 # Copyright 2022 The REALM authors and The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fast Tokenization classes for REALM.""" import json from typing import List, Optional, Tuple from tokenizers import normalizers from ...tokenization_utils_base import BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import PaddingStrategy, logging from .tokenization_realm import RealmTokenizer logger = logging.get_logger(__name__) VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt", "tokenizer_file": "tokenizer.json"} PRETRAINED_VOCAB_FILES_MAP = { "vocab_file": { "google/realm-cc-news-pretrained-embedder": "https://huggingface.co/google/realm-cc-news-pretrained-embedder/resolve/main/vocab.txt", "google/realm-cc-news-pretrained-encoder": "https://huggingface.co/google/realm-cc-news-pretrained-encoder/resolve/main/vocab.txt", "google/realm-cc-news-pretrained-scorer": "https://huggingface.co/google/realm-cc-news-pretrained-scorer/resolve/main/vocab.txt", "google/realm-cc-news-pretrained-openqa": "https://huggingface.co/google/realm-cc-news-pretrained-openqa/aresolve/main/vocab.txt", "google/realm-orqa-nq-openqa": "https://huggingface.co/google/realm-orqa-nq-openqa/resolve/main/vocab.txt", "google/realm-orqa-nq-reader": "https://huggingface.co/google/realm-orqa-nq-reader/resolve/main/vocab.txt", "google/realm-orqa-wq-openqa": "https://huggingface.co/google/realm-orqa-wq-openqa/resolve/main/vocab.txt", "google/realm-orqa-wq-reader": "https://huggingface.co/google/realm-orqa-wq-reader/resolve/main/vocab.txt", }, "tokenizer_file": { "google/realm-cc-news-pretrained-embedder": "https://huggingface.co/google/realm-cc-news-pretrained-embedder/resolve/main/tokenizer.jsont", "google/realm-cc-news-pretrained-encoder": "https://huggingface.co/google/realm-cc-news-pretrained-encoder/resolve/main/tokenizer.json", "google/realm-cc-news-pretrained-scorer": "https://huggingface.co/google/realm-cc-news-pretrained-scorer/resolve/main/tokenizer.json", "google/realm-cc-news-pretrained-openqa": "https://huggingface.co/google/realm-cc-news-pretrained-openqa/aresolve/main/tokenizer.json", "google/realm-orqa-nq-openqa": "https://huggingface.co/google/realm-orqa-nq-openqa/resolve/main/tokenizer.json", "google/realm-orqa-nq-reader": "https://huggingface.co/google/realm-orqa-nq-reader/resolve/main/tokenizer.json", "google/realm-orqa-wq-openqa": "https://huggingface.co/google/realm-orqa-wq-openqa/resolve/main/tokenizer.json", "google/realm-orqa-wq-reader": "https://huggingface.co/google/realm-orqa-wq-reader/resolve/main/tokenizer.json", }, } PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { "google/realm-cc-news-pretrained-embedder": 512, "google/realm-cc-news-pretrained-encoder": 512, "google/realm-cc-news-pretrained-scorer": 512, "google/realm-cc-news-pretrained-openqa": 512, "google/realm-orqa-nq-openqa": 512, "google/realm-orqa-nq-reader": 512, "google/realm-orqa-wq-openqa": 512, "google/realm-orqa-wq-reader": 512, } PRETRAINED_INIT_CONFIGURATION = { "google/realm-cc-news-pretrained-embedder": {"do_lower_case": True}, "google/realm-cc-news-pretrained-encoder": {"do_lower_case": True}, "google/realm-cc-news-pretrained-scorer": {"do_lower_case": True}, "google/realm-cc-news-pretrained-openqa": {"do_lower_case": True}, "google/realm-orqa-nq-openqa": {"do_lower_case": True}, "google/realm-orqa-nq-reader": {"do_lower_case": True}, "google/realm-orqa-wq-openqa": {"do_lower_case": True}, "google/realm-orqa-wq-reader": {"do_lower_case": True}, } class RealmTokenizerFast(PreTrainedTokenizerFast): r""" Construct a "fast" REALM tokenizer (backed by HuggingFace's *tokenizers* library). Based on WordPiece. [`RealmTokenizerFast`] is identical to [`BertTokenizerFast`] and runs end-to-end tokenization: punctuation splitting and wordpiece. This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: vocab_file (`str`): File containing the vocabulary. do_lower_case (`bool`, *optional*, defaults to `True`): Whether or not to lowercase the input when tokenizing. unk_token (`str`, *optional*, defaults to `"[UNK]"`): The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this token instead. sep_token (`str`, *optional*, defaults to `"[SEP]"`): The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for sequence classification or for a text and a question for question answering. It is also used as the last token of a sequence built with special tokens. pad_token (`str`, *optional*, defaults to `"[PAD]"`): The token used for padding, for example when batching sequences of different lengths. cls_token (`str`, *optional*, defaults to `"[CLS]"`): The classifier token which is used when doing sequence classification (classification of the whole sequence instead of per-token classification). It is the first token of the sequence when built with special tokens. mask_token (`str`, *optional*, defaults to `"[MASK]"`): The token used for masking values. This is the token used when training this model with masked language modeling. This is the token which the model will try to predict. clean_text (`bool`, *optional*, defaults to `True`): Whether or not to clean the text before tokenization by removing any control characters and replacing all whitespaces by the classic one. tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): Whether or not to tokenize Chinese characters. This should likely be deactivated for Japanese (see [this issue](https://github.com/huggingface/transformers/issues/328)). strip_accents (`bool`, *optional*): Whether or not to strip all accents. If this option is not specified, then it will be determined by the value for `lowercase` (as in the original BERT). wordpieces_prefix (`str`, *optional*, defaults to `"##"`): The prefix for subwords. """ vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES slow_tokenizer_class = RealmTokenizer def __init__( self, vocab_file=None, tokenizer_file=None, do_lower_case=True, unk_token="[UNK]", sep_token="[SEP]", pad_token="[PAD]", cls_token="[CLS]", mask_token="[MASK]", tokenize_chinese_chars=True, strip_accents=None, **kwargs ): super().__init__( vocab_file, tokenizer_file=tokenizer_file, do_lower_case=do_lower_case, unk_token=unk_token, sep_token=sep_token, pad_token=pad_token, cls_token=cls_token, mask_token=mask_token, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, **kwargs, ) normalizer_state = json.loads(self.backend_tokenizer.normalizer.__getstate__()) if ( normalizer_state.get("lowercase", do_lower_case) != do_lower_case or normalizer_state.get("strip_accents", strip_accents) != strip_accents or normalizer_state.get("handle_chinese_chars", tokenize_chinese_chars) != tokenize_chinese_chars ): normalizer_class = getattr(normalizers, normalizer_state.pop("type")) normalizer_state["lowercase"] = do_lower_case normalizer_state["strip_accents"] = strip_accents normalizer_state["handle_chinese_chars"] = tokenize_chinese_chars self.backend_tokenizer.normalizer = normalizer_class(**normalizer_state) self.do_lower_case = do_lower_case def batch_encode_candidates(self, text, **kwargs): r""" Encode a batch of text or text pair. This method is similar to regular __call__ method but has the following differences: 1. Handle additional num_candidate axis. (batch_size, num_candidates, text) 2. Always pad the sequences to *max_length*. 3. Must specify *max_length* in order to stack packs of candidates into a batch. - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: text (`List[List[str]]`): The batch of sequences to be encoded. Each sequence must be in this format: (batch_size, num_candidates, text). text_pair (`List[List[str]]`, *optional*): The batch of sequences to be encoded. Each sequence must be in this format: (batch_size, num_candidates, text). **kwargs: Keyword arguments of the __call__ method. Returns: [`BatchEncoding`]: Encoded text or text pair. Example: ```python >>> from transformers import RealmTokenizerFast >>> # batch_size = 2, num_candidates = 2 >>> text = [["Hello world!", "Nice to meet you!"], ["The cute cat.", "The adorable dog."]] >>> tokenizer = RealmTokenizerFast.from_pretrained("google/realm-cc-news-pretrained-encoder") >>> tokenized_text = tokenizer.batch_encode_candidates(text, max_length=10, return_tensors="pt") ```""" # Always using a fixed sequence length to encode in order to stack candidates into a batch. kwargs["padding"] = PaddingStrategy.MAX_LENGTH batch_text = text batch_text_pair = kwargs.pop("text_pair", None) return_tensors = kwargs.pop("return_tensors", None) output_data = { "input_ids": [], "attention_mask": [], "token_type_ids": [], } for idx, candidate_text in enumerate(batch_text): if batch_text_pair is not None: candidate_text_pair = batch_text_pair[idx] else: candidate_text_pair = None encoded_candidates = super().__call__(candidate_text, candidate_text_pair, return_tensors=None, **kwargs) encoded_input_ids = encoded_candidates.get("input_ids") encoded_attention_mask = encoded_candidates.get("attention_mask") encoded_token_type_ids = encoded_candidates.get("token_type_ids") if encoded_input_ids is not None: output_data["input_ids"].append(encoded_input_ids) if encoded_attention_mask is not None: output_data["attention_mask"].append(encoded_attention_mask) if encoded_token_type_ids is not None: output_data["token_type_ids"].append(encoded_token_type_ids) output_data = dict((key, item) for key, item in output_data.items() if len(item) != 0) return BatchEncoding(output_data, tensor_type=return_tensors) def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None): """ Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A REALM sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. """ output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id] if token_ids_1: output += token_ids_1 + [self.sep_token_id] return output def create_token_type_ids_from_sequences( self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None ) -> List[int]: """ Create a mask from the two sequences passed to be used in a sequence-pair classification task. A REALM sequence pair mask has the following format: ``` 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 | first sequence | second sequence | ``` If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Returns: `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). """ sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: files = self._tokenizer.model.save(save_directory, name=filename_prefix) return tuple(files)
5,978
866
<reponame>Miouge1/cli<filename>tests/functional/test_issue-126/plan.out.json {"format_version":"0.1","terraform_version":"0.12.3","planned_values":{"root_module":{"resources":[{"address":"aws_elb.bar","mode":"managed","type":"aws_elb","name":"bar","provider_name":"aws","schema_version":0,"values":{"access_logs":[{"bucket":"foo","bucket_prefix":"bar","enabled":true,"interval":60}],"availability_zones":["us-west-2a","us-west-2b","us-west-2c"],"connection_draining":true,"connection_draining_timeout":400,"cross_zone_load_balancing":true,"health_check":[{"healthy_threshold":2,"interval":30,"target":"HTTP:8000/","timeout":3,"unhealthy_threshold":2}],"idle_timeout":400,"instances":["some_id"],"listener":[{"instance_port":8000,"instance_protocol":"http","lb_port":443,"lb_protocol":"https","ssl_certificate_id":"arn:aws:iam::123456789012:server-certificate/certName"},{"instance_port":8000,"instance_protocol":"http","lb_port":80,"lb_protocol":"http","ssl_certificate_id":""}],"name":"foobar-terraform-elb","name_prefix":null,"tags":{"Name":"foobar-terraform-elb"}}}]}},"resource_changes":[{"address":"aws_elb.bar","mode":"managed","type":"aws_elb","name":"bar","provider_name":"aws","change":{"actions":["create"],"before":null,"after":{"access_logs":[{"bucket":"foo","bucket_prefix":"bar","enabled":true,"interval":60}],"availability_zones":["us-west-2a","us-west-2b","us-west-2c"],"connection_draining":true,"connection_draining_timeout":400,"cross_zone_load_balancing":true,"health_check":[{"healthy_threshold":2,"interval":30,"target":"HTTP:8000/","timeout":3,"unhealthy_threshold":2}],"idle_timeout":400,"instances":["some_id"],"listener":[{"instance_port":8000,"instance_protocol":"http","lb_port":443,"lb_protocol":"https","ssl_certificate_id":"arn:aws:iam::123456789012:server-certificate/certName"},{"instance_port":8000,"instance_protocol":"http","lb_port":80,"lb_protocol":"http","ssl_certificate_id":""}],"name":"foobar-terraform-elb","name_prefix":null,"tags":{"Name":"foobar-terraform-elb"}},"after_unknown":{"access_logs":[{}],"arn":true,"availability_zones":[false,false,false],"dns_name":true,"health_check":[{}],"id":true,"instances":[false],"internal":true,"listener":[{},{}],"security_groups":true,"source_security_group":true,"source_security_group_id":true,"subnets":true,"tags":{},"zone_id":true}}}],"configuration":{"root_module":{"resources":[{"address":"aws_elb.bar","mode":"managed","type":"aws_elb","name":"bar","provider_config_key":"aws","expressions":{"access_logs":[{"bucket":{"constant_value":"foo"},"bucket_prefix":{"constant_value":"bar"},"interval":{"constant_value":60}}],"availability_zones":{"constant_value":["us-west-2a","us-west-2b","us-west-2c"]},"connection_draining":{"constant_value":true},"connection_draining_timeout":{"constant_value":400},"cross_zone_load_balancing":{"constant_value":true},"health_check":[{"healthy_threshold":{"constant_value":2},"interval":{"constant_value":30},"target":{"constant_value":"HTTP:8000/"},"timeout":{"constant_value":3},"unhealthy_threshold":{"constant_value":2}}],"idle_timeout":{"constant_value":400},"instances":{"constant_value":["some_id"]},"listener":[{"instance_port":{"constant_value":8000},"instance_protocol":{"constant_value":"http"},"lb_port":{"constant_value":80},"lb_protocol":{"constant_value":"http"}},{"instance_port":{"constant_value":8000},"instance_protocol":{"constant_value":"http"},"lb_port":{"constant_value":443},"lb_protocol":{"constant_value":"https"},"ssl_certificate_id":{"constant_value":"arn:aws:iam::123456789012:server-certificate/certName"}}],"name":{"constant_value":"foobar-terraform-elb"},"tags":{"constant_value":{"Name":"foobar-terraform-elb"}}},"schema_version":0}]}}}
1,179
699
<reponame>Oussama-Goumghar/e-banking-registery package tech.jhipster.registry.service; import static com.sun.nio.file.SensitivityWatchEventModifier.HIGH; import static java.nio.file.FileVisitOption.FOLLOW_LINKS; import static java.nio.file.FileVisitResult.CONTINUE; import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY; import static tech.jhipster.config.JHipsterConstants.SPRING_PROFILE_K8S; import java.io.File; import java.io.IOException; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.cloud.context.refresh.ContextRefresher; import org.springframework.context.annotation.Profile; import org.springframework.stereotype.Service; /** * Kubernetes (K8s) cloud config refresher service */ @Service @Profile(SPRING_PROFILE_K8S) public class CloudConfigRefreshService { private final Logger log = LoggerFactory.getLogger(CloudConfigRefreshService.class); private final ContextRefresher refresher; private final String configPath; private ScheduledExecutorService taskExecutor; /** * Constructor * * @param refresher ContextRefresher * @param configPath String */ public CloudConfigRefreshService(ContextRefresher refresher, @Value("${k8s.config.path}") String configPath) { this.refresher = refresher; this.configPath = configPath; } /** * Creates a daemon thread when {@link #getConfigPath configPath} is specified through the environment * variable {@code k8s.config.path}. Daemon thread will execute the watcher service asynchronously. */ @PostConstruct public void configMapWatcher() { if (getConfigPath() != null && !getConfigPath().isEmpty()) { taskExecutor = Executors.newSingleThreadScheduledExecutor( job -> { Thread thread = new Thread(job, "CloudConfigMapRefresher"); thread.setDaemon(true); return thread; } ); taskExecutor.execute( () -> { try { configMapRefreshContext(); } catch (IOException | InterruptedException ex) { log.error("Unable to refresh K8s ConfigMap", ex); } } ); } else { log.error("ConfigMap directory path not specified. Specify value for the environment variable k8s.config.path"); } } /** * {@code WatchService} object to monitor K8s configMap path. Mounted configMap path will be recursively * registered with the {@code WatchService} instance to get notified for interested events. * * @throws IOException * @throws InterruptedException */ public void configMapRefreshContext() throws IOException, InterruptedException { List<File> fileList = new ArrayList(); List<Integer> hashList = new ArrayList(); WatchService watcherService = FileSystems.getDefault().newWatchService(); Path dirPath = Paths.get(getConfigPath()); Files.walkFileTree( dirPath, new HashSet<FileVisitOption>() { { add(FOLLOW_LINKS); } }, 2, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { log.debug("Registering" + dir + " in watcher service"); dir.register(watcherService, new WatchEvent.Kind[] { ENTRY_MODIFY }, HIGH); return CONTINUE; } @Override public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException { File file = path.toFile(); if (isValidConfigFile(file.getName().toLowerCase())) { log.debug("Adding file: " + file.getAbsolutePath()); fileList.add(file); hashList.add(getHashValue(file)); } return CONTINUE; } } ); while (true) { WatchKey key = watcherService.take(); List<WatchEvent<?>> events = key.pollEvents(); if (!events.isEmpty()) { if (log.isDebugEnabled()) { events.forEach(event -> log.debug("Event detected: " + event.kind().name() + ", Updated File: " + event.context())); } Collection<Integer> activeList = fileList.stream().map(entry -> getHashValue(entry)).collect(Collectors.toList()); if (!hashList.containsAll(activeList)) { log.debug("File system updated. Hashed content matching failed"); hashList.clear(); hashList.addAll(activeList); refresher.refresh(); log.debug("@Refreshscope context refreshed for ConfigMap update"); } else { // do nothing log.debug("Hashed content unchanged. Ignore and continue"); } if (!key.reset()) { log.error("Unable to reset the watcher service. Try restarting the running instance"); break; } } else { // do nothing log.debug("Event list is empty. Ignore and continue."); } } } @PreDestroy public void destroy() { if (taskExecutor != null) { taskExecutor.shutdown(); } } /** * Generates hash value * * @param file File * @return hasCode int */ private int getHashValue(File file) { return ( 37 * 21 + ( file.getAbsolutePath().hashCode() + (int) (file.length() ^ (file.length() >>> 32)) + (int) (file.lastModified() ^ (file.lastModified() >>> 32)) ) ); } /** * Checks for valid file extension * * @param name String - file name * @return boolean */ private boolean isValidConfigFile(String name) { return name.endsWith(".yml") || name.endsWith(".yaml") || name.endsWith(".properties"); } /** * Returns config path * * @return configPath String */ public String getConfigPath() { return configPath; } }
3,220
6,969
<gh_stars>1000+ public class Solution { public int findheight(TreeNode root){ if(root==null){return 0;} return 1+Math.max(findheight(root.left),findheight(root.right)); } public int maxDepth(TreeNode A) { int t=findheight(A); return t; } }
124
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.css.lib.api.properties; import java.util.LinkedList; import java.util.List; /** * An element of the parsed css property value grammar. * * Note: the object is in fact immutable since the setMinimum/MaximumOccurrences is * called only just after its creation. * * @author <EMAIL> */ public abstract class GrammarElement { public static final char INVISIBLE_PROPERTY_PREFIX = '@'; public static boolean isArtificialElementName(CharSequence name) { if(name.length() == 0) { return false; } return name.charAt(0) == INVISIBLE_PROPERTY_PREFIX; } private GroupGrammarElement parent; private String path; private String name; public GrammarElement(GroupGrammarElement parent, String elementName) { this.parent = parent; this.name = elementName; } /** * Return name of the element if it is named, null otherwise. */ public String getName() { return name; } public void setName(String name) { this.name = name; } public abstract void accept(GrammarElementVisitor visitor); private int minimum_occurances = 1; private int maximum_occurances = 1; public void setMinimumOccurances(int i) { this.minimum_occurances = i; } public void setMaximumOccurances(int i) { maximum_occurances = i; } public int getMaximumOccurances() { return maximum_occurances; } public int getMinimumOccurances() { return minimum_occurances; } public boolean isOptional() { return getMinimumOccurances() == 0; } public GroupGrammarElement parent() { return parent; } @Override public boolean equals(Object o) { if (!(o instanceof GrammarElement)) { return false; } GrammarElement e = (GrammarElement) o; return path().equalsIgnoreCase(e.path()); } @Override public int hashCode() { return path().hashCode(); } /** * returns a name of the property from which this element comes from */ public String origin() { return origin(true); } public String getVisibleOrigin() { return origin(false); } private String origin(boolean allowNonVisibleElements) { GroupGrammarElement p = parent; while (p != null) { if (p.getName() != null) { boolean visible = !isArtificialElementName(p.getName()); if (visible || allowNonVisibleElements) { return p.getName(); } } p = p.parent(); } return null; } public synchronized String path() { if (path == null) { StringBuilder sb = new StringBuilder(); if (parent() != null) { sb.append(parent().path()); sb.append('/'); } sb.append(toString()); path = sb.toString(); } return path; } public List<GrammarElement> elementsPath() { List<GrammarElement> elementsPath = new LinkedList<>(); GrammarElement element = this; do { elementsPath.add(0, element); } while((element = element.parent()) != null); return elementsPath; } @Override public String toString() { if (getMinimumOccurances() != 1 || getMaximumOccurances() != 1) { return "{" + getMinimumOccurances() + "," + (getMaximumOccurances() == Integer.MAX_VALUE ? "inf" : getMaximumOccurances()) + "}"; //NOI18N } else { return ""; //NOI18N } } public String toString2(int level) { return indentString(level) + toString(); } protected String indentString(int level) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < level; i++) { sb.append('\t'); } return sb.toString(); } }
1,918