max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
348
{"nom":"Clairefontaine-en-Yvelines","dpt":"Yvelines","inscrits":630,"abs":114,"votants":516,"blancs":43,"nuls":15,"exp":458,"res":[{"panneau":"1","voix":336},{"panneau":"2","voix":122}]}
75
348
{"nom":"Neuville-en-Verdunois","circ":"1ère circonscription","dpt":"Meuse","inscrits":57,"abs":34,"votants":23,"blancs":1,"nuls":0,"exp":22,"res":[{"nuance":"UDI","nom":"<NAME>","voix":12},{"nuance":"REM","nom":"<NAME>","voix":10}]}
95
1,091
<filename>core/api/src/main/java/org/onosproject/net/behaviour/DefaultBridgeDescription.java /* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.behaviour; import com.google.common.base.Strings; import com.google.common.collect.Lists; import org.onosproject.net.DeviceId; import org.onosproject.net.SparseAnnotations; import java.util.List; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; /** * The default implementation of bridge. */ public final class DefaultBridgeDescription implements BridgeDescription { private final String name; /* Optional OpenFlow configurations */ private final List<ControllerInfo> controllers; private final boolean enableLocalController; private final Optional<FailMode> failMode; private final Optional<String> datapathId; private final Optional<String> datapathType; private final Optional<List<ControlProtocolVersion>> controlProtocols; private final Optional<Boolean> disableInBand; private final Optional<Boolean> mcastSnoopingEnable; /* Adds more configurations */ private DefaultBridgeDescription(String name, List<ControllerInfo> controllers, boolean enableLocalController, Optional<FailMode> failMode, Optional<String> datapathId, Optional<String> datapathType, Optional<Boolean> disableInBand, Optional<Boolean> mcastSnoopingEnable, Optional<List<ControlProtocolVersion>> controlProtocols) { this.name = checkNotNull(name); this.controllers = controllers; this.enableLocalController = enableLocalController; this.failMode = failMode; this.datapathId = datapathId; this.datapathType = datapathType; this.disableInBand = disableInBand; this.mcastSnoopingEnable = mcastSnoopingEnable; this.controlProtocols = controlProtocols; } @Override public SparseAnnotations annotations() { return null; } @Override public String name() { return name; } @Override public List<ControllerInfo> controllers() { return controllers; } @Override public boolean enableLocalController() { return enableLocalController; } @Override public Optional<FailMode> failMode() { return failMode; } @Override public Optional<String> datapathId() { return datapathId; } @Override public Optional<String> datapathType() { return datapathType; } @Override public Optional<List<ControlProtocolVersion>> controlProtocols() { return controlProtocols; } @Override public Optional<DeviceId> deviceId() { if (datapathId.isPresent()) { return Optional.of(DeviceId.deviceId("of:" + datapathId.get())); } else { return Optional.empty(); } } @Override public Optional<Boolean> disableInBand() { return disableInBand; } @Override public Optional<Boolean> mcastSnoopingEnable() { return mcastSnoopingEnable; } /** * Creates and returns a new builder instance. * * @return new builder */ public static BridgeDescription.Builder builder() { return new Builder(); } public static final class Builder implements BridgeDescription.Builder { private String name; private List<ControllerInfo> controllers = Lists.newArrayList(); private boolean enableLocalController = false; private Optional<FailMode> failMode = Optional.empty(); private Optional<String> datapathId = Optional.empty(); private Optional<String> datapathType = Optional.empty(); private Optional<List<ControlProtocolVersion>> controlProtocols = Optional.empty(); private Optional<Boolean> disableInBand = Optional.empty(); private Optional<Boolean> mcastSnoopingEnable = Optional.empty(); private Builder() { } @Override public BridgeDescription build() { return new DefaultBridgeDescription(name, controllers, enableLocalController, failMode, datapathId, datapathType, disableInBand, mcastSnoopingEnable, controlProtocols); } @Override public Builder name(String name) { checkArgument(!Strings.isNullOrEmpty(name)); this.name = name; return this; } @Override public Builder controllers(List<ControllerInfo> controllers) { if (controllers != null) { this.controllers = Lists.newArrayList(controllers); } return this; } @Override public Builder enableLocalController() { this.enableLocalController = true; return this; } @Override public Builder failMode(FailMode failMode) { this.failMode = Optional.ofNullable(failMode); return this; } @Override public Builder datapathId(String datapathId) { this.datapathId = Optional.ofNullable(datapathId); return this; } @Override public Builder datapathType(String datapathType) { this.datapathType = Optional.ofNullable(datapathType); return this; } @Override public Builder controlProtocols(List<ControlProtocolVersion> controlProtocols) { this.controlProtocols = Optional.ofNullable(controlProtocols); return this; } @Override public Builder disableInBand() { this.disableInBand = Optional.of(Boolean.TRUE); return this; } @Override public BridgeDescription.Builder mcastSnoopingEnable() { this.mcastSnoopingEnable = Optional.of(Boolean.TRUE); return this; } } }
3,050
457
<filename>opsli-base-support/opsli-core/src/main/java/org/opsli/core/autoconfigure/conf/SwaggerConfig.java /** * Copyright 2020 OPSLI 快速开发平台 https://www.opsli.com * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.opsli.core.autoconfigure.conf; import com.fasterxml.classmate.TypeResolver; import com.github.xiaoymin.knife4j.spring.annotations.EnableKnife4j; import com.google.common.collect.Lists; import io.swagger.annotations.ApiOperation; import org.opsli.core.autoconfigure.properties.GlobalProperties; import org.opsli.core.utils.UserTokenUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import springfox.bean.validators.configuration.BeanValidatorPluginsConfiguration; import springfox.documentation.builders.ApiInfoBuilder; import springfox.documentation.builders.ParameterBuilder; import springfox.documentation.builders.PathSelectors; import springfox.documentation.builders.RequestHandlerSelectors; import springfox.documentation.schema.ModelRef; import springfox.documentation.service.*; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spi.service.contexts.SecurityContext; import springfox.documentation.spring.web.plugins.Docket; import springfox.documentation.swagger2.annotations.EnableSwagger2; import javax.annotation.Resource; import java.util.List; /** * Swagger 配置类 * * @author Parker * @date 2020-09-15 */ @Configuration @EnableSwagger2 @EnableKnife4j @Import(BeanValidatorPluginsConfiguration.class) public class SwaggerConfig { /** 配置类 */ @Resource private GlobalProperties globalProperties; private final TypeResolver typeResolver; @Autowired public SwaggerConfig(TypeResolver typeResolver) { this.typeResolver = typeResolver; } // ========================= Swagger ========================= /** * swagger2的配置文件,这里可以配置swagger2的一些基本的内容,比如扫描的包等等 * * @return Docket */ @Bean public Docket createRestApi() { return new Docket(DocumentationType.SWAGGER_2) .apiInfo(apiInfo()) .groupName("opsli 2.X") .select() //此包路径下的类,才生成接口文档 .apis(RequestHandlerSelectors.basePackage("org.opsli")) //加了ApiOperation注解的类,才生成接口文档 .apis(RequestHandlerSelectors.withMethodAnnotation(ApiOperation.class)) .paths(PathSelectors.any()) .build() .securityContexts(Lists.newArrayList(securityContext())) .securitySchemes(Lists.<SecurityScheme>newArrayList(apiKey())); } /** * api文档的详细信息函数,注意这里的注解引用的是哪个 * * @return ApiInfo */ private ApiInfo apiInfo() { return new ApiInfoBuilder() // //大标题 .title(globalProperties.getSystemName() + " 服务API接口文档") // 版本号 .version("1.0") // 描述 .description("后台API接口") // 作者 .contact(new Contact("Parker", "https://opsli.com", "<EMAIL>")) .license("The Apache License, Version 2.0") .licenseUrl("http://www.apache.org/licenses/LICENSE-2.0.html") .build(); } /** * 安全模块 * @return SecurityContext */ private SecurityContext securityContext() { return SecurityContext.builder() .securityReferences(defaultAuth()) .forPaths(PathSelectors.regex("/.*")) .build(); } /** * jwt token * @return List */ private List<Parameter> defaultToken() { ParameterBuilder parameterBuilder = new ParameterBuilder(); List<Parameter> parameters= Lists.newArrayList(); parameterBuilder.name(UserTokenUtil.TOKEN_NAME) .description("Token 令牌") .modelRef(new ModelRef("String")) .parameterType("header") .required(false).build(); parameters.add(parameterBuilder.build()); return parameters; } /** * oauth2 授权 * @return List */ List<SecurityReference> defaultAuth() { AuthorizationScope authorizationScope = new AuthorizationScope("global", "accessEverything"); AuthorizationScope[] authorizationScopes = new AuthorizationScope[1]; authorizationScopes[0] = authorizationScope; return Lists.newArrayList(new SecurityReference(UserTokenUtil.TOKEN_NAME, authorizationScopes)); } private ApiKey apiKey() { return new ApiKey(UserTokenUtil.TOKEN_NAME, UserTokenUtil.TOKEN_NAME, "header"); } }
2,280
651
/** * This file is mostly a copy of nanopb/examples/network_server/common.h */ #include <pb_decode.h> #include <pb_encode.h> #include <sys/socket.h> #include <sys/types.h> #include "nanopb_stream.h" static bool write_callback(pb_ostream_t *stream, const uint8_t *buf, size_t count) { int fd = (intptr_t)stream->state; return send(fd, buf, count, 0) == (ssize_t)count; } static bool read_callback(pb_istream_t *stream, uint8_t *buf, size_t count) { int fd = (intptr_t)stream->state; int result; result = recv(fd, buf, count, MSG_WAITALL); if (result == 0) { stream->bytes_left = 0; /* EOF */ } return result == (int)count; } pb_ostream_t pb_ostream_from_socket(int fd) { pb_ostream_t stream = {&write_callback, (void *)(intptr_t)fd, SIZE_MAX, 0, NULL}; return stream; } pb_istream_t pb_istream_from_socket(int fd) { pb_istream_t stream = {&read_callback, (void *)(intptr_t)fd, SIZE_MAX, NULL}; return stream; }
447
2,338
//===-- sanitizer_type_traits_test.cpp ------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file is a part of ThreadSanitizer/AddressSanitizer runtime. // //===----------------------------------------------------------------------===// #include "sanitizer_common/sanitizer_type_traits.h" #include "gtest/gtest.h" #include "sanitizer_common/sanitizer_internal_defs.h" using namespace __sanitizer; TEST(SanitizerCommon, IsSame) { ASSERT_TRUE((is_same<unsigned, unsigned>::value)); ASSERT_TRUE((is_same<uptr, uptr>::value)); ASSERT_TRUE((is_same<sptr, sptr>::value)); ASSERT_TRUE((is_same<const uptr, const uptr>::value)); ASSERT_FALSE((is_same<unsigned, signed>::value)); ASSERT_FALSE((is_same<uptr, sptr>::value)); ASSERT_FALSE((is_same<uptr, const uptr>::value)); } TEST(SanitizerCommon, Conditional) { ASSERT_TRUE((is_same<int, conditional<true, int, double>::type>::value)); ASSERT_TRUE((is_same<double, conditional<false, int, double>::type>::value)); }
421
5,305
import java.util.Arrays; public class JavaExporter_1 { public static String varargExample(String... args) { return Arrays.toString(args); } }
51
1,800
void LogSetOutputFunction(void *data);
11
361
#include "file.h" #ifdef BUILTIN_ELF #include <sys/types.h> #include <string.h> #include <stdio.h> #include <ctype.h> #include <stdlib.h> #ifdef HAVE_UNISTD_H #include <unistd.h> #endif #include <errno.h> #include "readelf.h" #ifndef lint FILE_RCSID("@(#)$Id: readelf.c,v 1.1.1.1 2001/10/16 18:05:31 provos Exp $") #endif #ifdef ELFCORE static void dophn_core __P((int, int, int, off_t, int, size_t)); #endif static void dophn_exec __P((int, int, int, off_t, int, size_t)); static void doshn __P((int, int, int, off_t, int, size_t)); static uint16_t getu16 __P((int, int)); static uint32_t getu32 __P((int, uint32_t)); static uint64_t getu64 __P((int, uint64_t)); static uint16_t getu16(swap, value) int swap; uint16_t value; { union { uint16_t ui; char c[2]; } retval, tmpval; if (swap) { tmpval.ui = value; retval.c[0] = tmpval.c[1]; retval.c[1] = tmpval.c[0]; return retval.ui; } else return value; } static uint32_t getu32(swap, value) int swap; uint32_t value; { union { uint32_t ui; char c[4]; } retval, tmpval; if (swap) { tmpval.ui = value; retval.c[0] = tmpval.c[3]; retval.c[1] = tmpval.c[2]; retval.c[2] = tmpval.c[1]; retval.c[3] = tmpval.c[0]; return retval.ui; } else return value; } static uint64_t getu64(swap, value) int swap; uint64_t value; { union { uint64_t ui; char c[8]; } retval, tmpval; if (swap) { tmpval.ui = value; retval.c[0] = tmpval.c[7]; retval.c[1] = tmpval.c[6]; retval.c[2] = tmpval.c[5]; retval.c[3] = tmpval.c[4]; retval.c[4] = tmpval.c[3]; retval.c[5] = tmpval.c[2]; retval.c[6] = tmpval.c[1]; retval.c[7] = tmpval.c[0]; return retval.ui; } else return value; } #define sh_addr (class == ELFCLASS32 \ ? (void *) &sh32 \ : (void *) &sh64) #define shs_type (class == ELFCLASS32 \ ? getu32(swap, sh32.sh_type) \ : getu32(swap, sh64.sh_type)) #define ph_addr (class == ELFCLASS32 \ ? (void *) &ph32 \ : (void *) &ph64) #define ph_type (class == ELFCLASS32 \ ? getu32(swap, ph32.p_type) \ : getu32(swap, ph64.p_type)) #define ph_offset (class == ELFCLASS32 \ ? getu32(swap, ph32.p_offset) \ : getu64(swap, ph64.p_offset)) #define nh_size (class == ELFCLASS32 \ ? sizeof *nh32 \ : sizeof *nh64) #define nh_type (class == ELFCLASS32 \ ? getu32(swap, nh32->n_type) \ : getu32(swap, nh64->n_type)) #define nh_namesz (class == ELFCLASS32 \ ? getu32(swap, nh32->n_namesz) \ : getu32(swap, nh64->n_namesz)) #define nh_descsz (class == ELFCLASS32 \ ? getu32(swap, nh32->n_descsz) \ : getu32(swap, nh64->n_descsz)) #define prpsoffsets(i) (class == ELFCLASS32 \ ? prpsoffsets32[i] \ : prpsoffsets64[i]) static void doshn(class, swap, fd, off, num, size) int class; int swap; int fd; off_t off; int num; size_t size; { Elf32_Shdr sh32; Elf64_Shdr sh64; if (lseek(fd, off, SEEK_SET) == -1) error("lseek failed (%s).\n", strerror(errno)); for ( ; num; num--) { if (read(fd, sh_addr, size) == -1) error("read failed (%s).\n", strerror(errno)); if (shs_type == SHT_SYMTAB /* || shs_type == SHT_DYNSYM */) { (void) printf (", not stripped"); return; } } (void) printf (", stripped"); } /* * Look through the program headers of an executable image, searching * for a PT_INTERP section; if one is found, it's dynamically linked, * otherwise it's statically linked. */ static void dophn_exec(class, swap, fd, off, num, size) int class; int swap; int fd; off_t off; int num; size_t size; { Elf32_Phdr ph32; Elf64_Phdr ph64; char *linking_style = "statically"; char *shared_libraries = ""; if (lseek(fd, off, SEEK_SET) == -1) error("lseek failed (%s).\n", strerror(errno)); for ( ; num; num--) { if (read(fd, ph_addr, size) == -1) error("read failed (%s).\n", strerror(errno)); switch (ph_type) { case PT_DYNAMIC: linking_style = "dynamically"; break; case PT_INTERP: shared_libraries = " (uses shared libs)"; break; } } printf(", %s linked%s", linking_style, shared_libraries); } #ifdef ELFCORE size_t prpsoffsets32[] = { 8, /* FreeBSD */ 28, /* Linux 2.0.36 */ 32, /* Linux (I forget which kernel version) */ 84, /* SunOS 5.x */ }; size_t prpsoffsets64[] = { 120, /* SunOS 5.x, 64-bit */ }; #define NOFFSETS32 (sizeof prpsoffsets32 / sizeof prpsoffsets32[0]) #define NOFFSETS64 (sizeof prpsoffsets64 / sizeof prpsoffsets64[0]) #define NOFFSETS (class == ELFCLASS32 ? NOFFSETS32 : NOFFSETS64) /* * Look through the program headers of an executable image, searching * for a PT_NOTE section of type NT_PRPSINFO, with a name "CORE" or * "FreeBSD"; if one is found, try looking in various places in its * contents for a 16-character string containing only printable * characters - if found, that string should be the name of the program * that dropped core. Note: right after that 16-character string is, * at least in SunOS 5.x (and possibly other SVR4-flavored systems) and * Linux, a longer string (80 characters, in 5.x, probably other * SVR4-flavored systems, and Linux) containing the start of the * command line for that program. * * The signal number probably appears in a section of type NT_PRSTATUS, * but that's also rather OS-dependent, in ways that are harder to * dissect with heuristics, so I'm not bothering with the signal number. * (I suppose the signal number could be of interest in situations where * you don't have the binary of the program that dropped core; if you * *do* have that binary, the debugger will probably tell you what * signal it was.) */ static void dophn_core(class, swap, fd, off, num, size) int class; int swap; int fd; off_t off; int num; size_t size; { Elf32_Phdr ph32; Elf32_Nhdr *nh32; Elf64_Phdr ph64; Elf64_Nhdr *nh64; size_t offset, nameoffset, noffset, reloffset; unsigned char c; int i, j; char nbuf[BUFSIZ]; int bufsize; int is_freebsd; /* * Loop through all the program headers. */ for ( ; num; num--) { if (lseek(fd, off, SEEK_SET) == -1) error("lseek failed (%s).\n", strerror(errno)); if (read(fd, ph_addr, size) == -1) error("read failed (%s).\n", strerror(errno)); off += size; if (ph_type != PT_NOTE) continue; /* * This is a PT_NOTE section; loop through all the notes * in the section. */ if (lseek(fd, (off_t) ph_offset, SEEK_SET) == -1) error("lseek failed (%s).\n", strerror(errno)); bufsize = read(fd, nbuf, BUFSIZ); if (bufsize == -1) error(": " "read failed (%s).\n", strerror(errno)); offset = 0; for (;;) { if (offset >= bufsize) break; if (class == ELFCLASS32) nh32 = (Elf32_Nhdr *)&nbuf[offset]; else nh64 = (Elf64_Nhdr *)&nbuf[offset]; offset += nh_size; /* * Check whether this note has the name "CORE" or * "FreeBSD". */ if (offset + nh_namesz >= bufsize) { /* * We're past the end of the buffer. */ break; } nameoffset = offset; offset += nh_namesz; offset = ((offset + 3)/4)*4; /* * Sigh. The 2.0.36 kernel in Debian 2.1, at * least, doesn't correctly implement name * sections, in core dumps, as specified by * the "Program Linking" section of "UNIX(R) System * V Release 4 Programmer's Guide: ANSI C and * Programming Support Tools", because my copy * clearly says "The first 'namesz' bytes in 'name' * contain a *null-terminated* [emphasis mine] * character representation of the entry's owner * or originator", but the 2.0.36 kernel code * doesn't include the terminating null in the * name.... */ if ((nh_namesz == 4 && strncmp(&nbuf[nameoffset], "CORE", 4) == 0) || (nh_namesz == 5 && strcmp(&nbuf[nameoffset], "CORE") == 0)) is_freebsd = 0; else if ((nh_namesz == 8 && strcmp(&nbuf[nameoffset], "FreeBSD") == 0)) is_freebsd = 1; else continue; if (nh_type == NT_PRPSINFO) { /* * Extract the program name. We assume * it to be 16 characters (that's what it * is in SunOS 5.x and Linux). * * Unfortunately, it's at a different offset * in varous OSes, so try multiple offsets. * If the characters aren't all printable, * reject it. */ for (i = 0; i < NOFFSETS; i++) { reloffset = prpsoffsets(i); noffset = offset + reloffset; for (j = 0; j < 16; j++, noffset++, reloffset++) { /* * Make sure we're not past * the end of the buffer; if * we are, just give up. */ if (noffset >= bufsize) goto tryanother; /* * Make sure we're not past * the end of the contents; * if we are, this obviously * isn't the right offset. */ if (reloffset >= nh_descsz) goto tryanother; c = nbuf[noffset]; if (c == '\0') { /* * A '\0' at the * beginning is * obviously wrong. * Any other '\0' * means we're done. */ if (j == 0) goto tryanother; else break; } else { /* * A nonprintable * character is also * wrong. */ #define isquote(c) (strchr("'\"`", (c)) != NULL) if (!isprint(c) || isquote(c)) goto tryanother; } } /* * Well, that worked. */ printf(", from '%.16s'", &nbuf[offset + prpsoffsets(i)]); break; tryanother: ; } break; } offset += nh_descsz; offset = ((offset + 3)/4)*4; } } } #endif void tryelf(fd, buf, nbytes) int fd; unsigned char *buf; int nbytes; { union { int32 l; char c[sizeof (int32)]; } u; int class; int swap; /* * ELF executables have multiple section headers in arbitrary * file locations and thus file(1) cannot determine it from easily. * Instead we traverse thru all section headers until a symbol table * one is found or else the binary is stripped. */ if (buf[EI_MAG0] != ELFMAG0 || (buf[EI_MAG1] != ELFMAG1 && buf[EI_MAG1] != OLFMAG1) || buf[EI_MAG2] != ELFMAG2 || buf[EI_MAG3] != ELFMAG3) return; class = buf[4]; if (class == ELFCLASS32) { Elf32_Ehdr elfhdr; if (nbytes <= sizeof (Elf32_Ehdr)) return; u.l = 1; (void) memcpy(&elfhdr, buf, sizeof elfhdr); swap = (u.c[sizeof(int32) - 1] + 1) != elfhdr.e_ident[5]; if (getu16(swap, elfhdr.e_type) == ET_CORE) #ifdef ELFCORE dophn_core(class, swap, fd, getu32(swap, elfhdr.e_phoff), getu16(swap, elfhdr.e_phnum), getu16(swap, elfhdr.e_phentsize)); #else ; #endif else { if (getu16(swap, elfhdr.e_type) == ET_EXEC) { dophn_exec(class, swap, fd, getu32(swap, elfhdr.e_phoff), getu16(swap, elfhdr.e_phnum), getu16(swap, elfhdr.e_phentsize)); } doshn(class, swap, fd, getu32(swap, elfhdr.e_shoff), getu16(swap, elfhdr.e_shnum), getu16(swap, elfhdr.e_shentsize)); } return; } if (class == ELFCLASS64) { Elf64_Ehdr elfhdr; if (nbytes <= sizeof (Elf64_Ehdr)) return; u.l = 1; (void) memcpy(&elfhdr, buf, sizeof elfhdr); swap = (u.c[sizeof(int32) - 1] + 1) != elfhdr.e_ident[5]; if (getu16(swap, elfhdr.e_type) == ET_CORE) #ifdef ELFCORE dophn_core(class, swap, fd, #ifdef USE_ARRAY_FOR_64BIT_TYPES getu32(swap, elfhdr.e_phoff[1]), #else getu64(swap, elfhdr.e_phoff), #endif getu16(swap, elfhdr.e_phnum), getu16(swap, elfhdr.e_phentsize)); #else ; #endif else { if (getu16(swap, elfhdr.e_type) == ET_EXEC) { dophn_exec(class, swap, fd, #ifdef USE_ARRAY_FOR_64BIT_TYPES getu32(swap, elfhdr.e_phoff[1]), #else getu64(swap, elfhdr.e_phoff), #endif getu16(swap, elfhdr.e_phnum), getu16(swap, elfhdr.e_phentsize)); } doshn(class, swap, fd, #ifdef USE_ARRAY_FOR_64BIT_TYPES getu32(swap, elfhdr.e_shoff[1]), #else getu64(swap, elfhdr.e_shoff), #endif getu16(swap, elfhdr.e_shnum), getu16(swap, elfhdr.e_shentsize)); } return; } } #endif
5,823
1,577
<reponame>Kua-Fu/rally # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import functools import re from esrally import exceptions VERSIONS = re.compile(r"^(\d+)\.(\d+)\.(\d+)(?:-(.+))?$") VERSIONS_OPTIONAL = re.compile(r"^(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:-(.+))?$") def _versions_pattern(strict): return VERSIONS if strict else VERSIONS_OPTIONAL def is_version_identifier(text, strict=True): return text is not None and _versions_pattern(strict).match(text) is not None def major_version(version): """ Determines the major version of a given version string. :param version: A version string in the format major.minor.path-suffix (suffix is optional) :return: The major version (as int). In case the version string is invalid, an ``exceptions.InvalidSyntax`` is raised. """ major, _, _, _ = components(version) return major def components(version, strict=True): """ Determines components of a version string. :param version: A version string in the format major.minor.path-suffix (suffix is optional) :param strict: Determines whether versions need to have at least "major", "minor" and "patch" defined. Default: True :return: A tuple with four components determining "major", "minor", "patch" and "suffix" (any part except "major" may be `None`) """ versions_pattern = _versions_pattern(strict) matches = versions_pattern.match(version) if matches: if matches.start(4) > 0: return int(matches.group(1)), int(matches.group(2)), int(matches.group(3)), matches.group(4) elif matches.start(3) > 0: return int(matches.group(1)), int(matches.group(2)), int(matches.group(3)), None elif matches.start(2) > 0: return int(matches.group(1)), int(matches.group(2)), None, None elif matches.start(1) > 0: return int(matches.group(1)), None, None, None else: return int(version), None, None, None raise exceptions.InvalidSyntax("version string '%s' does not conform to pattern '%s'" % (version, versions_pattern.pattern)) @functools.total_ordering class Version: """ Represents a version with components major, minor, patch and suffix (suffix is optional). Suffixes are not considered for version comparisons as its contents are opaque and a semantically correct order cannot be defined. """ def __init__(self, major, minor, patch, suffix=None): self.major = major self.minor = minor self.patch = patch self.suffix = suffix def __eq__(self, o: object) -> bool: return isinstance(o, type(self)) and (self.major, self.minor, self.patch) == (o.major, o.minor, o.patch) def __lt__(self, o: object) -> bool: return isinstance(o, type(self)) and (self.major, self.minor, self.patch) < (o.major, o.minor, o.patch) def __hash__(self) -> int: return hash(self.major) ^ hash(self.minor) ^ hash(self.patch) ^ hash(self.suffix) def __repr__(self) -> str: v = f"{self.major}.{self.minor}.{self.patch}" return f"{v}-{self.suffix}" if self.suffix else v @classmethod def from_string(cls, v): return cls(*components(v)) def variants_of(version): for v, _ in VersionVariants(version).all_versions: yield v class VersionVariants: """ Build all possible variations of a version. e.g. if version "5.0.0-SNAPSHOT" is given, the possible variations are: self.with_suffix: "5.0.0-SNAPSHOT", self.with_patch: "5.0.0", self.with_minor: "5.0", self.with_major: "5" """ def __init__(self, version): """ :param version: A version string in the format major.minor.path-suffix (suffix is optional) """ self.major, self.minor, self.patch, self.suffix = components(version) self.with_major = f"{int(self.major)}" self.with_minor = f"{int(self.major)}.{int(self.minor)}" self.with_patch = f"{int(self.major)}.{int(self.minor)}.{int(self.patch)}" self.with_suffix = f"{int(self.major)}.{int(self.minor)}.{int(self.patch)}-{self.suffix}" if self.suffix else None @property def all_versions(self): """ :return: a list of tuples containing version variants and version type ordered from most specific to most generic variation. Example: [("5.0.0-SNAPSHOT", "with_suffix"), ("5.0.0", "with_patch"), ("5.0", "with_minor"), ("5", "with_major")] """ versions = [(self.with_suffix, "with_suffix")] if self.suffix else [] versions.extend( [ (self.with_patch, "with_patch"), (self.with_minor, "with_minor"), (self.with_major, "with_major"), ] ) return versions def best_match(available_alternatives, distribution_version): """ Finds the most specific branch for a given distribution version assuming that versions have the pattern: major.minor.patch-suffix and the provided alternatives reflect this pattern. Best matches for distribution_version from available_alternatives may be: 1. exact matches of major.minor 2. nearest prior minor within the same major 3. major version 4. as a last resort, `master`. See test_find_best_match() for examples. :param available_alternatives: A list of possible distribution versions (or shortened versions). :param distribution_version: An Elasticsearch distribution version. :return: The most specific alternative that is available or None. """ if is_version_identifier(distribution_version): versions = VersionVariants(distribution_version) for version, version_type in versions.all_versions: if version in available_alternatives: return version # match nearest prior minor if version_type == "with_minor" and (latest_minor := latest_bounded_minor(available_alternatives, versions)): if latest_minor: return f"{versions.major}.{latest_minor}" # not found in the available alternatives, it could still be a master version major, _, _, _ = components(distribution_version) if major > _latest_major(available_alternatives): return "master" elif not distribution_version: return "master" return None def _latest_major(alternatives): max_major = -1 for a in alternatives: if is_version_identifier(a, strict=False): major, _, _, _ = components(a, strict=False) max_major = max(major, max_major) return max_major def latest_bounded_minor(alternatives, target_version): """ Finds the closest minor version that is smaller or eq to target_version from a list of version alternatives. Versions including patch or patch-suffix in alternatives are ignored. See test_latest_bounded_minor() for examples. :param alternatives: list of alternative versions :param target_version: a VersionVariants object presenting the distribution version :return: the closest minor version (if available) from alternatives otherwise None """ eligible_minors = [] for a in alternatives: if is_version_identifier(a, strict=False): major, minor, patch, suffix = components(a, strict=False) if patch is not None or suffix is not None: # branches containing patch or patch-suffix aren't supported continue if major == target_version.major and minor and minor <= target_version.minor: eligible_minors.append(minor) # no matching minor version if not eligible_minors: return None eligible_minors.sort() return min(eligible_minors, key=lambda x: abs(x - target_version.minor))
3,222
575
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/media/history/media_history_playback_table.h" #include "base/strings/stringprintf.h" #include "base/updateable_sequenced_task_runner.h" #include "chrome/browser/media/history/media_history_origin_table.h" #include "content/public/browser/media_player_watch_time.h" #include "sql/statement.h" namespace media_history { const char MediaHistoryPlaybackTable::kTableName[] = "playback"; MediaHistoryPlaybackTable::MediaHistoryPlaybackTable( scoped_refptr<base::UpdateableSequencedTaskRunner> db_task_runner) : MediaHistoryTableBase(std::move(db_task_runner)) {} MediaHistoryPlaybackTable::~MediaHistoryPlaybackTable() = default; sql::InitStatus MediaHistoryPlaybackTable::CreateTableIfNonExistent() { if (!CanAccessDatabase()) return sql::INIT_FAILURE; bool success = DB()->Execute(base::StringPrintf("CREATE TABLE IF NOT EXISTS %s(" "id INTEGER PRIMARY KEY AUTOINCREMENT," "origin_id INTEGER NOT NULL," "url TEXT," "watch_time_s INTEGER," "has_video INTEGER," "has_audio INTEGER," "last_updated_time_s BIGINT NOT NULL," "CONSTRAINT fk_origin " "FOREIGN KEY (origin_id) " "REFERENCES origin(id) " "ON DELETE CASCADE" ")", kTableName) .c_str()); if (success) { success = DB()->Execute( base::StringPrintf( "CREATE INDEX IF NOT EXISTS playback_origin_id_index ON " "%s (origin_id)", kTableName) .c_str()); } if (!success) { ResetDB(); LOG(ERROR) << "Failed to create media history playback table."; return sql::INIT_FAILURE; } return sql::INIT_OK; } bool MediaHistoryPlaybackTable::SavePlayback( const content::MediaPlayerWatchTime& watch_time) { DCHECK_LT(0, DB()->transaction_nesting()); if (!CanAccessDatabase()) return false; sql::Statement statement(DB()->GetCachedStatement( SQL_FROM_HERE, base::StringPrintf("INSERT INTO %s " "(origin_id, url, watch_time_s, has_video, has_audio, " "last_updated_time_s) " "VALUES ((SELECT id FROM origin WHERE origin = ?), " "?, ?, ?, ?, ?)", kTableName) .c_str())); statement.BindString(0, MediaHistoryOriginTable::GetOriginForStorage( url::Origin::Create(watch_time.origin))); statement.BindString(1, watch_time.url.spec()); statement.BindInt64(2, watch_time.cumulative_watch_time.InSeconds()); statement.BindInt(3, watch_time.has_video); statement.BindInt(4, watch_time.has_audio); statement.BindInt64(5, base::Time::Now().ToDeltaSinceWindowsEpoch().InSeconds()); if (!statement.Run()) { return false; } return true; } std::vector<mojom::MediaHistoryPlaybackRowPtr> MediaHistoryPlaybackTable::GetPlaybackRows() { std::vector<mojom::MediaHistoryPlaybackRowPtr> playbacks; if (!CanAccessDatabase()) return playbacks; sql::Statement statement(DB()->GetUniqueStatement( base::StringPrintf( "SELECT url, watch_time_s, has_audio, has_video, last_updated_time_s " "FROM %s", kTableName) .c_str())); while (statement.Step()) { mojom::MediaHistoryPlaybackRowPtr playback( mojom::MediaHistoryPlaybackRow::New()); playback->url = GURL(statement.ColumnString(0)); playback->watchtime = base::TimeDelta::FromSeconds(statement.ColumnInt64(1)); playback->has_audio = statement.ColumnBool(2); playback->has_video = statement.ColumnBool(3); playback->last_updated_time = base::Time::FromDeltaSinceWindowsEpoch( base::TimeDelta::FromSeconds(statement.ColumnInt64(4))) .ToJsTime(); playbacks.push_back(std::move(playback)); } DCHECK(statement.Succeeded()); return playbacks; } bool MediaHistoryPlaybackTable::DeleteURL(const GURL& url) { if (!CanAccessDatabase()) return false; sql::Statement statement(DB()->GetCachedStatement( SQL_FROM_HERE, "DELETE FROM playback WHERE url = ?")); statement.BindString(0, url.spec()); return statement.Run(); } } // namespace media_history
2,217
633
<reponame>michael-geng/itranswarp<gh_stars>100-1000 package com.itranswarp.web.view; import java.util.List; import com.mitchellbosecke.pebble.extension.Filter; public abstract class AbstractFilter implements Filter { public abstract String getName(); @Override public List<String> getArgumentNames() { return null; } }
114
314
<filename>ch08-supervised/decision_tree/decision_tree_logit.py<gh_stars>100-1000 # -*- coding: UTF-8 -*- """ 此脚本用于展示决策树联结逻辑回归模型 """ import sys import numpy as np import matplotlib.pyplot as plt import pandas as pd from sklearn.datasets import make_classification from sklearn.linear_model import LogisticRegression from sklearn.model_selection import train_test_split from sklearn.metrics import roc_curve, auc from sklearn.preprocessing import OneHotEncoder from sklearn.tree import DecisionTreeClassifier def generateData(n): """ 生成训练数据 """ X, y = make_classification(n_samples=n, n_features=4) data = pd.DataFrame(X, columns=["x1", "x2", "x3", "x4"]) data["y"] = y return data def trainModel(data, features, label): """ 分别使用逻辑回归、决策树和决策树+逻辑回归建模 """ res = {} trainData, testData = train_test_split(data, test_size=0.5) # 单独使用逻辑回归 logitModel = LogisticRegression() logitModel.fit(trainData[features], trainData[label]) logitProb = logitModel.predict_proba(testData[features])[:, 1] res["logit"] = roc_curve(testData[label], logitProb) # 单独使用决策树 dtModel = DecisionTreeClassifier(max_depth=2) dtModel.fit(trainData[features], trainData[label]) dtProb = dtModel.predict_proba(testData[features])[:, 1] res["DT"] = roc_curve(testData[label], dtProb) # 决策树和逻辑回归联结 ## 为了防止过拟合,使用不同的数据训练决策树和逻辑回归 trainDT, trainLR = train_test_split(trainData, test_size=0.5) ## 使用决策树对前两个变量做变换 m = 2 _dt = DecisionTreeClassifier(max_depth=2) _dt.fit(trainDT[features[:m]], trainDT[label]) leafNode = _dt.apply(trainDT[features[:m]]).reshape(-1, 1) coder = OneHotEncoder() coder.fit(leafNode) newFeature = np.c_[ coder.transform(_dt.apply(trainLR[features[:m]]).reshape(-1, 1)).toarray(), trainLR[features[m:]]] _logit = LogisticRegression() _logit.fit(newFeature[:, 1:], trainLR[label]) testFeature = np.c_[ coder.transform(_dt.apply(testData[features[:m]]).reshape(-1, 1)).toarray(), testData[features[m:]]] dtLogitProb = _logit.predict_proba(testFeature[:, 1:])[:, 1] res["DT + logit"] = roc_curve(testData[label], dtLogitProb) return res def visualize(re): """ 将模型结果可视化 """ # 为在Matplotlib中显示中文,设置特殊字体 plt.rcParams["font.sans-serif"]=["SimHei"] # 创建一个图形框 fig = plt.figure(figsize=(6, 6), dpi=80) ax = fig.add_subplot(1, 1, 1) ax.set_xlim([0, 1]) ax.set_ylim([0, 1]) styles = ["k--", "r-.", "b"] model = ["logit", "DT", "DT + logit"] for i, s in zip(model, styles): fpr, tpr, _ = re[i] _auc = auc(fpr, tpr) # 在Python3中,str不需要decode if sys.version_info[0] == 3: ax.plot(fpr, tpr, s, label="%s:%s; %s=%0.2f" % ("模型", i, "曲线下面积(AUC)", _auc)) else: ax.plot(fpr, tpr, s, label="%s:%s; %s=%0.2f" % ("模型".decode("utf-8"), i, "曲线下面积(AUC)".decode("utf-8"), _auc)) legend = plt.legend(loc=4, shadow=True) plt.show() if __name__ == "__main__": np.random.seed(4040) data = generateData(4000) re = trainModel(data, ["x1", "x2", "x3", "x4"], "y") visualize(re)
1,792
892
<reponame>westonsteimel/advisory-database-github { "schema_version": "1.2.0", "id": "GHSA-55g3-v56r-3g2g", "modified": "2022-05-02T00:07:06Z", "published": "2022-05-02T00:07:06Z", "aliases": [ "CVE-2008-4114" ], "details": "srv.sys in the Server service in Microsoft Windows 2000 SP4, XP SP2 and SP3, Server 2003 SP1 and SP2, Vista Gold and SP1, and Server 2008 allows remote attackers to cause a denial of service (system crash) or possibly have unspecified other impact via an SMB WRITE_ANDX packet with an offset that is inconsistent with the packet size, related to \"insufficiently validating the buffer size,\" as demonstrated by a request to the \\PIPE\\lsarpc named pipe, aka \"SMB Validation Denial of Service Vulnerability.\"", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2008-4114" }, { "type": "WEB", "url": "https://docs.microsoft.com/en-us/security-updates/securitybulletins/2009/ms09-001" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/45146" }, { "type": "WEB", "url": "https://oval.cisecurity.org/repository/search/definition/oval%3Aorg.mitre.oval%3Adef%3A5262" }, { "type": "WEB", "url": "https://oval.cisecurity.org/repository/search/definition/oval%3Aorg.mitre.oval%3Adef%3A6044" }, { "type": "WEB", "url": "https://www.exploit-db.com/exploits/6463" }, { "type": "WEB", "url": "http://secunia.com/advisories/31883" }, { "type": "WEB", "url": "http://www.reversemode.com/index.php?option=com_content&task=view&id=54&Itemid=1" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/496354/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/31179" }, { "type": "WEB", "url": "http://www.securitytracker.com/id?1020887" }, { "type": "WEB", "url": "http://www.us-cert.gov/cas/techalerts/TA09-013A.html" }, { "type": "WEB", "url": "http://www.vallejo.cc/proyectos/vista_SMB_write_DoS.htm" }, { "type": "WEB", "url": "http://www.vupen.com/english/advisories/2008/2583" } ], "database_specific": { "cwe_ids": [ ], "severity": "HIGH", "github_reviewed": false } }
1,129
1,331
<reponame>fergonaut/metasploit-payloads<filename>c/meterpreter/source/extensions/priv/precomp.h #ifndef METERPRETER_SOURCE_EXTENSION_PRIV_SERVER_PRECOMP_H #define METERPRETER_SOURCE_EXTENSION_PRIV_SERVER_PRECOMP_H #define _WIN32_WINNT 0x0400 #include "priv.h" #include "elevate.h" #include "passwd.h" #include "fs.h" #include "../../ReflectiveDLLInjection/inject/src/GetProcAddressR.h" #include "../../ReflectiveDLLInjection/dll/src/ReflectiveLoader.h" #define strcasecmp stricmp // declared in ReflectiveLoader.c and set by DllMain also in ReflectiveLoader.c extern HINSTANCE hAppInstance; #endif
239
769
<gh_stars>100-1000 // Copyright by Contributors #include <xgboost/metric.h> #include "../helpers.h" TEST(Metric, RMSE) { xgboost::Metric * metric = xgboost::Metric::Create("rmse"); ASSERT_STREQ(metric->Name(), "rmse"); EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0, 1e-10); EXPECT_NEAR(GetMetricEval(metric, {0.1f, 0.9f, 0.1f, 0.9f}, { 0, 0, 1, 1}), 0.6403f, 0.001f); } TEST(Metric, MAE) { xgboost::Metric * metric = xgboost::Metric::Create("mae"); ASSERT_STREQ(metric->Name(), "mae"); EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0, 1e-10); EXPECT_NEAR(GetMetricEval(metric, {0.1f, 0.9f, 0.1f, 0.9f}, { 0, 0, 1, 1}), 0.5f, 0.001f); } TEST(Metric, LogLoss) { xgboost::Metric * metric = xgboost::Metric::Create("logloss"); ASSERT_STREQ(metric->Name(), "logloss"); EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0, 1e-10); EXPECT_NEAR(GetMetricEval(metric, {0.1f, 0.9f, 0.1f, 0.9f}, { 0, 0, 1, 1}), 1.2039f, 0.001f); } TEST(Metric, Error) { xgboost::Metric * metric = xgboost::Metric::Create("error"); ASSERT_STREQ(metric->Name(), "error"); EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0, 1e-10); EXPECT_NEAR(GetMetricEval(metric, {0.1f, 0.9f, 0.1f, 0.9f}, { 0, 0, 1, 1}), 0.5f, 0.001f); EXPECT_ANY_THROW(xgboost::Metric::Create("error@abc")); delete metric; metric = xgboost::Metric::Create("[email protected]"); EXPECT_STREQ(metric->Name(), "error"); delete metric; metric = xgboost::Metric::Create("[email protected]"); ASSERT_STREQ(metric->Name(), "[email protected]"); EXPECT_STREQ(metric->Name(), "[email protected]"); EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0, 1e-10); EXPECT_NEAR(GetMetricEval(metric, {0.1f, 0.2f, 0.1f, 0.2f}, { 0, 0, 1, 1}), 0.5f, 0.001f); } TEST(Metric, PoissionNegLogLik) { xgboost::Metric * metric = xgboost::Metric::Create("poisson-nloglik"); ASSERT_STREQ(metric->Name(), "poisson-nloglik"); EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0.5f, 1e-10); EXPECT_NEAR(GetMetricEval(metric, {0.1f, 0.2f, 0.1f, 0.2f}, { 0, 0, 1, 1}), 1.1280f, 0.001f); }
1,483
480
<reponame>weicao/galaxysql /* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.optimizer.hint; import com.alibaba.polardbx.common.exception.NotSupportException; import com.alibaba.polardbx.common.exception.TddlRuntimeException; import com.alibaba.polardbx.common.exception.code.ErrorCode; import com.alibaba.polardbx.common.jdbc.ParameterContext; import com.alibaba.polardbx.common.jdbc.Parameters; import com.alibaba.polardbx.common.model.hint.DirectlyRouteCondition; import com.alibaba.polardbx.common.model.hint.ExtraCmdRouteCondition; import com.alibaba.polardbx.common.model.hint.FullRouteCondition; import com.alibaba.polardbx.common.model.hint.RouteCondition; import com.alibaba.polardbx.common.model.hint.RuleRouteCondition; import com.alibaba.polardbx.common.model.sqljep.Comparative; import com.alibaba.polardbx.common.properties.ConnectionProperties; import com.alibaba.polardbx.common.utils.TStringUtil; import com.alibaba.polardbx.common.utils.TreeMaps; import com.alibaba.polardbx.common.utils.logger.Logger; import com.alibaba.polardbx.common.utils.logger.LoggerFactory; import com.alibaba.polardbx.druid.sql.ast.statement.SQLSelectStatement; import com.alibaba.polardbx.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; import com.alibaba.polardbx.gms.topology.DbInfoManager; import com.alibaba.polardbx.optimizer.OptimizerContext; import com.alibaba.polardbx.optimizer.PlannerContext; import com.alibaba.polardbx.optimizer.config.meta.DrdsRelMetadataProvider; import com.alibaba.polardbx.optimizer.config.meta.DrdsRelOptCostImpl; import com.alibaba.polardbx.optimizer.config.schema.RootSchemaFactory; import com.alibaba.polardbx.optimizer.config.table.SchemaManager; import com.alibaba.polardbx.optimizer.config.table.TableMeta; import com.alibaba.polardbx.optimizer.context.ExecutionContext; import com.alibaba.polardbx.optimizer.core.CursorMeta; import com.alibaba.polardbx.optimizer.core.TddlJavaTypeFactoryImpl; import com.alibaba.polardbx.optimizer.core.TddlOperatorTable; import com.alibaba.polardbx.optimizer.core.TddlRelDataTypeSystemImpl; import com.alibaba.polardbx.optimizer.core.TddlTypeFactoryImpl; import com.alibaba.polardbx.optimizer.core.dialect.DbType; import com.alibaba.polardbx.optimizer.core.planner.ExecutionPlan; import com.alibaba.polardbx.optimizer.core.planner.Planner; import com.alibaba.polardbx.optimizer.core.planner.SqlConverter; import com.alibaba.polardbx.optimizer.core.planner.rule.RuleToUse; import com.alibaba.polardbx.optimizer.core.rel.AffectedRowsSum; import com.alibaba.polardbx.optimizer.core.rel.BroadcastTableModify; import com.alibaba.polardbx.optimizer.core.rel.DirectTableOperation; import com.alibaba.polardbx.optimizer.core.rel.ExecutionPlanPropertiesVisitor; import com.alibaba.polardbx.optimizer.core.rel.Gather; import com.alibaba.polardbx.optimizer.core.rel.LogicalInsert; import com.alibaba.polardbx.optimizer.core.rel.LogicalModify; import com.alibaba.polardbx.optimizer.core.rel.LogicalModifyView; import com.alibaba.polardbx.optimizer.core.rel.LogicalView; import com.alibaba.polardbx.optimizer.core.rel.MergeSort; import com.alibaba.polardbx.optimizer.core.rel.PhyQueryOperation; import com.alibaba.polardbx.optimizer.core.rel.PhyTableOperation; import com.alibaba.polardbx.optimizer.core.rel.PhyViewUnion; import com.alibaba.polardbx.optimizer.core.rel.ReplaceLogicalTableNameWithPhysicalTableNameVisitor; import com.alibaba.polardbx.optimizer.core.rel.dal.BaseDalOperation; import com.alibaba.polardbx.optimizer.core.rel.dal.LogicalDal; import com.alibaba.polardbx.optimizer.core.rel.dal.LogicalShow; import com.alibaba.polardbx.optimizer.core.rel.dal.PhyDal; import com.alibaba.polardbx.optimizer.core.rel.dal.PhyShow; import com.alibaba.polardbx.optimizer.core.rel.ddl.BaseDdlOperation; import com.alibaba.polardbx.optimizer.core.rel.ddl.LogicalCreateDatabase; import com.alibaba.polardbx.optimizer.core.rel.ddl.LogicalDropDatabase; import com.alibaba.polardbx.optimizer.core.rel.mpp.MppExchange; import com.alibaba.polardbx.optimizer.hint.operator.BaseHintOperator; import com.alibaba.polardbx.optimizer.hint.operator.HintCmdOperator; import com.alibaba.polardbx.optimizer.hint.operator.HintCmdOperator.CmdBean; import com.alibaba.polardbx.optimizer.hint.operator.HintPushOperator; import com.alibaba.polardbx.optimizer.hint.operator.HintPushdownOperator; import com.alibaba.polardbx.optimizer.hint.util.HintConverter; import com.alibaba.polardbx.optimizer.hint.util.HintConverter.HintCollection; import com.alibaba.polardbx.optimizer.hint.util.HintUtil; import com.alibaba.polardbx.optimizer.hint.visitor.HintCollectVisitor; import com.alibaba.polardbx.optimizer.hint.visitor.HintRelVisitor; import com.alibaba.polardbx.optimizer.hint.visitor.HintRelVisitor.HintTableFinder; import com.alibaba.polardbx.optimizer.hint.visitor.HintRelVisitor.PushdownHandlerVisitor; import com.alibaba.polardbx.optimizer.parse.FastsqlUtils; import com.alibaba.polardbx.optimizer.parse.custruct.FastSqlConstructUtils; import com.alibaba.polardbx.optimizer.parse.hint.SimpleHintParser; import com.alibaba.polardbx.optimizer.partition.PartitionInfo; import com.alibaba.polardbx.optimizer.partition.PartitionInfoManager; import com.alibaba.polardbx.optimizer.partition.pruning.PartPrunedResult; import com.alibaba.polardbx.optimizer.partition.pruning.PartitionPruneStep; import com.alibaba.polardbx.optimizer.partition.pruning.PartitionPruneStepBuilder; import com.alibaba.polardbx.optimizer.partition.pruning.PartitionPruner; import com.alibaba.polardbx.optimizer.partition.pruning.PartitionPrunerUtils; import com.alibaba.polardbx.optimizer.partition.pruning.PhysicalPartitionInfo; import com.alibaba.polardbx.optimizer.rule.TddlRuleManager; import com.alibaba.polardbx.optimizer.sharding.ConditionExtractor; import com.alibaba.polardbx.optimizer.sql.sql2rel.TddlSqlToRelConverter; import com.alibaba.polardbx.optimizer.utils.CalciteUtils; import com.alibaba.polardbx.optimizer.utils.PlannerUtils; import com.alibaba.polardbx.optimizer.utils.RelUtils; import com.alibaba.polardbx.optimizer.utils.RelUtils.TableProperties; import com.alibaba.polardbx.rule.TableRule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.apache.calcite.config.CalciteConnectionConfig; import org.apache.calcite.config.CalciteConnectionConfigImpl; import org.apache.calcite.config.CalciteConnectionProperty; import org.apache.calcite.config.Lex; import org.apache.calcite.config.NullCollation; import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.plan.Contexts; import org.apache.calcite.plan.ConventionTraitDef; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.plan.hep.HepMatchOrder; import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepProgramBuilder; import org.apache.calcite.plan.volcano.VolcanoPlanner; import org.apache.calcite.prepare.CalciteCatalogReader; import org.apache.calcite.rel.RelCollationTraitDef; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelFieldCollation.Direction; import org.apache.calcite.rel.RelFieldCollation.NullDirection; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.logical.LogicalAggregate; import org.apache.calcite.rel.logical.LogicalJoin; import org.apache.calcite.rel.logical.LogicalSort; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlCharStringLiteral; import org.apache.calcite.sql.SqlExplain; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlNumericLiteral; import org.apache.calcite.sql.SqlOperatorTable; import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.SqlSelectKeyword; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.impl.SqlParserImpl; import org.apache.calcite.sql.util.SqlShuttle; import org.apache.calcite.sql.validate.SelectScope; import org.apache.calcite.sql.validate.SqlConformance; import org.apache.calcite.sql.validate.SqlConformanceEnum; import org.apache.calcite.sql.validate.SqlValidatorCatalogReader; import org.apache.calcite.sql.validate.SqlValidatorImpl; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.sql2rel.SqlToRelConverter; import org.apache.calcite.sql2rel.StandardConvertletTable; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Litmus; import org.apache.calcite.util.Util; import org.apache.commons.collections.MapUtils; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import java.util.function.BiFunction; import java.util.stream.Collectors; import static com.alibaba.polardbx.common.exception.code.ErrorCode.ERR_TABLE_EMPTY_WITH_HINT; import static org.apache.calcite.sql.SqlUtil.stripAs; /** * @author chenmo.cm */ public class HintPlanner extends TddlSqlToRelConverter { private final Logger logger = LoggerFactory.getLogger(getClass()); public HintPlanner(SqlValidatorImpl validator, CalciteCatalogReader catalog, RelOptCluster cluster, Config converterConfig) { super(null, validator, catalog, cluster, StandardConvertletTable.INSTANCE, converterConfig, PlannerContext.EMPTY_CONTEXT); } public static HintPlanner getInstance(String schemaName, ExecutionContext ec) { CalciteCatalogReader catalog; TddlTypeFactoryImpl typeFactory; RelOptCluster cluster; Config converterConfig; typeFactory = new TddlTypeFactoryImpl(TddlRelDataTypeSystemImpl.getInstance()); SqlParser.Config parserConfig = SqlParser.configBuilder() .setLex(Lex.MYSQL) .setParserFactory(SqlParserImpl.FACTORY) .build(); converterConfig = SqlToRelConverter.configBuilder() .withConvertTableAccess(false) .withInSubQueryThreshold(Integer.MAX_VALUE) .withExpand(false) .build(); Properties properties = new Properties(); properties.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), String.valueOf(parserConfig.caseSensitive())); CalciteConnectionConfig connectionConfig = new CalciteConnectionConfigImpl(properties); CalciteSchema calciteSchema = RootSchemaFactory.createRootSchema(schemaName, ec); catalog = new CalciteCatalogReader(calciteSchema, calciteSchema.path(schemaName), new TddlJavaTypeFactoryImpl(), connectionConfig); VolcanoPlanner planner = new VolcanoPlanner(DrdsRelOptCostImpl.FACTORY, Contexts.EMPTY_CONTEXT); if (ec.isEnableRuleCounter()) { planner.setRuleCounter(); } planner.clearRelTraitDefs(); planner.addRelTraitDef(ConventionTraitDef.INSTANCE); planner.addRelTraitDef(RelCollationTraitDef.INSTANCE); RexBuilder rexBuilder = new RexBuilder(typeFactory); cluster = RelOptCluster.create(planner, rexBuilder); cluster.setMetadataProvider(DrdsRelMetadataProvider.INSTANCE); TddlOperatorTable opTab = TddlOperatorTable.instance(); TddlValidator validator = new TddlValidator(opTab, catalog, typeFactory, SqlConformanceEnum.DEFAULT); validator.setDefaultNullCollation(NullCollation.LOW); validator.setIdentifierExpansion(false); validator.setCallRewrite(false); return new HintPlanner(validator, catalog, cluster, converterConfig); } /** * !!!!!FOR TEST ONLY!!!! */ public ExecutionPlan getPlan(SqlNode ast) { // FIXME: support plan cache Map<Integer, ParameterContext> param = null; PlannerContext plannerContext = new PlannerContext(); plannerContext.setExecutionContext(new ExecutionContext()); return getPlan(ast, plannerContext, plannerContext.getExecutionContext()); } public ExecutionPlan getPlan(SqlNode ast, PlannerContext plannerContext, ExecutionContext ec) { Map<Integer, ParameterContext> param = plannerContext.getParams().getCurrentParameter(); // 使用现有优化器生成逻辑查询计划 ExecutionPlan executionPlan = getOriginLogicalPlan(ast, plannerContext); // 找到所有 LogicalView final HintRelVisitor hintRelVisitor = new HintRelVisitor(); executionPlan.getPlan().accept(hintRelVisitor); final List<LogicalView> logicalViews = hintRelVisitor.getLogicalViews(); final LogicalJoin join = hintRelVisitor.getLogicalJoin(); final Map<RelNode, SqlNodeList> relHintsMap = hintRelVisitor.getRelNodeHints(); /** * handle pushdown */ RelNode relResult = executionPlan.getPlan(); final boolean singleLv = logicalViews.size() == 1; final boolean singleHint = relHintsMap.size() == 1; if (singleLv || singleHint) { final SqlNodeList hints = singleLv ? logicalViews.get(0).getHints() : relHintsMap.entrySet() .iterator() .next() .getValue(); if (null == hints || hints.size() <= 0) { return executionPlan; } final List<HintPushdownOperator> pushdowns = new LinkedList<>(); final HintCollection hintCollection = HintConverter.convertPushdown(hints, pushdowns, ec); if (hintCollection.pushdownOnly()) { return executionPlan; } } /** * handle push_xxx and add_xxx */ if (relResult instanceof LogicalView) { relResult = handleLogicalView((LogicalView) relResult, param, ec); return buildLogicalPlan(ast, relResult, executionPlan); } else if (relResult instanceof Gather || relResult instanceof MergeSort) { RelNode newRelNode = relResult.getInput(0); newRelNode = handleLogicalView((LogicalView) newRelNode, param, ec); if (newRelNode == relResult.getInput(0)) { return buildLogicalPlan(ast, relResult, executionPlan); } else { return buildLogicalPlan(ast, newRelNode, executionPlan); } } if (singleLv) { final SqlNodeList hints = logicalViews.get(0).getHints(); final List<HintPushdownOperator> pushdowns = new LinkedList<>(); HintCollection hintCollection = HintConverter.convertPushdown(hints, pushdowns, ec); if (hintCollection.pushOnly()) { handlePushHint(logicalViews.get(0), param, ec); return executionPlan; } else { relResult = handleLogicalView(logicalViews.get(0), param, ec); return buildLogicalPlan(ast, relResult, executionPlan); } } // // if (join != null && join.getHints().size() > 0) { // // handle join // // relResult = buildRelNode(join, join.getHints(), ast); // // // handle logicalView // for (LogicalView logicalView : logicalViews) { // handleLogicalView(logicalView, param); // } // // return buildLogicalPlan(relResult); // } /** * handle push_xxx */ for (LogicalView logicalView : logicalViews) { final SqlNodeList hints = logicalView.getHints(); if (hints == null || hints.size() <= 0) { continue; } final List<HintPushdownOperator> pushdowns = new LinkedList<>(); HintCollection hintCollection = HintConverter.convertPushdown(hints, pushdowns, ec); if (hintCollection.pushOnly()) { handlePushHint(logicalView, param, ec); } } return executionPlan; } public HintCollection collectAndPreExecute(SqlNode ast, CmdBean cmdBean, boolean testMode, ExecutionContext ec) { HintCollectVisitor cmdVisitor = new HintCollectVisitor(testMode, ec); ast.accept(cmdVisitor); List<HintCmdOperator> cmdOperators = cmdVisitor.getCmdOperator(); for (HintCmdOperator op : cmdOperators) { // add connection properties to ExecutionContext op.handle(cmdBean); } return cmdVisitor.getCollection(); } public ExecutionPlan direct(SqlNode ast, CmdBean cmdBean, HintCollection hintCollection, Map<Integer, ParameterContext> param, String schemaName, ExecutionContext ec) { // init group List<String> finalGroups; if (cmdBean.jsonHint()) { // JSON HINT RouteCondition rc = SimpleHintParser.convertHint2RouteCondition(schemaName, SimpleHintParser.TDDL_HINT_PREFIX + cmdBean.getJson() + SimpleHintParser.TDDL_HINT_END, param); cmdBean.getExtraCmd().putAll(rc.getExtraCmds()); if (rc instanceof DirectlyRouteCondition) { hintCollection.routeCount++; final DirectlyRouteCondition drc = (DirectlyRouteCondition) rc; final List<String> groups = HintUtil.splitAndTrim(drc.getDbId(), ","); finalGroups = ImmutableList.copyOf(groups); } else { throw new TddlRuntimeException(ErrorCode.ERR_NOT_SUPPORT, "unsupport HINT type " + rc.getClass().getName() + " for direct plan!"); } } else { // NODE finalGroups = cmdBean.getGroups(); } final SqlConverter converter = SqlConverter.getInstance(schemaName, ec); final RelOptCluster cluster = converter.createRelOptCluster(null); final List<Integer> dynamicParamIndex = PlannerUtils.getDynamicParamIndex(ast); List<RelNode> results = new ArrayList<>(); for (String group : finalGroups) { PhyQueryOperation phyQueryOperation = new PhyQueryOperation(cluster, RelTraitSet.createEmpty(), ast, group, param, dynamicParamIndex); phyQueryOperation.setKind(ast.getKind()); results.add(phyQueryOperation); } // end of for final ExecutionPlanPropertiesVisitor logicalPlanPropertiesVisitor = new ExecutionPlanPropertiesVisitor(); ast.accept(logicalPlanPropertiesVisitor); final List<String> modifiedTableNames = logicalPlanPropertiesVisitor.getModifiedTableNames(); boolean pushdownHintOnGsi = false; if (cmdBean.getExtraCmd().containsKey(ConnectionProperties.PUSHDOWN_HINT_ON_GSI)) { pushdownHintOnGsi = Boolean.valueOf(cmdBean.getExtraCmd().get(ConnectionProperties.PUSHDOWN_HINT_ON_GSI).toString()); } List<TableProperties> tableModified = null; if (ast.getKind().belongsTo(SqlKind.DML)) { if (!pushdownHintOnGsi) { tableModified = checkModifyGsiDirectly(schemaName, modifiedTableNames, ec); } else { tableModified = getModifiedTable(schemaName, modifiedTableNames, ec); } } final BitSet planProperties = logicalPlanPropertiesVisitor.appendPlanProperties(null, null, ec); final ExecutionPlan result = new ExecutionPlan(ast, wrapWithViewUnion(results), null, planProperties); result.setModifiedTables(tableModified); return result; } public <T> List<T> getLogicalTable(String schemaName, List<String> phyTables, BiFunction<String, TableMeta, T> checker, ExecutionContext ec) { return getLogicalTable(schemaName, phyTables, checker, false, ec); } public <T> List<T> getLogicalTable(String schemaName, List<String> phyTables, BiFunction<String, TableMeta, T> checker, boolean allowDuplicate, ExecutionContext ec) { final List<T> result = new ArrayList<>(); final Set<String> currentTableNames = new HashSet<>(); List<String> untested = new ArrayList<>(); SchemaManager schemaManager = ec.getSchemaManager(schemaName); PartitionInfoManager partitionInfoManager = schemaManager.getTddlRuleManager().getPartitionInfoManager(); phyTables.forEach(phyTable -> { final TableRule tableRule = schemaManager.getTddlRuleManager().getTableRule(phyTable); if (null != tableRule || partitionInfoManager.isNewPartDbTable(phyTable)) { final TableMeta table = schemaManager.getTable(phyTable); if (allowDuplicate || !currentTableNames.contains(table.getTableName())) { final T resultItem = checker.apply(phyTable, table); if (null != resultItem) { result.add(resultItem); currentTableNames.add(table.getTableName()); } } } else { untested.add(phyTable); } }); if (!untested.isEmpty()) { Collection<TableRule> tables = OptimizerContext.getContext(schemaName).getRuleManager().getTddlRule().getTables(); tables = tables.stream().filter(rule -> schemaManager.getTable(rule.getVirtualTbName()).isGsi() || schemaManager.getTable(rule.getVirtualTbName()).withGsi()).collect(Collectors.toSet()); Map<String, Set<String>> logicalTableMap = buildLogicalTableMap(tables); Set<String> partitionTables = partitionInfoManager.getPartitionTables(); if (!partitionTables.isEmpty()) { partitionTables = partitionTables.stream().filter(tableName -> schemaManager.getTable(tableName).isGsi() || schemaManager.getTable(tableName).withGsi()).collect(Collectors.toSet()); for (String partitionTableName : partitionTables) { PartitionInfo partitionInfo = partitionInfoManager.getPartitionInfo(partitionTableName); Map<String, Set<String>> topology = partitionInfo.getTopology(); logicalTableMap .computeIfAbsent(partitionTableName, s -> new TreeSet<>(String.CASE_INSENSITIVE_ORDER)); for (Set<String> phyTableNames : topology.values()) { logicalTableMap.get(partitionTableName).addAll(phyTableNames); } } } untested.forEach(phyTable -> logicalTableMap.entrySet() .stream() .filter(e -> e.getValue().contains(phyTable)) .findAny() .map(entry -> ec.getSchemaManager(schemaName).getTable(entry.getKey())) .filter(table -> allowDuplicate || !currentTableNames.contains(table.getTableName())) .map(table -> { final T resultItem = checker.apply(phyTable, table); if (null != resultItem) { result.add(resultItem); currentTableNames.add(table.getTableName()); } return table; })); } return result; } private Map<String, Set<String>> buildLogicalTableMap(Collection<TableRule> tables) { Map<String, Set<String>> logicalTableMap = TreeMaps.caseInsensitiveMap(); for (TableRule tableRule : tables) { final String virtualTbName = tableRule.getVirtualTbName(); logicalTableMap.computeIfAbsent(virtualTbName, s -> new TreeSet<>(String.CASE_INSENSITIVE_ORDER)); for (Set<String> actualTables : tableRule.getActualTopology().values()) { logicalTableMap.get(virtualTbName).addAll(actualTables); } } return logicalTableMap; } public List<TableProperties> checkModifyGsiDirectly(final String schemaName, final List<String> modifiedTableNames, ExecutionContext ec) { return getLogicalTable(schemaName, modifiedTableNames, (phyTable, tableMeta) -> { if (tableMeta.isGsi()) { throw new TddlRuntimeException(ErrorCode.ERR_GLOBAL_SECONDARY_INDEX_MODIFY_GSI_TABLE_DIRECTLY, phyTable); } else if (tableMeta.withGsi()) { throw new TddlRuntimeException(ErrorCode.ERR_GLOBAL_SECONDARY_INDEX_MODIFY_GSI_PRIMARY_TABLE_DIRECTLY, phyTable); } return new TableProperties(tableMeta.getTableName(), schemaName, ec); }, ec); } public List<TableProperties> getModifiedTable(final String schemaName, final List<String> modifiedTableNames, ExecutionContext ec) { final List<TableMeta> logicalTables = getLogicalTable(schemaName, modifiedTableNames, (phyTable, tableMeta) -> tableMeta, ec); return logicalTables.stream() .map(tableMeta -> new TableProperties(tableMeta.getTableName(), schemaName, ec)) .collect(Collectors.toList()); } private RelNode wrapWithViewUnion(List<RelNode> results) { RelNode relResult; if (results.size() > 1) { relResult = PhyViewUnion.create(results); } else if (results.size() == 1) { relResult = results.get(0); } else { throw new NotSupportException("None group remained to push sql!"); } return relResult; } public ExecutionPlan pushdown(ExecutionPlan executionPlan, SqlNode ast, CmdBean cmdBean, HintCollection hintCollection, Map<Integer, ParameterContext> param, Map<String, Object> extraCmd, ExecutionContext ec) { ExecutionPlan result = executionPlan; String schemaName = cmdBean.getSchemaName(); if (executionPlan.getPlan() instanceof BaseDdlOperation) { schemaName = ((BaseDdlOperation) executionPlan.getPlan()).getSchemaName(); } else if (executionPlan.getPlan() instanceof LogicalShow) { schemaName = ((LogicalShow) executionPlan.getPlan()).getSchemaName(); } if (cmdBean.doPushdown()) { final RelNode origin = executionPlan.getPlan(); SqlNode realAst = ast; if (ast.getKind() == SqlKind.EXPLAIN) { realAst = ((SqlExplain) ast).getExplicandum(); } final HintTableFinder tableFinder = new HintTableFinder(); RelOptUtil.go(tableFinder, origin); boolean noTable = tableFinder.noTable(); List<String> finalGroups = new ArrayList<>(); List<String> tableNames = new ArrayList<>(); RelNode pushed = null; if (!noTable) { if (origin instanceof LogicalModifyView) { // pushed down dml pushed = origin; tableNames = ((LogicalModifyView) origin).getTableNames(); } else if (origin instanceof LogicalModify) { LogicalView lv = new LogicalView(origin, tableFinder.getTable(), new SqlNodeList(SqlParserPos.ZERO)); lv.getPushDownOpt().setNativeSqlNode(realAst); lv.setSqlTemplate(realAst); final LogicalModifyView logicalModifyView = new LogicalModifyView(lv); logicalModifyView.push(origin); RelUtils.changeRowType(logicalModifyView, origin.getRowType()); tableNames = lv.getTableNames(); pushed = logicalModifyView; } else if (origin instanceof LogicalInsert) { LogicalInsert logicalInsert = (LogicalInsert) origin; tableNames = ImmutableList.of(Util.last(logicalInsert.getTable().getQualifiedName())); // If it's INSERT SELECT, change LogicalInsert to // LogicalModifyView. if (logicalInsert.isSourceSelect()) { LogicalView lv = new LogicalView(origin, tableFinder.getTable(), new SqlNodeList(SqlParserPos.ZERO)); LogicalModifyView lmv = new LogicalModifyView(lv); lmv.getPushDownOpt().setNativeSqlNode(realAst); lmv.setSqlTemplate(realAst); tableNames = lmv.getTableNames(); pushed = lmv; } else { pushed = origin; } } else if (origin instanceof BaseDdlOperation) { tableNames = Lists.newArrayList(((BaseDdlOperation) origin).getTableName()); finalGroups = new ArrayList<>(); } else { LogicalView lv = new LogicalView(origin, tableFinder.getTable(), new SqlNodeList(SqlParserPos.ZERO)); lv.getPushDownOpt().setNativeSqlNode(realAst); lv.setSqlTemplate(realAst); tableNames = lv.getTableNames(); pushed = lv; } // end of else } // end of if /** * build target table */ Map<String, List<List<String>>> targetTable = new LinkedHashMap<>(); if (cmdBean.jsonHint()) { /** * JSON HINT */ RouteCondition rc = SimpleHintParser.convertHint2RouteCondition(schemaName, SimpleHintParser.TDDL_HINT_PREFIX + cmdBean.getJson() + SimpleHintParser.TDDL_HINT_END, param); cmdBean.getExtraCmd().putAll(rc.getExtraCmds()); if (rc instanceof DirectlyRouteCondition) { hintCollection.routeCount++; finalGroups = handleDirectlyRouteCondition(tableNames, targetTable, (DirectlyRouteCondition) rc); } else if (rc instanceof FullRouteCondition) { hintCollection.routeCount++; finalGroups = handleFullRouteCondition(tableNames, cmdBean.getCondition(), param, targetTable, (FullRouteCondition) rc, schemaName, ec); } else if (rc instanceof RuleRouteCondition) { hintCollection.routeCount++; finalGroups = handleRuleRouteCondition(schemaName, tableNames, param, targetTable, (RuleRouteCondition) rc, ec); } else if (rc instanceof ExtraCmdRouteCondition) { // just add extra cmd return executionPlan; } } else { /** * CoronaDB HINT */ if (cmdBean.logicalTableSpecified()) { if (cmdBean.realTableSpecified() && !noTable) { final List<String> vtNames = HintUtil.splitAndTrim(cmdBean.getTable(), ","); final List<List<String>> realTables = new LinkedList<>(); for (List<String> rtNames : cmdBean.getRealTable()) { List<String> phyTables = HintUtil.mergeTableNames(tableNames, vtNames, rtNames); realTables.add(phyTables); } for (String group : cmdBean.getGroups()) { targetTable.put(group, realTables); } finalGroups = cmdBean.getGroups(); } else if (cmdBean.groupSpecified()) { Map<String, List<List<String>>> tmpTargetTable = null; if (noTable) { Map<String, Map<String, Comparative>> comparatives = BaseHintOperator.buildComparative(cmdBean.getTable(), cmdBean.getCondition(), new LinkedList<Integer>(), schemaName, ec); tableNames.addAll(comparatives.keySet()); /** * tmpTargetTable = buildTargetTables(tableNames, * comparatives, param, schemaName); */ tmpTargetTable = HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec); } else { Map<String, Map<String, Comparative>> comparatives = new HashMap<>(); for (String tableName : tableNames) { comparatives.put(tableName, new HashMap<String, Comparative>()); } /** * tmpTargetTable = buildTargetTables(tableNames, * comparatives, param, schemaName); */ tmpTargetTable = HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec); } for (String group : cmdBean.getGroups()) { if (tmpTargetTable.containsKey(group)) { targetTable.put(group, tmpTargetTable.get(group)); } } finalGroups.addAll(cmdBean.getGroups()); } else { finalGroups = HintUtil.allGroup(); if (!DbInfoManager.getInstance().isNewPartitionDb(schemaName)) { Map<String, Map<String, Comparative>> comparatives = BaseHintOperator.buildComparative(cmdBean.getTable(), cmdBean.getCondition(), new LinkedList<Integer>(), schemaName, ec); if (noTable) { tableNames.addAll(comparatives.keySet()); /** * targetTable = buildTargetTables(tableNames, * comparatives, param, schemaName); } else { * targetTable = buildTargetTables(tableNames, * comparatives, param, * OptimizerContext.getContext().getSchemaName()); */ targetTable = HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec); } else { targetTable = HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec); } } else { Map<String, PartitionPruneStep> pruneStepMap = BaseHintOperator.buildPartitionPruneStepMap(cmdBean.getTable(), cmdBean.getCondition(), new LinkedList<Integer>(), schemaName, ec); if (noTable) { tableNames.addAll(pruneStepMap.keySet()); /** * targetTable = buildTargetTables(tableNames, * comparatives, param, schemaName); } else { * targetTable = buildTargetTables(tableNames, * comparatives, param, * OptimizerContext.getContext().getSchemaName()); */ targetTable = HintUtil.buildTargetTablesByPruneStepMap(tableNames, pruneStepMap, schemaName, ec); } else { targetTable = HintUtil.buildTargetTablesByPruneStepMap(tableNames, pruneStepMap, schemaName, ec); } } } } else { finalGroups = cmdBean.getGroups(); if (!noTable) { finalGroups = cmdBean.getGroups(); final TddlRuleManager rule = OptimizerContext.getContext(schemaName).getRuleManager(); if (tableNames.size() == 1 && rule.isBroadCast(tableNames.get(0))) { String logTbName = tableNames.get(0); if (!DbInfoManager.getInstance().isNewPartitionDb(schemaName)) { // broadcast table only final String physicalTableName = rule.getTableRule(logTbName).getTbNamePattern(); for (String group : cmdBean.getGroups()) { targetTable.put(group, ImmutableList.of(Lists.newArrayList(physicalTableName))); } } else { // broadcast table only Map<String, List<PhysicalPartitionInfo>> phyTblInfos = rule.getPartitionInfoManager().getPartitionInfo(logTbName) .getPhysicalPartitionTopology(null); for (String group : cmdBean.getGroups()) { String phyTblName = phyTblInfos.get(group).get(0).getPhyTable(); targetTable.put(group, ImmutableList.of(Lists.newArrayList(phyTblName))); } } } else { final Map<String, List<List<String>>> tmpTargetTable = fullTableScan(tableNames, schemaName, ec); for (String group : cmdBean.getGroups()) { if (tmpTargetTable.containsKey(group)) { targetTable.put(group, tmpTargetTable.get(group)); } } } } } } // end of else if (noTable || origin instanceof BaseDdlOperation) { // DDL or SQL without table (I.e. DirectTableOperation) result = handleSqlWithoutTable(executionPlan, ast, param, origin, finalGroups, targetTable, tableNames, schemaName, ec); } else if (ast.getKind() == SqlKind.EXPLAIN) { // pushdown explain if (pushed instanceof LogicalView) { final LogicalView lv = (LogicalView) pushed; lv.setTargetTables(targetTable); ExecutionContext executionContext = new ExecutionContext(); if (schemaName != null) { executionContext.setSchemaName(schemaName); } executionContext.setParams(new Parameters(param, false)); executionContext.setExtraCmds(extraCmd); final List<RelNode> inputs = lv.getInput(executionContext); final List<RelNode> results = new ArrayList<>(); for (RelNode input : inputs) { PhyTableOperation phyTableOperation = (PhyTableOperation) input; SqlNode oriSqlNode = phyTableOperation.getNativeSqlNode(); SqlExplain explainSqlNode = (SqlExplain) ast.clone(SqlParserPos.ZERO); explainSqlNode.setExplicandum(oriSqlNode); PhyQueryOperation phyQueryOperation = new PhyQueryOperation(phyTableOperation.getCluster(), phyTableOperation.getTraitSet(), explainSqlNode, phyTableOperation.getDbIndex(), phyTableOperation.getParam()); results.add(phyQueryOperation); } result = new ExecutionPlan(ast, wrapWithViewUnion(results), executionPlan.getCursorMeta()); } else { throw new NotSupportException("Not support HINT + EXPLAIN for sql type other than SELECT"); } } else { boolean dmlPushed = false; if (targetTable != null && targetTable.isEmpty()) { //目标表个数为0,运行容易产生歧义 throw new TddlRuntimeException(ERR_TABLE_EMPTY_WITH_HINT); } // DML, DQL if (pushed instanceof LogicalModifyView) { ((LogicalModifyView) pushed).setTargetTables(targetTable); dmlPushed = true; } else if (pushed instanceof LogicalInsert) { ((LogicalInsert) pushed).setTargetTables(targetTable); dmlPushed = true; } else { ((LogicalView) pushed).setTargetTables(targetTable); } /** * add MergeSort/Gather */ RelNode relNode = pushed; if (pushed instanceof LogicalInsert) { // do nothing } else if (targetTable.size() > 1 || (targetTable.size() == 1 && targetTable.entrySet().iterator().next().getValue().size() > 1)) { if (origin instanceof Sort) { Sort sort = (Sort) origin; if (null == sort.getCollation() || sort.getCollation().getFieldCollations().isEmpty()) { relNode = Gather.create(relNode); } else { relNode = MergeSort.create(relNode, sort.getCollation(), null, null); } } else if (origin instanceof MppExchange) { relNode = MppExchange.create(relNode, ((MppExchange) origin).getCollation(), ((MppExchange) origin).getDistribution()); } else if (pushed instanceof LogicalModifyView) { relNode = AffectedRowsSum.create(pushed, true); } else { relNode = Gather.create(relNode); } } result = new ExecutionPlan(ast, relNode, executionPlan.getCursorMeta()); if (dmlPushed) { final List<String> phyTables = targetTable.values().stream().flatMap( value -> value.stream().flatMap(Collection::stream)).collect(Collectors.toList()); final List<TableProperties> modifiedTable = getModifiedTable(schemaName, phyTables, ec); result.setModifiedTables(modifiedTable); } } } // end of if /** * keep original plan properties */ result.setPlanProperties(executionPlan.getPlanProperties()); result.setUsePostPlanner(false); return result; } public static Map<String, List<List<String>>> fullTableScan(List<String> tableNames, String schemaName, ExecutionContext ec) { PartitionInfoManager partitionInfoManager = ec.getSchemaManager(schemaName).getTddlRuleManager().getPartitionInfoManager(); if (tableNames.stream().allMatch(x -> partitionInfoManager.isNewPartDbTable(x))) { List<PartPrunedResult> allTbPrunedResults = new ArrayList<>(); for (int i = 0; i < tableNames.size(); i++) { PartitionPruneStep pruneStepInfo = PartitionPruneStepBuilder.generateFullScanPruneStepInfo(schemaName, tableNames.get(i), ec); PartPrunedResult tbPrunedResult = PartitionPruner.doPruningByStepInfo(pruneStepInfo, ec); allTbPrunedResults.add(tbPrunedResult); } return PartitionPrunerUtils.buildTargetTablesByPartPrunedResults(allTbPrunedResults); } else { Map<String, List<List<String>>> tmpTargetTable; Map<String, Map<String, Comparative>> comparatives = new HashMap<>(); for (String tableName : tableNames) { comparatives.put(tableName, new HashMap<String, Comparative>()); } tmpTargetTable = HintUtil.buildTargetTables(tableNames, comparatives, ImmutableMap.<Integer, ParameterContext>of(), schemaName, ec); return tmpTargetTable; } } public ExecutionPlan handleSqlWithoutTable(ExecutionPlan executionPlan, SqlNode ast, Map<Integer, ParameterContext> param, RelNode origin, List<String> finalGroups, Map<String, List<List<String>>> targetTable, List<String> tableNames, String schemaName, ExecutionContext ec) { if (finalGroups.size() <= 0) { // no group specified throw new NotSupportException("execute node/scan HINT without group or table specified"); } boolean single = (finalGroups.size() == 1); if (targetTable.isEmpty()) { for (String group : finalGroups) { targetTable.put(group, new LinkedList<List<String>>()); } } else { single &= PlannerUtils.isSingle(targetTable); } if (origin instanceof BaseDdlOperation && !(origin instanceof LogicalCreateDatabase || origin instanceof LogicalDropDatabase)) { ((BaseDdlOperation) origin).setTargetTablesHintCache(targetTable); return executionPlan.copy(origin); } if (origin instanceof BroadcastTableModify) { final DirectTableOperation directTableOperation = ((BroadcastTableModify) origin).getDirectTableOperation(); final List<Integer> dynamicParamIndex = PlannerUtils.getDynamicParamIndex(ast); SqlNode clonedAst = ast.clone(SqlParserPos.ZERO); clonedAst = clonedAst.accept(new ReplaceLogicalTableNameWithPhysicalTableNameVisitor(schemaName, ec)); List<RelNode> results = new ArrayList<>(); for (String group : finalGroups) { PhyQueryOperation phyQueryOperation = new PhyQueryOperation(directTableOperation.getCluster(), directTableOperation.getTraitSet(), clonedAst, group, param, dynamicParamIndex); phyQueryOperation.setKind(ast.getKind()); results.add(phyQueryOperation); } // end of for final ExecutionPlan resultPlan = new ExecutionPlan(ast, wrapWithViewUnion(results), executionPlan.getCursorMeta()); // for acquire MDL resultPlan.setModifiedTables(executionPlan.getModifiedTables()); return resultPlan; } if (single) { // only one group specified if (origin instanceof LogicalShow) { // LogicalShow show to PhyShow final LogicalShow logicalShow = (LogicalShow) origin; final PhyShow phyShow = PhyShow.create(logicalShow, targetTable, tableNames); phyShow.setTableNames(tableNames); return new ExecutionPlan(ast, phyShow, executionPlan.getCursorMeta()); } else if (origin instanceof BaseDalOperation) { targetTable.put(finalGroups.get(0), new LinkedList<List<String>>()); ((BaseDalOperation) origin).setTargetTable(targetTable); return new ExecutionPlan(ast, origin, executionPlan.getCursorMeta()); } else if (origin instanceof DirectTableOperation) { return executionPlan; } else { SqlNode clonedAst = ast.clone(SqlParserPos.ZERO); clonedAst = clonedAst.accept(new ReplaceLogicalTableNameWithPhysicalTableNameVisitor(schemaName, ec)); PhyTableOperation phyTableOperation = buildPhyTableOperation(executionPlan, clonedAst, param, finalGroups.get(0)); return new ExecutionPlan(ast, phyTableOperation, executionPlan.getCursorMeta()); } } else { // multi group specified List<RelNode> resulRel = new LinkedList<>(); if (origin instanceof LogicalShow) { // LogicalShow show to PhyShow final LogicalShow logicalShow = (LogicalShow) origin; PhyShow phyShow = PhyShow.create(logicalShow, targetTable, tableNames); resulRel.add(phyShow); } else if (origin instanceof LogicalDal) { // LogicalDal show to PhyDal final LogicalDal logicalDal = (LogicalDal) origin; PhyDal phyDal = PhyDal.create(logicalDal, targetTable, tableNames); phyDal.setTargetTable(targetTable); resulRel.add(phyDal); } else if (origin instanceof BaseDalOperation) { ((BaseDalOperation) origin).setTargetTable(targetTable); if (origin instanceof PhyShow) { ((PhyShow) origin).setTableNames(tableNames); } resulRel.add(origin); } else { if (SqlKind.OPTIMIZE_TABLE == ast.getKind() && origin instanceof Gather) { final PhyDal optimizeTable = (PhyDal) origin.getInputs().get(0); final Map<String, List<List<String>>> optimizeTargets = new HashMap<>(targetTable.size()); targetTable.forEach((group, phyTables) -> optimizeTargets.put(group, phyTables.stream() .flatMap(phyTable -> phyTable.stream().map(ImmutableList::of)) .collect(Collectors.toList()))); optimizeTable.setTargetTable(optimizeTargets); resulRel.add(optimizeTable); } else { SqlNode clonedAst = ast.clone(SqlParserPos.ZERO); clonedAst = clonedAst.accept(new ReplaceLogicalTableNameWithPhysicalTableNameVisitor(schemaName, ec)); for (String group : finalGroups) { PhyTableOperation phyTableOperation = buildPhyTableOperation(executionPlan, clonedAst, param, group); resulRel.add(phyTableOperation); } return new ExecutionPlan(ast, PhyViewUnion.create(resulRel), executionPlan.getCursorMeta()); } } return new ExecutionPlan(ast, PhyViewUnion.create(resulRel), executionPlan.getCursorMeta()); } // end of else } private List<String> handleRuleRouteCondition(String schemaName, List<String> tableNames, Map<Integer, ParameterContext> param, Map<String, List<List<String>>> targetTable, RuleRouteCondition rrc, ExecutionContext ec) { final boolean noTable = (null == tableNames || tableNames.size() == 0); List<String> finalGroups = new LinkedList<>(); if (noTable) { finalGroups = HintUtil.allGroup(); } else { Map<String, Map<String, Comparative>> comparatives = new HashMap<>(); List<String> vtNames = HintUtil.splitAndTrim(rrc.getVirtualTableName(), ","); for (String vTable : vtNames) { comparatives.put(vTable, rrc.getParameters()); } /** * targetTable.putAll(buildTargetTables(tableNames, comparatives, * param, OptimizerContext.getContext().getSchemaName())); */ targetTable.putAll(HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec)); finalGroups.addAll(targetTable.keySet()); } return finalGroups; } private List<String> handleFullRouteCondition(List<String> tableNames, String condition, Map<Integer, ParameterContext> param, Map<String, List<List<String>>> targetTable, FullRouteCondition frc, String schemaName, ExecutionContext ec) { final boolean noTable = (null == tableNames || tableNames.size() == 0); List<String> finalGroups = new LinkedList<>(); Map<String, Map<String, Comparative>> comparatives = BaseHintOperator.buildComparative(frc.getVirtualTableName(), condition, new LinkedList<Integer>(), schemaName, ec); if (noTable) { if (TStringUtil.isNotBlank(frc.getVirtualTableName())) { tableNames.addAll(HintUtil.splitAndTrim(frc.getVirtualTableName(), ",")); /** * targetTable.putAll(buildTargetTables(tableNames, * comparatives, param, schemaName)); */ targetTable.putAll(HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec)); finalGroups = ImmutableList.copyOf(targetTable.keySet()); } else { finalGroups = HintUtil.allGroup(); } } else { /** * targetTable.putAll(buildTargetTables(tableNames, comparatives, * param, schemaName)); */ targetTable.putAll(HintUtil.buildTargetTables(tableNames, comparatives, param, schemaName, ec)); finalGroups.addAll(targetTable.keySet()); } return finalGroups; } private List<String> handleDirectlyRouteCondition(List<String> tableNames, Map<String, List<List<String>>> targetTable, DirectlyRouteCondition rc) { List<String> finalGroups = new LinkedList<>(); final DirectlyRouteCondition drc = rc; final List<String> groups = HintUtil.splitAndTrim(drc.getDbId(), ","); final boolean noTable = (null == tableNames || tableNames.size() == 0); if (noTable) { finalGroups = ImmutableList.copyOf(groups); } else { if (null == drc.getTables() || drc.getTables().size() <= 0) { final List<String> vtName = HintUtil.splitAndTrim(drc.getVirtualTableName(), ","); if (null != vtName && vtName.size() > 0) { // should never be here for (String group : groups) { targetTable.put(group, ImmutableList.<List<String>>of(ImmutableList.copyOf(vtName))); } } else { for (String group : groups) { targetTable.put(group, ImmutableList.of(tableNames)); } } } else { final List<String> vtNames = HintUtil.splitAndTrim(drc.getVirtualTableName(), ","); for (String group : groups) { List<List<String>> tables = new LinkedList<>(); for (String relTable : drc.getTables()) { List<String> rtNames = HintUtil.splitAndTrim(relTable, ","); List<String> phyTables = HintUtil.mergeTableNames(tableNames, vtNames, rtNames); tables.add(phyTables); } // end of for targetTable.put(group, tables); } // end of for } // end of else finalGroups.addAll(targetTable.keySet()); } // end of else return finalGroups; } private PhyTableOperation buildPhyTableOperation(ExecutionPlan executionPlan, SqlNode ast, Map<Integer, ParameterContext> param, String group) { RelNode plan = executionPlan.getPlan(); PhyTableOperation phyTableOperation = new PhyTableOperation(plan.getCluster(), plan.getTraitSet(), plan.getRowType(), null, plan); phyTableOperation.setDbIndex(group); phyTableOperation.setParam(param); String sql = RelUtils.toNativeSql(ast, DbType.MYSQL); phyTableOperation.setSqlTemplate(sql); return phyTableOperation; } private RelNode handlePushdown(RelNode originPlan, Map<RelNode, RelNode> relRootMap, Map<Integer, ParameterContext> param, ExecutionContext ec) { PushdownHandlerVisitor pushdownHandler = new PushdownHandlerVisitor(MapUtils.invertMap(relRootMap), param, ec); return originPlan.accept(pushdownHandler); } private ExecutionPlan buildLogicalPlan(SqlNode ast, RelNode relResult, ExecutionPlan defaultRel) { if (null == relResult) { return defaultRel; } String tableName = "OUTPUT"; CursorMeta oriMeta = defaultRel.getCursorMeta(); if (oriMeta.getColumns() != null && oriMeta.getColumns().size() > 0) { tableName = oriMeta.getColumnMeta(0).getTableName(); } RelMetadataQuery mq = relResult.getCluster().getMetadataQuery(); final CursorMeta cursorMeta = CursorMeta.build( CalciteUtils.buildColumnMeta(mq.getOriginalRowType(relResult), tableName)); return new ExecutionPlan(ast, relResult, cursorMeta); } public RelNode handleLogicalView(LogicalView logicalView, Map<Integer, ParameterContext> param, ExecutionContext ec) { final SqlNodeList hints = logicalView.getHints(); if (null == hints || hints.size() <= 0) { return logicalView; } final List<HintPushdownOperator> pushdowns = new LinkedList<>(); HintCollection hintCollection = HintConverter.convertPushdown(hints, pushdowns, ec); if (hintCollection.pushdownOnly()) { return logicalView; } return handleLogicalView(logicalView, param, hints, pushdowns, ec); } private RelNode handleLogicalView(LogicalView logicalView, Map<Integer, ParameterContext> param, SqlNodeList hints, List<HintPushdownOperator> pushdowns, ExecutionContext ec) { SqlSelect nativeSqlNode = (SqlSelect) logicalView.getNativeSqlNode(); nativeSqlNode = updateSqlNode(nativeSqlNode, hints, ec); // 补上 select * 和 count(*) nativeSqlNode = appendStarForSelect(nativeSqlNode); SqlSelect originNativeSql = (SqlSelect) nativeSqlNode.clone(SqlParserPos.ZERO); // validate SqlConverter converter = SqlConverter.getInstance(logicalView.getSchemaName(), ec); SqlNode validatedNode = converter.validate(nativeSqlNode); // logical plan RelNode rel = converter.toRel(validatedNode); // optimize logical plan with basic rules rel = optimizeLogicalPlan(rel); logicalView.getPushDownOpt().setNativeSqlNode(nativeSqlNode); logicalView.getPushDownOpt().getBuilder().clear(); logicalView.getPushDownOpt().getBuilder().push(rel); logicalView.getPushDownOpt().setPlainRowType(rel.getRowType()); // 重建 LogicalView 上层节点 RelNode relResult = buildRelNode(logicalView, hints, nativeSqlNode, ec); logicalView.getPushDownOpt().setNativeSqlNode(originNativeSql); logicalView.setSqlTemplate(originNativeSql); if (pushdowns.size() <= 0 || TStringUtil.isBlank(pushdowns.get(0).condition)) { rebuildComparative(logicalView, param, ec); } return relResult; } protected ExecutionPlan getOriginLogicalPlan(SqlNode ast, PlannerContext plannerContext) { return Planner.getInstance().getPlan(ast, plannerContext); } public LogicalView handlePushHint(LogicalView logicalView, Map<Integer, ParameterContext> param, ExecutionContext ec) { final SqlSelect nativeSqlNode = (SqlSelect) logicalView.getNativeSqlNode(); // update sql node SqlSelect newSqlNode = updateSqlNode(nativeSqlNode, logicalView.getHints(), ec); if (nativeSqlNode == newSqlNode) { return logicalView; } // append star in 'select *' and 'count(*)' newSqlNode = appendStarForSelect(newSqlNode); // validate SqlConverter converter = SqlConverter.getInstance(logicalView.getSchemaName(), plannerContext.getExecutionContext()); SqlNode validatedNode = converter.validate(newSqlNode); // logical plan RelNode rel = converter.toRel(validatedNode); // optimize logical plan with basic rules rel = optimizeLogicalPlan(rel); logicalView.setSqlTemplate(newSqlNode); logicalView.getPushDownOpt().setNativeSqlNode(newSqlNode); logicalView.getPushDownOpt().getBuilder().clear(); logicalView.getPushDownOpt().getBuilder().push(rel); logicalView.getPushDownOpt().setPlainRowType(rel.getRowType()); rebuildComparative(logicalView, param, ec); return logicalView; } public static void rebuildComparative(LogicalView logicalView, Map<Integer, ParameterContext> param, ExecutionContext ec) { // calculate target tables Map<String, Map<String, Comparative>> comparative = new HashMap<>(); ConditionExtractor.partitioningConditionFrom(logicalView).extract().allCondition(comparative, null, ec); logicalView.setComparativeHintCache(comparative); } public RelNode optimizeLogicalPlan(RelNode rel) { HepProgramBuilder hepPgmBuilder = getHepProgramBuilder(); final HepPlanner planner = new HepPlanner(hepPgmBuilder.build()); planner.setRoot(rel); rel = planner.findBestExp(); return rel; } public HepProgramBuilder getHepProgramBuilder() { HepProgramBuilder hepPgmBuilder = new HepProgramBuilder(); hepPgmBuilder.addMatchOrder(HepMatchOrder.ARBITRARY); hepPgmBuilder.addGroupBegin(); hepPgmBuilder.addRuleCollection(RuleToUse.SQL_REWRITE_CALCITE_RULE_PRE); hepPgmBuilder.addGroupEnd(); return hepPgmBuilder; } private List<Integer> getRefList(SqlNodeList exprList, List<RelDataTypeField> inputFields) { List<Integer> refList = new ArrayList<>(); if (null == exprList) { return refList; } for (SqlNode expr : exprList) { final String exprName = Util.last(((SqlIdentifier) expr).names); boolean gotIt = false; for (int index = 0; index < inputFields.size(); index++) { if (TStringUtil.equalsIgnoreCase(inputFields.get(index).getName(), exprName)) { refList.add(index); gotIt = true; break; } } // end of for if (!gotIt) { // add wrong value,trigger exception refList.add(-1); } } // end of for return refList; } private SqlSelect appendStarForSelect(SqlSelect select) { if (null == select) { return select; } return (SqlSelect) select.accept(new StartAppender()); } private static class StartAppender extends SqlShuttle { @Override public SqlNode visit(SqlCall call) { SqlNode result = super.visit(call); if (result instanceof SqlSelect) { SqlSelect select = (SqlSelect) result; if (null == select.getSelectList()) { SqlIdentifier star = SqlIdentifier.star(SqlParserPos.ZERO); select.setSelectList(new SqlNodeList(ImmutableList.<SqlNode>of(star), SqlParserPos.ZERO)); } } else if (result instanceof SqlBasicCall) { boolean countStar = result.getKind() == SqlKind.COUNT && (null == call.getOperandList() || call.getOperandList().size() <= 0); if (countStar) { SqlIdentifier star = SqlIdentifier.star(SqlParserPos.ZERO); return new SqlBasicCall(call.getOperator(), new SqlNode[] {star}, call.getParserPosition(), call.isExpanded(), call.getFunctionQuantifier()); } } return result; } } private RelBuilder createBuilder(RelNode relNode) { return RelBuilder.proto(Contexts.EMPTY_CONTEXT).create(relNode.getCluster(), SqlConverter.getInstance(PlannerContext.getPlannerContext(relNode).getSchemaName(), PlannerContext.getPlannerContext(relNode).getExecutionContext()).getCatalog()); } private int getNumberValue(SqlNode sqlNode) { return ((SqlNumericLiteral) sqlNode).intValue(false); } private RelNode buildRelNode(LogicalView relNode, SqlNodeList hints, SqlSelect ast, ExecutionContext ec) { if (hints.size() <= 0) { return relNode; } RelBuilder builder = createBuilder(relNode); builder.push(relNode); for (SqlNode op : hints) { SqlBasicCall hintOp = (SqlBasicCall) op; String name = hintOp.getOperator().getName(); Map<String, SqlNode> argMap = getArgMap(hintOp); if (TStringUtil.equalsIgnoreCase(name, "add_ms")) { final String sql = "SELECT * FROM DUAL ORDER BY " + getStringValue(argMap.get(String.valueOf(0))); final MySqlSelectQueryBlock query = parseQuery(sql); final SqlNodeList orderBy = FastSqlConstructUtils.constructOrderBy(query.getOrderBy(), null, ec); final SqlNodeList limit = FastSqlConstructUtils.constructLimit(query.getLimit(), null, ec); final List<RexNode> orderByExps = gatherOrderExprs(relNode, orderBy, builder.getRexBuilder(), ast); int offset = -1; int fetch = -1; if (limit != null) { offset = getNumberValue(limit.get(0)); fetch = getNumberValue(limit.get(1)); } builder.sortLimit(offset, fetch, orderByExps); LogicalSort logicalSort = (LogicalSort) builder.build(); builder.push(MergeSort.create(logicalSort.getInput(), logicalSort.getCollation(), logicalSort.offset, logicalSort.fetch)); } else if (TStringUtil.equalsIgnoreCase(name, "add_agg")) { String sql = "SELECT "; if (argMap.containsKey("agg")) { sql += ((SqlLiteral) argMap.get("agg")).toValue(); } else { sql += "*"; } sql += " FROM DUAL "; if (argMap.containsKey("group_by")) { sql += " GROUP BY " + getStringValue(argMap.get("group_by")); } final MySqlSelectQueryBlock query = parseQuery(sql); final SqlNodeList aggCall = FastSqlConstructUtils.constructSelectList(query.getSelectList(), null, ec); final SqlNodeList groupBy = FastSqlConstructUtils.constructGroupBy(query.getGroupBy(), null, ec); final RelNode input = builder.peek(); final List<RelDataTypeField> inputFields = input.getRowType().getFieldList(); // get agg call list final List<AggregateCall> aggCallList = buildAggCall(builder, aggCall, inputFields); // get group field list final List<Integer> groupRefList = getRefList(groupBy, inputFields); final ImmutableBitSet groupSet = ImmutableBitSet.of(groupRefList); final List<ImmutableBitSet> groupSets = ImmutableList.of(groupSet); builder.push(LogicalAggregate.create(builder.build(), groupSet, groupSets, aggCallList)); } else if (TStringUtil.equalsIgnoreCase(name, "add_ts")) { final String sql = "SELECT * FROM DUAL ORDER BY " + getStringValue(argMap.get(String.valueOf(0))); final MySqlSelectQueryBlock query = parseQuery(sql); final SqlNodeList orderBy = FastSqlConstructUtils.constructOrderBy(query.getOrderBy(), null, ec); final SqlNodeList limit = FastSqlConstructUtils.constructLimit(query.getLimit(), null, ec); final List<RexNode> orderByExps = gatherOrderExprs(relNode, orderBy, builder.getRexBuilder(), ast); int offset = -1; int fetch = -1; if (limit != null) { offset = getNumberValue(limit.get(0)); fetch = getNumberValue(limit.get(1)); } builder.sortLimit(offset, fetch, orderByExps); } else if (TStringUtil.equalsIgnoreCase(name, "add_lmt")) { final String sql = "SELECT * FROM DUAL LIMIT " + getStringValue(argMap.get(String.valueOf(0))); final MySqlSelectQueryBlock query = parseQuery(sql); final SqlNodeList limit = FastSqlConstructUtils.constructLimit(query.getLimit(), null, ec); int offset = -1; int fetch = -1; if (limit != null) { offset = getNumberValue(limit.get(0)); fetch = getNumberValue(limit.get(1)); } builder.limit(offset, fetch); } else if (TStringUtil.equalsIgnoreCase(name, "add_un")) { builder.push(Gather.create(builder.build())); } else if (TStringUtil.equalsIgnoreCase(name, "add_ft")) { final String sql = "SELECT * FROM DUAL WHERE " + getStringValue(argMap.get(String.valueOf(0))); final MySqlSelectQueryBlock query = parseQuery(sql); final SqlNode where = FastSqlConstructUtils.constructWhere(query.getWhere(), null, ec); final SqlValidatorImpl validatorForScope = getValidatorForScope(ast); final Blackboard bb = createBlackboard(validatorForScope.getSelectScope(ast), null, true); this.hintBlackboard.beginSelect(); try { // 初始化 where 条件的 scope convertFrom(bb, ast.getFrom()); } finally { this.hintBlackboard.endFrom(); } final RexNode convertedWhere = bb.convertExpression(where); builder.filter(convertedWhere); } else if (TStringUtil.equalsIgnoreCase(name, "add_pj")) { final String sql = "SELECT " + getStringValue(argMap.get(String.valueOf(0))) + " FROM DUAL"; final MySqlSelectQueryBlock query = parseQuery(sql); final SqlNodeList selectList = FastSqlConstructUtils.constructSelectList(query.getSelectList(), null, ec); final SqlValidatorImpl validatorForScope = getValidatorForScope(ast); final Blackboard bb = createBlackboard(validatorForScope.getSelectScope(ast), null, true); this.hintBlackboard.beginSelect(); try { // 初始化 scope convertFrom(bb, ast.getFrom()); } finally { this.hintBlackboard.endFrom(); } List<String> fieldNames = new ArrayList<>(); List<String> originalNames = new ArrayList<>(); final List<RexNode> exprs = new ArrayList<>(); final Collection<String> aliases = new TreeSet<>(); // Project select clause. int i = -1; for (SqlNode expr : selectList) { ++i; exprs.add(bb.convertExpression(expr)); fieldNames.add(deriveAlias(expr, aliases, i)); originalNames.add(deriveOriginalAlias(expr, i)); } fieldNames = SqlValidatorUtil.uniquify(fieldNames, catalogReader.nameMatcher().isCaseSensitive()); builder.push(RelOptUtil.createProject(builder.build(), exprs, fieldNames, originalNames)); } // end of if } // end of for return builder.build(); } private String getStringValue(SqlNode sqlNode) { if (sqlNode instanceof SqlCharStringLiteral) { return ((SqlCharStringLiteral) sqlNode).getNlsString().getValue(); } return sqlNode.toString(); } private List<AggregateCall> buildAggCall(RelBuilder builder, SqlNodeList aggs, List<RelDataTypeField> inputFields) { List<AggregateCall> aggCallList = new ArrayList<>(); for (SqlNode agg : aggs) { String aggName = agg.toString(); SqlCall aggCall = (SqlCall) agg; if (agg.getKind() == SqlKind.AS) { List<SqlNode> operandList = ((SqlBasicCall) agg).getOperandList(); aggCall = (SqlCall) operandList.get(0); aggName = operandList.get(1).toString(); } final SqlAggFunction aggFuncOp = (SqlAggFunction) aggCall.getOperator(); boolean distinct = false; SqlLiteral quantifier = aggCall.getFunctionQuantifier(); if ((null != quantifier) && (quantifier.getValue() == SqlSelectKeyword.DISTINCT)) { distinct = true; } final List<Integer> refList = new ArrayList<>(); final List<RelDataType> operandTypeList = new ArrayList<>(); for (SqlNode expr : aggCall.getOperandList()) { final String exprName = Util.last(((SqlIdentifier) expr).names); boolean gotIt = false; for (int index = 0; index < inputFields.size(); index++) { if (TStringUtil.equalsIgnoreCase(inputFields.get(index).getName(), exprName)) { refList.add(index); operandTypeList.add(inputFields.get(index).getType()); gotIt = true; break; } } // end of for if (!gotIt) { // add wrong value,trigger exception refList.add(-1); } } // end of for final List<Integer> argList = getRefList(new SqlNodeList(aggCall.getOperandList(), SqlParserPos.ZERO), inputFields); final RelDataType type = aggFuncOp.inferReturnType(builder.getTypeFactory(), operandTypeList); aggCallList.add(AggregateCall.create(aggFuncOp, distinct, false, argList, -1, type, aggName)); } return aggCallList; } private List<RexNode> gatherOrderExprs(LogicalView logicalView, SqlNodeList orderBy, RexBuilder rexBuilder, SqlSelect ast) { SqlValidatorImpl validator = getValidatorForScope(ast); final List<RexNode> orderByExps = new ArrayList<>(); final List<SqlNode> orderExprList = new ArrayList<>(); for (SqlNode orderByItem : orderBy) { RelFieldCollation relFieldCollation = buildOrderItem((SqlSelect) logicalView.getNativeSqlNode(), orderByItem, orderExprList, Direction.ASCENDING, NullDirection.UNSPECIFIED, validator); RexNode rexNode = RexInputRef.of(relFieldCollation.getFieldIndex(), logicalView.getRowType()); if (relFieldCollation.direction == Direction.DESCENDING) { rexNode = rexBuilder.makeCall(SqlStdOperatorTable.DESC, rexNode); } switch (relFieldCollation.nullDirection) { case LAST: rexNode = rexBuilder.makeCall(SqlStdOperatorTable.NULLS_LAST, rexNode); break; case FIRST: rexNode = rexBuilder.makeCall(SqlStdOperatorTable.NULLS_FIRST, rexNode); break; default: } orderByExps.add(rexNode); } // end of for return orderByExps; } private SqlValidatorImpl getValidatorForScope(SqlNode sqlNode) { Util.discard(validator.validate(sqlNode)); return (SqlValidatorImpl) validator; } private SqlSelect updateSqlNode(SqlSelect nativeSqlNode, SqlNodeList hints, ExecutionContext ec) { if (hints.size() <= 0) { return nativeSqlNode; } SqlSelect result = (SqlSelect) nativeSqlNode.clone(SqlParserPos.ZERO); // 清理 FROM 中的库名前缀 if (result.getFrom() instanceof SqlIdentifier) { SqlIdentifier from = (SqlIdentifier) result.getFrom(); if (from.names.size() > 1) { from.setNames(ImmutableList.of(Util.last(from.names)), ImmutableList.of(SqlParserPos.ZERO)); } } final List<HintPushOperator> pushHints = new LinkedList<>(); HintConverter.convertPush(hints, pushHints, ec); for (HintPushOperator pushHint : pushHints) { pushHint.handle(result); } return result; } private Map<String, SqlNode> getArgMap(SqlBasicCall hintOp) { Map<String, SqlNode> argMap = new LinkedHashMap<>(); for (int index = 0; index < hintOp.getOperands().length; index++) { if (hintOp.getOperands()[index] instanceof SqlBasicCall) { SqlBasicCall arg = (SqlBasicCall) hintOp.getOperands()[index]; if (arg.getOperator().getKind() == SqlKind.EQUALS) { argMap.put(arg.getOperands()[0].toString(), arg.getOperands()[1]); } else { argMap.put(String.valueOf(index), arg.getOperands()[0]); } } else { argMap.put(String.valueOf(index), hintOp.getOperands()[0]); } } return argMap; } private MySqlSelectQueryBlock parseQuery(String sql) { SQLSelectStatement sqlSelect = (SQLSelectStatement) FastsqlUtils.parseSql(sql).get(0); return (MySqlSelectQueryBlock) sqlSelect.getSelect().getQuery(); } protected RelFieldCollation buildOrderItem(SqlSelect select, SqlNode orderItem, List<SqlNode> extraExprs, RelFieldCollation.Direction direction, RelFieldCollation.NullDirection nullDirection, SqlValidatorImpl validator) { assert select != null; // Handle DESC keyword, e.g. 'select a, b from t order by a desc'. switch (orderItem.getKind()) { case DESCENDING: return buildOrderItem(select, ((SqlCall) orderItem).operand(0), extraExprs, RelFieldCollation.Direction.DESCENDING, nullDirection, validator); case NULLS_FIRST: return buildOrderItem(select, ((SqlCall) orderItem).operand(0), extraExprs, direction, RelFieldCollation.NullDirection.FIRST, validator); case NULLS_LAST: return buildOrderItem(select, ((SqlCall) orderItem).operand(0), extraExprs, direction, RelFieldCollation.NullDirection.LAST, validator); default: } SqlNode converted = validator.expandOrderExpr(select, orderItem); switch (nullDirection) { case UNSPECIFIED: nullDirection = NullCollation.LOW.last(desc(direction)) ? RelFieldCollation.NullDirection.LAST : RelFieldCollation.NullDirection.FIRST; default: } // Scan the select list and order exprs for an identical expression. int ordinal = -1; final SelectScope selectScope = validator.getRawSelectScope(select); for (SqlNode selectItem : selectScope.getExpandedSelectList()) { ++ordinal; if (converted.equalsDeep(stripAs(selectItem), Litmus.IGNORE)) { return new RelFieldCollation(ordinal, direction, nullDirection); } } for (SqlNode extraExpr : extraExprs) { ++ordinal; if (orderItem.equalsDeep(extraExpr, Litmus.IGNORE)) { return new RelFieldCollation(ordinal, direction, nullDirection); } } // TODO: handle collation sequence // TODO: flag expressions as non-standard extraExprs.add(orderItem); return new RelFieldCollation(ordinal + 1, direction, nullDirection); } private static boolean desc(RelFieldCollation.Direction direction) { switch (direction) { case DESCENDING: case STRICTLY_DESCENDING: return true; default: return false; } } protected static class TddlValidator extends SqlValidatorImpl { protected TddlValidator(SqlOperatorTable opTab, SqlValidatorCatalogReader catalogReader, RelDataTypeFactory typeFactory, SqlConformance conformance) { super(opTab, catalogReader, typeFactory, conformance); } } }
40,738
14,668
<gh_stars>1000+ // Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/wm/desks/templates/desks_templates_icon_container.h" #include "ash/public/cpp/desk_template.h" #include "ash/public/cpp/desks_templates_delegate.h" #include "ash/public/cpp/shelf_types.h" #include "ash/public/cpp/window_properties.h" #include "ash/shell.h" #include "ash/style/ash_color_provider.h" #include "ash/wm/desks/templates/desks_templates_icon_view.h" #include "base/containers/contains.h" #include "components/app_restore/app_launch_info.h" #include "extensions/common/constants.h" #include "ui/aura/client/aura_constants.h" #include "ui/base/metadata/metadata_impl_macros.h" #include "ui/color/color_provider.h" #include "ui/views/background.h" #include "ui/views/widget/widget.h" namespace ash { namespace { // The space between icon views. constexpr int kIconSpacingDp = 8; // A struct for storing the various information used to determine which app // icons/favicons to display. struct IconInfo { int activation_index; int count; }; // Given a map from unique icon identifiers to their count, returns an ordered // vector of the unique icon identifiers (app ids/urls) and their number of // occurrences. std::vector<std::pair<std::string, int>> SortIconIdentifiers( const std::map<std::string, IconInfo>& identifier_info) { // Create a vector using `identifier_info` that contains pairs of identifiers // and counts. This will be unsorted. std::vector<std::pair<std::string, int>> identifiers_with_count; for (const auto& entry : identifier_info) identifiers_with_count.emplace_back(entry.first, entry.second.count); // Sort `identifiers_with_count` using the activation indices stored in // `identifier_info`. std::sort(identifiers_with_count.begin(), identifiers_with_count.end(), [&identifier_info](const std::pair<std::string, int>& data_1, const std::pair<std::string, int>& data_2) { return identifier_info.at(data_1.first).activation_index < identifier_info.at(data_2.first).activation_index; }); return identifiers_with_count; } // Inserts an `IconInfo` struct into `out_identifier_info` if no entry exists // for `identifier`. If an entry exists for `identifier`, updates its values. void InsertIdentifierInfo( const std::string& identifier, int activation_index, std::map<std::string, IconInfo>* out_identifier_info) { // A single app/site can have multiple windows so count their occurrences and // use the smallest activation index for sorting purposes. if (!base::Contains(*out_identifier_info, identifier)) { (*out_identifier_info)[identifier] = {activation_index, /*count=*/1}; } else { ++(*out_identifier_info)[identifier].count; (*out_identifier_info)[identifier].activation_index = std::min( (*out_identifier_info)[identifier].activation_index, activation_index); } } // Iterates through `launch_list`, inserting `IconInfo` structs into // `out_identifier_info` for each tab and app. void InsertIdentifierInfoFromLaunchList( const std::string& app_id, const app_restore::RestoreData::LaunchList& launch_list, std::map<std::string, IconInfo>* out_identifier_info) { // We want to group active tabs and apps ahead of inactive tabs so offsets // inactive tabs activation index by `kInactiveTabOffset`. In almost every use // case, there should be no more than `kInactiveTabOffset` number of tabs + // apps on a desk. constexpr int kInactiveTabOffset = 10000; for (auto& restore_data : launch_list) { // If `restore_data` is a SWA then it will have a valid url for its active // tab. However, in this case we want to display the SWA's icon via its app // id so to determine whether `restore_data` is an SWA we need to check // whether it's a browser. const bool is_browser = app_id == extension_misc::kChromeAppId && (!restore_data.second->app_type_browser.has_value() || !restore_data.second->app_type_browser.value()); const int activation_index = restore_data.second->activation_index.value(); const int active_tab_index = restore_data.second->active_tab_index.value_or(-1); if (restore_data.second->urls.has_value() && is_browser) { const auto& urls = restore_data.second->urls.value(); for (int i = 0; i < static_cast<int>(urls.size()); ++i) { InsertIdentifierInfo(urls[i].spec(), active_tab_index == i ? activation_index : kInactiveTabOffset + activation_index, out_identifier_info); } } else { InsertIdentifierInfo(app_id, activation_index, out_identifier_info); } } } } // namespace DesksTemplatesIconContainer::DesksTemplatesIconContainer() { views::Builder<DesksTemplatesIconContainer>(this) .SetOrientation(views::BoxLayout::Orientation::kHorizontal) .SetBetweenChildSpacing(kIconSpacingDp) .BuildChildren(); } DesksTemplatesIconContainer::~DesksTemplatesIconContainer() = default; void DesksTemplatesIconContainer::PopulateIconContainerFromTemplate( DeskTemplate* desk_template) { const app_restore::RestoreData* restore_data = desk_template->desk_restore_data(); if (!restore_data) return; // Iterate through the template's WindowInfo, counting the occurrences of each // unique icon identifier and storing their lowest activation index. std::map<std::string, IconInfo> identifier_info; for (auto& app_id_to_launch_list_entry : restore_data->app_id_to_launch_list()) { InsertIdentifierInfoFromLaunchList(app_id_to_launch_list_entry.first, app_id_to_launch_list_entry.second, &identifier_info); } CreateIconViewsFromIconIdentifiers(SortIconIdentifiers(identifier_info)); } void DesksTemplatesIconContainer::PopulateIconContainerFromWindows( const std::vector<aura::Window*>& windows) { DCHECK(!windows.empty()); // Iterate through `windows`, counting the occurrences of each unique icon and // storing their lowest activation index. std::map<std::string, IconInfo> identifier_info; auto* delegate = Shell::Get()->desks_templates_delegate(); for (size_t i = 0; i < windows.size(); ++i) { auto* window = windows[i]; // If `window` is an incognito window, we want to display the incognito icon // instead of its favicons so denote it using // `DeskTemplate::kIncognitoWindowIdentifier`. const bool is_incognito_window = delegate->IsIncognitoWindow(window); const std::string app_id = is_incognito_window ? DeskTemplate::kIncognitoWindowIdentifier : ShelfID::Deserialize(window->GetProperty(kShelfIDKey)).app_id; if (is_incognito_window && !incognito_window_color_provider_) { incognito_window_color_provider_ = views::Widget::GetWidgetForNativeWindow(window)->GetColorProvider(); } InsertIdentifierInfo(app_id, i, &identifier_info); } CreateIconViewsFromIconIdentifiers(SortIconIdentifiers(identifier_info)); } void DesksTemplatesIconContainer::Layout() { views::BoxLayoutView::Layout(); if (icon_views_.empty()) return; const int available_horizontal_space = bounds().width(); // Use the preferred size of this since this will provide the width as if // every view in `icon_views_` is shown. int used_horizontal_space = GetPreferredSize().width(); DesksTemplatesIconView* overflow_icon_view = icon_views_.back(); if (used_horizontal_space > available_horizontal_space) { // Reverse iterate through `icon_views_` starting with the first // non-overflow icon view (i.e. the second-last element). Hide as many icons // we need to fit `available_horizontal_space` and then update the overflow // icon view. int num_hidden_icons = 0; for (auto it = ++icon_views_.rbegin(); it != icon_views_.rend(); ++it) { if ((*it)->GetVisible()) { used_horizontal_space -= (*it)->GetPreferredSize().width(); (*it)->SetVisible(false); ++num_hidden_icons; } if (used_horizontal_space <= available_horizontal_space) break; } overflow_icon_view->UpdateCount(overflow_icon_view->count() + num_hidden_icons); } else if (overflow_icon_view->count() == 0) { // There is no overflow so hide the overflow icon view. overflow_icon_view->SetVisible(false); } } void DesksTemplatesIconContainer::CreateIconViewsFromIconIdentifiers( const std::vector<std::pair<std::string, int>>& identifiers_and_counts) { DCHECK(icon_views_.empty()); if (identifiers_and_counts.empty()) return; for (size_t i = 0; i < kMaxIcons && i < identifiers_and_counts.size(); ++i) { DesksTemplatesIconView* icon_view = AddChildView(views::Builder<DesksTemplatesIconView>() .SetBackground(views::CreateRoundedRectBackground( AshColorProvider::Get()->GetControlsLayerColor( AshColorProvider::ControlsLayerType:: kControlBackgroundColorInactive), DesksTemplatesIconView::kIconSize / 2)) .Build()); icon_view->SetIconIdentifierAndCount(identifiers_and_counts[i].first, identifiers_and_counts[i].second); icon_views_.push_back(icon_view); } // Always add a `DesksTemplatesIconView` overflow counter in case the width // of the view changes. It will be hidden if not needed. const int num_added_icons = children().size(); DesksTemplatesIconView* overflow_icon_view = AddChildView(views::Builder<DesksTemplatesIconView>() .SetBackground(views::CreateRoundedRectBackground( AshColorProvider::Get()->GetControlsLayerColor( AshColorProvider::ControlsLayerType:: kControlBackgroundColorInactive), DesksTemplatesIconView::kIconSize / 2)) .Build()); overflow_icon_view->SetIconIdentifierAndCount( std::string(), identifiers_and_counts.size() - num_added_icons); icon_views_.push_back(overflow_icon_view); } BEGIN_METADATA(DesksTemplatesIconContainer, views::BoxLayoutView) END_METADATA } // namespace ash
4,027
849
package milkman.plugin.grpc.processor; import com.google.protobuf.DescriptorProtos.*; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type; import com.google.protobuf.Descriptors.FieldDescriptor; import lombok.Value; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; public class ProtoDescriptorSerializer { @Value public static class FileContent{ String fileName; String contents; } public List<FileContent> descriptorToString(FileDescriptorSet descriptorSet) { List<FileContent> result = new LinkedList<>(); for (FileDescriptorProto protoFileDesc : descriptorSet.getFileList()) { StringBuilder buffer = new StringBuilder(); buffer.append("syntax = \"").append(protoFileDesc.getSyntax()).append("\";\n"); for (Entry<FieldDescriptor, Object> opt : protoFileDesc.getOptions().getAllFields().entrySet()) { buffer.append("option ").append(opt.getKey().getName()).append(" = ").append(opt.getValue()).append(";\n"); } buffer.append("package ").append(protoFileDesc.getPackage()).append(";\n"); buffer.append("\n"); for (DescriptorProto msgType : protoFileDesc.getMessageTypeList()) { buffer.append("message ").append(msgType.getName()).append(" {\n"); for (FieldDescriptorProto field : msgType.getFieldList()) { String type = getType(field.getType()); buffer.append(" ") .append(toLabel(field.getLabel())) .append(type) .append(" ") .append(field.getName()) .append(" = ") .append(field.getNumber()) .append(";\n"); } buffer.append("}\n\n"); } boolean firstService = true; for (ServiceDescriptorProto serviceDesc : protoFileDesc.getServiceList()) { if (!firstService) { buffer.append("\n"); } firstService = false; buffer.append("service ").append(serviceDesc.getName()).append(" {\n"); for (MethodDescriptorProto method : serviceDesc.getMethodList()) { buffer.append(" rpc ") .append(method.getName()) .append("("); if (method.getClientStreaming()) buffer.append("stream "); buffer.append(method.getInputType()) .append(") returns ("); if (method.getServerStreaming()) buffer.append("stream "); buffer .append(method.getOutputType()) .append(");\n"); } buffer.append("}\n"); } result.add(new FileContent(protoFileDesc.getName(), buffer.toString())); } return result; } private String toLabel(FieldDescriptorProto.Label label) { if (label == null){ return ""; } switch (label){ case LABEL_OPTIONAL: return ""; //optional is implicit case LABEL_REQUIRED: return "required "; case LABEL_REPEATED: return "repeated "; default: return ""; } } private String getType(Type type) { switch (type) { case TYPE_BOOL: return "bool"; case TYPE_BYTES: return "bytes"; case TYPE_DOUBLE: return "double"; case TYPE_ENUM: return "enum"; case TYPE_FIXED32: return "fixed32"; case TYPE_FIXED64: return "fixed64"; case TYPE_FLOAT: return "float"; case TYPE_GROUP: return "group"; case TYPE_INT32: return "int32"; case TYPE_INT64: return "int64"; case TYPE_MESSAGE: return "message"; case TYPE_SFIXED32: return "sfixed32"; case TYPE_SFIXED64: return "sfixed64"; case TYPE_SINT32: return "sint32"; case TYPE_SINT64: return "sint64"; case TYPE_STRING: return "string"; case TYPE_UINT32: return "uint32"; case TYPE_UINT64: return "uint64"; default: break; } throw new IllegalArgumentException("Unknown type: " + type); } }
1,519
348
<filename>docs/data/leg-t2/047/04702114.json<gh_stars>100-1000 {"nom":"Grézet-Cavagnan","circ":"2ème circonscription","dpt":"Lot-et-Garonne","inscrits":305,"abs":139,"votants":166,"blancs":18,"nuls":0,"exp":148,"res":[{"nuance":"REM","nom":"<NAME>","voix":90},{"nuance":"FN","nom":"<NAME>","voix":58}]}
127
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * BeanPool.java * * Created on November 17, 2004, 5:18 PM */ package org.netbeans.modules.j2ee.sun.dd.api.ejb; /** * * @author <NAME> */ public interface BeanPool extends org.netbeans.modules.j2ee.sun.dd.api.CommonDDBean { public static final String STEADY_POOL_SIZE = "SteadyPoolSize"; // NOI18N public static final String RESIZE_QUANTITY = "ResizeQuantity"; // NOI18N public static final String MAX_POOL_SIZE = "MaxPoolSize"; // NOI18N public static final String POOL_IDLE_TIMEOUT_IN_SECONDS = "PoolIdleTimeoutInSeconds"; // NOI18N public static final String MAX_WAIT_TIME_IN_MILLIS = "MaxWaitTimeInMillis"; // NOI18N /** Setter for steady-pool-size property * @param value property value */ public void setSteadyPoolSize(java.lang.String value); /** Getter for steady-pool-size property. * @return property value */ public java.lang.String getSteadyPoolSize(); /** Setter for resize-quantity property * @param value property value */ public void setResizeQuantity(java.lang.String value); /** Getter for resize-quantity property. * @return property value */ public java.lang.String getResizeQuantity(); /** Setter for max-pool-size property * @param value property value */ public void setMaxPoolSize(java.lang.String value); /** Getter for max-pool-size property. * @return property value */ public java.lang.String getMaxPoolSize(); /** Setter for pool-idle-timeout-in-seconds property * @param value property value */ public void setPoolIdleTimeoutInSeconds(java.lang.String value); /** Getter for pool-idle-timeout-in-seconds property. * @return property value */ public java.lang.String getPoolIdleTimeoutInSeconds(); /** Setter for max-wait-time-in-millis property * @param value property value */ public void setMaxWaitTimeInMillis(java.lang.String value); /** Getter for max-wait-time-in-millis property. * @return property value */ public java.lang.String getMaxWaitTimeInMillis(); }
997
2,151
<reponame>zipated/src<gh_stars>1000+ // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "headless/public/util/virtual_time_controller.h" #include "base/auto_reset.h" #include "base/bind.h" #include "base/logging.h" namespace headless { using base::TimeDelta; VirtualTimeController::VirtualTimeController( HeadlessDevToolsClient* devtools_client, int max_task_starvation_count) : devtools_client_(devtools_client), max_task_starvation_count_(max_task_starvation_count), weak_ptr_factory_(this) { devtools_client_->GetEmulation()->GetExperimental()->AddObserver(this); } VirtualTimeController::~VirtualTimeController() { devtools_client_->GetEmulation()->GetExperimental()->RemoveObserver(this); } void VirtualTimeController::StartVirtualTime() { if (virtual_time_started_) return; TimeDelta next_budget; bool wait_for_navigation = false; for (auto& entry_pair : tasks_) { entry_pair.second.ready_to_advance = true; if (entry_pair.first->start_policy() == RepeatingTask::StartPolicy::WAIT_FOR_NAVIGATION) { wait_for_navigation = true; } if (next_budget.is_zero()) { next_budget = entry_pair.second.next_execution_time - total_elapsed_time_offset_; } else { next_budget = std::min(next_budget, entry_pair.second.next_execution_time - total_elapsed_time_offset_); } } // If there's no budget, then don't do anything! if (next_budget.is_zero()) return; virtual_time_started_ = true; should_send_start_notification_ = true; if (resume_deferrer_) { resume_deferrer_->DeferResume(base::BindOnce( &VirtualTimeController::SetVirtualTimePolicy, weak_ptr_factory_.GetWeakPtr(), next_budget, wait_for_navigation)); } else { SetVirtualTimePolicy(next_budget, wait_for_navigation); } } void VirtualTimeController::NotifyTasksAndAdvance() { // The task may call its continue callback synchronously. Prevent re-entrance. if (in_notify_tasks_and_advance_) return; base::AutoReset<bool> reset(&in_notify_tasks_and_advance_, true); for (auto iter = tasks_.begin(); iter != tasks_.end();) { auto entry_pair = iter++; if (entry_pair->second.next_execution_time <= total_elapsed_time_offset_) { entry_pair->second.ready_to_advance = false; entry_pair->second.next_execution_time = total_elapsed_time_offset_ + entry_pair->second.interval; // This may delete itself. entry_pair->first->IntervalElapsed( total_elapsed_time_offset_, base::BindOnce(&VirtualTimeController::TaskReadyToAdvance, weak_ptr_factory_.GetWeakPtr(), base::Unretained(&entry_pair->second))); } } // Give at most as much virtual time as available until the next callback. bool advance_virtual_time = false; bool stop_virtual_time = false; bool ready_to_advance = true; TimeDelta next_budget; for (const auto& entry_pair : tasks_) { ready_to_advance &= entry_pair.second.ready_to_advance; if (next_budget.is_zero()) { next_budget = entry_pair.second.next_execution_time - total_elapsed_time_offset_; } else { next_budget = std::min(next_budget, entry_pair.second.next_execution_time - total_elapsed_time_offset_); } if (entry_pair.second.continue_policy == RepeatingTask::ContinuePolicy::CONTINUE_MORE_TIME_NEEDED) { advance_virtual_time = true; } else if (entry_pair.second.continue_policy == RepeatingTask::ContinuePolicy::STOP) { stop_virtual_time = true; } } if (!ready_to_advance) return; if (!advance_virtual_time || stop_virtual_time) { for (auto& entry_pair : tasks_) { entry_pair.second.ready_to_advance = false; } for (auto iter = observers_.begin(); iter != observers_.end();) { Observer* observer = *iter++; // |observer| may delete itself. observer->VirtualTimeStopped(total_elapsed_time_offset_); } virtual_time_started_ = false; return; } DCHECK(!next_budget.is_zero()); if (resume_deferrer_) { resume_deferrer_->DeferResume( base::BindOnce(&VirtualTimeController::SetVirtualTimePolicy, weak_ptr_factory_.GetWeakPtr(), next_budget, false /* wait_for_navigation */)); } else { SetVirtualTimePolicy(next_budget, false /* wait_for_navigation */); } } void VirtualTimeController::TaskReadyToAdvance( TaskEntry* entry, RepeatingTask::ContinuePolicy continue_policy) { entry->ready_to_advance = true; entry->continue_policy = continue_policy; NotifyTasksAndAdvance(); } void VirtualTimeController::SetVirtualTimePolicy(base::TimeDelta next_budget, bool wait_for_navigation) { last_budget_ = next_budget; devtools_client_->GetEmulation()->GetExperimental()->SetVirtualTimePolicy( emulation::SetVirtualTimePolicyParams::Builder() .SetPolicy( emulation::VirtualTimePolicy::PAUSE_IF_NETWORK_FETCHES_PENDING) .SetBudget(next_budget.InMillisecondsF()) .SetMaxVirtualTimeTaskStarvationCount(max_task_starvation_count_) .SetWaitForNavigation(wait_for_navigation) .Build(), base::BindOnce(&VirtualTimeController::SetVirtualTimePolicyDone, weak_ptr_factory_.GetWeakPtr())); } void VirtualTimeController::SetVirtualTimePolicyDone( std::unique_ptr<emulation::SetVirtualTimePolicyResult> result) { if (result) { virtual_time_base_ = base::TimeTicks() + base::TimeDelta::FromMillisecondsD(result->GetVirtualTimeTicksBase()); } else { LOG(WARNING) << "SetVirtualTimePolicy did not succeed"; } if (should_send_start_notification_) { should_send_start_notification_ = false; for (auto iter = observers_.begin(); iter != observers_.end();) { Observer* observer = *iter++; // |observer| may delete itself. observer->VirtualTimeStarted(total_elapsed_time_offset_); } } } void VirtualTimeController::OnVirtualTimeBudgetExpired( const emulation::VirtualTimeBudgetExpiredParams& params) { total_elapsed_time_offset_ += last_budget_; virtual_time_paused_ = true; NotifyTasksAndAdvance(); } void VirtualTimeController::ScheduleRepeatingTask(RepeatingTask* task, base::TimeDelta interval) { if (!virtual_time_paused_) { // We cannot accurately modify any previously granted virtual time budget. LOG(WARNING) << "VirtualTimeController tasks should be added while " "virtual time is paused."; } TaskEntry entry; entry.interval = interval; entry.next_execution_time = total_elapsed_time_offset_ + entry.interval; tasks_.insert(std::make_pair(task, entry)); } void VirtualTimeController::CancelRepeatingTask(RepeatingTask* task) { tasks_.erase(task); } void VirtualTimeController::AddObserver(Observer* observer) { observers_.insert(observer); } void VirtualTimeController::RemoveObserver(Observer* observer) { observers_.erase(observer); } base::TimeTicks VirtualTimeController::GetVirtualTimeBase() const { return virtual_time_base_; } base::TimeDelta VirtualTimeController::GetCurrentVirtualTimeOffset() const { return total_elapsed_time_offset_; } void VirtualTimeController::SetResumeDeferrer(ResumeDeferrer* resume_deferrer) { resume_deferrer_ = resume_deferrer; } } // namespace headless
2,985
32,544
<filename>spring-boot-modules/spring-boot-libraries/src/main/java/com/baeldung/barcodes/SpringBootApp.java<gh_stars>1000+ package com.baeldung.barcodes; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.Bean; import org.springframework.http.converter.BufferedImageHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; import java.awt.image.BufferedImage; @SpringBootApplication public class SpringBootApp { public static void main(String[] args) { SpringApplication.run(SpringBootApp.class, args); } @Bean public HttpMessageConverter<BufferedImage> createImageHttpMessageConverter() { return new BufferedImageHttpMessageConverter(); } }
271
41,267
import sys import scrapy from scrapy.crawler import CrawlerProcess class CachingHostnameResolverSpider(scrapy.Spider): """ Finishes in a finite amount of time (does not hang indefinitely in the DNS resolution) """ name = "caching_hostname_resolver_spider" def start_requests(self): yield scrapy.Request(self.url) def parse(self, response): for _ in range(10): yield scrapy.Request(response.url, dont_filter=True, callback=self.ignore_response) def ignore_response(self, response): self.logger.info(repr(response.ip_address)) if __name__ == "__main__": process = CrawlerProcess(settings={ "RETRY_ENABLED": False, "DNS_RESOLVER": "scrapy.resolver.CachingHostnameResolver", }) process.crawl(CachingHostnameResolverSpider, url=sys.argv[1]) process.start()
328
889
<reponame>limberc/HyperGAN import argparse import os import uuid import tensorflow as tf import hypergan as hg import hyperchamber as hc import numpy as np from hypergan.generators import * from hypergan.viewer import GlobalViewer from common import * from hypergan.search.random_search import RandomSearch from hypergan.samplers.random_walk_sampler import RandomWalkSampler from hypergan.samplers.static_batch_sampler import StaticBatchSampler arg_parser = ArgumentParser("Feed static values into X/Z and memorize them") arg_parser.add_image_arguments() args = arg_parser.parse_args() width, height, channels = parse_size(args.size) config = lookup_config(args) save_file = "save/model.ckpt" if args.action == 'search': config = RandomSearch({}).random_config() if args.config_list is not None: config = random_config_from_list(args.config_list) random_config = RandomSearch({}).random_config() config["generator"]=random_config["generator"] config["discriminator"]=random_config["discriminator"] # TODO Other search terms? inputs = hg.inputs.image_loader.ImageLoader(args.batch_size) inputs.create(args.directory, channels=channels, format=args.format, crop=args.crop, width=width, height=height, resize=True) save_file = "save/model.ckpt" def setup_gan(config, inputs, args): gan = hg.GAN(config, inputs=inputs) gan.create() if(args.action != 'search' and os.path.isfile(save_file+".meta")): gan.load(save_file) tf.train.start_queue_runners(sess=gan.session) config_name = args.config title = "[hypergan] static " + config_name GlobalViewer.title = title GlobalViewer.enabled = args.viewer return gan def train(config, inputs, args): gan = setup_gan(config, inputs, args) static_x, static_z = gan.session.run([gan.inputs.x, gan.encoder.sample]) accuracy_x_to_g=batch_accuracy(static_x, gan.generator.sample) diversity_g = batch_diversity(gan.generator.sample) metrics = [accuracy_x_to_g, diversity_g] sum_metrics = [0 for metric in metrics] sampler = lookup_sampler(args.sampler or StaticBatchSampler)(gan) for i in range(args.steps): gan.step({gan.inputs.x: static_x, gan.encoder.sample: static_z}) if i % args.sample_every == 0: print("sampling "+str(i)) sample_file = "samples/"+str(i)+".png" sampler.sample(sample_file, args.save_samples) if args.action == 'train' and i % args.save_every == 0 and i > 0: print("saving " + save_file) gan.save(save_file) if i > args.steps * 9.0/10: for k, metric in enumerate(gan.session.run(metrics)): print("Metric "+str(k)+" "+str(metric)) sum_metrics[k] += metric return sum_metrics def sample(config, inputs, args): gan = setup_gan(config, inputs, args) sampler = lookup_sampler(args.sampler or RandomWalkSampler)(gan) for i in range(args.steps): sample_file = "samples/"+str(i)+".png" sampler.sample(sample_file, args.save_samples) def search(config, inputs, args): metrics = train(config, inputs, args) config_filename = "static-"+str(uuid.uuid4())+'.json' hc.Selector().save(config_filename, config) with open(args.search_output, "a") as myfile: myfile.write(config_filename+","+",".join([str(x) for x in metrics])+"\n") if args.action == 'train': metrics = train(config, inputs, args) print("Resulting metrics:", metrics) elif args.action == 'sample': sample(config, inputs, args) elif args.action == 'search': search(config, inputs, args) else: print("Unknown action: "+args.action)
1,531
3,428
{"id":"00019","group":"spam-1","checksum":{"type":"MD5","value":"bbc97ad616ffd06e93ce0f821ca8c381"},"text":"From <EMAIL> Fri Aug 23 11:03:37 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: [email protected]\nReceived: from localhost (localhost [1192.168.3.11])\n\tby phobos.labs.spamassassin.taint.org (Postfix) with ESMTP id 5AC994415F\n\tfor <zzzz@localhost>; Fri, 23 Aug 2002 06:02:59 -0400 (EDT)\nReceived: from mail.webnote.net [192.168.3.11]\n\tby localhost with POP3 (fetchmail-5.9.0)\n\tfor zzzz@localhost (single-drop); Fri, 23 Aug 2002 11:02:59 +0100 (IST)\nReceived: from l11.newnamedns.com ([64.25.38.81])\n\tby webnote.net (8.9.3/8.9.3) with ESMTP id KAA09379\n\tfor <<EMAIL>>; Fri, 23 Aug 2002 10:18:03 +0100\nFrom: <EMAIL>\nDate: Fri, 23 Aug 2002 02:16:25 -0400\nMessage-Id: <<EMAIL>>\nTo: k<EMAIL>\nReply-To: <EMAIL>\nSubject: ADV: Lowest life insurance rates available! moode\n\nLowest rates available for term life insurance! Take a moment and fill out our online form to see the low rate you qualify for. Save up to 70% from regular rates! Smokers accepted! http://www.newnamedns.com/termlife/ \n \nRepresenting quality nationwide carriers. Act now!\n \n \n \n \n \n---------------------------------------\nTo easily remove your address from the list, go to: \nhttp://www.newnamedns.com/stopthemailplease/\nPlease allow 48-72 hours for removal.\n\n"}
611
521
<reponame>braymar/afl<filename>qemu_mode/qemu-2.10.0/roms/ipxe/src/arch/x86/include/bits/linux_api_platform.h<gh_stars>100-1000 #ifndef _LINUX_API_PLATFORM_H #define _LINUX_API_PLATFORM_H extern int linux_errno; #endif /* _LINUX_API_PLATFORM_H */
120
4,071
/* * \file matmul_op.h * \brief The matmul operation */ #pragma once #include <vector> #include "blaze/operator/operator.h" #include "blaze/common/exception.h" #include "blaze/common/types.h" #include "blaze/math/broadcast.h" #include "blaze/math/gemm.h" #include "blaze/math/vml.h" namespace blaze { template <class Context> class MatMulOp : public Operator<Context> { public: USE_OPERATOR_FUNCTIONS(Context); MatMulOp(const OperatorDef& def, Workspace* workspace) : Operator<Context>(def, workspace) { transa_ = OperatorBase::GetSingleArgument<bool>("transA", false); transb_ = OperatorBase::GetSingleArgument<bool>("transB", false); from_deepnet_ = OperatorBase::GetSingleArgument<bool>("from_deepnet", false); } bool RunOnDevice() override { Blob* a = this->Input(0); Blob* b = this->Input(1); Blob* c = this->Output(0); // Step1: Setup Setup(); // Step2: Run MatMul std::vector<TIndex> a_shape, b_shape; // Calc outer shape for (size_t k = 0; k < a->shape().size() - 2; ++k) { a_shape.push_back(a->shape()[k]); } for (size_t k = 0; k < b->shape().size() - 2; ++k) { b_shape.push_back(b->shape()[k]); } if (b_shape.size() < a_shape.size()) { for (size_t k = 0; k < a_shape.size() - b_shape.size(); ++k) { b_shape.insert(b_shape.begin(), 1); } } else if (b_shape.size() > a_shape.size()) { for (size_t k = 0; k < b_shape.size() - a_shape.size(); ++k) { a_shape.insert(a_shape.begin(), 1); } } // Calc matrix M/K/N int M = a->shape()[a->shape().size() - 2]; int K = a->shape()[a->shape().size() - 1]; if (transa_) { std::swap(M, K); } int N = b->shape()[b->shape().size() - 1]; if (transb_) { N = b->shape()[b->shape().size() - 2]; } TYPE_SWITCH_WITH_CTX(this->context_, a->data_type(), DType, { // Run StrideBatched DType* a0 = a->as<DType>(); DType* b0 = b->as<DType>(); DType* c0 = c->as<DType>(); // Broacast GEMM BroadcastGemm<DType, Context>(transa_ ? CblasTrans : CblasNoTrans, transb_ ? CblasTrans : CblasNoTrans, M, N, K, 1.0, a0, b0, 0, c0, a_shape, b_shape, &this->context_); }); return true; } protected: void Setup() { Blob* a = this->Input(0); Blob* b = this->Input(1); Blob* c = this->Output(0); if (from_deepnet_) { // Ulf compatile process UlfCompatileProcess(); } BLAZE_CONDITION_THROW(a->shape().size() >= 2, "a->shape().size()=", a->shape().size()); BLAZE_CONDITION_THROW(b->shape().size() >= 2, "b->shape().size()=", b->shape().size()); Blob *min = a, *max = b; bool min_trans = transa_, max_trans = transb_; if (a->shape().size() > b->shape().size()) { std::swap(min, max); std::swap(min_trans, max_trans); } std::vector<TIndex> shape = max->shape(); size_t off = max->shape().size() - min->shape().size(); for (size_t k = 0; k < min->shape().size() - 2; ++k) { TIndex dim_min = min->shape()[k]; TIndex dim_max = max->shape()[off + k]; // Broadcast support. numpy standard. BLAZE_CONDITION_THROW(dim_min == dim_max || dim_min == 1 || dim_max == 1, "dim_min=", dim_min, " dim_max=", dim_max, " op=", this->def_.name()); if (dim_max == 1) { shape[off + k] = dim_min; } } TIndex a_k = a->shape()[a->shape().size() - 1]; if (transa_) a_k = a->shape()[a->shape().size() - 2]; TIndex b_k = b->shape()[b->shape().size() - 2]; if (transb_) b_k = b->shape()[b->shape().size() - 1]; BLAZE_CONDITION_THROW(a_k == b_k, "a_k=", a_k, " b_k=", b_k); if (transa_) shape[shape.size() - 2] = a->shape()[a->shape().size() - 1]; else shape[shape.size() - 2] = a->shape()[a->shape().size() - 2]; if (transb_) shape[shape.size() - 1] = b->shape()[b->shape().size() - 2]; else shape[shape.size() - 1] = b->shape()[b->shape().size() - 1]; c->Reshape(shape); } // Ulf model compatible process void UlfCompatileProcess() { // According to the batchdot definition in ulf. Blob* a = this->Input(0); Blob* b = this->Input(1); if (b->shape().size() == 2) { std::vector<TIndex> rb_shape(3); rb_shape[0] = b->shape()[0]; rb_shape[1] = 1; rb_shape[2] = b->shape()[1]; b->Reshape(rb_shape); } if (a->shape().size() == 2) { std::vector<TIndex> ra_shape(3); ra_shape[0] = a->shape()[0]; ra_shape[1] = a->shape()[1]; ra_shape[2] = 1; a->Reshape(ra_shape); } } bool transa_; bool transb_; bool from_deepnet_; }; } // namespace blaze
2,690
903
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.demo.facet; import java.util.List; import org.apache.lucene.facet.FacetResult; import org.apache.lucene.tests.util.LuceneTestCase; import org.junit.Test; public class TestAssociationsFacetsExample extends LuceneTestCase { @Test public void testExamples() throws Exception { List<FacetResult> res = new AssociationsFacetsExample().runSumAssociations(); assertEquals("Wrong number of results", 2, res.size()); assertEquals( "dim=tags path=[] value=-1 childCount=2\n lucene (4)\n solr (2)\n", res.get(0).toString()); assertEquals( "dim=genre path=[] value=-1.0 childCount=2\n computing (1.62)\n software (0.34)\n", res.get(1).toString()); } @Test public void testDrillDown() throws Exception { FacetResult result = new AssociationsFacetsExample().runDrillDown(); assertEquals( "dim=genre path=[] value=-1.0 childCount=2\n computing (0.75)\n software (0.34)\n", result.toString()); } }
579
335
{ "word": "Leprosarium", "definitions": [ "A hospital for people with leprosy." ], "parts-of-speech": "Noun" }
63
3,370
(module.exports = { "parserOptions": { "ecmaVersion": 6, "sourceType": "module", "ecmaFeatures": { "modules": true, "jsx": true } }, "parser": "@typescript-eslint/parser", "plugins": ["@typescript-eslint", "prettier", "react", "react-hooks"], "extends": [ "eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:react/recommended", "prettier", "prettier/@typescript-eslint" ], "rules": { // Built-in eslint rules "no-console": ["error", {"allow": ["info", "warn", "error"]}], "no-unused-expressions": "warn", "no-await-in-loop": "warn", "no-template-curly-in-string": "warn", "array-callback-return": "warn", "block-scoped-var": "error", "no-param-reassign": "error", "no-restricted-globals": [ "error", // Prevent accidental use of specific globals with names that were designed to clash, // https://media2.giphy.com/media/4ZxicT7ZQYcLShHOiz/giphy.gif "addEventListener", "Animation", "blur", "close", "closed", "confirm", "defaultStatus", "defaultstatus", "Element", "event", "external", "find", "focus", "frameElement", "frames", "history", "History", "innerHeight", "innerWidth", "length", "location", "locationbar", "menubar", "moveBy", "moveTo", "name", "navigator", "onblur", "onerror", "onfocus", "onload", "onresize", "onunload", "open", "opener", "opera", "origin", "outerHeight", "outerWidth", "pageXOffset", "pageYOffset", "parent", "print", "removeEventListener", "resizeBy", "resizeTo", "screen", "screenLeft", "screenTop", "screenX", "screenY", "scroll", "scrollbars", "scrollBy", "scrollTo", "scrollX", "scrollY", "status", "statusbar", "stop", "Text", "toolbar", "top" ], "no-restricted-imports": [ "error", { "patterns": ["**/*.spec", "**/*.spec.*"] } ], "no-shadow": "error", // React specific errors "react/jsx-no-comment-textnodes": "error", "react/jsx-key": ["error", {"checkFragmentShorthand": true}], "react/no-did-mount-set-state": "error", "react/no-did-update-set-state": "error", "react/no-access-state-in-setstate": "error", "react/jsx-no-bind": [ "error", { "ignoreDOMComponents": true, "ignoreRefs": false, "allowArrowFunctions": false, "allowFunctions": false, "allowBind": false } ], "react-hooks/rules-of-hooks": "error", "react-hooks/exhaustive-deps": "error", // typescript-eslint rules "@typescript-eslint/no-unused-vars": [ "warn", { "args": "none", "varsIgnorePattern": "_exhaustiveCheck", "ignoreRestSiblings": true } ], // inherited rules we are turning off here "no-undef": "off", "no-inner-declarations": "off", "no-dupe-class-members": "off", "no-case-declarations": "off", "no-prototype-builtins": "off", "no-useless-escape": "off", "no-var": "off", "prefer-const": "off", "react/prop-types": "off", "react/display-name": "off", "react/no-deprecated": "off", "react/no-direct-mutation-state": "off", "react/no-find-dom-node": "off", "react/no-unescaped-entities": "off", "react/no-string-refs": "off", "react/no-typos": "off", "react/require-render-return": "off", "@typescript-eslint/ban-ts-ignore": "off", "@typescript-eslint/no-inferrable-types": "off", "@typescript-eslint/no-empty-interface": "off", "@typescript-eslint/camelcase": "off", "@typescript-eslint/array-type": "off", "@typescript-eslint/explicit-function-return-type": "off", "@typescript-eslint/no-explicit-any": "off", "@typescript-eslint/no-non-null-assertion": "off", "@typescript-eslint/no-use-before-define": "off", "@typescript-eslint/no-object-literal-type-assertion": "off", "@typescript-eslint/prefer-interface": "off", "@typescript-eslint/explicit-member-accessibility": "off", "@typescript-eslint/no-var-requires": "off" }, "overrides": [ { "files": ["*.spec.ts", "*.spec.tsx"], "rules": { "no-unused-expressions": "off", "no-template-curly-in-string": "off", "@typescript-eslint/no-unused-vars": "off", "@typescript-eslint/no-empty-function": "off" } } ], "settings": { "react": { "version": "16.12" } } })
2,288
1,343
#ifndef ANIME4KCPP_GUI_COMMUNICATOR_H #define ANIME4KCPP_GUI_COMMUNICATOR_H #include <QObject> class Communicator : public QObject { Q_OBJECT public: explicit Communicator(QObject* parent = nullptr); signals: void setError(int row); void showError(QString msg); void done(int row, double pro, quint64 time); void allDone(quint64 totalTime); void logInfo(QString info); void updateProgress(double v, double elpsed, double remaining); }; #endif // !ANIME4KCPP_GUI_COMMUNICATOR_H
200
10,225
<reponame>PieterjanDeconinck/quarkus package io.quarkus.it.jpa.h2; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; @Entity public abstract class Customer { @Id @GeneratedValue private Integer id; String externalcode; }
113
574
<gh_stars>100-1000 import os import unittest import netCDF4 CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) class TestOpenMem(unittest.TestCase): def test_mem_open(self): fpath = os.path.join(CURRENT_DIR, "netcdf_dummy_file.nc") with open(fpath, 'rb') as f: nc_bytes = f.read() if not netCDF4.__has_nc_open_mem__: with self.assertRaises(ValueError): netCDF4.Dataset('foo_bar', memory=nc_bytes) return # Needs: https://github.com/Unidata/netcdf-c/pull/400 if netCDF4.__netcdf4libversion__ < '4.4.1.2': with self.assertRaises(IOError): netCDF4.Dataset('foo_bar', memory=nc_bytes) return with netCDF4.Dataset('foo_bar', memory=nc_bytes) as nc: assert nc.filepath() == 'foo_bar' assert nc.project_summary == 'Dummy netCDF file' if __name__ == '__main__': unittest.main()
532
17,702
<gh_stars>1000+ import pytest import numpy as np from cntk.contrib import crosstalk as cstk import tempfile workdir = tempfile.gettempdir() shape1 = (100, 200,) shape2 = (10, 20,) param1 = np.random.random(shape1).astype(np.float32) param2 = np.random.random(shape2).astype(np.float32) def cntk_baseline_basic(): import cntk as C import cntk.contrib.crosstalk.crosstalk_cntk as crct ci = crct.instance p1 = C.parameter(shape1, init=param1) p2 = C.parameter(shape2, init=param2) ci.watch(p1, 'p1') ci.watch({'param1':p1, 'param2':p2}, 'p1_p2', var_type=crct.DictParameterType) ci.set_workdir(workdir) ci.fetch('p1', save=True) ci.fetch('p1_p2', save=True) ci.reset() def tf_baseline_basic(): import tensorflow as tf import cntk.contrib.crosstalk.crosstalk_tensorflow as crtf ci = crtf.instance tf.reset_default_graph() p1 = tf.get_variable("param1", initializer=param1, dtype=tf.float32) p2 = tf.get_variable("param2", initializer=param2, dtype=tf.float32) ci.watch(p1, 'p1', var_type=crtf.TrainableType) ci.watch({'param1':p1, 'param2':p2}, 'p1_p2', var_type=crtf.DictTrainableType) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) ci.set_workdir(workdir) ci.set_data(sess, None) ci.fetch('p1', save=True) ci.fetch('p1_p2', save=True) ci.reset() sess.close() def test_cntk_basic(): try: import tensorflow has_tensorflow = True except: has_tensorflow = False if has_tensorflow: tf_baseline_basic() else: cntk_baseline_basic() import cntk as C import cntk.contrib.crosstalk.crosstalk_cntk as crct ci = crct.instance ci.set_workdir(workdir) p1 = C.parameter(shape1) p2 = C.parameter(shape2) ci.watch(p1, 'p1') ci.watch({'param1':p1, 'param2':p2}, 'p1_p2', var_type=crct.DictParameterType) ci.assign('p1', load=True) assert np.isclose(p1.value, param1).all() ci.assign('p1_p2', load=True) assert np.isclose(p1.value, param1).all() and np.isclose(p2.value, param2).all() # test assign with value ci.assign('p1', value=param1) ci.assign('p1_p2', value={'param1':param1, 'param2':param2}) ci.reset()
1,135
852
#include "L1Trigger/L1TCommon/src/Setting.cc" #include "L1Trigger/L1TCommon/src/Mask.cc" #include "L1Trigger/L1TCommon/src/XmlConfigReader.cc" #include "L1Trigger/L1TCommon/src/TrigSystem.cc" #include <iostream> #include <fstream> // To compile run these lines in your CMSSW_X_Y_Z/src/ : /* cmsenv eval "setenv `scram tool info xerces-c | sed -n -e 's/INCLUDE=/XERC_INC /gp'`" eval "setenv `scram tool info xerces-c | sed -n -e 's/LIBDIR=/XERC_LIB /gp'`" eval "setenv `scram tool info boost | sed -n -e 's/INCLUDE=/BOOST_INC /gp'`" eval "setenv `scram tool info boost | sed -n -e 's/LIBDIR=/BOOST_LIB /gp'`" g++ -g -std=c++11 -o test readcalol2.cpp -I./ -I$CMSSW_RELEASE_BASE/src -I$XERC_INC -L$XERC_LIB -lxerces-c -I$BOOST_INC -L$BOOST_LIB -lboost_thread -lboost_signals -lboost_date_time -L$CMSSW_RELEASE_BASE/lib/$SCRAM_ARCH/ -lFWCoreMessageLogger -lCondFormatsL1TObjects -L$CMSSW_BASE/lib/slc6_amd64_gcc530/ -lL1TriggerL1TCommon ./test ~kkotov/public/MPs*.xml */ using namespace std; int main(int argc, char *argv[]){ if( argc < 2 ) return 0; // read the input xml file into a string list<string> sequence; map<string,string> xmlPayload; for(int p=1; p<argc; p++){ ifstream input( argv[p] ); if( !input ){ cout << "Cannot open " << argv[p] << " file" << endl; return 0; } sequence.push_back( argv[p] ); size_t nLinesRead=0; while( !input.eof() ){ string tmp; getline( input, tmp, '\n' ); xmlPayload[ argv[p] ].append( tmp ); nLinesRead++; } cout << argv[p] << ": read " << nLinesRead << " lines" << endl; input.close(); } // parse the string using the XML reader XmlConfigReader xmlRdr; l1t::TrigSystem trgSys; trgSys.addProcRole("MainProcessor", "processors"); // for(auto &name : sequence){ cout<<"Parsing "<<name<<endl; xmlRdr.readDOMFromString( xmlPayload[name] ); xmlRdr.readRootElement ( trgSys, "calol2" ); } trgSys.setConfigured(); // feel free to play with the containers: map<string, l1t::Setting> conf = trgSys.getSettings("MainProcessor"); // use your context id here // map<string, l1t::Mask> rs = trgSys.getMasks ("processors"); // don't call a context that doesn't exist string tmp = conf["leptonSeedThreshold"].getValueAsStr(); cout << "leptonSeedThreshold=" << tmp << endl; return 0; }
1,105
3,055
<reponame>sei-kiu/Grove-Beginner-Kit-for-Arduino-Grove-OLED-Display-0.96-SSD1315- /* u8x8_string.c string line procedures Universal 8bit Graphics Library (https://github.com/olikraus/u8g2/) Copyright (c) 2016, <EMAIL> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "u8x8.h" uint8_t u8x8_GetStringLineCnt(const char *str) { char e; uint8_t line_cnt = 1; if ( str == NULL ) return 0; for(;;) { e = *str; if ( e == '\0' ) break; str++; if ( e == '\n' ) line_cnt++; } return line_cnt; } /* Assumes strings, separated by '\n' in "str". Returns the string at index "line_idx". First strng has line_idx = 0 Example: Returns "xyz" for line_idx = 1 with str = "abc\nxyz" Support both UTF8 and normal strings. */ const char *u8x8_GetStringLineStart(uint8_t line_idx, const char *str ) { char e; uint8_t line_cnt = 1; if ( line_idx == 0 ) return str; for(;;) { e = *str; if ( e == '\0' ) break; str++; if ( e == '\n' ) { if ( line_cnt == line_idx ) return str; line_cnt++; } } return NULL; /* line not found */ } /* copy until first '\n' or '\0' in str */ /* Important: There is no string overflow check, ensure */ /* that the destination buffer is large enough */ void u8x8_CopyStringLine(char *dest, uint8_t line_idx, const char *str) { if ( dest == NULL ) return; str = u8x8_GetStringLineStart( line_idx, str ); if ( str != NULL ) { for(;;) { if ( *str == '\n' || *str == '\0' ) break; *dest = *str; dest++; str++; } } *dest = '\0'; } /* Draw a string Extend the string to size "w" Center the string within "w" return the size of the string */ uint8_t u8x8_DrawUTF8Line(u8x8_t *u8x8, uint8_t x, uint8_t y, uint8_t w, const char *s) { uint8_t d, lw; uint8_t cx, dx; d = 0; lw = u8x8_GetUTF8Len(u8x8, s); if ( lw < w ) { d = w; d -=lw; d /= 2; } cx = x; dx = cx + d; while( cx < dx ) { u8x8_DrawUTF8(u8x8, cx, y, " "); cx++; } cx += u8x8_DrawUTF8(u8x8, cx, y, s); dx = x + w; while( cx < dx ) { u8x8_DrawUTF8(u8x8, cx, y, " "); cx++; } cx -= x; return cx; } /* draw several lines at position x,y. lines are stored in s and must be separated with '\n'. lines can be centered with respect to "w" if s == NULL nothing is drawn and 0 is returned returns the number of lines in s */ uint8_t u8x8_DrawUTF8Lines(u8x8_t *u8x8, uint8_t x, uint8_t y, uint8_t w, const char *s) { uint8_t i; uint8_t cnt; cnt = u8x8_GetStringLineCnt(s); for( i = 0; i < cnt; i++ ) { u8x8_DrawUTF8Line(u8x8, x, y, w, u8x8_GetStringLineStart(i, s)); y++; } return cnt; }
1,705
1,037
import json from flask import Blueprint, jsonify, request, Response, current_app from flask_login import login_required, current_user from flask_babel import gettext as _ from configs.regions_list import REGION_HIERARCHY import application.models as Models from application.services.cache import cached address = Blueprint('address', __name__, url_prefix='/api/address') @address.route('/hierarchy', methods=['GET']) @cached(21600) def get_countries(): return jsonify(message='OK', countries=list(REGION_HIERARCHY.keys())) @address.route('/hierarchy/<country>', methods=['GET']) @cached(21600) def get_regions(country): regions = REGION_HIERARCHY.get(country) return jsonify(message='OK', regions=regions) @address.route('/default', methods=['GET']) @login_required def default_address(): addresses = current_user.addresses if len(addresses) > 0: return jsonify(message='OK', address=addresses[0].to_json()) return jsonify(message='OK', address=None) @address.route('/get/<addr_id>', methods=['GET']) @login_required def get_address(addr_id): address = Models.Address.objects(id=addr_id).first() if address not in current_user.addresses: return jsonify(message='Failed', error=_('invalid address id for current user')) return jsonify(message='OK', address=address.to_json()) @address.route('/all', methods=['GET']) @login_required def user_addresses(): addresses = current_user.addresses return jsonify(message='OK', addresses=[a.to_json() for a in addresses]) @address.route('/add', methods=['POST']) @login_required def address_add(): contact = request.json address = Models.Address( state=contact['state'], city=contact['city'], country=contact['country'], street1=contact['street1'], street2=contact.get('street2'), postcode=contact['postcode'], receiver=contact['receiver'], mobile_number=contact['mobile_number'] ) address.save() current_user.addresses.insert(0, address) current_user.save() return jsonify(message='OK', address_id=str(address.id)) @address.route('/del/<addr_id>', methods=['GET']) @login_required def address_del(addr_id): address = Models.Address.objects(id=addr_id).first_or_404() if address not in current_user.addresses: return jsonify(message='Failed', error=_('invalid address id for current user')) current_user.update(pull__addresses=address) address.delete() return jsonify(message='OK') @address.route('/update/<addr_id>', methods=['POST']) @login_required def address_update(addr_id): address = Models.Address.objects(id=addr_id).first_or_404() if address not in current_user.addresses: return jsonify(message='Failed', error=_('invalid address id for current user')) contact = request.json try: address.state=contact['state'] address.city=contact['city'] address.country=contact['country'] address.street1=contact['street1'] address.street2=contact.get('street2') address.postcode=contact['postcode'] address.receiver=contact['receiver'] address.mobile_number=contact['mobile_number'] except KeyError: return jsonify(message='Failed', error=_('invalid data')) address.save() return jsonify(message='OK', address_id=str(address.id))
1,318
335
{ "word": "Gradation", "definitions": [ "A scale or series of successive changes, stages, or degrees.", "An individual stage within a succession of changes, stages, or degrees.", "A minute variation in shade, tone, or colour." ], "parts-of-speech": "Noun" }
108
1,314
package cn.dreampie.captcha; import cn.dreampie.captcha.background.BackgroundFactory; import cn.dreampie.captcha.color.ColorFactory; import cn.dreampie.captcha.filter.FilterFactory; import cn.dreampie.captcha.filter.predefined.*; import cn.dreampie.captcha.font.FontFactory; import cn.dreampie.captcha.font.RandomFontFactory; import cn.dreampie.captcha.service.Captcha; import cn.dreampie.captcha.service.ConfigurableCaptchaService; import cn.dreampie.captcha.text.render.TextRenderer; import cn.dreampie.captcha.word.WordFactory; /** * Created by Dreampie on 16/1/6. */ public class CaptchaFactory { private ConfigurableCaptchaService captchaService = null; public CaptchaFactory() { captchaService = new ConfigurableCaptchaService(); } public Captcha getCaptcha() { return captchaService.getCaptcha(); } /** * 获取滤镜效果 * * @param captchaFilter * @return */ private FilterFactory getFilterFactory(CaptchaFilter captchaFilter) { FilterFactory filterFactory = null; switch (captchaFilter) { case Curves: filterFactory = new CurvesRippleFilterFactory(); break; case Marble: filterFactory = new MarbleRippleFilterFactory(); break; case Double: filterFactory = new DoubleRippleFilterFactory(); break; case Wobble: filterFactory = new WobbleRippleFilterFactory(); break; case Diffuse: filterFactory = new DiffuseRippleFilterFactory(); break; } return filterFactory; } public FilterFactory getFilterFactory() { return captchaService.getFilterFactory(); } public CaptchaFactory setFilterFactory(FilterFactory filterFactory) { captchaService.setFilterFactory(filterFactory); return this; } /** * 滤镜效果 * * @param captchaFilter * @return */ public CaptchaFactory setFilterFactory(CaptchaFilter captchaFilter) { captchaService.setFilterFactory(getFilterFactory(captchaFilter)); return this; } /** * 背景生成工厂 * * @return */ public BackgroundFactory getBackgroundFactory() { return captchaService.getBackgroundFactory(); } public CaptchaFactory setBackgroundFactory(BackgroundFactory backgroundFactory) { if (backgroundFactory.getColorFactory() == null) { backgroundFactory.setColorFactory(captchaService.getColorFactory()); } captchaService.setBackgroundFactory(backgroundFactory); return this; } public ColorFactory getColorFactory() { return captchaService.getColorFactory(); } public CaptchaFactory setColorFactory(ColorFactory colorFactory) { captchaService.setColorFactory(colorFactory); return this; } /** * 字体生成工厂 * * @return */ public FontFactory getFontFactory() { return captchaService.getFontFactory(); } public CaptchaFactory setFontFactory(RandomFontFactory fontFactory) { captchaService.setFontFactory(fontFactory); return this; } /** * 字符生成工厂 * * @return */ public WordFactory getWordFactory() { return captchaService.getWordFactory(); } public CaptchaFactory setWordFactory(WordFactory wordFactory) { captchaService.setWordFactory(wordFactory); return this; } /** * 文本位置 * * @return */ public TextRenderer getTextRenderer() { return captchaService.getTextRenderer(); } public CaptchaFactory setTextRenderer(TextRenderer textRenderer) { captchaService.setTextRenderer(textRenderer); return this; } /** * 验证码图片的大小 */ public CaptchaFactory setImgSize(int width, int height) { captchaService.setWidth(width); captchaService.setHeight(height); return this; } }
1,333
1,475
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.redis.internal.executor.hash; import static org.assertj.core.api.Assertions.assertThat; import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import redis.clients.jedis.HostAndPort; import redis.clients.jedis.JedisCluster; import org.apache.geode.redis.ConcurrentLoopingThreads; import org.apache.geode.test.awaitility.GeodeAwaitility; import org.apache.geode.test.dunit.rules.MemberVM; import org.apache.geode.test.dunit.rules.RedisClusterStartupRule; public class HsetDUnitTest { @ClassRule public static RedisClusterStartupRule clusterStartUp = new RedisClusterStartupRule(4); private static final String LOCAL_HOST = "127.0.0.1"; private static final int HASH_SIZE = 1000; private static final int JEDIS_TIMEOUT = Math.toIntExact(GeodeAwaitility.getTimeout().toMillis()); private static JedisCluster jedis; private static MemberVM locator; private static MemberVM server1; private static MemberVM server2; private static MemberVM server3; @BeforeClass public static void classSetup() { locator = clusterStartUp.startLocatorVM(0); server1 = clusterStartUp.startRedisVM(1, locator.getPort()); server2 = clusterStartUp.startRedisVM(2, locator.getPort()); server3 = clusterStartUp.startRedisVM(3, locator.getPort()); int redisServerPort = clusterStartUp.getRedisPort(1); jedis = new JedisCluster(new HostAndPort(LOCAL_HOST, redisServerPort), JEDIS_TIMEOUT); } @Before public void testSetup() { clusterStartUp.flushAll(); } @AfterClass public static void tearDown() { jedis.close(); server1.stop(); server2.stop(); server3.stop(); } @Test public void shouldDistributeDataAmongCluster_givenMultipleClients() { String key = "key"; Map<String, String> testMap = makeHashMap(HASH_SIZE, "field-", "value-"); jedis.hset(key, testMap); Map<String, String> result = jedis.hgetAll(key); assertThat(result.keySet().toArray()).containsExactlyInAnyOrder(testMap.keySet().toArray()); assertThat(result.values().toArray()).containsExactlyInAnyOrder(testMap.values().toArray()); } @Test public void shouldDistributeDataAmongCluster_givenMultipleThreadsAddingDifferentDataToSameHashConcurrently() { String key = "key"; Map<String, String> testMap1 = makeHashMap(HASH_SIZE, "field1-", "value1-"); Map<String, String> testMap2 = makeHashMap(HASH_SIZE, "field2-", "value2-"); Map<String, String> wholeMap = new HashMap<>(); wholeMap.putAll(testMap1); wholeMap.putAll(testMap2); String[] testMap1Fields = testMap1.keySet().toArray(new String[] {}); String[] testMap2Fields = testMap2.keySet().toArray(new String[] {}); Consumer<Integer> hsetJedis1Consumer = makeHSetConsumer(testMap1, testMap1Fields, key, jedis); Consumer<Integer> hsetJedis2Consumer = makeHSetConsumer(testMap2, testMap2Fields, key, jedis); new ConcurrentLoopingThreads(HASH_SIZE, hsetJedis1Consumer, hsetJedis2Consumer).run(); Map<String, String> results = jedis.hgetAll(key); assertThat(results.keySet().toArray()).containsExactlyInAnyOrder(wholeMap.keySet().toArray()); assertThat(results.values().toArray()).containsExactlyInAnyOrder(wholeMap.values().toArray()); } @Test public void shouldDistributeDataAmongCluster_givenMultipleThreadsAddingSameDataToSameHashConcurrently() { String key = "key"; Map<String, String> testMap = makeHashMap(HASH_SIZE, "field-", "value-"); String[] testMapFields = testMap.keySet().toArray(new String[] {}); Consumer<Integer> hsetJedis1Consumer = makeHSetConsumer(testMap, testMapFields, key, jedis); Consumer<Integer> hsetJedis2Consumer = makeHSetConsumer(testMap, testMapFields, key, jedis); new ConcurrentLoopingThreads(HASH_SIZE, hsetJedis1Consumer, hsetJedis2Consumer).run(); Map<String, String> results = jedis.hgetAll(key); assertThat(results.keySet().toArray()).containsExactlyInAnyOrder(testMap.keySet().toArray()); assertThat(results.values().toArray()).containsExactlyInAnyOrder(testMap.values().toArray()); } @Test public void shouldDistributeDataAmongCluster_givenMultipleThreadsAddingToDifferentHashesConcurrently() { String key1 = "key1"; String key2 = "key2"; Map<String, String> testMap1 = makeHashMap(HASH_SIZE, "field1-", "value1-"); Map<String, String> testMap2 = makeHashMap(HASH_SIZE, "field2-", "value2-"); String[] testMap1Fields = testMap1.keySet().toArray(new String[] {}); String[] testMap2Fields = testMap2.keySet().toArray(new String[] {}); Consumer<Integer> hsetJedis1Consumer = makeHSetConsumer(testMap1, testMap1Fields, key1, jedis); Consumer<Integer> hsetJedis2Consumer = makeHSetConsumer(testMap2, testMap2Fields, key2, jedis); new ConcurrentLoopingThreads(HASH_SIZE, hsetJedis1Consumer, hsetJedis2Consumer).run(); Map<String, String> results1 = jedis.hgetAll(key1); Map<String, String> results2 = jedis.hgetAll(key2); assertThat(results1.keySet().toArray()).containsExactlyInAnyOrder(testMap1.keySet().toArray()); assertThat(results1.values().toArray()).containsExactlyInAnyOrder(testMap1.values().toArray()); assertThat(results2.values().toArray()).containsExactlyInAnyOrder(testMap2.values().toArray()); assertThat(results2.values().toArray()).containsExactlyInAnyOrder(testMap2.values().toArray()); } @Test public void shouldDistributeDataAmongCluster_givenMultipleThreadsAddingSameDataToSameSetConcurrently() { String key = "key"; Map<String, String> testMap = makeHashMap(HASH_SIZE, "field1-", "value1-"); String[] testMapFields = testMap.keySet().toArray(new String[] {}); Consumer<Integer> hsetJedis1Consumer = makeHSetConsumer(testMap, testMapFields, key, jedis); Consumer<Integer> hsetJedis1BConsumer = makeHSetConsumer(testMap, testMapFields, key, jedis); Consumer<Integer> hsetJedis2Consumer = makeHSetConsumer(testMap, testMapFields, key, jedis); Consumer<Integer> hsetJedis2BConsumer = makeHSetConsumer(testMap, testMapFields, key, jedis); new ConcurrentLoopingThreads(HASH_SIZE, hsetJedis1Consumer, hsetJedis1BConsumer, hsetJedis2Consumer, hsetJedis2BConsumer).run(); Map<String, String> results = jedis.hgetAll(key); assertThat(results.keySet().toArray()).containsExactlyInAnyOrder(testMap.keySet().toArray()); assertThat(results.values().toArray()).containsExactlyInAnyOrder(testMap.values().toArray()); } @Test public void shouldDistributeDataAmongCluster_givenMultipleThreadsAddingDifferentDataToSameSetConcurrently() { String key = "key1"; Map<String, String> testMap1 = makeHashMap(HASH_SIZE, "field1-", "value1-"); Map<String, String> testMap2 = makeHashMap(HASH_SIZE, "field2-", "value2-"); Map<String, String> wholeMap = new HashMap<>(); wholeMap.putAll(testMap1); wholeMap.putAll(testMap2); String[] testMap1Fields = testMap1.keySet().toArray(new String[] {}); String[] testMap2Fields = testMap2.keySet().toArray(new String[] {}); Consumer<Integer> consumer1 = makeHSetConsumer(testMap1, testMap1Fields, key, jedis); Consumer<Integer> consumer1B = makeHSetConsumer(testMap1, testMap1Fields, key, jedis); Consumer<Integer> consumer2 = makeHSetConsumer(testMap2, testMap2Fields, key, jedis); Consumer<Integer> consumer2B = makeHSetConsumer(testMap2, testMap2Fields, key, jedis); new ConcurrentLoopingThreads(HASH_SIZE, consumer1, consumer1B, consumer2, consumer2B).run(); Map<String, String> results = jedis.hgetAll(key); assertThat(results.keySet().toArray()).containsExactlyInAnyOrder(wholeMap.keySet().toArray()); assertThat(results.values().toArray()).containsExactlyInAnyOrder(wholeMap.values().toArray()); } private Consumer<Integer> makeHSetConsumer(Map<String, String> testMap, String[] fields, String hashKey, JedisCluster jedis) { Consumer<Integer> consumer = (i) -> { String field = fields[i]; jedis.hset(hashKey, field, testMap.get(field)); }; return consumer; } private Map<String, String> makeHashMap(int hashSize, String baseFieldName, String baseValueName) { Map<String, String> map = new HashMap<>(); for (int i = 0; i < hashSize; i++) { map.put(baseFieldName + i, baseValueName + i); } return map; } }
3,220
602
<gh_stars>100-1000 /*! * @file vm.cpp * Base "PS2 virtual machine" code. * Simulates the existence of select PS2 components, for inspection & debugging. * Not an emulator! */ #include "vm.h" #include "dmac.h" #include "common/log/log.h" #include "game/kernel/kscheme.h" #include <condition_variable> #include <mutex> namespace VM { bool use = true; // enable VM by default, since we're debugging right now namespace { Status status; std::condition_variable vm_init_cv; std::condition_variable vm_dead_cv; std::mutex status_mutex; int components = 0; } // namespace static void vm_change_status(Status new_status) { std::unique_lock<std::mutex> lk(status_mutex); status = new_status; } void wait_vm_init() { std::unique_lock<std::mutex> lk(status_mutex); vm_init_cv.wait(lk, [&] { return status == Status::Inited; }); } void wait_vm_dead() { if (status != Status::Kill && status != Status::Dead) { lg::warn("[VM] Dying without being killed! There are {} component(s) running", components); } std::unique_lock<std::mutex> lk(status_mutex); vm_dead_cv.wait(lk, [&] { return components == 0; }); } bool vm_want_exit() { return status == Status::Kill || status == Status::Dead; } void vm_prepare() { lg::debug("[VM] Preparing..."); vm_change_status(Status::Uninited); lg::debug("[VM] Prepared"); } void vm_init() { if (status != Status::Uninited) { lg::warn("[VM] unexpected status {}", status); } lg::debug("[VM] Inited"); vm_change_status(Status::Inited); vm_init_cv.notify_all(); } void vm_kill() { lg::debug("[VM] Killing"); vm_change_status(Status::Kill); // stall caller until VM is done dying wait_vm_dead(); vm_change_status(Status::Dead); } void subscribe_component() { if (status == Status::Dead) { throw std::runtime_error("[VM] Cannot add new components when VM is dead!"); } status_mutex.lock(); ++components; status_mutex.unlock(); // stall component until VM is ready if (status == Status::Uninited) { wait_vm_init(); } } void unsubscribe_component() { status_mutex.lock(); --components; status_mutex.unlock(); vm_dead_cv.notify_all(); } /*! * Return the GOAL pointer to a specified PS2 VM component based on the EE address. */ u64 get_vm_ptr(u32 ptr) { // currently, only DMAC and DMA channel banks are implemented. add more as necessary. if (ptr == 0x10008000) { return VM::dmac_ch[0].offset; } else if (ptr == 0x10009000) { return VM::dmac_ch[1].offset; } else if (ptr == 0x1000a000) { return VM::dmac_ch[2].offset; } else if (ptr == 0x1000b000) { return VM::dmac_ch[3].offset; } else if (ptr == 0x1000b400) { return VM::dmac_ch[4].offset; } else if (ptr == 0x1000c000) { return VM::dmac_ch[5].offset; } else if (ptr == 0x1000c400) { return VM::dmac_ch[6].offset; } else if (ptr == 0x1000c800) { return VM::dmac_ch[7].offset; } else if (ptr == 0x1000d000) { return VM::dmac_ch[8].offset; } else if (ptr == 0x1000d400) { return VM::dmac_ch[9].offset; } else if (ptr == 0x1000e000) { return VM::dmac.offset; } else { // return zero, using this result will segfault GOAL! // we could die immediately, but it might be worth it to keep going just on the off chance more // errors are reported, and not just only this one. lg::error("unknown EE register for VM at #x{:08x}", ptr); return 0; } } } // namespace VM
1,290
678
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/IMAVCore.framework/IMAVCore */ #import <IMAVCore/XXUnknownSuperclass.h> @class NSString; @interface IMAVCamera : XXUnknownSuperclass { id _internalDevice; // 4 = 0x4 } @property(readonly, assign, nonatomic) BOOL isSuspended; // G=0x43cd9; @property(readonly, assign, nonatomic) BOOL isShutterOpen; // G=0x43cdd; @property(readonly, assign, nonatomic) BOOL hasShutter; // G=0x43cd5; @property(readonly, assign, nonatomic) BOOL isWideScreen; // G=0x43cd1; @property(readonly, assign, nonatomic) int cameraStyle; // G=0x43ccd; @property(readonly, assign, nonatomic) NSString *uniqueID; // G=0x43ce5; @property(readonly, assign, nonatomic) NSString *name; // G=0x43cc1; - (id)description; // 0x43d35 - (void)dealloc; // 0x43ce9 // declared property getter: - (id)uniqueID; // 0x43ce5 - (id)_FTCamera; // 0x43ce1 // declared property getter: - (BOOL)isShutterOpen; // 0x43cdd // declared property getter: - (BOOL)isSuspended; // 0x43cd9 // declared property getter: - (BOOL)hasShutter; // 0x43cd5 // declared property getter: - (BOOL)isWideScreen; // 0x43cd1 // declared property getter: - (int)cameraStyle; // 0x43ccd // declared property getter: - (id)name; // 0x43cc1 - (id)init; // 0x43c95 - (id)_initWithFTCamera:(id)ftcamera; // 0x43c55 @end
542
311
package org.javacs.example; class CompleteParens { void test() { ret } void returnsVoid() { } String returnsString() { return ""; } String returnsArg(String arg) { return arg; } }
81
9,136
/* Bullet Continuous Collision Detection and Physics Library Copyright (c) 2015 Google Inc. http://bulletphysics.org This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ #include "NewtonsCradle.h" #include <cmath> #include <iterator> #include <vector> // TODO: Should I use another data structure? #include "btBulletDynamicsCommon.h" #include "LinearMath/btVector3.h" #include "LinearMath/btAlignedObjectArray.h" #include "../CommonInterfaces/CommonRigidBodyBase.h" #include "../CommonInterfaces/CommonParameterInterface.h" static btScalar gPendulaQty = 5; // Number of pendula in newton's cradle //TODO: This would actually be an Integer, but the Slider does not like integers, so I floor it when changed static btScalar gDisplacedPendula = 1; // number of displaced pendula //TODO: This is an int as well static btScalar gPendulaRestitution = 1; // pendula restitution when hitting against each other static btScalar gSphereRadius = 1; // pendula radius static btScalar gCurrentPendulumLength = 8; // current pendula length static btScalar gInitialPendulumLength = 8; // default pendula length static btScalar gDisplacementForce = 30; // default force to displace the pendula static btScalar gForceScalar = 0; // default force scalar to apply a displacement struct NewtonsCradleExample : public CommonRigidBodyBase { NewtonsCradleExample(struct GUIHelperInterface* helper) : CommonRigidBodyBase(helper) { } virtual ~NewtonsCradleExample() { } virtual void initPhysics(); virtual void renderScene(); virtual void createPendulum(btSphereShape* colShape, const btVector3& position, btScalar length, btScalar mass); virtual void changePendulaLength(btScalar length); virtual void changePendulaRestitution(btScalar restitution); virtual void stepSimulation(float deltaTime); virtual bool keyboardCallback(int key, int state); virtual void applyPendulumForce(btScalar pendulumForce); void resetCamera() { float dist = 41; float pitch = -35; float yaw = 52; float targetPos[3] = {0, 0.46, 0}; m_guiHelper->resetCamera(dist, yaw, pitch, targetPos[0], targetPos[1], targetPos[2]); } std::vector<btSliderConstraint*> constraints; // keep a handle to the slider constraints std::vector<btRigidBody*> pendula; // keep a handle to the pendula }; static NewtonsCradleExample* nex = NULL; void onPendulaLengthChanged(float pendulaLength, void* userPtr); // Change the pendula length void onPendulaRestitutionChanged(float pendulaRestitution, void* userPtr); // change the pendula restitution void applyForceWithForceScalar(float forceScalar); void NewtonsCradleExample::initPhysics() { { // create a slider to change the number of pendula SliderParams slider("Number of Pendula", &gPendulaQty); slider.m_minVal = 1; slider.m_maxVal = 50; slider.m_clampToIntegers = true; m_guiHelper->getParameterInterface()->registerSliderFloatParameter( slider); } { // create a slider to change the number of displaced pendula SliderParams slider("Number of Displaced Pendula", &gDisplacedPendula); slider.m_minVal = 0; slider.m_maxVal = 49; slider.m_clampToIntegers = true; m_guiHelper->getParameterInterface()->registerSliderFloatParameter( slider); } { // create a slider to change the pendula restitution SliderParams slider("Pendula Restitution", &gPendulaRestitution); slider.m_minVal = 0; slider.m_maxVal = 1; slider.m_clampToNotches = false; slider.m_callback = onPendulaRestitutionChanged; m_guiHelper->getParameterInterface()->registerSliderFloatParameter( slider); } { // create a slider to change the pendulum length SliderParams slider("Pendula Length", &gCurrentPendulumLength); slider.m_minVal = 0; slider.m_maxVal = 49; slider.m_clampToNotches = false; slider.m_callback = onPendulaLengthChanged; m_guiHelper->getParameterInterface()->registerSliderFloatParameter( slider); } { // create a slider to change the force to displace the lowest pendulum SliderParams slider("Displacement force", &gDisplacementForce); slider.m_minVal = 0.1; slider.m_maxVal = 200; slider.m_clampToNotches = false; m_guiHelper->getParameterInterface()->registerSliderFloatParameter( slider); } { // create a slider to apply the force by slider SliderParams slider("Apply displacement force", &gForceScalar); slider.m_minVal = -1; slider.m_maxVal = 1; slider.m_clampToNotches = false; m_guiHelper->getParameterInterface()->registerSliderFloatParameter( slider); } m_guiHelper->setUpAxis(1); createEmptyDynamicsWorld(); // create a debug drawer m_guiHelper->createPhysicsDebugDrawer(m_dynamicsWorld); if (m_dynamicsWorld->getDebugDrawer()) m_dynamicsWorld->getDebugDrawer()->setDebugMode( btIDebugDraw::DBG_DrawWireframe + btIDebugDraw::DBG_DrawContactPoints + btIDebugDraw::DBG_DrawConstraints + btIDebugDraw::DBG_DrawConstraintLimits); { // create the pendula starting at the indicated position below and where each pendulum has the following mass btScalar pendulumMass(1.f); btVector3 position(0.0f, 15.0f, 0.0f); // initial left-most pendulum position btQuaternion orientation(0, 0, 0, 1); // orientation of the pendula // Re-using the same collision is better for memory usage and performance btSphereShape* pendulumShape = new btSphereShape(gSphereRadius); m_collisionShapes.push_back(pendulumShape); for (int i = 0; i < std::floor(gPendulaQty); i++) { // create pendulum createPendulum(pendulumShape, position, gInitialPendulumLength, pendulumMass); // displace the pendula 1.05 sphere size, so that they all nearly touch (small spacings in between position.setX(position.x() - 2.1f * gSphereRadius); } } m_guiHelper->autogenerateGraphicsObjects(m_dynamicsWorld); } void NewtonsCradleExample::stepSimulation(float deltaTime) { applyForceWithForceScalar(gForceScalar); // apply force defined by apply force slider if (m_dynamicsWorld) { m_dynamicsWorld->stepSimulation(deltaTime); } } void NewtonsCradleExample::createPendulum(btSphereShape* colShape, const btVector3& position, btScalar length, btScalar mass) { // The pendulum looks like this (names when built): // O topSphere // | // O bottomSphere //create a dynamic pendulum btTransform startTransform; startTransform.setIdentity(); // position the top sphere above ground with a moving x position startTransform.setOrigin(position); startTransform.setRotation(btQuaternion(0, 0, 0, 1)); // zero rotation btRigidBody* topSphere = createRigidBody(mass, startTransform, colShape); // position the bottom sphere below the top sphere startTransform.setOrigin( btVector3(position.x(), btScalar(position.y() - length), position.z())); startTransform.setRotation(btQuaternion(0, 0, 0, 1)); // zero rotation btRigidBody* bottomSphere = createRigidBody(mass, startTransform, colShape); bottomSphere->setFriction(0); // we do not need friction here pendula.push_back(bottomSphere); // disable the deactivation when objects do not move anymore topSphere->setActivationState(DISABLE_DEACTIVATION); bottomSphere->setActivationState(DISABLE_DEACTIVATION); bottomSphere->setRestitution(gPendulaRestitution); // set pendula restitution //make the top sphere position "fixed" to the world by attaching with a point to point constraint // The pivot is defined in the reference frame of topSphere, so the attachment is exactly at the center of the topSphere btVector3 constraintPivot(btVector3(0.0f, 0.0f, 0.0f)); btPoint2PointConstraint* p2pconst = new btPoint2PointConstraint(*topSphere, constraintPivot); p2pconst->setDbgDrawSize(btScalar(5.f)); // set the size of the debug drawing // add the constraint to the world m_dynamicsWorld->addConstraint(p2pconst, true); //create constraint between spheres // this is represented by the constraint pivot in the local frames of reference of both constrained spheres // furthermore we need to rotate the constraint appropriately to orient it correctly in space btTransform constraintPivotInTopSphereRF, constraintPivotInBottomSphereRF; constraintPivotInTopSphereRF.setIdentity(); constraintPivotInBottomSphereRF.setIdentity(); // the slider constraint is x aligned per default, but we want it to be y aligned, therefore we rotate it btQuaternion qt; qt.setEuler(0, 0, -SIMD_HALF_PI); constraintPivotInTopSphereRF.setRotation(qt); //we use Y like up Axis constraintPivotInBottomSphereRF.setRotation(qt); //we use Y like up Axis //Obtain the position of topSphere in local reference frame of bottomSphere (the pivot is therefore in the center of topSphere) btVector3 topSphereInBottomSphereRF = (bottomSphere->getWorldTransform().inverse()( topSphere->getWorldTransform().getOrigin())); constraintPivotInBottomSphereRF.setOrigin(topSphereInBottomSphereRF); btSliderConstraint* sliderConst = new btSliderConstraint(*topSphere, *bottomSphere, constraintPivotInTopSphereRF, constraintPivotInBottomSphereRF, true); sliderConst->setDbgDrawSize(btScalar(5.f)); // set the size of the debug drawing // set limits // the initial setup of the constraint defines the origins of the limit dimensions, // therefore we set both limits directly to the current position of the topSphere sliderConst->setLowerLinLimit(btScalar(0)); sliderConst->setUpperLinLimit(btScalar(0)); sliderConst->setLowerAngLimit(btScalar(0)); sliderConst->setUpperAngLimit(btScalar(0)); constraints.push_back(sliderConst); // add the constraint to the world m_dynamicsWorld->addConstraint(sliderConst, true); } void NewtonsCradleExample::changePendulaLength(btScalar length) { btScalar lowerLimit = -gInitialPendulumLength; for (std::vector<btSliderConstraint*>::iterator sit = constraints.begin(); sit != constraints.end(); sit++) { btAssert((*sit) && "Null constraint"); //if the pendulum is being shortened beyond it's own length, we don't let the lower sphere to go past the upper one if (lowerLimit <= length) { (*sit)->setLowerLinLimit(length + lowerLimit); (*sit)->setUpperLinLimit(length + lowerLimit); } } } void NewtonsCradleExample::changePendulaRestitution(btScalar restitution) { for (std::vector<btRigidBody*>::iterator rit = pendula.begin(); rit != pendula.end(); rit++) { btAssert((*rit) && "Null constraint"); (*rit)->setRestitution(restitution); } } void NewtonsCradleExample::renderScene() { CommonRigidBodyBase::renderScene(); } bool NewtonsCradleExample::keyboardCallback(int key, int state) { //b3Printf("Key pressed: %d in state %d \n",key,state); //key 1, key 2, key 3 switch (key) { case '1' /*ASCII for 1*/: { //assumption: Sphere are aligned in Z axis btScalar newLimit = btScalar(gCurrentPendulumLength + 0.1); changePendulaLength(newLimit); gCurrentPendulumLength = newLimit; b3Printf("Increase pendulum length to %f", gCurrentPendulumLength); return true; } case '2' /*ASCII for 2*/: { //assumption: Sphere are aligned in Z axis btScalar newLimit = btScalar(gCurrentPendulumLength - 0.1); //is being shortened beyond it's own length, we don't let the lower sphere to go over the upper one if (0 <= newLimit) { changePendulaLength(newLimit); gCurrentPendulumLength = newLimit; } b3Printf("Decrease pendulum length to %f", gCurrentPendulumLength); return true; } case '3' /*ASCII for 3*/: { applyPendulumForce(gDisplacementForce); return true; } } return false; } void NewtonsCradleExample::applyPendulumForce(btScalar pendulumForce) { if (pendulumForce != 0) { b3Printf("Apply %f to pendulum", pendulumForce); for (int i = 0; i < gDisplacedPendula; i++) { if (gDisplacedPendula >= 0 && gDisplacedPendula <= gPendulaQty) pendula[i]->applyCentralForce(btVector3(pendulumForce, 0, 0)); } } } // GUI parameter modifiers void onPendulaLengthChanged(float pendulaLength, void*) { if (nex) { nex->changePendulaLength(pendulaLength); //b3Printf("Pendula length changed to %f \n",sliderValue ); } } void onPendulaRestitutionChanged(float pendulaRestitution, void*) { if (nex) { nex->changePendulaRestitution(pendulaRestitution); } } void applyForceWithForceScalar(float forceScalar) { if (nex) { btScalar appliedForce = forceScalar * gDisplacementForce; if (fabs(gForceScalar) < 0.2f) gForceScalar = 0; nex->applyPendulumForce(appliedForce); } } CommonExampleInterface* ET_NewtonsCradleCreateFunc( CommonExampleOptions& options) { nex = new NewtonsCradleExample(options.m_guiHelper); return nex; }
4,527
412
/*******************************************************************\ Module: Abstract Interpretation Author: <NAME>, <EMAIL> \*******************************************************************/ /// \file /// Abstract Interpretation Storage /// /// An interface for the storage of domains in the abstract interpreter. /// Conceptually this is a map from history -> domain. /// However in some cases we may wish to share domains between locations /// so a simple map interface is not sufficient. /// Also any domain that has not been previously accessed or stored is /// automatically bottom. /// There is a constant interface which returns shared pointers to const /// domains, allowing these to either be stored domains, or things created /// on-the-fly. The non-constant interace returns a reference as it can /// create and initialise the domains as needed. #ifndef CPROVER_ANALYSES_AI_STORAGE_H #define CPROVER_ANALYSES_AI_STORAGE_H #include <util/deprecate.h> #include <goto-programs/goto_program.h> #include "ai_domain.h" #include "ai_history.h" /// This is the basic interface for storing domains. /// The abstract interpreters should use this interface by default. class ai_storage_baset { protected: ai_storage_baset() { } public: virtual ~ai_storage_baset() { } typedef ai_domain_baset statet; typedef std::shared_ptr<statet> state_ptrt; typedef std::shared_ptr<const statet> cstate_ptrt; typedef ai_history_baset tracet; typedef ai_history_baset::trace_ptrt trace_ptrt; typedef ai_history_baset::trace_sett trace_sett; typedef std::shared_ptr<trace_sett> trace_set_ptrt; typedef std::shared_ptr<const trace_sett> ctrace_set_ptrt; typedef goto_programt::const_targett locationt; /// Returns all of the histories that have reached /// the start of the instruction. virtual ctrace_set_ptrt abstract_traces_before(locationt l) const = 0; /// Non-modifying access to the stored domains, /// used in the ai_baset public interface. /// In the case of un-analysed locals this should create a domain /// The history version is the primary version, the location one may /// simply join all of the histories that reach the given location virtual cstate_ptrt abstract_state_before( trace_ptrt p, const ai_domain_factory_baset &fac) const = 0; virtual cstate_ptrt abstract_state_before( locationt l, const ai_domain_factory_baset &fac) const = 0; /// Look up the analysis state for a given history, /// instantiating a new domain if required. virtual statet & get_state(trace_ptrt p, const ai_domain_factory_baset &fac) = 0; /// Reset the abstract state virtual void clear() { return; } /// Notifies the storage that the user will not need the domain object(s) /// for this location. After this has been called abstract_state_before may /// return an over-approximation of the value and get_state may give an /// under-approximation (forcing recomputation). /// If there are multiple histories that reach this location all will be /// affected virtual void prune(locationt l) { return; } }; // There are a number of options for how to store the history objects. // This implements a simple one. It is not in ai_storage_baset so that // storage implementations can implement their own, more specific, approaches class trace_map_storaget : public ai_storage_baset { protected: typedef std::map<locationt, trace_set_ptrt> trace_mapt; trace_mapt trace_map; // This retains one part of a shared_ptr to the history object void register_trace(trace_ptrt p) { // Save the trace_ptrt trace_mapt::iterator it = trace_map.find(p->current_location()); if(it == trace_map.end()) { trace_set_ptrt s(new trace_sett()); auto ins = trace_map.emplace(p->current_location(), s); CHECK_RETURN(ins.second); it = ins.first; } // Strictly this should be "it->second points to a trace_set" POSTCONDITION(it->second != nullptr); it->second->insert(p); return; } public: ctrace_set_ptrt abstract_traces_before(locationt l) const override { trace_mapt::const_iterator it = trace_map.find(l); if(it == trace_map.end()) return trace_set_ptrt(new trace_sett()); // Strictly this should be "it->second points to a trace_set" POSTCONDITION(it->second != nullptr); return it->second; } void clear() override { ai_storage_baset::clear(); trace_map.clear(); return; } }; // A couple of older domains make direct use of the state map class invariant_propagationt; class dependence_grapht; class variable_sensitivity_dependence_grapht; /// The most conventional storage; one domain per location class location_sensitive_storaget : public trace_map_storaget { protected: /// This is location sensitive so we store one domain per location typedef std::unordered_map< locationt, state_ptrt, const_target_hash, pointee_address_equalt> state_mapt; state_mapt state_map; // Support some older domains that explicitly iterate across the state map friend invariant_propagationt; friend dependence_grapht; friend variable_sensitivity_dependence_grapht; // Based on dependence_grapht state_mapt &internal(void) { return state_map; } public: cstate_ptrt abstract_state_before( trace_ptrt p, const ai_domain_factory_baset &fac) const override { return abstract_state_before(p->current_location(), fac); } cstate_ptrt abstract_state_before( locationt l, const ai_domain_factory_baset &fac) const override { typename state_mapt::const_iterator it = state_map.find(l); if(it == state_map.end()) return fac.make(l); return it->second; } statet &get_state(trace_ptrt p, const ai_domain_factory_baset &fac) override { register_trace(p); return get_state(p->current_location(), fac); } // For backwards compatability // Care should be exercised in using this. It is possible to create domains // without any corresponding history object(s). This can lead to somewhat // unexpected behaviour depending on which APIs you use. DEPRECATED(SINCE(2019, 08, 01, "use get_state(trace_ptrt p) instead")) statet &get_state(locationt l, const ai_domain_factory_baset &fac) { typename state_mapt::const_iterator it = state_map.find(l); if(it == state_map.end()) { std::shared_ptr<statet> d(fac.make(l)); auto p = state_map.emplace(l, d); CHECK_RETURN(p.second); it = p.first; } return *(it->second); } void clear() override { trace_map_storaget::clear(); state_map.clear(); return; } }; // The most precise form of storage class history_sensitive_storaget : public trace_map_storaget { protected: typedef std::map<trace_ptrt, state_ptrt, ai_history_baset::compare_historyt> domain_mapt; domain_mapt domain_map; public: cstate_ptrt abstract_state_before( trace_ptrt p, const ai_domain_factory_baset &fac) const override { auto it = domain_map.find(p); if(it == domain_map.end()) return fac.make(p->current_location()); return it->second; } cstate_ptrt abstract_state_before( locationt t, const ai_domain_factory_baset &fac) const override { auto traces = abstract_traces_before(t); if(traces->size() == 0) { return fac.make(t); } else if(traces->size() == 1) { auto it = domain_map.find(*(traces->begin())); DATA_INVARIANT( it != domain_map.end(), "domain_map must be in sync with trace_map"); return it->second; } else { // Need to merge all of the traces that reach this location auto res = fac.make(t); for(auto p : *traces) { auto it = domain_map.find(p); DATA_INVARIANT( it != domain_map.end(), "domain_map must be in sync with trace_map"); fac.merge(*res, *(it->second), p, p); } return cstate_ptrt(res.release()); } } statet &get_state(trace_ptrt p, const ai_domain_factory_baset &fac) override { register_trace(p); auto it = domain_map.find(p); if(it == domain_map.end()) { std::shared_ptr<statet> d(fac.make(p->current_location())); auto jt = domain_map.emplace(p, d); CHECK_RETURN(jt.second); it = jt.first; } return *(it->second); } void clear() override { trace_map_storaget::clear(); domain_map.clear(); return; } }; #endif
3,010
7,312
<filename>CYLTabBarController/CYLTabBar+CYLTabBarControllerExtention.h /* //  CYLTabBarController //  CYLTabBarController // //  Created by 微博@iOS程序犭袁 ( http://weibo.com/luohanchenyilong/ ) on 03/06/19. //  Copyright © 2019 https://github.com/ChenYilong . All rights reserved. */ #import <UIKit/UIKit.h> #import "CYLTabBar.h" NS_ASSUME_NONNULL_BEGIN @interface CYLTabBar (CYLTabBarControllerExtention) - (NSArray<UIControl *> *)cyl_visibleControls; - (NSArray<UIControl *> *)cyl_subTabBarButtons; - (NSArray<UIControl *> *)cyl_subTabBarButtonsWithoutPlusButton; - (UIControl *)cyl_tabBarButtonWithTabIndex:(NSUInteger)tabIndex; - (void)cyl_animationLottieImageWithSelectedControl:(UIControl *)selectedControl lottieURL:(NSURL *)lottieURL size:(CGSize)size defaultSelected:(BOOL)defaultSelected; - (void)cyl_stopAnimationOfAllLottieView; - (NSArray *)cyl_originalTabBarButtons; - (BOOL)cyl_hasPlusChildViewController; @end NS_ASSUME_NONNULL_END
507
335
/* SPDX-License-Identifier: BSL-1.0 OR BSD-3-Clause */ #ifndef MPT_BASE_TESTS_UUID_HPP #define MPT_BASE_TESTS_UUID_HPP #include "mpt/base/detect.hpp" #include "mpt/base/namespace.hpp" #include "mpt/random/default_engines.hpp" #include "mpt/random/device.hpp" #include "mpt/string/types.hpp" #include "mpt/test/test.hpp" #include "mpt/test/test_macros.hpp" #include "mpt/uuid/guid.hpp" #include "mpt/uuid/uuid.hpp" #include <cstddef> #include <cstring> namespace mpt { inline namespace MPT_INLINE_NS { namespace tests { namespace uuid { #if MPT_COMPILER_CLANG #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wglobal-constructors" #endif MPT_TEST_GROUP_INLINE("mpt/uuid") #if MPT_COMPILER_CLANG #pragma clang diagnostic pop #endif { using namespace mpt::uuid_literals; MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x2ed6593au, 0xdfe6, 0x4cf8, 0xb2e575ad7f600c32ull).ToUString(), MPT_USTRING("2ed6593a-dfe6-4cf8-b2e5-75ad7f600c32")); #if MPT_OS_WINDOWS constexpr mpt::UUID uuid_tmp = "2ed6593a-dfe6-4cf8-b2e5-75ad7f600c32"_uuid; MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x2ed6593au, 0xdfe6, 0x4cf8, 0xb2e575ad7f600c32ull), uuid_tmp); MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x2ed6593au, 0xdfe6, 0x4cf8, 0xb2e575ad7f600c32ull), mpt::UUID(mpt::StringToGUID(TEXT("{2ed6593a-dfe6-4cf8-b2e5-75ad7f600c32}")))); MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x2ed6593au, 0xdfe6, 0x4cf8, 0xb2e575ad7f600c32ull), mpt::UUID(mpt::StringToCLSID(TEXT("{2ed6593a-dfe6-4cf8-b2e5-75ad7f600c32}")))); MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x00112233u, 0x4455, 0x6677, 0x8899AABBCCDDEEFFull), mpt::UUID(mpt::StringToGUID(TEXT("{00112233-4455-6677-8899-AABBCCDDEEFF}")))); MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x00112233u, 0x4455, 0x6677, 0xC899AABBCCDDEEFFull), mpt::UUID(mpt::StringToGUID(TEXT("{00112233-4455-6677-C899-AABBCCDDEEFF}")))); MPT_TEST_EXPECT_EQUAL(mpt::GUIDToString(mpt::UUID(0x00112233u, 0x4455, 0x6677, 0x8899AABBCCDDEEFFull)), TEXT("{00112233-4455-6677-8899-AABBCCDDEEFF}")); MPT_TEST_EXPECT_EQUAL(mpt::GUIDToString(mpt::UUID(0x00112233u, 0x4455, 0x6677, 0xC899AABBCCDDEEFFull)), TEXT("{00112233-4455-6677-C899-AABBCCDDEEFF}")); #endif // MPT_OS_WINDOWS mpt::sane_random_device rd; mpt::good_engine prng = mpt::make_prng<mpt::good_engine>(rd); #if MPT_OS_WINDOWS MPT_TEST_EXPECT_EQUAL(mpt::IsValid(mpt::CreateGUID()), true); { mpt::UUID uuid = mpt::UUID::Generate(prng); MPT_TEST_EXPECT_EQUAL(uuid, mpt::UUID::FromString(mpt::UUID(uuid).ToUString())); MPT_TEST_EXPECT_EQUAL(uuid, mpt::UUID(mpt::StringToGUID(mpt::GUIDToString(uuid)))); MPT_TEST_EXPECT_EQUAL(uuid, mpt::UUID(mpt::StringToIID(mpt::IIDToString(uuid)))); MPT_TEST_EXPECT_EQUAL(uuid, mpt::UUID(mpt::StringToCLSID(mpt::CLSIDToString(uuid)))); } { GUID guid = mpt::UUID::Generate(prng); MPT_TEST_EXPECT_EQUAL(IsEqualGUID(guid, static_cast<GUID>(mpt::UUID::FromString(mpt::UUID(guid).ToUString()))), TRUE); MPT_TEST_EXPECT_EQUAL(IsEqualGUID(guid, mpt::StringToGUID(mpt::GUIDToString(guid))), TRUE); MPT_TEST_EXPECT_EQUAL(IsEqualGUID(guid, mpt::StringToIID(mpt::IIDToString(guid))), TRUE); MPT_TEST_EXPECT_EQUAL(IsEqualGUID(guid, mpt::StringToCLSID(mpt::CLSIDToString(guid))), TRUE); } #endif // MPT_OS_WINDOWS MPT_TEST_EXPECT_EQUAL(mpt::UUID::Generate(prng).IsValid(), true); MPT_TEST_EXPECT_EQUAL(mpt::UUID::GenerateLocalUseOnly(prng).IsValid(), true); MPT_TEST_EXPECT_EQUAL(mpt::UUID::Generate(prng) != mpt::UUID::Generate(prng), true); mpt::UUID a = mpt::UUID::Generate(prng); MPT_TEST_EXPECT_EQUAL(a, mpt::UUID::FromString(a.ToUString())); std::byte uuiddata[16]{}; for (std::size_t i = 0; i < 16; ++i) { uuiddata[i] = mpt::byte_cast<std::byte>(static_cast<uint8>(i)); } static_assert(sizeof(mpt::UUID) == 16); mpt::UUIDbin uuid2; std::memcpy(&uuid2, uuiddata, 16); MPT_TEST_EXPECT_EQUAL(mpt::UUID(uuid2).ToUString(), MPT_USTRING("00010203-0405-0607-0809-0a0b0c0d0e0f")); constexpr mpt::UUID uuid3 = "2ed6593a-dfe6-4cf8-b2e5-75ad7f600c32"_uuid; MPT_TEST_EXPECT_EQUAL(mpt::UUID(0x2ed6593au, 0xdfe6, 0x4cf8, 0xb2e575ad7f600c32ull), uuid3); } } // namespace uuid } // namespace tests } // namespace MPT_INLINE_NS } // namespace mpt #endif // MPT_BASE_TESTS_UUID_HPP
2,137
14,564
package com.alibaba.datax.plugin.writer.conn; import com.alibaba.datax.common.plugin.RecordSender; import com.alibaba.datax.plugin.writer.util.TSDBUtils; import com.alibaba.fastjson.JSON; import org.apache.commons.lang3.StringUtils; import java.util.List; /** * Copyright @ 2019 alibaba.com * All right reserved. * Function:TSDB Connection * * @author <NAME> * @since 2019-03-29 */ public class TSDBConnection implements Connection4TSDB { private String address; public TSDBConnection(String address) { if (StringUtils.isBlank(address)) { throw new RuntimeException("TSDBConnection init failed because address is blank!"); } this.address = address; } @Override public String address() { return address; } @Override public String version() { return TSDBUtils.version(address); } @Override public String config() { return TSDBUtils.config(address); } @Override public String[] getSupportVersionPrefix() { return new String[]{"2.4.1", "2.4.2"}; } @Override public void sendDPs(String metric, Long start, Long end, RecordSender recordSender) { throw new RuntimeException("Not support yet!"); } @Override public boolean put(DataPoint4TSDB dp) { return TSDBUtils.put(address, dp); } @Override public boolean put(List<DataPoint4TSDB> dps) { return TSDBUtils.put(address, dps); } @Override public boolean put(String dps) { return TSDBUtils.put(address, dps); } @Override public boolean isSupported() { String versionJson = version(); if (StringUtils.isBlank(versionJson)) { throw new RuntimeException("Cannot get the version!"); } String version = JSON.parseObject(versionJson).getString("version"); if (StringUtils.isBlank(version)) { return false; } for (String prefix : getSupportVersionPrefix()) { if (version.startsWith(prefix)) { return true; } } return false; } }
884
1,144
<reponame>dram/metasfresh package de.metas.contracts.interceptor; import static org.adempiere.model.InterfaceWrapperHelper.newInstance; import static org.adempiere.model.InterfaceWrapperHelper.save; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import org.adempiere.exceptions.AdempiereException; import org.adempiere.service.ClientId; import org.adempiere.service.ISysConfigBL; import org.adempiere.test.AdempiereTestHelper; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import de.metas.contracts.model.I_C_Flatrate_Term; import de.metas.contracts.model.X_C_Flatrate_Term; import de.metas.contracts.order.ContractOrderService; import de.metas.organization.OrgId; import de.metas.util.Services; /* * #%L * de.metas.contracts * %% * Copyright (C) 2017 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ public class C_Flatrate_TermTest { @BeforeEach public void init() { AdempiereTestHelper.get().init(); } @Test public void prohibitReactivatingUnlessAllowed_wrong_term_throws_exception() { setupAllowProcurementReactivate(); final I_C_Flatrate_Term emptyTerm = newInstance(I_C_Flatrate_Term.class); invokeMethodAndAssertExceptionThrown(emptyTerm); final I_C_Flatrate_Term subscriptionTerm = newInstance(I_C_Flatrate_Term.class); subscriptionTerm.setType_Conditions(X_C_Flatrate_Term.TYPE_CONDITIONS_Subscription); save(subscriptionTerm); invokeMethodAndAssertExceptionThrown(subscriptionTerm); } public void invokeMethodAndAssertExceptionThrown(final I_C_Flatrate_Term term) { try { final C_Flatrate_Term flatrateTermInterceptor = new C_Flatrate_Term(new ContractOrderService()); flatrateTermInterceptor.prohibitReactivatingUnlessAllowed(term); fail("Expected an AdempiereExeception"); } catch (AdempiereException ae) { assertThat(ae.getMessage()).isEqualTo(MainValidator.MSG_FLATRATE_REACTIVATE_DOC_ACTION_NOT_SUPPORTED_0P.toAD_Message()); } } @Test public void prohibitReactivatingUnlessAllowed_procurement_term_allowed() { setupAllowProcurementReactivate(); final I_C_Flatrate_Term term = newInstance(I_C_Flatrate_Term.class); term.setType_Conditions(X_C_Flatrate_Term.TYPE_CONDITIONS_Procurement); save(term); final C_Flatrate_Term flatrateTermInterceptor = new C_Flatrate_Term(new ContractOrderService()); flatrateTermInterceptor.prohibitReactivatingUnlessAllowed(term); // shall return with no exception } public void setupAllowProcurementReactivate() { final String sysConfigName = "de.metas.contracts.C_Flatrate_Term.allow_reactivate_" + X_C_Flatrate_Term.TYPE_CONDITIONS_Procurement; Services.get(ISysConfigBL.class) .setValue(sysConfigName, true, ClientId.SYSTEM, OrgId.ANY); } }
1,177
3,102
// RUN: %clang_cc1 -fcxx-exceptions -fexceptions -analyze -analyzer-checker=debug.DumpCFG -analyzer-config cfg-lifetime=true,cfg-temporary-dtors=false,cfg-rich-constructors=false -analyzer-config cfg-implicit-dtors=false %s > %t 2>&1 // RUN: FileCheck --input-file=%t %s extern bool UV; class A { public: // CHECK: [B2 (ENTRY)] // CHECK-NEXT: Succs (1): B1 // CHECK: [B1] // CHECK-NEXT: 1: true // CHECK-NEXT: 2: UV // CHECK-NEXT: 3: [B1.2] = [B1.1] // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 A() { UV = true; } // CHECK: [B3 (ENTRY)] // CHECK-NEXT: Succs (1): B2 // CHECK: [B1] // CHECK-NEXT: 1: 0 // CHECK-NEXT: 2: this // CHECK-NEXT: 3: [B1.2]->p // CHECK-NEXT: 4: [B1.3] (ImplicitCastExpr, LValueToRValue, int *) // CHECK-NEXT: 5: *[B1.4] // CHECK-NEXT: 6: [B1.5] = [B1.1] // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: this // CHECK-NEXT: 2: [B2.1]->p // CHECK-NEXT: 3: [B2.2] (ImplicitCastExpr, LValueToRValue, int *) // CHECK-NEXT: 4: [B2.3] (ImplicitCastExpr, PointerToBoolean, _Bool) // CHECK-NEXT: T: if [B2.4] // CHECK-NEXT: Preds (1): B3 // CHECK-NEXT: Succs (2): B1 B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (2): B1 B2 ~A() { if (p) *p = 0; } // CHECK: [B2 (ENTRY)] // CHECK-NEXT: Succs (1): B1 // CHECK: [B1] // CHECK-NEXT: 1: 1 // CHECK-NEXT: 2: return [B1.1]; // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 operator int() const { return 1; } int *p; }; // CHECK: [B2 (ENTRY)] // CHECK-NEXT: Succs (1): B1 // CHECK: [B1] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: a // CHECK-NEXT: 4: [B1.3] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 5: const A &b = a; // CHECK-NEXT: 6: A() (CXXConstructExpr, class A) // CHECK-NEXT: 7: [B1.6] (BindTemporary) // CHECK-NEXT: 8: [B1.7] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 9: [B1.8] // CHECK-NEXT: 10: const A &c = A(); // CHECK-NEXT: 11: [B1.10] (Lifetime ends) // CHECK-NEXT: 12: [B1.2] (Lifetime ends) // CHECK-NEXT: 13: [B1.5] (Lifetime ends) // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_const_ref() { A a; const A &b = a; const A &c = A(); } // CHECK: [B2 (ENTRY)] // CHECK-NEXT: Succs (1): B1 // CHECK: [B1] // CHECK-NEXT: 1: (CXXConstructExpr, class A [2]) // CHECK-NEXT: 2: A a[2]; // CHECK-NEXT: 3: (CXXConstructExpr, class A [0]) // CHECK-NEXT: 4: A b[0]; // lifetime of a ends when its destructors are run // CHECK-NEXT: 5: [B1.2] (Lifetime ends) // lifetime of b ends when its storage duration ends // CHECK-NEXT: 6: [B1.4] (Lifetime ends) // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_array() { A a[2]; A b[0]; } // CHECK: [B2 (ENTRY)] // CHECK-NEXT: Succs (1): B1 // CHECK: [B1] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: (CXXConstructExpr, class A) // CHECK-NEXT: 4: A c; // CHECK-NEXT: 5: (CXXConstructExpr, class A) // CHECK-NEXT: 6: A d; // CHECK-NEXT: 7: [B1.6] (Lifetime ends) // CHECK-NEXT: 8: [B1.4] (Lifetime ends) // CHECK-NEXT: 9: (CXXConstructExpr, class A) // CHECK-NEXT: 10: A b; // CHECK-NEXT: 11: [B1.10] (Lifetime ends) // CHECK-NEXT: 12: [B1.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_scope() { A a; { A c; A d; } A b; } // CHECK: [B4 (ENTRY)] // CHECK-NEXT: Succs (1): B3 // CHECK: [B1] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: [B1.2] (Lifetime ends) // CHECK-NEXT: 4: [B3.4] (Lifetime ends) // CHECK-NEXT: 5: [B3.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B3 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: return; // CHECK-NEXT: 2: [B3.4] (Lifetime ends) // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B3 // CHECK-NEXT: Succs (1): B0 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: (CXXConstructExpr, class A) // CHECK-NEXT: 4: A b; // CHECK-NEXT: 5: UV // CHECK-NEXT: 6: [B3.5] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B3.6] // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (2): B2 B1 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (2): B1 B2 void test_return() { A a; A b; if (UV) return; A c; } // CHECK: [B5 (ENTRY)] // CHECK-NEXT: Succs (1): B4 // CHECK: [B1] // CHECK-NEXT: 1: [B4.6] (Lifetime ends) // CHECK-NEXT: 2: [B4.2] (Lifetime ends) // CHECK-NEXT: Preds (2): B2 B3 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: [B2.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B1 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B1 // CHECK: [B4] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: a // CHECK-NEXT: 4: [B4.3] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 5: [B4.4] (CXXConstructExpr, class A) // CHECK-NEXT: 6: A b = a; // CHECK-NEXT: 7: b // CHECK-NEXT: 8: [B4.7] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 9: [B4.8].operator int // CHECK-NEXT: 10: [B4.8] // CHECK-NEXT: 11: [B4.10] (ImplicitCastExpr, UserDefinedConversion, int) // CHECK-NEXT: 12: [B4.11] (ImplicitCastExpr, IntegralToBoolean, _Bool) // CHECK-NEXT: T: if [B4.12] // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (2): B3 B2 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_if_implicit_scope() { A a; if (A b = a) A c; else A c; } // CHECK: [B9 (ENTRY)] // CHECK-NEXT: Succs (1): B8 // CHECK: [B1] // CHECK-NEXT: 1: [B8.6] (Lifetime ends) // CHECK-NEXT: 2: (CXXConstructExpr, class A) // CHECK-NEXT: 3: A e; // CHECK-NEXT: 4: [B1.3] (Lifetime ends) // CHECK-NEXT: 5: [B8.2] (Lifetime ends) // CHECK-NEXT: Preds (2): B2 B5 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A d; // CHECK-NEXT: 3: [B2.2] (Lifetime ends) // CHECK-NEXT: 4: [B4.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B1 // CHECK: [B3] // CHECK-NEXT: 1: return; // CHECK-NEXT: 2: [B4.2] (Lifetime ends) // CHECK-NEXT: 3: [B8.6] (Lifetime ends) // CHECK-NEXT: 4: [B8.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B0 // CHECK: [B4] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: UV // CHECK-NEXT: 4: [B4.3] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B4.4] // CHECK-NEXT: Preds (1): B8 // CHECK-NEXT: Succs (2): B3 B2 // CHECK: [B5] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A d; // CHECK-NEXT: 3: [B5.2] (Lifetime ends) // CHECK-NEXT: 4: [B7.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (1): B1 // CHECK: [B6] // CHECK-NEXT: 1: return; // CHECK-NEXT: 2: [B7.2] (Lifetime ends) // CHECK-NEXT: 3: [B8.6] (Lifetime ends) // CHECK-NEXT: 4: [B8.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (1): B0 // CHECK: [B7] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: UV // CHECK-NEXT: 4: [B7.3] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B7.4] // CHECK-NEXT: Preds (1): B8 // CHECK-NEXT: Succs (2): B6 B5 // CHECK: [B8] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: a // CHECK-NEXT: 4: [B8.3] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 5: [B8.4] (CXXConstructExpr, class A) // CHECK-NEXT: 6: A b = a; // CHECK-NEXT: 7: b // CHECK-NEXT: 8: [B8.7] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 9: [B8.8].operator int // CHECK-NEXT: 10: [B8.8] // CHECK-NEXT: 11: [B8.10] (ImplicitCastExpr, UserDefinedConversion, int) // CHECK-NEXT: 12: [B8.11] (ImplicitCastExpr, IntegralToBoolean, _Bool) // CHECK-NEXT: T: if [B8.12] // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (2): B7 B4 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (3): B1 B3 B6 void test_if_jumps() { A a; if (A b = a) { A c; if (UV) return; A d; } else { A c; if (UV) return; A d; } A e; } // CHECK: [B6 (ENTRY)] // CHECK-NEXT: Succs (1): B5 // CHECK: [B1] // CHECK-NEXT: 1: [B4.4] (Lifetime ends) // CHECK-NEXT: 2: [B5.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: Preds (1): B3 // CHECK-NEXT: Succs (1): B4 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: 4: [B4.4] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B2 // CHECK: [B4] // CHECK-NEXT: 1: a // CHECK-NEXT: 2: [B4.1] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 3: [B4.2] (CXXConstructExpr, class A) // CHECK-NEXT: 4: A b = a; // CHECK-NEXT: 5: b // CHECK-NEXT: 6: [B4.5] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 7: [B4.6].operator int // CHECK-NEXT: 8: [B4.6] // CHECK-NEXT: 9: [B4.8] (ImplicitCastExpr, UserDefinedConversion, int) // CHECK-NEXT: 10: [B4.9] (ImplicitCastExpr, IntegralToBoolean, _Bool) // CHECK-NEXT: T: while [B4.10] // CHECK-NEXT: Preds (2): B2 B5 // CHECK-NEXT: Succs (2): B3 B1 // CHECK: [B5] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: Preds (1): B6 // CHECK-NEXT: Succs (1): B4 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_while_implicit_scope() { A a; while (A b = a) A c; } // CHECK: [B12 (ENTRY)] // CHECK-NEXT: Succs (1): B11 // CHECK: [B1] // CHECK-NEXT: 1: [B10.4] (Lifetime ends) // CHECK-NEXT: 2: (CXXConstructExpr, class A) // CHECK-NEXT: 3: A e; // CHECK-NEXT: 4: [B1.3] (Lifetime ends) // CHECK-NEXT: 5: [B11.2] (Lifetime ends) // CHECK-NEXT: Preds (2): B8 B10 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: Preds (2): B3 B6 // CHECK-NEXT: Succs (1): B10 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A d; // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: 4: [B9.2] (Lifetime ends) // CHECK-NEXT: 5: [B10.4] (Lifetime ends) // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (1): B2 // CHECK: [B4] // CHECK-NEXT: 1: return; // CHECK-NEXT: 2: [B9.2] (Lifetime ends) // CHECK-NEXT: 3: [B10.4] (Lifetime ends) // CHECK-NEXT: 4: [B11.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (1): B0 // CHECK: [B5] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B5.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B5.2] // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (2): B4 B3 // CHECK: [B6] // CHECK-NEXT: 1: [B9.2] (Lifetime ends) // CHECK-NEXT: 2: [B10.4] (Lifetime ends) // CHECK-NEXT: T: continue; // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (1): B2 // CHECK: [B7] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B7.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B7.2] // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (2): B6 B5 // CHECK: [B8] // CHECK-NEXT: 1: [B9.2] (Lifetime ends) // CHECK-NEXT: T: break; // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (1): B1 // CHECK: [B9] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: UV // CHECK-NEXT: 4: [B9.3] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B9.4] // CHECK-NEXT: Preds (1): B10 // CHECK-NEXT: Succs (2): B8 B7 // CHECK: [B10] // CHECK-NEXT: 1: a // CHECK-NEXT: 2: [B10.1] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 3: [B10.2] (CXXConstructExpr, class A) // CHECK-NEXT: 4: A b = a; // CHECK-NEXT: 5: b // CHECK-NEXT: 6: [B10.5] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 7: [B10.6].operator int // CHECK-NEXT: 8: [B10.6] // CHECK-NEXT: 9: [B10.8] (ImplicitCastExpr, UserDefinedConversion, int) // CHECK-NEXT: 10: [B10.9] (ImplicitCastExpr, IntegralToBoolean, _Bool) // CHECK-NEXT: T: while [B10.10] // CHECK-NEXT: Preds (2): B2 B11 // CHECK-NEXT: Succs (2): B9 B1 // CHECK: [B11] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: Preds (1): B12 // CHECK-NEXT: Succs (1): B10 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (2): B1 B4 void test_while_jumps() { A a; while (A b = a) { A c; if (UV) break; if (UV) continue; if (UV) return; A d; } A e; } // CHECK: [B12 (ENTRY)] // CHECK-NEXT: Succs (1): B11 // CHECK: [B1] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A d; // CHECK-NEXT: 3: [B1.2] (Lifetime ends) // CHECK-NEXT: 4: [B11.2] (Lifetime ends) // CHECK-NEXT: Preds (2): B8 B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B2.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: do ... while [B2.2] // CHECK-NEXT: Preds (2): B3 B6 // CHECK-NEXT: Succs (2): B10 B1 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: 4: [B9.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (1): B2 // CHECK: [B4] // CHECK-NEXT: 1: return; // CHECK-NEXT: 2: [B9.2] (Lifetime ends) // CHECK-NEXT: 3: [B11.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (1): B0 // CHECK: [B5] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B5.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B5.2] // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (2): B4 B3 // CHECK: [B6] // CHECK-NEXT: 1: [B9.2] (Lifetime ends) // CHECK-NEXT: T: continue; // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (1): B2 // CHECK: [B7] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B7.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B7.2] // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (2): B6 B5 // CHECK: [B8] // CHECK-NEXT: 1: [B9.2] (Lifetime ends) // CHECK-NEXT: T: break; // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (1): B1 // CHECK: [B9] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A b; // CHECK-NEXT: 3: UV // CHECK-NEXT: 4: [B9.3] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B9.4] // CHECK-NEXT: Preds (2): B10 B11 // CHECK-NEXT: Succs (2): B8 B7 // CHECK: [B10] // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B9 // CHECK: [B11] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: Preds (1): B12 // CHECK-NEXT: Succs (1): B9 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (2): B1 B4 void test_do_jumps() { A a; do { A b; if (UV) break; if (UV) continue; if (UV) return; A c; } while (UV); A d; } // CHECK: [B6 (ENTRY)] // CHECK-NEXT: Succs (1): B5 // CHECK: [B1] // CHECK-NEXT: 1: [B4.4] (Lifetime ends) // CHECK-NEXT: 2: [B5.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: Preds (1): B3 // CHECK-NEXT: Succs (1): B4 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A c; // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: 4: [B4.4] (Lifetime ends) // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B2 // CHECK: [B4] // CHECK-NEXT: 1: a // CHECK-NEXT: 2: [B4.1] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 3: [B4.2] (CXXConstructExpr, class A) // CHECK-NEXT: 4: A b = a; // CHECK-NEXT: 5: b // CHECK-NEXT: 6: [B4.5] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 7: [B4.6].operator int // CHECK-NEXT: 8: [B4.6] // CHECK-NEXT: 9: [B4.8] (ImplicitCastExpr, UserDefinedConversion, int) // CHECK-NEXT: 10: [B4.9] (ImplicitCastExpr, IntegralToBoolean, _Bool) // CHECK-NEXT: T: for (...; [B4.10]; ) // CHECK-NEXT: Preds (2): B2 B5 // CHECK-NEXT: Succs (2): B3 B1 // CHECK: [B5] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: Preds (1): B6 // CHECK-NEXT: Succs (1): B4 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_for_implicit_scope() { for (A a; A b = a;) A c; } // CHECK: [B12 (ENTRY)] // CHECK-NEXT: Succs (1): B11 // CHECK: [B1] // CHECK-NEXT: 1: [B10.4] (Lifetime ends) // CHECK-NEXT: 2: [B11.4] (Lifetime ends) // CHECK-NEXT: 3: (CXXConstructExpr, class A) // CHECK-NEXT: 4: A f; // CHECK-NEXT: 5: [B1.4] (Lifetime ends) // CHECK-NEXT: 6: [B11.2] (Lifetime ends) // CHECK-NEXT: Preds (2): B8 B10 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: Preds (2): B3 B6 // CHECK-NEXT: Succs (1): B10 // CHECK: [B3] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A e; // CHECK-NEXT: 3: [B3.2] (Lifetime ends) // CHECK-NEXT: 4: [B9.2] (Lifetime ends) // CHECK-NEXT: 5: [B10.4] (Lifetime ends) // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (1): B2 // CHECK: [B4] // CHECK-NEXT: 1: return; // CHECK-NEXT: 2: [B9.2] (Lifetime ends) // CHECK-NEXT: 3: [B10.4] (Lifetime ends) // CHECK-NEXT: 4: [B11.4] (Lifetime ends) // CHECK-NEXT: 5: [B11.2] (Lifetime ends) // CHECK-NEXT: Preds (1): B5 // CHECK-NEXT: Succs (1): B0 // CHECK: [B5] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B5.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B5.2] // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (2): B4 B3 // CHECK: [B6] // CHECK-NEXT: 1: [B9.2] (Lifetime ends) // CHECK-NEXT: T: continue; // CHECK-NEXT: Preds (1): B7 // CHECK-NEXT: Succs (1): B2 // CHECK: [B7] // CHECK-NEXT: 1: UV // CHECK-NEXT: 2: [B7.1] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B7.2] // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (2): B6 B5 // CHECK: [B8] // CHECK-NEXT: 1: [B9.2] (Lifetime ends) // CHECK-NEXT: T: break; // CHECK-NEXT: Preds (1): B9 // CHECK-NEXT: Succs (1): B1 // CHECK: [B9] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A d; // CHECK-NEXT: 3: UV // CHECK-NEXT: 4: [B9.3] (ImplicitCastExpr, LValueToRValue, _Bool) // CHECK-NEXT: T: if [B9.4] // CHECK-NEXT: Preds (1): B10 // CHECK-NEXT: Succs (2): B8 B7 // CHECK: [B10] // CHECK-NEXT: 1: b // CHECK-NEXT: 2: [B10.1] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 3: [B10.2] (CXXConstructExpr, class A) // CHECK-NEXT: 4: A c = b; // CHECK-NEXT: 5: c // CHECK-NEXT: 6: [B10.5] (ImplicitCastExpr, NoOp, const class A) // CHECK-NEXT: 7: [B10.6].operator int // CHECK-NEXT: 8: [B10.6] // CHECK-NEXT: 9: [B10.8] (ImplicitCastExpr, UserDefinedConversion, int) // CHECK-NEXT: 10: [B10.9] (ImplicitCastExpr, IntegralToBoolean, _Bool) // CHECK-NEXT: T: for (...; [B10.10]; ) // CHECK-NEXT: Preds (2): B2 B11 // CHECK-NEXT: Succs (2): B9 B1 // CHECK: [B11] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: (CXXConstructExpr, class A) // CHECK-NEXT: 4: A b; // CHECK-NEXT: Preds (1): B12 // CHECK-NEXT: Succs (1): B10 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (2): B1 B4 void test_for_jumps() { A a; for (A b; A c = b;) { A d; if (UV) break; if (UV) continue; if (UV) return; A e; } A f; } // CHECK: [B2 (ENTRY)] // CHECK-NEXT: Succs (1): B1 // CHECK: [B1] // CHECK-NEXT: 1: (CXXConstructExpr, class A) // CHECK-NEXT: 2: A a; // CHECK-NEXT: 3: int n; // CHECK-NEXT: 4: n // CHECK-NEXT: 5: &[B1.4] // CHECK-NEXT: 6: a // CHECK-NEXT: 7: [B1.6].p // CHECK-NEXT: 8: [B1.7] = [B1.5] // CHECK-NEXT: 9: [B1.2] (Lifetime ends) // CHECK-NEXT: 10: [B1.3] (Lifetime ends) // CHECK-NEXT: Preds (1): B2 // CHECK-NEXT: Succs (1): B0 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void test_trivial_vs_non_trivial_order() { A a; int n; a.p = &n; } // CHECK: [B4 (ENTRY)] // CHECK-NEXT: Succs (1): B3 // CHECK: [B1] // CHECK-NEXT: a: // CHECK-NEXT: 1: 1 // CHECK-NEXT: 2: i // CHECK-NEXT: 3: [B1.2] = [B1.1] // CHECK-NEXT: 4: [B2.1] (Lifetime ends) // CHECK-NEXT: Preds (2): B2 B3 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: int i; // CHECK-NEXT: Succs (1): B1 // CHECK: [B3] // CHECK-NEXT: T: goto a; // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B1 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void goto_past_declaration() { goto a; int i; a: i = 1; } // CHECK: [B4 (ENTRY)] // CHECK-NEXT: Succs (1): B3 // CHECK: [B1] // CHECK-NEXT: a: // CHECK-NEXT: 1: 1 // CHECK-NEXT: 2: k // CHECK-NEXT: 3: [B1.2] = [B1.1] // CHECK-NEXT: 4: [B2.4] (Lifetime ends) // CHECK-NEXT: Preds (2): B2 B3 // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: 1: int j; // CHECK-NEXT: 2: [B2.1] (Lifetime ends) // CHECK-NEXT: 3: [B3.1] (Lifetime ends) // CHECK-NEXT: 4: int k; // CHECK-NEXT: Succs (1): B1 // CHECK: [B3] // CHECK-NEXT: 1: int i; // CHECK-NEXT: 2: [B3.1] (Lifetime ends) // CHECK-NEXT: T: goto a; // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B1 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 void goto_past_declaration2() { { int i; goto a; int j; } { int k; a: k = 1; } } struct B { ~B(); }; // CHECK: [B4 (ENTRY)] // CHECK-NEXT: Succs (1): B3 // CHECK: [B1] // CHECK-NEXT: 1: i // CHECK-NEXT: 2: [B1.1]++ // CHECK-NEXT: 3: [B2.2] (Lifetime ends) // CHECK-NEXT: 4: [B3.1] (Lifetime ends) // CHECK-NEXT: Succs (1): B0 // CHECK: [B2] // CHECK-NEXT: label: // CHECK-NEXT: 1: (CXXConstructExpr, struct B) // CHECK-NEXT: 2: B b; // CHECK-NEXT: 3: [B2.2] (Lifetime ends) // CHECK-NEXT: T: goto label; // CHECK-NEXT: Preds (2): B3 B2 // CHECK-NEXT: Succs (1): B2 // CHECK: [B3] // CHECK-NEXT: 1: int i; // CHECK-NEXT: Preds (1): B4 // CHECK-NEXT: Succs (1): B2 // CHECK: [B0 (EXIT)] // CHECK-NEXT: Preds (1): B1 int backpatched_goto() { int i; label: B b; goto label; i++; }
13,549
2,494
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* * pkix_pl_infoaccess.h * * InfoAccess Object Definitions * */ #ifndef _PKIX_PL_INFOACCESS_H #define _PKIX_PL_INFOACCESS_H #include "pkix_pl_common.h" #ifdef __cplusplus extern "C" { #endif struct PKIX_PL_InfoAccessStruct{ PKIX_UInt32 method; PKIX_PL_GeneralName *location; }; /* see source file for function documentation */ PKIX_Error *pkix_pl_InfoAccess_RegisterSelf(void *plContext); PKIX_Error * pkix_pl_InfoAccess_CreateList( CERTAuthInfoAccess **authInfoAccess, PKIX_List **pAiaList, /* of PKIX_PL_InfoAccess */ void *plContext); #ifndef NSS_PKIX_NO_LDAP PKIX_Error * pkix_pl_InfoAccess_ParseLocation( PKIX_PL_GeneralName *generalName, PLArenaPool *arena, LDAPRequestParams *request, char **pDomainName, void *plContext); #endif /* !NSS_PKIX_NO_LDAP */ #ifdef __cplusplus } #endif #endif /* _PKIX_PL_INFOACCESS_H */
475
1,433
//****************************************************************** // // Copyright 2015 Samsung Electronics All Rights Reserved. // //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= /** * @file * * This file contains the declaration of classes and its members related to RCSRemoteResourceObject */ #ifndef RCSREMOTERESOURCEOBJECT_H #define RCSREMOTERESOURCEOBJECT_H #include <vector> #include "RCSResourceAttributes.h" namespace OIC { namespace Service { /** * The states of caching. * * @see startCaching * @see getCacheState */ enum class CacheState { NONE, /**< Caching is not started.*/ UNREADY, /**< Caching is started, but the data is not ready yet. This is the default state after startCaching. */ READY, /**< The data is ready.*/ LOST_SIGNAL, /**< Failed to reach the resource. */ }; /** * The states of monitoring. * * @see startMonitoring * @see getState */ enum class ResourceState { NONE, /**< Monitoring is not started.*/ REQUESTED, /**< Monitoring is started and checking state is in progress. This is the default state after startMonitoring. */ ALIVE, /**< The resource is alive. */ LOST_SIGNAL, /**< Failed to reach the resource. */ DESTROYED /**< The resource is deleted. */ }; class PrimitiveResource; /** * * This represents a remote resource and provides simple ways to interact with it. * Basically this is a client of a remote resource that runs on other device. * * The class supports features to help get information of a remote resource * such as monitoring and caching. * * @see RCSDiscoveryManager * */ class RCSRemoteResourceObject { public: typedef std::shared_ptr< RCSRemoteResourceObject > Ptr; /** * Callback definition to be invoked when monitoring state is changed. * * @see startMonitioring * @see ResourceState */ typedef std::function< void(ResourceState) > StateChangedCallback; /** * Callback definition to be invoked when cache is updated. * * @param attrs the updated attributes */ typedef std::function< void(const RCSResourceAttributes& attrs) > CacheUpdatedCallback; /** * Callback definition to be invoked when the response of getRemoteAttributes is * received. * * @param attrs the result attributes * @param eCode the error code received from the resource * * @see getRemoteAttributes */ typedef std::function< void(const RCSResourceAttributes& attrs, int eCode) > RemoteAttributesGetCallback; /** * Callback definition to be invoked when the response of setRemoteAttributes is * received. * * @param attrs the result attributes * @param eCode the error code received from the resource * * @see setRemoteAttributes */ typedef std::function< void(const RCSResourceAttributes&, int) > RemoteAttributesSetCallback; private: typedef int CacheID; typedef unsigned int BrokerID; public: //! @cond RCSRemoteResourceObject(std::shared_ptr< PrimitiveResource >); //! @endcond ~RCSRemoteResourceObject(); /** * Returns whether monitoring is enabled. * * @see startMonitoring() */ bool isMonitoring() const; /** * Returns whether caching is enabled. * * @see startCaching() */ bool isCaching() const; /** * Returns whether the resource is observable. * */ bool isObservable() const; /** * Starts monitoring the resource. * * Monitoring provides a feature to check the presence of a resource, * even when the server is not announcing Presence using startPresnece. * * @param cb A Callback to get changed resource state. * * @throws InvalidParameterException If cb is an empty function or null. * @throws BadRequestException If monitoring is already started. * * @note The callback will be invoked in an internal thread. * * @see StateChangedCallback * @see ResourceState * @see isMonitoring() * @see stopMonitoring() * */ void startMonitoring(StateChangedCallback cb); /** * Stops monitoring the resource. * * It does nothing if monitoring is not started. * * @see startMonitoring() * */ void stopMonitoring(); /** * Returns the current state of the resource. * * @see startMonitoring */ ResourceState getState() const; /** * Starts caching attributes of the resource. * * This will start caching for the resource. * Once caching started it will look for the data updation on the resource * and updates the cache data accordingly. * * It is equivalent to calling startCaching(CacheUpdatedCallback) with an empty function. * * @see getCacheState() * @see getCachedAttributes() * @see getCachedAttribute(const std::string&) const * * @throws BadRequestException * */ void startCaching(); /** * Starts caching attributes for the resource. * * This will start data caching for the resource. * Once caching started it will look for the data updation on the resource and * updates the cached data accordingly. * * @param cb If non-empty function, it will be invoked whenever the cache updated. * * @throws BadRequestException If caching is already started. * * @note The callback will be invoked in an internal thread. * * @see CacheUpdatedCallback * @see getCacheState() * @see isCachedAvailable() * @see getCachedAttributes() * @see getCachedAttribute(const std::string&) const * */ void startCaching(CacheUpdatedCallback cb); /** * Stops caching. * * It does nothing if caching is not started. * * @see startCaching() * @see startCaching(CacheUpdatedCallback) */ void stopCaching(); /** * Returns the current cache state. * */ CacheState getCacheState() const; /** * Returns whether cached data is available. * * Cache will be available always once cache state had been CacheState::READY * even if current state is CacheState::LOST_SIGNAL. * * @see getCacheState() */ bool isCachedAvailable() const; /** * Gets the cached RCSResourceAttributes data. * * @pre Cache should be available. * * @return The cached attributes. * * @throws BadRequestException If the precondition is not fulfilled. * * @see RCSResourceAttributes * @see isCachedAvailable() * @see startCaching() * @see startCaching(CacheUpdatedCallback) * */ RCSResourceAttributes getCachedAttributes() const; /** * Gets a particular cached a ResourceAttribute Value. * * @pre Cache should be available. * * @return A requested attribute value. * * @throws BadRequestException If the precondition is not fulfilled. * @throws InvalidKeyException If @a key doesn't match the key of any value. * * @see RCSResourceAttributes::Value * @see isCachedAvailable() * @see startCaching() * @see startCaching(CacheUpdatedCallback) * */ RCSResourceAttributes::Value getCachedAttribute(const std::string& key) const; /** * Gets resource attributes directly from the server. * * This API send a get request to the resource of interest and provides * the attributes to the caller in the RemoteAttributesReceivedCallback. * * @throws PlatformException If the operation failed * @throws InvalidParameterException If cb is an empty function or null. * * @see RCSResourceAttributes::Value * * @note The callback will be invoked in an internal thread. */ void getRemoteAttributes(RemoteAttributesGetCallback cb); /** * Sends a set request with resource attributes to the server. * * The SetRequest behavior depends on the server, whether updating its attributes or not. * * @param attributes Attributes to set * @param cb A callback to receive the response. * * @throws PlatformException If the operation failed * @throws InvalidParameterException If cb is an empty function or null. * * @see RCSResourceObject * @see RCSResourceObject::SetRequestHandlerPolicy * * @note The callback will be invoked in an internal thread. */ void setRemoteAttributes(const RCSResourceAttributes& attributes, RemoteAttributesSetCallback cb); /** * Returns the uri of the resource. * */ std::string getUri() const; /** * Returns the address of the resource . * */ std::string getAddress() const; /** * Returns the resource types of the resource. * */ std::vector< std::string > getTypes() const; /** * Returns the resource interfaces of the resource. * */ std::vector< std::string > getInterfaces() const; private: std::shared_ptr< PrimitiveResource > m_primitiveResource; CacheID m_cacheId; BrokerID m_brokerId; }; } } #endif // RCSREMOTERESOURCEOBJECT_H
5,703
3,102
// RUN: %clang_analyze_cc1 -w -analyzer-checker=debug.DumpLiveStmts %s 2>&1\ // RUN: | FileCheck %s int coin(); int testThatDumperWorks(int x, int y, int z) { return x ? y : z; } // CHECK: [ B0 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B1 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B2 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'y' 'int' // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'z' 'int' // CHECK-EMPTY: // CHECK-NEXT: ImplicitCastExpr {{.*}} <IntegralToBoolean> // CHECK-NEXT: `-ImplicitCastExpr {{.*}} <LValueToRValue> // CHECK-NEXT: `-DeclRefExpr {{.*}} 'x' 'int' // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B3 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'y' 'int' // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'z' 'int' // CHECK-EMPTY: // CHECK-NEXT: ImplicitCastExpr {{.*}} <IntegralToBoolean> // CHECK-NEXT: `-ImplicitCastExpr {{.*}} <LValueToRValue> // CHECK-NEXT: `-DeclRefExpr {{.*}} 'x' 'int' // CHECK: [ B4 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'y' 'int' // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'z' 'int' // CHECK-EMPTY: // CHECK-NEXT: ImplicitCastExpr {{.*}} <IntegralToBoolean> // CHECK-NEXT: `-ImplicitCastExpr {{.*}} <LValueToRValue> // CHECK-NEXT: `-DeclRefExpr {{.*}} 'x' 'int' // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B5 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'y' 'int' // CHECK-EMPTY: // CHECK-NEXT: DeclRefExpr {{.*}} 'z' 'int' // CHECK-EMPTY: // CHECK-EMPTY: void testIfBranchExpression(bool flag) { // No expressions should be carried over from one block to another here. while (flag) { int e = 1; if (true) e; } } // CHECK: [ B0 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B1 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B2 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B3 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B4 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B5 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: void testWhileBodyExpression(bool flag) { // No expressions should be carried over from one block to another here. while (flag) { int e = 1; while (coin()) e; } } // CHECK: [ B0 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B1 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B2 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B3 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B4 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B5 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: void testDoWhileBodyExpression(bool flag) { // No expressions should be carried over from one block to another here. while (flag) { int e = 1; do e; while (coin()); } } // CHECK: [ B0 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B1 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B2 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B3 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B4 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B5 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: void testForBodyExpression(bool flag) { // No expressions should be carried over from one block to another here. while (flag) { int e = 1; for (; coin();) e; } } // CHECK: [ B0 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B1 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B2 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B3 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B4 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY: // CHECK: [ B5 (live statements at block exit) ] // CHECK-EMPTY: // CHECK-EMPTY:
1,775
959
#!/usr/bin/env python # -*- coding: utf-8 -*- # Import standard libraries import pytest import random random.seed(0) # Import modules import numpy as np # Import from pyswarms from pyswarms.backend.topology import Star from pyswarms.single import GlobalBestPSO, LocalBestPSO, GeneralOptimizerPSO # Knapsack parameters capacity = 50 number_of_items = 10 item_range = range(number_of_items) # PARAMETERS value = [random.randint(1, number_of_items) for i in item_range] weight = [random.randint(1, number_of_items) for i in item_range] # PSO parameters n_particles = 10 iterations = 1000 options = {"c1": 2, "c2": 2, "w": 0.7, "k": 3, "p": 2} dim = number_of_items LB = [0] * dim UB = [1] * dim constraints = (np.array(LB), np.array(UB)) kwargs = {"value": value, "weight": weight, "capacity": capacity} def get_particle_obj(X, **kwargs): """Calculates the objective function value which is total revenue minus penalty of capacity violations""" # X is the decision variable. X is vector in the lenght of number of items # $ value of items value = kwargs["value"] # weight of items weight = kwargs["weight"] # Total revenue revenue = sum([value[i] * np.round(X[i]) for i in item_range]) # Total weight of selected items used_capacity = sum( [kwargs["weight"][i] * np.round(X[i]) for i in item_range] ) # Total capacity violation with 100 as a penalty cofficient capacity_violation = 100 * min(0, capacity - used_capacity) # the objective function minimizes the negative revenue, which is the same # as maximizing the positive revenue return -1 * (revenue + capacity_violation) def objective_function(X, **kwargs): """Objective function with arguments""" n_particles_ = X.shape[0] dist = [get_particle_obj(X[i], **kwargs) for i in range(n_particles_)] return np.array(dist) # Instantiate optimizers optimizers = [GlobalBestPSO, LocalBestPSO, GeneralOptimizerPSO] parameters = dict( n_particles=n_particles, dimensions=dim, options=options, bounds=constraints, bh_strategy="periodic", velocity_clamp=(-0.5, 0.5), vh_strategy="invert", ) class TestToleranceOptions: @pytest.fixture(params=optimizers) def optimizer(self, request): global parameters if request.param.__name__ == "GeneralOptimizerPSO": return request.param, {**parameters, **{"topology": Star()}} return request.param, parameters def test_no_ftol(self, optimizer): """Test complete run""" optm, params = optimizer opt = optm(**params) opt.optimize(objective_function, iters=iterations, **kwargs) assert len(opt.cost_history) == iterations def test_ftol_effect(self, optimizer): """Test early stopping with ftol""" optm, params = optimizer params["ftol"] = 0.01 opt = optm(**params) opt.optimize(objective_function, iters=iterations, **kwargs) assert len(opt.cost_history) <= iterations def test_ftol_iter_assertion(self, optimizer): """Assert ftol_iter type and value""" with pytest.raises(AssertionError): optm, params = optimizer params["ftol_iter"] = 0 opt = optm(**params) def test_ftol_iter_effect(self, optimizer): """Test early stopping with ftol and ftol_iter; must run for a minimum of ftol_iter iterations""" optm, params = optimizer params["ftol_iter"] = 50 opt = optm(**params) opt.optimize(objective_function, iters=iterations, **kwargs) assert len(opt.cost_history) >= opt.ftol_iter
1,416
3,262
<gh_stars>1000+ /* * Tencent is pleased to support the open source community by making Angel available. * * Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * https://opensource.org/licenses/Apache-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. * */ package com.tencent.angel.common.transport; import com.tencent.angel.common.location.Location; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import java.util.concurrent.TimeoutException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.pool.PoolableObjectFactory; /** * Netty channel pool factory. */ public class ChannelObjectFactory implements PoolableObjectFactory<Channel> { private static final Log LOG = LogFactory.getLog(ChannelObjectFactory.class); /** * server address */ private final Location loc; /** * netty client bootstrap */ private final Bootstrap bootstrap; /** * Create a new ChannelObjectFactory. * * @param loc server address * @param bootstrap netty client bootstrap */ public ChannelObjectFactory(Location loc, Bootstrap bootstrap) { this.loc = loc; this.bootstrap = bootstrap; } @Override public void activateObject(Channel channel) throws Exception { } @Override public void destroyObject(Channel channel) throws Exception { channel.close(); } @Override public Channel makeObject() throws Exception { ChannelFuture connectFuture = bootstrap.connect(loc.getIp(), loc.getPort()); int ticks = 10000; while (ticks-- > 0) { if (connectFuture.isDone()) { return connectFuture.channel(); } Thread.sleep(10); } if (!connectFuture.isDone()) { throw new TimeoutException("connect " + loc + " timeout"); } else { return connectFuture.channel(); } } @Override public void passivateObject(Channel channel) throws Exception { } @Override public boolean validateObject(Channel channel) { return channel.isOpen(); } }
867
841
<reponame>pet0etie/cgeo<gh_stars>100-1000 package cgeo.geocaching.sorting; import cgeo.geocaching.models.Geocache; import androidx.annotation.NonNull; import java.util.List; /** * comparator which inverses the sort order of the given other comparator * */ public class InverseComparator implements CacheComparator { private final CacheComparator originalComparator; public InverseComparator(final CacheComparator comparator) { this.originalComparator = comparator; } @Override public int compare(final Geocache lhs, final Geocache rhs) { return originalComparator.compare(rhs, lhs); } @Override public boolean isAutoManaged() { return originalComparator.isAutoManaged(); } @Override public String getSortableSection(@NonNull final Geocache cache) { return originalComparator.getSortableSection(cache); } @Override public void sort(final List<Geocache> list, final boolean inverse) { this.originalComparator.sort(list, inverse); } }
366
6,717
<gh_stars>1000+ //****************************************************************************** // // Copyright (c) 2015 Microsoft Corporation. All rights reserved. // // This code is licensed under the MIT License (MIT). // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // //****************************************************************************** #pragma once #import <Foundation/Foundation.h> // LifetimeCounting is a class that tracks its reference counting behaviour. // It is intended to be used in reference counting tests. // // Seeing as Clang with ARC on will refuse to emit -retain, -release, // -dealloc, and -retainCount, the property accessors have to have rather silly names. @interface LifetimeCounting : NSObject { unsigned arcSafeRetainCount; unsigned arcSafeReleaseCount; } - (unsigned)arcSafeRetainCount; - (unsigned)arcSafeReleaseCount; - (void)destroy /* ARC escape hatch */; @end
361
1,097
/* * Copyright 2011-2020 <NAME>. All rights reserved. * License: https://github.com/bkaradzic/bgfx#license-bsd-2-clause */ #ifndef BGFX_VERTEXDECL_H_HEADER_GUARD #define BGFX_VERTEXDECL_H_HEADER_GUARD #include <bgfx/bgfx.h> #include <bx/readerwriter.h> namespace bgfx { /// void initAttribTypeSizeTable(RendererType::Enum _type); /// Returns attribute name. const char* getAttribName(Attrib::Enum _attr); /// const char* getAttribNameShort(Attrib::Enum _attr); /// Attrib::Enum idToAttrib(uint16_t id); /// uint16_t attribToId(Attrib::Enum _attr); /// AttribType::Enum idToAttribType(uint16_t id); /// int32_t write(bx::WriterI* _writer, const bgfx::VertexLayout& _layout, bx::Error* _err = NULL); /// int32_t read(bx::ReaderI* _reader, bgfx::VertexLayout& _layout, bx::Error* _err = NULL); /// uint32_t weldVertices(void* _output, const VertexLayout& _layout, const void* _data, uint32_t _num, bool _index32, float _epsilon, bx::AllocatorI* _allocator); } // namespace bgfx #endif // BGFX_VERTEXDECL_H_HEADER_GUARD
427
879
<reponame>LEONAD486/zstack<filename>image/src/main/java/org/zstack/image/ImageBase.java package org.zstack.image; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.zstack.compute.vm.IsoOperator; import org.zstack.compute.vm.VmSystemTags; import org.zstack.core.asyncbatch.While; import org.zstack.core.cascade.CascadeConstant; import org.zstack.core.cascade.CascadeFacade; import org.zstack.core.cloudbus.CloudBus; import org.zstack.core.cloudbus.CloudBusCallBack; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.DatabaseFacade; import org.zstack.core.db.SQL; import org.zstack.core.db.SQLBatch; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.errorcode.ErrorFacade; import org.zstack.core.thread.ChainTask; import org.zstack.core.thread.SyncTaskChain; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.header.core.Completion; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.core.WhileDoneCompletion; import org.zstack.header.core.NopeCompletion; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.ErrorCodeList; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.errorcode.SysErrors; import org.zstack.header.identity.SharedResourceVO; import org.zstack.header.identity.SharedResourceVO_; import org.zstack.header.image.*; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.ImageDeletionPolicyManager.ImageDeletionPolicy; import org.zstack.header.message.*; import org.zstack.header.storage.backup.*; import org.zstack.header.vm.DetachIsoFromVmInstanceMsg; import org.zstack.header.vm.VmInstanceConstant; import org.zstack.header.volume.VolumeType; import org.zstack.tag.SystemTagCreator; import org.zstack.tag.TagManager; import org.zstack.utils.CollectionUtils; import org.zstack.utils.Utils; import org.zstack.utils.function.ForEachFunction; import org.zstack.utils.function.Function; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import java.util.*; import java.util.concurrent.TimeUnit; import static org.zstack.core.Platform.*; import static org.zstack.utils.CollectionDSL.e; import static org.zstack.utils.CollectionDSL.map; /** * Created with IntelliJ IDEA. * User: frank * Time: 5:38 PM * To change this template use File | Settings | File Templates. */ @Configurable(preConstruction = true, autowire = Autowire.BY_TYPE) public class ImageBase implements Image { private static final CLogger logger = Utils.getLogger(ImageBase.class); protected String syncThreadId; @Autowired private ThreadFacade thdf; @Autowired protected CloudBus bus; @Autowired protected DatabaseFacade dbf; @Autowired private CascadeFacade casf; @Autowired private ErrorFacade errf; @Autowired private ImageDeletionPolicyManager deletionPolicyMgr; @Autowired private PluginRegistry pluginRgty; @Autowired private TagManager tagMgr; protected ImageVO self; public ImageBase(ImageVO vo) { self = vo; syncThreadId = String.format("image-%s", self.getUuid()); } @Override public void handleMessage(Message msg) { try { if (msg instanceof APIMessage) { handleApiMessage((APIMessage) msg); } else { handleLocalMessage(msg); } } catch (Exception e) { bus.logExceptionWithMessageDump(msg, e); bus.replyErrorByMessageType(msg, e); } } protected ImageVO getSelf() { return self; } protected ImageInventory getSelfInventory() { return ImageInventory.valueOf(getSelf()); } private void handleLocalMessage(Message msg) { if (msg instanceof ImageDeletionMsg) { handle((ImageDeletionMsg) msg); } else if (msg instanceof CancelAddImageMsg) { handle((CancelAddImageMsg) msg); } else if (msg instanceof ExpungeImageMsg) { handle((ExpungeImageMsg) msg); } else if (msg instanceof SyncImageSizeMsg) { handle((SyncImageSizeMsg) msg); } else if (msg instanceof OverlayMessage) { handle((OverlayMessage) msg); } else if (msg instanceof SyncSystemTagFromVolumeMsg) { handle((SyncSystemTagFromVolumeMsg) msg); } else if (msg instanceof SyncSystemTagFromTagMsg) { handle((SyncSystemTagFromTagMsg) msg); } else if (msg instanceof UpdateImageMsg) { handle((UpdateImageMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } class ImageSize { long size; long actualSize; } private void handle(final SyncImageSizeMsg msg) { final SyncImageSizeReply reply = new SyncImageSizeReply(); syncImageSize(msg.getBackupStorageUuid(), new ReturnValueCompletion<ImageSize>(msg) { @Override public void success(ImageSize ret) { reply.setActualSize(ret.actualSize); reply.setSize(ret.size); bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void syncImageSize(String backupStorageUuid, final ReturnValueCompletion<ImageSize> completion) { if (backupStorageUuid == null) { List<String> bsUuids = CollectionUtils.transformToList(self.getBackupStorageRefs(), new Function<String, ImageBackupStorageRefVO>() { @Override public String call(ImageBackupStorageRefVO arg) { return arg.getBackupStorageUuid(); } }); if (bsUuids.isEmpty()) { throw new OperationFailureException(operr("the image[uuid:%s, name:%s] is not on any backup storage", self.getUuid(), self.getName())); } SimpleQuery<BackupStorageVO> q = dbf.createQuery(BackupStorageVO.class); q.select(BackupStorageVO_.uuid); q.add(BackupStorageVO_.uuid, Op.IN, bsUuids); q.add(BackupStorageVO_.status, Op.EQ, BackupStorageStatus.Connected); q.setLimit(1); backupStorageUuid = q.findValue(); if (backupStorageUuid == null) { completion.fail(operr("No connected backup storage found for image[uuid:%s, name:%s]", self.getUuid(), self.getName())); return; } } SyncImageSizeOnBackupStorageMsg smsg = new SyncImageSizeOnBackupStorageMsg(); smsg.setBackupStorageUuid(backupStorageUuid); smsg.setImage(ImageInventory.valueOf(self)); bus.makeTargetServiceIdByResourceUuid(smsg, BackupStorageConstant.SERVICE_ID, backupStorageUuid); bus.send(smsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); } else { SyncImageSizeOnBackupStorageReply sr = reply.castReply(); self.setSize(sr.getSize()); self.setActualSize(sr.getActualSize()); dbf.update(self); ImageSize ret = new ImageSize(); ret.actualSize = sr.getActualSize(); ret.size = sr.getSize(); completion.success(ret); } } }); } private void handle(final ExpungeImageMsg msg) { final ExpungeImageReply reply = new ExpungeImageReply(); final ImageBackupStorageRefVO ref = CollectionUtils.find( self.getBackupStorageRefs(), arg -> arg.getBackupStorageUuid().equals(msg.getBackupStorageUuid()) ? arg : null ); if (ref == null) { logger.debug(String.format("cannot find reference for the image[uuid:%s] on the backup storage[uuid:%s], assume it's been deleted", self.getUuid(), msg.getBackupStorageUuid())); bus.reply(msg, reply); return; } DeleteBitsOnBackupStorageMsg dmsg = new DeleteBitsOnBackupStorageMsg(); dmsg.setBackupStorageUuid(ref.getBackupStorageUuid()); dmsg.setInstallPath(ref.getInstallPath()); bus.makeTargetServiceIdByResourceUuid(dmsg, BackupStorageConstant.SERVICE_ID, dmsg.getBackupStorageUuid()); bus.send(dmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply r) { if (!r.isSuccess()) { BackupStorageDeleteBitGC gc = new BackupStorageDeleteBitGC(); gc.NAME = String.format("gc-delete-bits-%s-on-backup-storage-%s", msg.getImageUuid(), ref.getBackupStorageUuid()); gc.backupStorageUuid = ref.getBackupStorageUuid(); gc.imageUuid = msg.getImageUuid(); gc.installPath = ref.getInstallPath(); gc.submit(ImageGlobalConfig.DELETION_GARBAGE_COLLECTION_INTERVAL.value(Long.class), TimeUnit.SECONDS); } returnBackupStorageCapacity(ref.getBackupStorageUuid(), self.getActualSize()); dbf.remove(ref); //TODO remove ref from metadata, this logic should after all refs deleted runAfterExpungeImageExtension(ref.getBackupStorageUuid()); logger.debug(String.format("successfully expunged the image[uuid: %s, name: %s] on the backup storage[uuid: %s]", self.getUuid(), self.getName(), ref.getBackupStorageUuid())); new SQLBatch() { // delete the image if it's not on any backup storage @Override protected void scripts() { long count = sql("select count(ref) from ImageBackupStorageRefVO ref" + " where ref.imageUuid = :uuid", Long.class) .param("uuid", msg.getImageUuid()).find(); if (count == 0) { // the image is expunged on all backup storage sql(ImageVO.class).eq(ImageVO_.uuid, msg.getImageUuid()).delete(); sql(SharedResourceVO.class).eq(SharedResourceVO_.resourceUuid, msg.getImageUuid()).delete(); logger.debug(String.format("the image[uuid:%s, name:%s] has been expunged on all backup storage, remove it from database", self.getUuid(), self.getName())); } } }.execute(); bus.reply(msg, reply); } }); } private void returnBackupStorageCapacity(final String bsUuid, final long size) { ReturnBackupStorageMsg msg = new ReturnBackupStorageMsg(); msg.setBackupStorageUuid(bsUuid); msg.setSize(size); bus.makeTargetServiceIdByResourceUuid(msg, BackupStorageConstant.SERVICE_ID, bsUuid); bus.send(msg, new CloudBusCallBack(null) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.warn(String.format("failed to return capacity[%s] to the backup storage[uuid:%s], %s", size, bsUuid, reply.getError())); } } }); } private void handle(final ImageDeletionMsg msg) { final ImageDeletionReply reply = new ImageDeletionReply(); Set<ImageBackupStorageRefVO> bsRefs = self.getBackupStorageRefs(); if (bsRefs.isEmpty()) { if (self.getStatus() == ImageStatus.Ready) { SQL.New(ImageVO.class).eq(ImageVO_.uuid, self.getUuid()).delete(); } else { SQL.New(ImageVO.class).eq(ImageVO_.uuid, self.getUuid()).hardDelete(); } bus.reply(msg, reply); return; } else if (bsRefs.stream().allMatch( r -> r.getStatus() == ImageStatus.Creating || r.getStatus() == ImageStatus.Downloading)) { // the image is not on any backup storage; mostly likely the image is not in the status of Ready, for example // it's still downloading // in this case, we directly delete it from the database new SQLBatch() { @Override protected void scripts() { // in case 'recover api' called for an incomplete image sql(ImageBackupStorageRefVO.class).eq(ImageBackupStorageRefVO_.imageUuid, self.getUuid()).hardDelete(); sql(ImageVO.class).eq(ImageVO_.uuid, self.getUuid()).hardDelete(); } }.execute(); bus.reply(msg, reply); return; } final ImageDeletionPolicy deletionPolicy = msg.getDeletionPolicy() == null ? deletionPolicyMgr.getDeletionPolicy(self.getUuid()) : ImageDeletionPolicy.valueOf(msg.getDeletionPolicy()); FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("delete-image-%s", self.getUuid())); Collection<ImageBackupStorageRefVO> toDelete = msg.getBackupStorageUuids() == null ? self.getBackupStorageRefs() : CollectionUtils.transformToList( self.getBackupStorageRefs(), new Function<ImageBackupStorageRefVO, ImageBackupStorageRefVO>() { @Override public ImageBackupStorageRefVO call(ImageBackupStorageRefVO arg) { return msg.getBackupStorageUuids().contains(arg.getBackupStorageUuid()) ? arg : null; } } ); chain.then(new NoRollbackFlow() { @Override public void run(FlowTrigger trigger, Map data) { List<String> vmUuids = IsoOperator.getVmUuidByIsoUuid(msg.getImageUuid()); if (vmUuids.isEmpty()) { trigger.next(); return; } List<DetachIsoFromVmInstanceMsg> detachIsoFromVmInstanceMsgs = new ArrayList<>(); for (String vmUuid : vmUuids) { DetachIsoFromVmInstanceMsg detachIsoFromVmInstanceMsg = new DetachIsoFromVmInstanceMsg(); detachIsoFromVmInstanceMsg.setVmInstanceUuid(vmUuid); detachIsoFromVmInstanceMsg.setIsoUuid(msg.getImageUuid()); bus.makeLocalServiceId(detachIsoFromVmInstanceMsg, VmInstanceConstant.SERVICE_ID); detachIsoFromVmInstanceMsgs.add(detachIsoFromVmInstanceMsg); } List<ErrorCode> errors = Collections.synchronizedList(new LinkedList<ErrorCode>()); new While<>(detachIsoFromVmInstanceMsgs).all((detachIsoFromVmInstanceMsg, completion) -> { bus.send(detachIsoFromVmInstanceMsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply rly) { if (!rly.isSuccess()) { errors.add(rly.getError()); } completion.done(); } }); }).run(new WhileDoneCompletion(trigger) { @Override public void done(ErrorCodeList errorCodeList) { if (errors.size() != 0) { trigger.fail(operr("detach iso[uuid=%s] from vm failed, errors are %s" ,msg.getImageUuid(), JSONObjectUtil.toJsonString(errors))); return; } trigger.next(); } }); } }); List<Object> refs = new ArrayList<>(); for (final ImageBackupStorageRefVO ref : toDelete) { chain.then(new NoRollbackFlow() { String __name__ = String.format("delete-image-%s-from-backup-storage-%s", self.getUuid(), ref.getBackupStorageUuid()); @Override public void run(final FlowTrigger trigger, Map data) { if (deletionPolicy == ImageDeletionPolicy.Direct) { DeleteBitsOnBackupStorageMsg dmsg = new DeleteBitsOnBackupStorageMsg(); dmsg.setBackupStorageUuid(ref.getBackupStorageUuid()); dmsg.setInstallPath(ref.getInstallPath()); bus.makeTargetServiceIdByResourceUuid(dmsg, BackupStorageConstant.SERVICE_ID, dmsg.getBackupStorageUuid()); bus.send(dmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { //TODO logger.warn(String.format("failed to delete image[uuid:%s, name:%s] from backup storage[uuid:%s] because %s," + " need to garbage collect it", self.getUuid(), self.getName(), reply.getError(), ref.getBackupStorageUuid())); } else { returnBackupStorageCapacity(ref.getBackupStorageUuid(), self.getActualSize()); dbf.remove(ref); // now delete ref in metadata runAfterExpungeImageExtension(ref.getBackupStorageUuid()); } trigger.next(); } }); } else if (deletionPolicy == ImageDeletionPolicy.DeleteReference) { dbf.remove(ref); logger.debug(String.format("delete the image[uuid: %s, name:%s]'s reference of the backup storage[uuid:%s]", self.getUuid(), self.getName(), ref.getBackupStorageUuid())); trigger.next(); } else { ref.setStatus(ImageStatus.Deleted); refs.add(ref); trigger.next(); } } }); } chain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { new SQLBatch() { @Override protected void scripts() { for (Object ref : refs) { // update ref status if there is any dbf.getEntityManager().merge(ref); } dbf.getEntityManager().flush(); self = dbf.getEntityManager().find(ImageVO.class, self.getUuid()); if (self.getBackupStorageRefs().isEmpty()) { // the image is directly deleted from all backup storage // hard delete it sql(ImageVO.class).eq(ImageVO_.uuid, self.getUuid()).delete(); if (deletionPolicy == ImageDeletionPolicy.DeleteReference) { logger.debug(String.format("successfully directly deleted the image[uuid:%s, name:%s] from the database," + " as the policy is DeleteReference, it's still on the physical backup storage", self.getUuid(), self.getName())); } else { logger.debug(String.format("successfully directly deleted the image[uuid:%s, name:%s]", self.getUuid(), self.getName())); } } else { if (self.getBackupStorageRefs().stream().noneMatch(r -> r.getStatus() != ImageStatus.Deleted)) { self.setStatus(ImageStatus.Deleted); dbf.getEntityManager().merge(self); logger.debug(String.format("successfully deleted the image[uuid:%s, name:%s] with deletion policy[%s]", self.getUuid(), self.getName(), deletionPolicy)); } } } }.execute(); bus.reply(msg, reply); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); } }).start(); } private void handle(CancelAddImageMsg msg) { CancelDownloadImageReply reply = new CancelDownloadImageReply(); AddImageMsg amsg = msg.getMsg(); List<String> bsUuids = amsg.getBackupStorageUuids(); ImageInventory img = ImageInventory.valueOf(dbf.findByUuid(msg.getImageUuid(), ImageVO.class)); ErrorCodeList err = new ErrorCodeList(); new While<>(bsUuids).all((bsUuid, compl) -> { CancelDownloadImageMsg cmsg = new CancelDownloadImageMsg(); cmsg.setImageInventory(img); cmsg.setBackupStorageUuid(bsUuid); cmsg.setCancellationApiId(msg.getCancellationApiId()); bus.makeTargetServiceIdByResourceUuid(cmsg, BackupStorageConstant.SERVICE_ID, bsUuid); bus.send(cmsg, new CloudBusCallBack(compl) { @Override public void run(MessageReply r) { if (!r.isSuccess()) { err.getCauses().add(r.getError()); } compl.done(); } }); }).run(new WhileDoneCompletion(msg) { @Override public void done(ErrorCodeList errorCodeList) { if (!err.getCauses().isEmpty()) { reply.setError(err.getCauses().get(0)); } bus.reply(msg, reply); } }); } private void handle(OverlayMessage msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadId; } @Override public void run(SyncTaskChain chain) { doOverlayMessage(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "overlay-message"; } }); } private void doOverlayMessage(OverlayMessage msg, NoErrorCompletion noErrorCompletion) { bus.send(msg.getMessage(), new CloudBusCallBack(msg, noErrorCompletion) { @Override public void run(MessageReply reply) { bus.reply(msg, reply); noErrorCompletion.done(); } }); } private void runAfterExpungeImageExtension(String backupStorageUuid) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(ExpungeImageExtensionPoint.class), ext -> ext.afterExpungeImage(ImageInventory.valueOf(self), backupStorageUuid)); } private void handle(SyncSystemTagFromVolumeMsg msg) { SyncSystemTagFromVolumeReply reply = new SyncSystemTagFromVolumeReply(); // only sync root volume List<String> vmSystemTags = SQL.New("select s.tag from SystemTagVO s, VolumeVO vol" + " where vol.uuid = :volUuid" + " and vol.type = :type" + " and vol.vmInstanceUuid = s.resourceUuid", String.class) .param("volUuid", msg.getVolumeUuid()) .param("type", VolumeType.Root) .list(); syncVmSystemTags(vmSystemTags); bus.reply(msg, reply); } private void handle(SyncSystemTagFromTagMsg msg) { SyncSystemTagFromTagReply reply = new SyncSystemTagFromTagReply(); syncVmSystemTags(msg.getVmSystemTags()); bus.reply(msg, reply); } private void syncVmSystemTags(List<String> tags) { if (tags == null || tags.isEmpty()) { return; } for (String tag : tags) { if (VmSystemTags.VM_INJECT_QEMUGA.getTagFormat().equals(tag)) { tagMgr.createNonInherentSystemTag(self.getUuid(), ImageSystemTags.IMAGE_INJECT_QEMUGA.getTagFormat(), ImageVO.class.getSimpleName()); } else if (VmSystemTags.BOOT_MODE.isMatch(tag)) { String bootMode = VmSystemTags.BOOT_MODE.getTokenByTag(tag, VmSystemTags.BOOT_MODE_TOKEN); SystemTagCreator creator = ImageSystemTags.BOOT_MODE.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(Collections.singletonMap(VmSystemTags.BOOT_MODE_TOKEN, bootMode)); creator.inherent = false; creator.recreate = true; creator.create(); } else if (VmSystemTags.VM_GUEST_TOOLS.isMatch(tag)) { String guestTools = VmSystemTags.VM_GUEST_TOOLS.getTokenByTag(tag, VmSystemTags.VM_GUEST_TOOLS_VERSION_TOKEN); SystemTagCreator creator = ImageSystemTags.IMAGE_GUEST_TOOLS.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(Collections.singletonMap(ImageSystemTags.IMAGE_GUEST_TOOLS_VERSION_TOKEN, guestTools)); creator.inherent = false; creator.recreate = true; creator.create(); } } } private void handleApiMessage(APIMessage msg) { if (msg instanceof APIChangeImageStateMsg) { handle((APIChangeImageStateMsg) msg); } else if (msg instanceof APIExpungeImageMsg) { handle((APIExpungeImageMsg) msg); } else if (msg instanceof APIDeleteImageMsg) { handle((APIDeleteImageMsg) msg); } else if (msg instanceof APIUpdateImageMsg) { handle((APIUpdateImageMsg) msg); } else if (msg instanceof APIRecoverImageMsg) { handle((APIRecoverImageMsg) msg); } else if (msg instanceof APISyncImageSizeMsg) { handle((APISyncImageSizeMsg) msg); } else if (msg instanceof APISetImageBootModeMsg) { handle((APISetImageBootModeMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handle(APISetImageBootModeMsg msg) { SystemTagCreator creator = ImageSystemTags.BOOT_MODE.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map( e(ImageSystemTags.BOOT_MODE_TOKEN, msg.getBootMode()) )); creator.recreate = true; creator.create(); APISetImageBootModeEvent evt = new APISetImageBootModeEvent(msg.getId()); bus.publish(evt); } private void handle(APISyncImageSizeMsg msg) { final APISyncImageSizeEvent evt = new APISyncImageSizeEvent(msg.getId()); syncImageSize(null, new ReturnValueCompletion<ImageSize>(msg) { @Override public void success(ImageSize ret) { self = dbf.reload(self); evt.setInventory(getSelfInventory()); bus.publish(evt); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); } }); } private void handle(APIRecoverImageMsg msg) { List<String> toRecoverBsUuids; if (msg.getBackupStorageUuids() == null || msg.getBackupStorageUuids().isEmpty()) { toRecoverBsUuids = CollectionUtils.transformToList(self.getBackupStorageRefs(), new Function<String, ImageBackupStorageRefVO>() { @Override public String call(ImageBackupStorageRefVO arg) { return arg.getStatus() == ImageStatus.Deleted ? arg.getBackupStorageUuid() : null; } }); if (toRecoverBsUuids.isEmpty()) { throw new OperationFailureException(operr("the image[uuid:%s, name:%s] is not deleted on any backup storage", self.getUuid(), self.getName())); } } else { toRecoverBsUuids = new ArrayList<String>(); for (final String bsUuid : msg.getBackupStorageUuids()) { ImageBackupStorageRefVO ref = CollectionUtils.find(self.getBackupStorageRefs(), new Function<ImageBackupStorageRefVO, ImageBackupStorageRefVO>() { @Override public ImageBackupStorageRefVO call(ImageBackupStorageRefVO arg) { return bsUuid.equals(arg.getBackupStorageUuid()) ? arg : null; } }); if (ref == null) { throw new OperationFailureException(argerr("the image[uuid:%s, name:%s] is not on the backup storage[uuid:%s]", self.getUuid(), self.getName(), bsUuid)); } if (ref.getStatus() != ImageStatus.Deleted) { throw new OperationFailureException(argerr("the image[uuid:%s, name:%s]'s status[%s] is not Deleted on the backup storage[uuid:%s]", self.getUuid(), self.getName(), ref.getStatus(), bsUuid)); } toRecoverBsUuids.add(bsUuid); } } List<Object> refs = new ArrayList<>(); for (ImageBackupStorageRefVO ref : self.getBackupStorageRefs()) { if (toRecoverBsUuids.contains(ref.getBackupStorageUuid())) { ref.setStatus(ImageStatus.Ready); refs.add(ref); } } self.setStatus(ImageStatus.Ready); refs.add(self); dbf.updateCollection(refs); self = dbf.reload(self); logger.debug(String.format("successfully recovered the image[uuid:%s, name:%s] on the backup storage%s", self.getUuid(), self.getName(), toRecoverBsUuids)); APIRecoverImageEvent evt = new APIRecoverImageEvent(msg.getId()); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(final APIExpungeImageMsg msg) { List<String> bsUuids = new ArrayList<>(); if (msg.getBackupStorageUuids() == null || msg.getBackupStorageUuids().isEmpty()) { bsUuids = CollectionUtils.transformToList( self.getBackupStorageRefs(), new Function<String, ImageBackupStorageRefVO>() { @Override public String call(ImageBackupStorageRefVO arg) { return ImageStatus.Deleted == arg.getStatus() ? arg.getBackupStorageUuid() : null; } } ); if (bsUuids.isEmpty()) { throw new OperationFailureException(operr("the image[uuid:%s, name:%s] is not deleted on any backup storage", self.getUuid(), self.getName())); } } else { for (final String bsUuid : msg.getBackupStorageUuids()) { ImageBackupStorageRefVO ref = CollectionUtils.find( self.getBackupStorageRefs(), new Function<ImageBackupStorageRefVO, ImageBackupStorageRefVO>() { @Override public ImageBackupStorageRefVO call(ImageBackupStorageRefVO arg) { return arg.getBackupStorageUuid().equals(bsUuid) ? arg : null; } } ); if (ref == null) { throw new OperationFailureException(argerr("the image[uuid:%s, name:%s] is not on the backup storage[uuid:%s]", self.getUuid(), self.getName(), bsUuid)); } if (ref.getStatus() != ImageStatus.Deleted) { throw new OperationFailureException(argerr("the image[uuid:%s, name:%s] is not deleted on the backup storage[uuid:%s]", self.getUuid(), self.getName(), bsUuid)); } bsUuids.add(bsUuid); } } new While<>(bsUuids).all((bsUuid, completion) -> { ExpungeImageMsg emsg = new ExpungeImageMsg(); emsg.setBackupStorageUuid(bsUuid); emsg.setImageUuid(self.getUuid()); bus.makeTargetServiceIdByResourceUuid(emsg, ImageConstant.SERVICE_ID, self.getUuid()); bus.send(emsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.warn(reply.getError().toString()); } completion.done(); } }); }).run(new WhileDoneCompletion(msg) { @Override public void done(ErrorCodeList errorCodeList) { bus.publish(new APIExpungeImageEvent(msg.getId())); } }); } private void updateImage(UpdateImageMsg msg) { boolean update = false; if (msg.getName() != null) { self.setName(msg.getName()); update = true; } if (msg.getDescription() != null) { self.setDescription(msg.getDescription()); update = true; } if (msg.getSystem() != null) { self.setSystem(msg.getSystem()); update = true; } if (msg.getGuestOsType() != null) { self.setGuestOsType(msg.getGuestOsType()); update = true; } if (msg.getMediaType() != null) { self.setMediaType(ImageMediaType.valueOf(msg.getMediaType())); update = true; } if (msg.getFormat() != null) { self.setFormat(msg.getFormat()); update = true; } if (msg.getPlatform() != null) { self.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); update = true; } if (msg.getArchitecture() != null) { self.setArchitecture(msg.getArchitecture()); update = true; } if (msg.getVirtio() != null) { self.setVirtio(msg.getVirtio()); update = true; } if (update) { self = dbf.updateAndRefresh(self); } if (ImageArchitecture.aarch64.toString().equals(msg.getArchitecture())){ SystemTagCreator creator = ImageSystemTags.BOOT_MODE.newSystemTagCreator(msg.getImageUuid()); creator.setTagByTokens(Collections.singletonMap(ImageSystemTags.BOOT_MODE_TOKEN, ImageBootMode.UEFI.toString())); creator.recreate = true; creator.create(); } if (ImageArchitecture.x86_64.toString().equals(msg.getArchitecture()) && self.isSystem()) { SystemTagCreator creator = ImageSystemTags.BOOT_MODE.newSystemTagCreator(msg.getUuid()); creator.setTagByTokens(Collections.singletonMap(ImageSystemTags.BOOT_MODE_TOKEN, ImageBootMode.Legacy.toString())); creator.recreate = true; creator.create(); } } private void handle(UpdateImageMsg msg) { updateImage(msg); UpdateImageReply reply = new UpdateImageReply(); reply.setInventory(getSelfInventory()); bus.reply(msg, reply); } private void handle(APIUpdateImageMsg msg) { updateImage(UpdateImageMsg.valueOf(msg)); APIUpdateImageEvent evt = new APIUpdateImageEvent(msg.getId()); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APIChangeImageStateMsg msg) { ImageStateEvent sevt = ImageStateEvent.valueOf(msg.getStateEvent()); if (sevt == ImageStateEvent.disable) { self.setState(ImageState.Disabled); } else { self.setState(ImageState.Enabled); } self = dbf.updateAndRefresh(self); APIChangeImageStateEvent evt = new APIChangeImageStateEvent(msg.getId()); evt.setInventory(ImageInventory.valueOf(self)); bus.publish(evt); } private void handle(APIDeleteImageMsg msg) { final APIDeleteImageEvent evt = new APIDeleteImageEvent(msg.getId()); final String issuer = ImageVO.class.getSimpleName(); ImageDeletionStruct struct = new ImageDeletionStruct(); struct.setImage(ImageInventory.valueOf(self)); struct.setBackupStorageUuids(msg.getBackupStorageUuids()); final List<ImageDeletionStruct> ctx = Arrays.asList(struct); FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("delete-image-%s", msg.getUuid())); if (msg.getDeletionMode() == APIDeleteMessage.DeletionMode.Permissive) { chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } else { chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } chain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion()); bus.publish(evt); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(err(SysErrors.DELETE_RESOURCE_ERROR, errCode, errCode.getDetails())); bus.publish(evt); } }).start(); } }
20,293
450
'd' from types import StringType, NoneType from string import upper def foo(): return map(lambda x: upper(x), 'testing') def func(a, b, *args, **kw): 'verify no warnings for variables (arguments) used before set' print a, b, args, kw class E(Exception): 'doc' def x(): 'instantiate a new E with many args, should not be a warning' print E('test', 'test', 'test', 'test', 'test', 0) def y(): from string import lower lower = lower('Unknown option') def foobar(x): return x def nn(): print map(lambda x, s=foobar: s(x), [1,2]) def start() : int(args=(lambda x=None: x,)) def nn2(): n = [] n.append(1) n.append(1, 2) n.append((1, 2)) def run(): foobar(x={0:5})
302
474
package org.javacord.api.entity.permission; import org.javacord.api.entity.permission.internal.RoleBuilderDelegate; import org.javacord.api.entity.server.Server; import org.javacord.api.util.internal.DelegateFactory; import java.awt.Color; import java.util.concurrent.CompletableFuture; /** * This class is used to create roles. */ public class RoleBuilder { /** * The role delegate used by this instance. */ private final RoleBuilderDelegate delegate; /** * Creates a new role role builder for the given server. * * @param server The server for which the role should be created. */ public RoleBuilder(Server server) { delegate = DelegateFactory.createRoleBuilderDelegate(server); } /** * Sets the reason for the creation. This reason will be visible in the audit log entry(s). * * @param reason The reason for this update. * @return The current instance in order to chain call methods. */ public RoleBuilder setAuditLogReason(String reason) { delegate.setAuditLogReason(reason); return this; } /** * Sets the name of the role. * By default it's <code>"new role"</code>. * * @param name The name of the role. * @return The current instance in order to chain call methods. */ public RoleBuilder setName(String name) { delegate.setName(name); return this; } /** * Sets the permissions of the role. * By default it uses the permissions of the @everyone role. * * @param permissions The permissions to set. * @return The current instance in order to chain call methods. */ public RoleBuilder setPermissions(Permissions permissions) { delegate.setPermissions(permissions); return this; } /** * Sets the color of the role. * * @param color The color of the role. * @return The current instance in order to chain call methods. */ public RoleBuilder setColor(Color color) { delegate.setColor(color); return this; } /** * Sets if the role is mentionable or not. * By default it's set to <code>false</code>. * * @param mentionable Whether the role should be mentionable or not. * @return The current instance in order to chain call methods. */ public RoleBuilder setMentionable(boolean mentionable) { delegate.setMentionable(mentionable); return this; } /** * Sets if the role should be pinned in the user listing (sometimes called "hoist"). * * @param displaySeparately Whether the role should be pinned in the user listing or not. * @return The current instance in order to chain call methods. */ public RoleBuilder setDisplaySeparately(boolean displaySeparately) { delegate.setDisplaySeparately(displaySeparately); return this; } /** * Creates the server text channel. * * @return The created text channel. */ public CompletableFuture<Role> create() { return delegate.create(); } }
1,106
766
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.siddhi.core.query.selector.attribute.aggregator.incremental; import io.siddhi.annotation.Example; import io.siddhi.annotation.Extension; import io.siddhi.annotation.ParameterOverload; import io.siddhi.annotation.ReturnAttribute; import io.siddhi.annotation.util.DataType; import io.siddhi.query.api.definition.Attribute; import io.siddhi.query.api.expression.Expression; import org.apache.log4j.Logger; /** * {@link IncrementalAttributeAggregator} to calculate count based on an event attribute. */ @Extension( name = "count", namespace = "incrementalAggregator", description = "Returns the count of all events, in incremental event processing", parameters = {}, parameterOverloads = { @ParameterOverload() }, returnAttributes = @ReturnAttribute( description = "Returns the event count as a long.", type = {DataType.LONG}), examples = @Example( syntax = " define aggregation cseEventAggregation\n from cseEventStream\n" + " select count() as countEvents,\n aggregate by timeStamp every sec ... hour;", description = "count() returns the count of all the events based on their " + "arrival and expiry. The count is calculated for sec, min and hour durations." ) ) public class CountIncrementalAttributeAggregator extends IncrementalAttributeAggregator { private static final Logger LOG = Logger.getLogger(CountIncrementalAttributeAggregator.class); private Attribute[] baseAttributes; private Expression[] baseAttributesInitialValues; @Override public void init(String attributeName, Attribute.Type attributeType) { if (attributeName != null) { LOG.warn("Aggregation count function will return the count of all events and count(" + attributeName + ") will be considered as count()."); } Attribute count; Expression countInitialValue; // Since we set the initial value of count, we can simply set it as long // However, since count is summed internally (in avg incremental calculation), // ensure that either double or long is used here (since return value of sum is long or // double. Long is chosen here) count = new Attribute("AGG_COUNT", Attribute.Type.LONG); countInitialValue = Expression.value(1L); this.baseAttributes = new Attribute[]{count}; this.baseAttributesInitialValues = new Expression[]{countInitialValue}; } @Override public Expression aggregate() { return Expression.variable(baseAttributes[0].getName()); } @Override public Attribute[] getBaseAttributes() { return this.baseAttributes; } @Override public Expression[] getBaseAttributeInitialValues() { return this.baseAttributesInitialValues; } @Override public Expression[] getBaseAggregators() { Expression countAggregator = Expression.function("sum", Expression.variable(getBaseAttributes()[0].getName())); return new Expression[]{countAggregator}; } @Override public Attribute.Type getReturnType() { return Attribute.Type.LONG; } }
1,373
2,881
<reponame>bruce-stewart/shopizer<gh_stars>1000+ package com.salesmanager.shop.model.system; import java.util.Date; public class OptinEntity extends Optin { /** * */ private static final long serialVersionUID = 1L; private Date startDate; private Date endDate; private String optinType; private String store; private String code; private String description; public Date getStartDate() { return startDate; } public void setStartDate(Date startDate) { this.startDate = startDate; } public Date getEndDate() { return endDate; } public void setEndDate(Date endDate) { this.endDate = endDate; } public String getStore() { return store; } public void setStore(String store) { this.store = store; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getOptinType() { return optinType; } public void setOptinType(String optinType) { this.optinType = optinType; } }
394
2,482
<reponame>wwjiang007/nutch /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nutch.plugin; import java.net.URL; import java.net.URLClassLoader; import java.util.Arrays; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.List; /** * The <code>PluginClassLoader</code> is a child-first classloader that only * contains classes of the runtime libraries setuped in the plugin manifest file * and exported libraries of plugins that are required plugins. Libraries can be * exported or not. Not exported libraries are only used in the plugin own * <code>PluginClassLoader</code>. Exported libraries are available for * <code>PluginClassLoader</code> of plugins that depends on these plugins. * */ public class PluginClassLoader extends URLClassLoader { private URL[] urls; private ClassLoader parent; private ClassLoader system = getSystemClassLoader(); /** * Overloaded constructor * * @param urls * Array of urls with own libraries and all exported libraries of * plugins that are required for this plugin * @param parent the parent {@link java.lang.ClassLoader} responsible for * loading classes defined in urls. */ public PluginClassLoader(URL[] urls, ClassLoader parent) { super(urls, parent); this.urls = urls; this.parent = parent; } @Override protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // First, check if the class has already been loaded Class<?> c = findLoadedClass(name); if (c == null) { try { // checking local c = findClass(name); } catch (ClassNotFoundException | SecurityException e) { c = loadClassFromParent(name, resolve); } } if (resolve) { resolveClass(c); } return c; } private Class<?> loadClassFromParent(String name, boolean resolve) throws ClassNotFoundException { // checking parent // This call to loadClass may eventually call findClass // again, in case the parent doesn't find anything. Class<?> c; try { c = super.loadClass(name, resolve); } catch (ClassNotFoundException e) { c = loadClassFromSystem(name); } catch (SecurityException e) { c = loadClassFromSystem(name); } return c; } private Class<?> loadClassFromSystem(String name) throws ClassNotFoundException { Class<?> c = null; if (system != null) { // checking system: jvm classes, endorsed, cmd classpath, c = system.loadClass(name); } return c; } @Override public URL getResource(String name) { URL url = findResource(name); if (url == null) url = super.getResource(name); if (url == null && system != null) url = system.getResource(name); return url; } @Override public Enumeration<URL> getResources(String name) throws IOException { /** * Similar to super, but local resources are enumerated before parent * resources */ Enumeration<URL> systemUrls = null; if (system != null) { systemUrls = system.getResources(name); } Enumeration<URL> localUrls = findResources(name); Enumeration<URL> parentUrls = null; if (getParent() != null) { parentUrls = getParent().getResources(name); } final List<URL> urls = new ArrayList<URL>(); if (localUrls != null) { while (localUrls.hasMoreElements()) { URL local = localUrls.nextElement(); urls.add(local); } } if (systemUrls != null) { while (systemUrls.hasMoreElements()) { urls.add(systemUrls.nextElement()); } } if (parentUrls != null) { while (parentUrls.hasMoreElements()) { urls.add(parentUrls.nextElement()); } } return new Enumeration<URL>() { Iterator<URL> iter = urls.iterator(); public boolean hasMoreElements() { return iter.hasNext(); } public URL nextElement() { return iter.next(); } }; } public InputStream getResourceAsStream(String name) { URL url = getResource(name); try { return url != null ? url.openStream() : null; } catch (IOException e) { } return null; } @Override public int hashCode() { final int PRIME = 31; int result = 1; result = PRIME * result + ((parent == null) ? 0 : parent.hashCode()); result = PRIME * result + Arrays.hashCode(urls); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final PluginClassLoader other = (PluginClassLoader) obj; if (parent == null) { if (other.parent != null) return false; } else if (!parent.equals(other.parent)) return false; if (!Arrays.equals(urls, other.urls)) return false; return true; } }
2,024
305
from __future__ import absolute_import from .epytext import epytext_author from .common import default_movie, plain_exercise, DEFAULT_ARGLIST def define(FILENAME_EXTENSION, BLANKLINE, INLINE_TAGS_SUBST, CODE, LIST, ARGLIST, TABLE, EXERCISE, FIGURE_EXT, CROSS_REFS, INDEX_BIB, TOC, ENVIRS, QUIZ, INTRO, OUTRO, filestr): # all arguments are dicts and accept in-place modifications (extensions) FILENAME_EXTENSION['st'] = '.st' BLANKLINE['st'] = '\n' # replacement patterns for substitutions of inline tags INLINE_TAGS_SUBST['st'] = { 'math': r'\g<begin>\g<subst>\g<end>', 'math2': r'\g<begin>\g<puretext>\g<end>', 'emphasize': None, 'bold': r'\g<begin>**\g<subst>**\g<end>', 'verbatim': r"\g<begin>'\g<subst>'\g<end>", #'linkURL': r'\g<begin>"\g<url>":\g<link>\g<end>', 'linkURL2': r'"\g<url>":\g<link>', 'linkURL3': r'"\g<url>":\g<link>', 'linkURL2v': r"""\g<url>:'\g<link>'""", 'linkURL3v': r"""\g<url>:'\g<link>'""", 'plainURL': r'"\g<url>":\g<url>', 'colortext': '\g<text>', # the replacement string differs, depending on the match object m: 'chapter': r'\g<subst>', 'section': r'\g<subst>', 'subsection': r'\g<subst>', 'subsubsection': r'\g<subst>\n', 'paragraph': r'*\g<subst>*\g<space>', # extra blank 'abstract': r'*\g<type>.* \g<text>\n\g<rest>', 'title': r'TITLE: \g<subst>', 'date': r'DATE: \g<subst>', 'author': epytext_author, 'movie': default_movie, 'linebreak': r'\g<text>', 'non-breaking-space': ' ', 'ampersand2': r' \g<1>&\g<2>', } from .rst import rst_code, rst_table CODE['st'] = rst_code TABLE['st'] = rst_table LIST['st'] = { 'itemize': {'begin': '', 'item': '-', 'end': '\n'}, 'enumerate': {'begin': '', 'item': '%d.', 'end': '\n'}, 'description': {'begin': '', 'item': '%s -- ', 'end': '\n'}, 'separator': '', } ARGLIST['st'] = DEFAULT_ARGLIST from .plaintext import plain_ref_and_label, plain_index_bib CROSS_REFS['st'] = plain_ref_and_label INDEX_BIB['st'] = plain_index_bib EXERCISE['st'] = plain_exercise TOC['st'] = lambda s, f: '' # drop from .plaintext import plain_quiz QUIZ['st'] = plain_quiz
1,470
2,151
<gh_stars>1000+ /* * The copyright in this software is being made available under the 2-clauses * BSD License, included below. This software may be subject to other third * party and contributor rights, including patent rights, and no such rights * are granted under this license. * * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium * Copyright (c) 2002-2014, Professor <NAME> * Copyright (c) 2001-2003, <NAME> * Copyright (c) 2002-2003, <NAME> * Copyright (c) 2003-2007, <NAME> * Copyright (c) 2003-2014, <NAME> * Copyright (c) 2005, <NAME>, FreeImage Team * Copyright (c) 2007, <NAME> <<EMAIL>> * Copyright (c) 2012, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS' * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include "opj_includes.h" static int t1_init_ctxno_zc(OPJ_UINT32 f, OPJ_UINT32 orient) { int h, v, d, n, t, hv; n = 0; h = ((f & T1_SIGMA_3) != 0) + ((f & T1_SIGMA_5) != 0); v = ((f & T1_SIGMA_1) != 0) + ((f & T1_SIGMA_7) != 0); d = ((f & T1_SIGMA_0) != 0) + ((f & T1_SIGMA_2) != 0) + (( f & T1_SIGMA_8) != 0) + ((f & T1_SIGMA_6) != 0); switch (orient) { case 2: t = h; h = v; v = t; case 0: case 1: if (!h) { if (!v) { if (!d) { n = 0; } else if (d == 1) { n = 1; } else { n = 2; } } else if (v == 1) { n = 3; } else { n = 4; } } else if (h == 1) { if (!v) { if (!d) { n = 5; } else { n = 6; } } else { n = 7; } } else { n = 8; } break; case 3: hv = h + v; if (!d) { if (!hv) { n = 0; } else if (hv == 1) { n = 1; } else { n = 2; } } else if (d == 1) { if (!hv) { n = 3; } else if (hv == 1) { n = 4; } else { n = 5; } } else if (d == 2) { if (!hv) { n = 6; } else { n = 7; } } else { n = 8; } break; } return (T1_CTXNO_ZC + n); } static int t1_init_ctxno_sc(OPJ_UINT32 f) { int hc, vc, n; n = 0; hc = opj_int_min(((f & (T1_LUT_SIG_E | T1_LUT_SGN_E)) == T1_LUT_SIG_E) + ((f & (T1_LUT_SIG_W | T1_LUT_SGN_W)) == T1_LUT_SIG_W), 1) - opj_int_min(((f & (T1_LUT_SIG_E | T1_LUT_SGN_E)) == (T1_LUT_SIG_E | T1_LUT_SGN_E)) + ((f & (T1_LUT_SIG_W | T1_LUT_SGN_W)) == (T1_LUT_SIG_W | T1_LUT_SGN_W)), 1); vc = opj_int_min(((f & (T1_LUT_SIG_N | T1_LUT_SGN_N)) == T1_LUT_SIG_N) + ((f & (T1_LUT_SIG_S | T1_LUT_SGN_S)) == T1_LUT_SIG_S), 1) - opj_int_min(((f & (T1_LUT_SIG_N | T1_LUT_SGN_N)) == (T1_LUT_SIG_N | T1_LUT_SGN_N)) + ((f & (T1_LUT_SIG_S | T1_LUT_SGN_S)) == (T1_LUT_SIG_S | T1_LUT_SGN_S)), 1); if (hc < 0) { hc = -hc; vc = -vc; } if (!hc) { if (vc == -1) { n = 1; } else if (!vc) { n = 0; } else { n = 1; } } else if (hc == 1) { if (vc == -1) { n = 2; } else if (!vc) { n = 3; } else { n = 4; } } return (T1_CTXNO_SC + n); } static int t1_init_spb(OPJ_UINT32 f) { int hc, vc, n; hc = opj_int_min(((f & (T1_LUT_SIG_E | T1_LUT_SGN_E)) == T1_LUT_SIG_E) + ((f & (T1_LUT_SIG_W | T1_LUT_SGN_W)) == T1_LUT_SIG_W), 1) - opj_int_min(((f & (T1_LUT_SIG_E | T1_LUT_SGN_E)) == (T1_LUT_SIG_E | T1_LUT_SGN_E)) + ((f & (T1_LUT_SIG_W | T1_LUT_SGN_W)) == (T1_LUT_SIG_W | T1_LUT_SGN_W)), 1); vc = opj_int_min(((f & (T1_LUT_SIG_N | T1_LUT_SGN_N)) == T1_LUT_SIG_N) + ((f & (T1_LUT_SIG_S | T1_LUT_SGN_S)) == T1_LUT_SIG_S), 1) - opj_int_min(((f & (T1_LUT_SIG_N | T1_LUT_SGN_N)) == (T1_LUT_SIG_N | T1_LUT_SGN_N)) + ((f & (T1_LUT_SIG_S | T1_LUT_SGN_S)) == (T1_LUT_SIG_S | T1_LUT_SGN_S)), 1); if (!hc && !vc) { n = 0; } else { n = (!(hc > 0 || (!hc && vc > 0))); } return n; } static void dump_array16(int array[], int size) { int i; --size; for (i = 0; i < size; ++i) { printf("0x%04x,", array[i]); if (!((i + 1) & 0x7)) { printf("\n "); } else { printf(" "); } } printf("0x%04x\n};\n\n", array[size]); } int main(int argc, char **argv) { unsigned int i, j; double u, v, t; int lut_ctxno_zc[2048]; int lut_nmsedec_sig[1 << T1_NMSEDEC_BITS]; int lut_nmsedec_sig0[1 << T1_NMSEDEC_BITS]; int lut_nmsedec_ref[1 << T1_NMSEDEC_BITS]; int lut_nmsedec_ref0[1 << T1_NMSEDEC_BITS]; (void)argc; (void)argv; printf("/* This file was automatically generated by t1_generate_luts.c */\n\n"); /* lut_ctxno_zc */ for (j = 0; j < 4; ++j) { for (i = 0; i < 512; ++i) { OPJ_UINT32 orient = j; if (orient == 2) { orient = 1; } else if (orient == 1) { orient = 2; } lut_ctxno_zc[(orient << 9) | i] = t1_init_ctxno_zc(i, j); } } printf("static const OPJ_BYTE lut_ctxno_zc[2048] = {\n "); for (i = 0; i < 2047; ++i) { printf("%i,", lut_ctxno_zc[i]); if (!((i + 1) & 0x1f)) { printf("\n "); } else { printf(" "); } } printf("%i\n};\n\n", lut_ctxno_zc[2047]); /* lut_ctxno_sc */ printf("static const OPJ_BYTE lut_ctxno_sc[256] = {\n "); for (i = 0; i < 255; ++i) { printf("0x%x,", t1_init_ctxno_sc(i)); if (!((i + 1) & 0xf)) { printf("\n "); } else { printf(" "); } } printf("0x%x\n};\n\n", t1_init_ctxno_sc(255)); /* lut_spb */ printf("static const OPJ_BYTE lut_spb[256] = {\n "); for (i = 0; i < 255; ++i) { printf("%i,", t1_init_spb(i)); if (!((i + 1) & 0x1f)) { printf("\n "); } else { printf(" "); } } printf("%i\n};\n\n", t1_init_spb(255)); /* FIXME FIXME FIXME */ /* fprintf(stdout,"nmsedec luts:\n"); */ for (i = 0U; i < (1U << T1_NMSEDEC_BITS); ++i) { t = i / pow(2, T1_NMSEDEC_FRACBITS); u = t; v = t - 1.5; lut_nmsedec_sig[i] = opj_int_max(0, (int)(floor((u * u - v * v) * pow(2, T1_NMSEDEC_FRACBITS) + 0.5) / pow(2, T1_NMSEDEC_FRACBITS) * 8192.0)); lut_nmsedec_sig0[i] = opj_int_max(0, (int)(floor((u * u) * pow(2, T1_NMSEDEC_FRACBITS) + 0.5) / pow(2, T1_NMSEDEC_FRACBITS) * 8192.0)); u = t - 1.0; if (i & (1 << (T1_NMSEDEC_BITS - 1))) { v = t - 1.5; } else { v = t - 0.5; } lut_nmsedec_ref[i] = opj_int_max(0, (int)(floor((u * u - v * v) * pow(2, T1_NMSEDEC_FRACBITS) + 0.5) / pow(2, T1_NMSEDEC_FRACBITS) * 8192.0)); lut_nmsedec_ref0[i] = opj_int_max(0, (int)(floor((u * u) * pow(2, T1_NMSEDEC_FRACBITS) + 0.5) / pow(2, T1_NMSEDEC_FRACBITS) * 8192.0)); } printf("static const OPJ_INT16 lut_nmsedec_sig[1U << T1_NMSEDEC_BITS] = {\n "); dump_array16(lut_nmsedec_sig, 1U << T1_NMSEDEC_BITS); printf("static const OPJ_INT16 lut_nmsedec_sig0[1U << T1_NMSEDEC_BITS] = {\n "); dump_array16(lut_nmsedec_sig0, 1U << T1_NMSEDEC_BITS); printf("static const OPJ_INT16 lut_nmsedec_ref[1U << T1_NMSEDEC_BITS] = {\n "); dump_array16(lut_nmsedec_ref, 1U << T1_NMSEDEC_BITS); printf("static const OPJ_INT16 lut_nmsedec_ref0[1U << T1_NMSEDEC_BITS] = {\n "); dump_array16(lut_nmsedec_ref0, 1U << T1_NMSEDEC_BITS); return 0; }
6,157
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * AutoGridLayout.java * * Created on 04 October 2003, 16:01 */ package org.openide.explorer.propertysheet; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Insets; import java.awt.LayoutManager; import java.awt.Toolkit; import java.util.Arrays; import java.util.Comparator; /**A layout manager which can layout like-heighted components in a grid * pattern based on their preferred sizes. Can be used in packed or unpacked * mode. Column positions are based on the preferred sizes of the first row * of components.<p> * When packed, the components are sorted from narrowest to widest; * subsequent rows will use these columns, with components two wide for a * single column spanning two or more columns.<p> * When not packed, the components are sorted from widest to narrowest, * resulting in more whitespace, but consistent rows of columns - no * component will span more than one column.<p> * Used by <code>RadioInplaceEditor</code> to manage the set of radio * buttons representing property editor tags. * * @author <NAME> */ class AutoGridLayout implements LayoutManager { int gapY = 5; boolean pack; public AutoGridLayout(boolean pack) { this.pack = pack; } public void addLayoutComponent(String name, Component comp) { //Do nothing } public void removeLayoutComponent(Component comp) { //Do nothing } private Comparator<Component> comparator() { return new PreferredSizeComparator(pack); } public void layoutContainer(Container parent) { Component[] c = parent.getComponents(); if (c.length > 3) { Arrays.sort(c, comparator()); } if (c.length == 2) { //we're probably a radio button editor in the property sheet - //make sure that both buttons are displayed, even if some clipping //would occur - we don't have another row to go to Dimension d0 = c[0].getPreferredSize(); Dimension d1 = c[1].getPreferredSize(); c[0].setBounds(0, 0, d0.width, d0.height); c[1].setBounds(d0.width, 0, d1.width, d1.height); return; } Insets insets = parent.getInsets(); int w = parent.getWidth() - insets.right; int h = parent.getHeight() - insets.bottom; int currx = insets.left; int curry = insets.top; boolean done = false; int cols = -1; //Layout the first row of components according to their preferred //sizes. Their positions will act as column positions. If sorted //narrowest-first, results in a smaller, packedAutoGridLayout. If sorted //widest-first, results in nice consistent columns, but uses more //space to do it. for (int i = 0; i < c.length; i++) { Dimension d = c[i].getPreferredSize(); if ((d.width == 0) || (d.height == 0)) { //Can happen for foreign components that can't do //a proper preferred size w/o a graphics context d = PropUtils.getMinimumPanelSize(); } if ((currx + d.width) > w) { curry += (d.height + gapY); currx = insets.left; if (cols == -1) { cols = i; break; } } c[i].setBounds(currx, curry, d.width, d.height); currx += d.width; } if (cols == -1) { cols = c.length; } int currCol = 0; for (int i = cols; i < c.length; i++) { Dimension d = c[i].getPreferredSize(); if ((currx + d.width) > w) { //will only happen with inverse sort - we're starting //the loop with a position that won't fit, and should flip //to the next line curry += (d.height + gapY); currx = insets.left; currCol = 0; } //see if we're out of horizontal space and should punt done = (curry + d.height) > h; if (!done) { //fetch the width of this column, as the width of the first row component int currColWidth = c[currCol].getWidth(); //will we fit at all? if (d.width <= w) { //loop until we know how many columns we need while (currColWidth <= d.width) { currCol++; if (currCol > cols) { //out of columns? Flip to the next line currCol = 0; curry += (d.height + gapY); currx = insets.left; currColWidth = 0; } //note the combined column width - it will be the //next iteration's starting x position currColWidth += c[currCol].getWidth(); } c[i].setBounds(currx, curry, d.width, d.height); currx += currColWidth; } else { //Okay, we've got a component wider than its parent - give up c[i].setBounds(currx, curry, d.width, d.height); currx += d.width; //just clip it if it's wider than max } if (currx > w) { //see if we should flip to the next row or if there may //still be space currx = insets.left; curry += (d.height + gapY); currCol = 0; } else { currCol++; } } else { //If we get here, we've run out of horizontal space - no //point in trying to do something reasonable with the component c[i].setBounds(0, 0, 0, 0); } } } public Dimension minimumLayoutSize(java.awt.Container parent) { return preferredLayoutSize(parent); } public Dimension preferredLayoutSize(java.awt.Container parent) { Component[] c = parent.getComponents(); if (c.length > 3) { Arrays.sort(c, comparator()); } Dimension max = Toolkit.getDefaultToolkit().getScreenSize(); max.width /= 2; max.height /= 2; Insets insets = parent.getInsets(); int w = max.width - insets.right; int currx = insets.left; int cols = -1; int baseHeight = 0; Dimension[] dims = new Dimension[c.length]; Dimension result = new Dimension(); //establish the base columns and populate the dimensions array for (int i = 0; i < c.length; i++) { dims[i] = c[i].getPreferredSize(); if ((dims[i].width == 0) || (dims[i].height == 0)) { //Can happen for foreign components that can't do //a proper preferred size w/o a graphics context dims[i] = PropUtils.getMinimumPanelSize(); } baseHeight = Math.max(baseHeight, dims[i].height); if (cols == -1) { if ((currx + dims[i].width) > w) { result.width = currx; cols = i; } } if (cols != -1) { //Make sure we don't have one element wider than all the //column sizes result.width = Math.max(result.width, dims[i].width + insets.left + insets.right); } currx += dims[i].width; } if (cols == -1) { //we didn't overstretch the available width cols = c.length; result.width = currx; } if (!pack && (c.length > 3)) { //Then we can take a short cust - we know all will be 1 item per cell int rows = (c.length / cols) + (((c.length % cols) != 0) ? 1 : 0); result.height = (baseHeight * rows) + (gapY * rows) + insets.top + insets.bottom; result.width += 6; assert (result.width >= 0) && (result.height >= 0); return result; } int currRow = 0; int currCol = 0; currx = insets.left; //iterate the rest of the array, incrementing the row index //when the content won't fit, to find out the total rows needed for (int i = cols; i < c.length; i++) { int colspan = 1; int colwidth = dims[currCol].width; while (dims[i].width > colwidth) { currCol++; colwidth += dims[currCol].width; colspan++; if ((colwidth + currx) > max.width) { currCol = 0; currRow++; colspan = 1; colwidth = dims[currCol].width; } } currCol += colspan; currx += colwidth; if ((currCol > cols) && (i != (c.length - 1))) { currCol = 0; currRow++; currx = insets.left; } } result.height = (baseHeight * currRow) + insets.top + insets.bottom + (gapY * currRow); return result; } private static final class PreferredSizeComparator implements Comparator<Component> { boolean smallFirst; public PreferredSizeComparator(boolean smallFirst) { this.smallFirst = smallFirst; } public int compare(Component c1, Component c2) { Dimension d1 = c1.getPreferredSize(); Dimension d2 = c2.getPreferredSize(); return smallFirst ? (d1.width - d2.width) : (d2.width - d1.width); } } }
5,066
30,023
<gh_stars>1000+ { "config": { "abort": { "already_configured": "Urz\u0105dzenie jest ju\u017c skonfigurowane", "no_valid_uuid_set": "Nie ustawiono prawid\u0142owego UUID" }, "error": { "no_valid_uuid_set": "Nie ustawiono prawid\u0142owego UUID" }, "step": { "user": { "data": { "token": "<PASSWORD>", "url": "URL" }, "description": "Wprowad\u017a adres IP z portem i tokenem dost\u0119pu serwera Z-Way. Aby uzyska\u0107 token, przejd\u017a do interfejsu u\u017cytkownika Z-Way Smart Home UI > Menu > Ustawienia > U\u017cytkownicy > Administrator > Token API. \n\nPrzyk\u0142ad po\u0142\u0105czenia z Z-Way w sieci lokalnej:\nURL: {local_url}\nToken: {local_token} \n\nPrzyk\u0142ad po\u0142\u0105czenia z Z-Way przez zdalny dost\u0119p find.z-wave.me:\nURL: {find_url}\nToken: {find_token} \n\nPrzyk\u0142ad po\u0142\u0105czenia z Z-Way za pomoc\u0105 statycznego publicznego adresu IP:\nURL: {remote_url}\nToken: {local_token} \n\n\u0141\u0105cz\u0105c si\u0119 przez find.z-wave.me, musisz u\u017cy\u0107 tokena o zasi\u0119gu globalnym (w tym celu zaloguj si\u0119 do Z-Way przez find.z-wave.me)." } } } }
722
335
<gh_stars>100-1000 { "word": "Resigned", "definitions": [ "Having accepted something unpleasant that one cannot do anything about." ], "parts-of-speech": "Adjective" }
73
870
<reponame>jlpedrosa/accumulo /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.shell.commands; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.metadata.RootTable; import org.apache.accumulo.shell.Shell; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; public class OnlineCommand extends TableOperation { private boolean wait; private Option waitOpt; @Override public String description() { return "starts the process of putting a table online"; } @Override protected void doTableOp(final Shell shellState, final String tableName) throws AccumuloException, AccumuloSecurityException, TableNotFoundException { if (tableName.equals(RootTable.NAME)) { Shell.log.info(" The {} is always online.", RootTable.NAME); } else { shellState.getAccumuloClient().tableOperations().online(tableName, wait); Shell.log.info("Online of table {} {}", tableName, wait ? " completed." : " initiated..."); } } @Override public int execute(final String fullCommand, final CommandLine cl, final Shell shellState) throws Exception { wait = cl.hasOption(waitOpt.getLongOpt()); return super.execute(fullCommand, cl, shellState); } @Override public Options getOptions() { final Options opts = super.getOptions(); waitOpt = new Option("w", "wait", false, "wait for online to finish"); opts.addOption(waitOpt); return opts; } }
715
12,278
<gh_stars>1000+ // Copyright 2015 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <stdio.h> #include <stdlib.h> #include <string.h> #include "include/libplatform/libplatform.h" #include "include/v8.h" int main(int argc, char* argv[]) { // Initialize V8. v8::V8::InitializeICUDefaultLocation(argv[0]); v8::V8::InitializeExternalStartupData(argv[0]); std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform(); v8::V8::InitializePlatform(platform.get()); v8::V8::Initialize(); // Create a new Isolate and make it the current one. v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = v8::ArrayBuffer::Allocator::NewDefaultAllocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); { v8::Isolate::Scope isolate_scope(isolate); // Create a stack-allocated handle scope. v8::HandleScope handle_scope(isolate); // Create a new context. v8::Local<v8::Context> context = v8::Context::New(isolate); // Enter the context for compiling and running the hello world script. v8::Context::Scope context_scope(context); { // Create a string containing the JavaScript source code. v8::Local<v8::String> source = v8::String::NewFromUtf8(isolate, "'Hello' + ', World!'", v8::NewStringType::kNormal) .ToLocalChecked(); // Compile the source code. v8::Local<v8::Script> script = v8::Script::Compile(context, source).ToLocalChecked(); // Run the script to get the result. v8::Local<v8::Value> result = script->Run(context).ToLocalChecked(); // Convert the result to an UTF8 string and print it. v8::String::Utf8Value utf8(isolate, result); printf("%s\n", *utf8); } { // Use the JavaScript API to generate a WebAssembly module. // // |bytes| contains the binary format for the following module: // // (func (export "add") (param i32 i32) (result i32) // get_local 0 // get_local 1 // i32.add) // const char* csource = R"( let bytes = new Uint8Array([ 0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x07, 0x01, 0x60, 0x02, 0x7f, 0x7f, 0x01, 0x7f, 0x03, 0x02, 0x01, 0x00, 0x07, 0x07, 0x01, 0x03, 0x61, 0x64, 0x64, 0x00, 0x00, 0x0a, 0x09, 0x01, 0x07, 0x00, 0x20, 0x00, 0x20, 0x01, 0x6a, 0x0b ]); let module = new WebAssembly.Module(bytes); let instance = new WebAssembly.Instance(module); instance.exports.add(3, 4); )"; // Create a string containing the JavaScript source code. v8::Local<v8::String> source = v8::String::NewFromUtf8(isolate, csource, v8::NewStringType::kNormal) .ToLocalChecked(); // Compile the source code. v8::Local<v8::Script> script = v8::Script::Compile(context, source).ToLocalChecked(); // Run the script to get the result. v8::Local<v8::Value> result = script->Run(context).ToLocalChecked(); // Convert the result to a uint32 and print it. uint32_t number = result->Uint32Value(context).ToChecked(); printf("3 + 4 = %u\n", number); } } // Dispose the isolate and tear down V8. isolate->Dispose(); v8::V8::Dispose(); v8::V8::ShutdownPlatform(); delete create_params.array_buffer_allocator; return 0; }
1,515
327
import os import platform import torch import argparse import numpy as np import torch.optim as optim from model import Actor, Critic from utils.utils import to_tensor, get_action, save_checkpoint from collections import deque from utils.running_state import ZFilter from utils.memory import Memory from agent.ppo import process_memory, train_model from unityagents import UnityEnvironment from tensorboardX import SummaryWriter parser = argparse.ArgumentParser(description='Setting for unity walker agent') parser.add_argument('--render', default=False, action='store_true', help='if you dont want to render, set this to False') parser.add_argument('--train', default=False, action='store_true', help='if you dont want to train, set this to False') parser.add_argument('--load_model', type=str, default=None) parser.add_argument('--gamma', type=float, default=0.995, help='discount factor') parser.add_argument('--lamda', type=float, default=0.95, help='GAE hyper-parameter') parser.add_argument('--hidden_size', type=int, default=512, help='hidden unit size of actor and critic networks') parser.add_argument('--critic_lr', type=float, default=0.0003) parser.add_argument('--actor_lr', type=float, default=0.0003) parser.add_argument('--batch_size', type=int, default=2048) parser.add_argument('--max_iter', type=int, default=2000000, help='the number of max iteration') parser.add_argument('--time_horizon', type=int, default=1000, help='the number of time horizon (step number) T ') parser.add_argument('--l2_rate', type=float, default=0.001, help='l2 regularizer coefficient') parser.add_argument('--clip_param', type=float, default=0.1, help='hyper parameter for ppo policy loss and value loss') parser.add_argument('--activation', type=str, default='swish', help='you can choose between tanh and swish') parser.add_argument('--logdir', type=str, default='logs', help='tensorboardx logs directory') parser.add_argument('--env', type=str, default='plane', help='environment, plane or curved') args = parser.parse_args() os.environ['CUDA_VISIBLE_DEVICES'] = '0' device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") if __name__ == "__main__": if platform.system() == 'Darwin': env_name = "./env/{}-mac".format(args.env) elif platform.system() == 'Linux': env_name = "./env/{}-linux/plane-walker".format(args.env) elif platform.system() == 'Windows': env_name = "./env/{}-win/Unity Environment".format(args.env) train_mode = args.train torch.manual_seed(500) if args.render: env = UnityEnvironment(file_name=env_name) else: env = UnityEnvironment(file_name=env_name, no_graphics=True) # setting for unity ml-agent default_brain = env.brain_names[0] brain = env.brains[default_brain] env_info = env.reset(train_mode=train_mode)[default_brain] num_inputs = brain.vector_observation_space_size num_actions = brain.vector_action_space_size num_agent = env._n_agents[default_brain] print('state size:', num_inputs) print('action size:', num_actions) print('agent count:', num_agent) writer = SummaryWriter(args.logdir) # running average of state running_state = ZFilter((num_agent,num_inputs), clip=5) actor = Actor(num_inputs, num_actions, args).to(device) critic = Critic(num_inputs, args).to(device) if torch.cuda.is_available(): actor = actor.cuda() critic = critic.cuda() if args.load_model is not None: saved_ckpt_path = os.path.join(os.getcwd(), 'save_model', str(args.load_model)) ckpt = torch.load(saved_ckpt_path) actor.load_state_dict(ckpt['actor']) critic.load_state_dict(ckpt['critic']) running_state.rs.n = ckpt['z_filter_n'] running_state.rs.mean = ckpt['z_filter_m'] running_state.rs.sum_square = ckpt['z_filter_s'] print("Loaded OK ex. Zfilter N {}".format(running_state.rs.n)) states = running_state(env_info.vector_observations) actor_optim = optim.Adam(actor.parameters(), lr=args.actor_lr) critic_optim = optim.Adam(critic.parameters(), lr=args.critic_lr, weight_decay=args.l2_rate) scores = [] score_avg = 0 for iter in range(args.max_iter): actor.eval(), critic.eval() memory = [Memory() for _ in range(num_agent)] steps = 0 score = 0 while steps < args.time_horizon: steps += 1 mu, std, _ = actor(to_tensor(states)) actions = get_action(mu, std) env_info = env.step(actions)[default_brain] next_states = running_state(env_info.vector_observations) rewards = env_info.rewards dones = env_info.local_done masks = list(~(np.array(dones))) for i in range(num_agent): memory[i].push(states[i], actions[i], rewards[i], masks[i]) score += rewards[0] states = next_states if dones[0]: scores.append(score) score = 0 episodes = len(scores) if len(scores) % 10 == 0: score_avg = np.mean(scores[-min(10, episodes):]) print('{}th episode : last 10 episode mean score of 1st agent is {:.2f}'.format( episodes, score_avg)) writer.add_scalar('log/score', float(score_avg), iter) actor.train(), critic.train() sts, ats, returns, advants, old_policy, old_value = [], [], [], [], [], [] for i in range(num_agent): batch = memory[i].sample() st, at, rt, adv, old_p, old_v = process_memory(actor, critic, batch, args) sts.append(st) ats.append(at) returns.append(rt) advants.append(adv) old_policy.append(old_p) old_value.append(old_v) sts = torch.cat(sts) ats = torch.cat(ats) returns = torch.cat(returns) advants = torch.cat(advants) old_policy = torch.cat(old_policy) old_value = torch.cat(old_value) train_model(actor, critic, actor_optim, critic_optim, sts, ats, returns, advants, old_policy, old_value, args) if iter % 100: score_avg = int(score_avg) model_path = os.path.join(os.getcwd(),'save_model') if not os.path.isdir(model_path): os.makedirs(model_path) ckpt_path = os.path.join(model_path, 'ckpt_'+ str(score_avg)+'.pth.tar') save_checkpoint({ 'actor': actor.state_dict(), 'critic': critic.state_dict(), 'z_filter_n':running_state.rs.n, 'z_filter_m': running_state.rs.mean, 'z_filter_s': running_state.rs.sum_square, 'args': args, 'score': score_avg }, filename=ckpt_path) env.close()
3,194
416
package org.simpleflatmapper.csv.getter; import org.simpleflatmapper.converter.Context; import org.simpleflatmapper.csv.CsvRow; import org.simpleflatmapper.map.getter.ContextualGetter; public class CsvBoxedShortGetter implements ContextualGetter<CsvRow, Short> { public final int index; public CsvBoxedShortGetter(int index) { this.index = index; } @Override public Short get(CsvRow target, Context context) { return target.getBoxedShort(index); } }
183
338
package com.tvd12.ezyfoxserver.socket; import static com.tvd12.ezyfox.util.EzyProcessor.processWithLogException; import com.tvd12.ezyfox.entity.EzyArray; import com.tvd12.ezyfoxserver.entity.EzySession; import lombok.Setter; public abstract class EzySocketRequestHandler extends EzySocketAbstractEventHandler { @Setter protected EzySessionTicketsQueue sessionTicketsQueue; @Setter protected EzySocketDataHandlerGroupFetcher dataHandlerGroupFetcher; @Override public void handleEvent() { processRequestQueue0(); } @Override public void destroy() { processWithLogException(() -> sessionTicketsQueue.clear()); } private void processRequestQueue0() { EzySocketRequest request = null; try { EzySession session = sessionTicketsQueue.take(); EzyRequestQueue requestQueue = getRequestQueue(session); synchronized (requestQueue) { request = requestQueue.take(); if(requestQueue.size() > 0) sessionTicketsQueue.add(session); } processRequestQueue(request); } catch (InterruptedException e) { logger.info("{}-request-handler thread interrupted: {}", getRequestType(), Thread.currentThread()); } catch(Throwable throwable) { logger.warn("problems in {}-request-handler, thread: {}", getRequestType(), Thread.currentThread(), throwable); } finally { if(request != null) request.release(); } } protected abstract EzyRequestQueue getRequestQueue(EzySession session); protected abstract String getRequestType(); private void processRequestQueue(EzySocketRequest request) throws Exception { try { processRequestQueue0(request); } finally { request.release(); } } private void processRequestQueue0(EzySocketRequest request) throws Exception { EzyArray data = request.getData(); EzySession session = request.getSession(); EzySocketDataHandlerGroup handlerGroup = getDataHandlerGroup(session); if(handlerGroup != null) handlerGroup.fireChannelRead(request.getCommand(), data); else logger.warn("has no handler group with session: {}, drop request: {}", session, request); } protected EzySocketDataHandlerGroup getDataHandlerGroup(EzySession session) { return dataHandlerGroupFetcher.getDataHandlerGroup(session); } }
779
892
<reponame>westonsteimel/advisory-database-github { "schema_version": "1.2.0", "id": "GHSA-w343-x6rw-f2x7", "modified": "2022-02-02T00:02:01Z", "published": "2022-01-26T00:00:35Z", "aliases": [ "CVE-2022-23021" ], "details": "On BIG-IP version 16.1.x before 16.1.2, when any of the following configurations are configured on a virtual server, undisclosed requests can cause the Traffic Management Microkernel (TMM) to terminate: HTTP redirect rule in an LTM policy, BIG-IP APM Access Profile, and Explicit HTTP Proxy in HTTP Profile. Note: Software versions which have reached End of Technical Support (EoTS) are not evaluated.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-23021" }, { "type": "WEB", "url": "https://support.f5.com/csp/article/K57111075" } ], "database_specific": { "cwe_ids": [ "CWE-476" ], "severity": "HIGH", "github_reviewed": false } }
418
5,079
# -*- coding: utf-8 - # # This file is part of gunicorn released under the MIT license. # See the NOTICE for more information. import asyncio import datetime import functools import logging import os try: import ssl except ImportError: ssl = None import gunicorn.workers.base as base from aiohttp.wsgi import WSGIServerHttpProtocol as OldWSGIServerHttpProtocol class WSGIServerHttpProtocol(OldWSGIServerHttpProtocol): def log_access(self, request, environ, response, time): self.logger.access(response, request, environ, datetime.timedelta(0, 0, time)) class AiohttpWorker(base.Worker): def __init__(self, *args, **kw): # pragma: no cover super().__init__(*args, **kw) cfg = self.cfg if cfg.is_ssl: self.ssl_context = self._create_ssl_context(cfg) else: self.ssl_context = None self.servers = [] self.connections = {} def init_process(self): # create new event_loop after fork asyncio.get_event_loop().close() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) super().init_process() def run(self): self._runner = asyncio.ensure_future(self._run(), loop=self.loop) try: self.loop.run_until_complete(self._runner) finally: self.loop.close() def wrap_protocol(self, proto): proto.connection_made = _wrp( proto, proto.connection_made, self.connections) proto.connection_lost = _wrp( proto, proto.connection_lost, self.connections, False) return proto def factory(self, wsgi, addr): # are we in debug level is_debug = self.log.loglevel == logging.DEBUG proto = WSGIServerHttpProtocol( wsgi, readpayload=True, loop=self.loop, log=self.log, debug=is_debug, keep_alive=self.cfg.keepalive, access_log=self.log.access_log, access_log_format=self.cfg.access_log_format) return self.wrap_protocol(proto) def get_factory(self, sock, addr): return functools.partial(self.factory, self.wsgi, addr) @asyncio.coroutine def close(self): try: if hasattr(self.wsgi, 'close'): yield from self.wsgi.close() except: self.log.exception('Process shutdown exception') @asyncio.coroutine def _run(self): for sock in self.sockets: factory = self.get_factory(sock.sock, sock.cfg_addr) self.servers.append( (yield from self._create_server(factory, sock))) # If our parent changed then we shut down. pid = os.getpid() try: while self.alive or self.connections: self.notify() if (self.alive and pid == os.getpid() and self.ppid != os.getppid()): self.log.info("Parent changed, shutting down: %s", self) self.alive = False # stop accepting requests if not self.alive: if self.servers: self.log.info( "Stopping server: %s, connections: %s", pid, len(self.connections)) for server in self.servers: server.close() self.servers.clear() # prepare connections for closing for conn in self.connections.values(): if hasattr(conn, 'closing'): conn.closing() yield from asyncio.sleep(1.0, loop=self.loop) except KeyboardInterrupt: pass if self.servers: for server in self.servers: server.close() yield from self.close() @asyncio.coroutine def _create_server(self, factory, sock): return self.loop.create_server(factory, sock=sock.sock, ssl=self.ssl_context) @staticmethod def _create_ssl_context(cfg): """ Creates SSLContext instance for usage in asyncio.create_server. See ssl.SSLSocket.__init__ for more details. """ ctx = ssl.SSLContext(cfg.ssl_version) ctx.load_cert_chain(cfg.certfile, cfg.keyfile) ctx.verify_mode = cfg.cert_reqs if cfg.ca_certs: ctx.load_verify_locations(cfg.ca_certs) if cfg.ciphers: ctx.set_ciphers(cfg.ciphers) return ctx class _wrp: def __init__(self, proto, meth, tracking, add=True): self._proto = proto self._id = id(proto) self._meth = meth self._tracking = tracking self._add = add def __call__(self, *args): if self._add: self._tracking[self._id] = self._proto elif self._id in self._tracking: del self._tracking[self._id] conn = self._meth(*args) return conn
2,514
4,054
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.prelude.query; import com.yahoo.compress.IntegerCompressor; import com.yahoo.prelude.query.textualrepresentation.Discloser; import java.nio.ByteBuffer; /** * A set of terms which must be near each other to match. * * @author bratseth * @author havardpe */ public class NearItem extends CompositeItem { protected int distance; /** The default distance used if none is specified: 2 */ public static final int defaultDistance = 2; /** Creates a NEAR item with distance 2 */ public NearItem() { setDistance(defaultDistance); } /** * Creates a <i>near</i> item with a limit to the distance between the words. * * @param distance the maximum position difference between the words which should be counted as a match */ public NearItem(int distance) { setDistance(distance); } public void setDistance(int distance) { if (distance < 0) throw new IllegalArgumentException("Can not use negative distance " + distance); this.distance = distance; } public int getDistance() { return distance; } @Override public ItemType getItemType() { return ItemType.NEAR; } @Override public String getName() { return "NEAR"; } @Override protected void encodeThis(ByteBuffer buffer) { super.encodeThis(buffer); IntegerCompressor.putCompressedPositiveNumber(distance, buffer); } @Override public void disclose(Discloser discloser) { super.disclose(discloser); discloser.addProperty("limit", distance); } /** Appends the heading of this string - <code>[getName()]([limit]) </code> */ @Override protected void appendHeadingString(StringBuilder buffer) { buffer.append(getName()); buffer.append("("); buffer.append(distance); buffer.append(")"); buffer.append(" "); } @Override public int hashCode() { return super.hashCode() + 23* distance; } /** * Returns whether this item is of the same class and * contains the same state as the given item */ @Override public boolean equals(Object object) { if (!super.equals(object)) return false; NearItem other = (NearItem) object; // Ensured by superclass if (this.distance != other.distance) return false; return true; } }
910
391
# # Copyright (C) 2020 IBM. All Rights Reserved. # # See LICENSE.txt file in the root directory # of this source tree for licensing information. # from typing import Union, List from clai.server.agent import Agent from clai.server.command_message import State, Action # pylint: disable=too-few-public-methods from clai.tools.colorize_console import Colorize from clai.server.logger import current_logger as logger class DemoAgent(Agent): def get_next_action(self, state: State) -> Union[Action, List[Action]]: logger.info("This is my agent") if state.command == 'ls': return Action(suggested_command="ls -la", description="This is a demo sample that helps to execute the command in better way.", confidence=1 ) if state.command == 'pwd': return [Action(suggested_command="ls -la", description="This is a demo sample that helps to execute the command in better way.", confidence=1 ), Action(suggested_command="pwd -P", description="This is a demo sample that helps to execute the command in better way.", confidence=1 ) ] if state.previous_execution and state.previous_execution.command == 'ls -4': return Action(suggested_command="ls -a", execute=True, confidence=1) return Action(suggested_command=state.command) def post_execute(self, state: State) -> Action: if state.command.startswith('ls') and state.result_code != '0': return Action(description=Colorize() .append(f"Are you sure that this command is correct?({state.result_code})\n") .warning() .append(f"Try man ls for more info ") .to_console(), confidence=1 ) return Action(suggested_command=state.command)
1,039
3,102
<reponame>medismailben/llvm-project // RUN: mkdir -p %t/test1 %t/test2 %t/test3 // RUN: env "CPATH=%t/test1%{pathsep}%t/test2" %clang -x c -E -v %s 2>&1 | FileCheck %s -check-prefix=CPATH // CPATH: -I{{.*}}/test1 // CPATH: -I{{.*}}/test2 // CPATH: search starts here // CPATH: test1 // CPATH: test2 // RUN: env "OBJC_INCLUDE_PATH=%t/test1%{pathsep}%t/test2" OBJCPLUS_INCLUDE_PATH=%t/test1 "CPLUS_INCLUDE_PATH=%t/test1%{pathsep}%t/test2" C_INCLUDE_PATH=%t/test3 %clang -x c -E -v %s 2>&1 | FileCheck %s -check-prefix=C_INCLUDE_PATH // C_INCLUDE_PATH: -c-isystem {{"?.*}}/test3{{"?}} -cxx-isystem {{"?.*}}/test1{{"?}} -cxx-isystem {{"?.*}}/test2{{"?}} -objc-isystem {{"?.*}}/test1{{"?}} -objc-isystem {{"?.*}}/test2{{"?}} -objcxx-isystem {{"?.*}}/test1{{"?}} // C_INCLUDE_PATH: search starts here // C_INCLUDE_PATH-NOT: test1 // C_INCLUDE_PATH: test3 // C_INCLUDE_PATH-NOT: test1 // RUN: env OBJC_INCLUDE_PATH=%t/test1 OBJCPLUS_INCLUDE_PATH=%t/test3 CPLUS_INCLUDE_PATH=%t/test3 C_INCLUDE_PATH=%t/test1 %clang -x objective-c++ -E -v %s 2>&1 | FileCheck %s -check-prefix=OBJCPLUS_INCLUDE_PATH // OBJCPLUS_INCLUDE_PATH: -c-isystem {{"?.*}}/test1{{"?}} -cxx-isystem {{"?.*}}/test3{{"?}} -objc-isystem {{"?.*}}/test1{{"?}} -objcxx-isystem {{"?.*}}/test3{{"?}} // OBJCPLUS_INCLUDE_PATH: search starts here // OBJCPLUS_INCLUDE_PATH-NOT: test1 // OBJCPLUS_INCLUDE_PATH: test3 // OBJCPLUS_INCLUDE_PATH-NOT: test1
710
9,516
<reponame>ketyi/dgl<gh_stars>1000+ /*! * Copyright (c) 2021 by Contributors * \file nccl_api.h * \brief Wrapper around NCCL routines. */ #ifndef DGL_RUNTIME_CUDA_NCCL_API_H_ #define DGL_RUNTIME_CUDA_NCCL_API_H_ #include "nccl.h" #include <dgl/runtime/object.h> #include <string> namespace dgl { namespace runtime { namespace cuda { class NCCLUniqueId : public runtime::Object { public: NCCLUniqueId(); static constexpr const char* _type_key = "cuda.NCCLUniqueId"; DGL_DECLARE_OBJECT_TYPE_INFO(NCCLUniqueId, Object); ncclUniqueId Get() const; std::string ToString() const; void FromString(const std::string& str); private: ncclUniqueId id_; }; DGL_DEFINE_OBJECT_REF(NCCLUniqueIdRef, NCCLUniqueId); class NCCLCommunicator : public runtime::Object { public: NCCLCommunicator( int size, int rank, ncclUniqueId id); ~NCCLCommunicator(); // disable copying NCCLCommunicator(const NCCLCommunicator& other) = delete; NCCLCommunicator& operator=( const NCCLCommunicator& other); ncclComm_t Get(); /** * @brief Perform an all-to-all communication. * * @param send The continous array of data to send. * @param recv The continous array of data to recieve. * @param count The size of data to send to each rank. * @param stream The stream to operate on. */ template<typename IdType> void AllToAll( const IdType * send, IdType * recv, int64_t count, cudaStream_t stream); /** * @brief Perform an all-to-all variable sized communication. * * @tparam DType The type of value to send. * @param send The arrays of data to send. * @param send_prefix The prefix of each array to send. * @param recv The arrays of data to recieve. * @param recv_prefix The prefix of each array to recieve. * @param type The type of data to send. * @param stream The stream to operate on. */ template<typename DType> void AllToAllV( const DType * const send, const int64_t * send_prefix, DType * const recv, const int64_t * recv_prefix, cudaStream_t stream); /** * @brief Perform an all-to-all with sparse data (idx and value pairs). By * necessity, the sizes of each message are variable. * * @tparam IdType The type of index. * @tparam DType The type of value. * @param send_idx The set of indexes to send on the device. * @param send_value The set of values to send on the device. * @param num_feat The number of values per index. * @param send_prefix The exclusive prefix sum of elements to send on the * host. * @param recv_idx The set of indexes to recieve on the device. * @param recv_value The set of values to recieve on the device. * @param recv_prefix The exclusive prefix sum of the number of elements to * recieve on the host. * @param stream The stream to communicate on. */ template<typename IdType, typename DType> void SparseAllToAll( const IdType * send_idx, const DType * send_value, const int64_t num_feat, const int64_t * send_prefix, IdType * recv_idx, DType * recv_value, const int64_t * recv_prefix, cudaStream_t stream); int size() const; int rank() const; static constexpr const char* _type_key = "cuda.NCCLCommunicator"; DGL_DECLARE_OBJECT_TYPE_INFO(NCCLCommunicator, Object); private: ncclComm_t comm_; int size_; int rank_; }; DGL_DEFINE_OBJECT_REF(NCCLCommunicatorRef, NCCLCommunicator); } // namespace cuda } // namespace runtime } // namespace dgl #endif // DGL_RUNTIME_CUDA_NCCL_API_H_
1,401
350
/* * Copyright (C) 2005-2017 <NAME> (<EMAIL>). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * See the file COPYING for License information. */ /* * A changeset collects all pages that are modified during a single * operation. * * @exception_safe: unknown * @thread_safe: unknown */ #ifndef UPS_CHANGESET_H #define UPS_CHANGESET_H #include "0root/root.h" #include <stdlib.h> // Always verify that a file of level N does not include headers > N! #include "2config/env_config.h" #include "2page/page.h" #include "2page/page_collection.h" #ifndef UPS_ROOT_H # error "root.h was not included" #endif namespace upscaledb { struct LocalEnv; struct Changeset { Changeset(LocalEnv *env_) : env(env_) { } /* * Returns a page from the changeset, or NULL if the page is not part * of the changeset */ Page *get(uint64_t address) { return collection.get(address); } /* Append a new page to the changeset. The page is locked. */ void put(Page *page) { if (!has(page)) page->mutex().lock(); collection.put(page); } /* Removes a page from the changeset. The page is unlocked. */ void del(Page *page) { page->mutex().unlock(); collection.del(page); } /* Check if the page is already part of the changeset */ bool has(Page *page) const { return collection.has(page); } /* Returns true if the changeset is empty */ bool is_empty() const { return collection.is_empty(); } /* Removes all pages from the changeset. The pages are unlocked. */ void clear(); /* * Flush all pages in the changeset - first write them to the log, then * write them to the disk. * On success: will clear the changeset and the journal */ void flush(uint64_t lsn); /* The Environment */ LocalEnv *env; /* The pages which were added to this Changeset */ PageCollection<Page::kListChangeset> collection; }; } // namespace upscaledb #endif /* UPS_CHANGESET_H */
800
335
{ "word": "Fraternity", "definitions": [ "A group of people sharing a common profession or interests.", "A male students' society in a university or college.", "A religious or Masonic society or guild.", "Friendship and mutual support within a group." ], "parts-of-speech": "Noun" }
118
772
<reponame>binbash5447/scrimage<filename>scrimage-filters/src/main/java/thirdparty/marvin/image/MarvinAbstractPlugin.java<gh_stars>100-1000 package thirdparty.marvin.image; public abstract class MarvinAbstractPlugin implements MarvinPlugin { private MarvinAttributes marvinAttributes; private boolean valid; protected MarvinAbstractPlugin() { marvinAttributes = new MarvinAttributes(this); } public void validate() { valid = true; } public void invalidate() { valid = false; } public boolean isValid() { return valid; } public MarvinAttributes getAttributes() { return marvinAttributes; } public void setAttribute(String label, Object value) { marvinAttributes.set(label, value); } public void setAttributes(Object... params) { marvinAttributes.set(params); } public Object getAttribute(String label) { return marvinAttributes.get(label); } }
311
1,153
<gh_stars>1000+ #!/usr/bin/env python # -*- coding: utf-8 -*- from ykdl.util.html import get_content from ykdl.util.match import match1 from ykdl.extractor import VideoExtractor from ykdl.videoinfo import VideoInfo from ykdl.compact import urlencode, compact_bytes import hashlib import json import time SECRETKEY = '6FE26D855E1AEAE090E243EB1AF73685' class HuomaoTv(VideoExtractor): name = u'火猫 (Huomao)' supported_stream_types = ['BD', 'TD', 'HD', 'SD' ] stream_2_profile = {'BD': u"原画", 'TD': u'超清', 'HD': u'高清', 'SD': u'标清' } live_base = "https://www.huomao.com/swf/live_data" def prepare(self): info = VideoInfo(self.name, True) html = get_content(self.url) info.title = match1(html, '<title>([^<]+)').split('_')[0] data = json.loads(match1(html, 'channelOneInfo = ({.+?});')) tag_from = 'huomaoh5room' tn = str(int(time.time())) sign_context = data['stream'] + tag_from + tn + SECRETKEY token = hashlib.md5(compact_bytes(sign_context, 'utf-8')).hexdigest() params = { 'streamtype':'live', 'VideoIDS': data['stream'], 'time': tn, 'cdns' : '1', 'from': tag_from, 'token': token } content = get_content(self.live_base, data=compact_bytes(urlencode(params), 'utf-8'), charset='utf-8') stream_data = json.loads(content) assert stream_data["roomStatus"] == "1", "The live stream is not online! " for stream in stream_data["streamList"]: if stream['default'] == 1: defstream = stream['list'] for stream in defstream: info.stream_types.append(stream['type']) info.streams[stream['type']] = {'container': 'flv', 'video_profile': self.stream_2_profile[stream['type']], 'src' : [stream['url']], 'size': float('inf')} info.stream_types = sorted(info.stream_types, key = self.supported_stream_types.index) return info site = HuomaoTv()
957
4,473
<filename>src/test/java/com/github/tomakehurst/wiremock/testsupport/MockRequestBuilder.java /* * Copyright (C) 2011 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.tomakehurst.wiremock.testsupport; import com.github.tomakehurst.wiremock.http.*; import org.mockito.Mockito; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import static com.github.tomakehurst.wiremock.http.HttpHeader.httpHeader; import static com.github.tomakehurst.wiremock.http.RequestMethod.GET; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newLinkedHashSet; import static org.mockito.Mockito.when; public class MockRequestBuilder { private String url = "/"; private RequestMethod method = GET; private String clientIp = "x.x.x.x"; private List<HttpHeader> individualHeaders = newArrayList(); private Map<String, Cookie> cookies = newHashMap(); private List<QueryParameter> queryParameters = newArrayList(); private String body = ""; private String bodyAsBase64 = ""; private Collection<Request.Part> multiparts = newArrayList(); private boolean browserProxyRequest = false; private String mockName; public MockRequestBuilder() { } public MockRequestBuilder(String mockName) { this.mockName = mockName; } public static MockRequestBuilder aRequest() { return new MockRequestBuilder(); } public static MockRequestBuilder aRequest(String mockName) { return new MockRequestBuilder(mockName); } public MockRequestBuilder withUrl(String url) { this.url = url; return this; } public MockRequestBuilder withQueryParameter(String key, String... values) { queryParameters.add(new QueryParameter(key, Arrays.asList(values))); return this; } public MockRequestBuilder withMethod(RequestMethod method) { this.method = method; return this; } public MockRequestBuilder withClientIp(String clientIp) { this.clientIp = clientIp; return this; } public MockRequestBuilder withHeader(String key, String value) { individualHeaders.add(new HttpHeader(key, value)); return this; } public MockRequestBuilder withCookie(String key, String value) { cookies.put(key, new Cookie(value)); return this; } public MockRequestBuilder withBody(String body) { this.body = body; return this; } public MockRequestBuilder withBodyAsBase64(String bodyAsBase64) { this.bodyAsBase64 = bodyAsBase64; return this; } public MockRequestBuilder asBrowserProxyRequest() { this.browserProxyRequest = true; return this; } public MockRequestBuilder withMultiparts(Collection<Request.Part> parts) { this.multiparts = parts; return this; } public Request build() { final HttpHeaders headers = new HttpHeaders(individualHeaders); final Request request = mockName == null ? Mockito.mock(Request.class) : Mockito.mock(Request.class, mockName); when(request.getUrl()).thenReturn(url); when(request.getMethod()).thenReturn(method); when(request.getClientIp()).thenReturn(clientIp); for (HttpHeader header : headers.all()) { when(request.containsHeader(header.key())).thenReturn(true); when(request.getHeader(header.key())).thenReturn(header.firstValue()); } for (HttpHeader header : headers.all()) { when(request.header(header.key())).thenReturn(header); if (header.key().equals(ContentTypeHeader.KEY) && header.isPresent()) { when(request.contentTypeHeader()).thenReturn(new ContentTypeHeader(header.firstValue())); } } for (QueryParameter queryParameter : queryParameters) { when(request.queryParameter(queryParameter.key())).thenReturn(queryParameter); } when(request.header(Mockito.any(String.class))).thenReturn(httpHeader("key", "value")); when(request.getHeaders()).thenReturn(headers); when(request.getAllHeaderKeys()).thenReturn(newLinkedHashSet(headers.keys())); when(request.containsHeader(Mockito.any(String.class))).thenReturn(false); when(request.getCookies()).thenReturn(cookies); when(request.getBody()).thenReturn(body.getBytes()); when(request.getBodyAsString()).thenReturn(body); when(request.getBodyAsBase64()).thenReturn(bodyAsBase64); when(request.getAbsoluteUrl()).thenReturn("http://localhost:8080" + url); when(request.isBrowserProxyRequest()).thenReturn(browserProxyRequest); when(request.isMultipart()).thenReturn(multiparts != null && !multiparts.isEmpty()); when(request.getParts()).thenReturn(multiparts); return request; } }
1,596
341
package jwtc.android.chess.ics; import android.content.Intent; import android.os.Bundle; import android.preference.Preference; import android.preference.Preference.OnPreferenceClickListener; import jwtc.android.chess.MyPreferenceActivity; import jwtc.android.chess.R; public class ICSPrefs extends MyPreferenceActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.icsprefs); Preference prefCustomCommand = findPreference("icscustomcommandHandle"); prefCustomCommand.setOnPreferenceClickListener(new OnPreferenceClickListener() { public boolean onPreferenceClick(Preference preference) { Intent intent = new Intent(ICSPrefs.this, CustomCommands.class); startActivity(intent); return true; } }); } }
342
945
<reponame>arobert01/ITK /*========================================================================= * * Copyright NumFOCUS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef itkSegmentationLevelSetFunction_hxx #define itkSegmentationLevelSetFunction_hxx #include "itkSegmentationLevelSetFunction.h" namespace itk { template <typename TImageType, typename TFeatureImageType> void SegmentationLevelSetFunction<TImageType, TFeatureImageType>::SetSpeedImage(ImageType * s) { m_SpeedImage = s; m_Interpolator->SetInputImage(m_SpeedImage); } template <typename TImageType, typename TFeatureImageType> void SegmentationLevelSetFunction<TImageType, TFeatureImageType>::SetAdvectionImage(VectorImageType * s) { m_AdvectionImage = s; m_VectorInterpolator->SetInputImage(m_AdvectionImage); } template <typename TImageType, typename TFeatureImageType> void SegmentationLevelSetFunction<TImageType, TFeatureImageType>::ReverseExpansionDirection() { this->SetPropagationWeight(-1.0 * this->GetPropagationWeight()); this->SetAdvectionWeight(-1.0 * this->GetAdvectionWeight()); } template <typename TImageType, typename TFeatureImageType> void SegmentationLevelSetFunction<TImageType, TFeatureImageType>::Initialize(const RadiusType & r) { Superclass::Initialize(r); } template <typename TImageType, typename TFeatureImageType> void SegmentationLevelSetFunction<TImageType, TFeatureImageType>::AllocateSpeedImage() { m_SpeedImage->SetRequestedRegion(m_FeatureImage->GetRequestedRegion()); m_SpeedImage->SetBufferedRegion(m_FeatureImage->GetBufferedRegion()); m_SpeedImage->SetLargestPossibleRegion(m_FeatureImage->GetLargestPossibleRegion()); m_SpeedImage->Allocate(); m_Interpolator->SetInputImage(m_SpeedImage); } template <typename TImageType, typename TFeatureImageType> void SegmentationLevelSetFunction<TImageType, TFeatureImageType>::AllocateAdvectionImage() { m_AdvectionImage->SetRequestedRegion(m_FeatureImage->GetRequestedRegion()); m_AdvectionImage->SetBufferedRegion(m_FeatureImage->GetBufferedRegion()); m_AdvectionImage->SetLargestPossibleRegion(m_FeatureImage->GetLargestPossibleRegion()); m_AdvectionImage->Allocate(); m_VectorInterpolator->SetInputImage(m_AdvectionImage); } template <typename TImageType, typename TFeatureImageType> typename SegmentationLevelSetFunction<TImageType, TFeatureImageType>::ScalarValueType SegmentationLevelSetFunction<TImageType, TFeatureImageType>::PropagationSpeed(const NeighborhoodType & neighborhood, const FloatOffsetType & offset, GlobalDataStruct *) const { IndexType idx = neighborhood.GetIndex(); ContinuousIndexType cdx; for (unsigned i = 0; i < ImageDimension; ++i) { cdx[i] = static_cast<double>(idx[i]) - offset[i]; } if (m_Interpolator->IsInsideBuffer(cdx)) { return (static_cast<ScalarValueType>(m_Interpolator->EvaluateAtContinuousIndex(cdx))); } else { return (static_cast<ScalarValueType>(m_SpeedImage->GetPixel(idx))); } } template <typename TImageType, typename TFeatureImageType> typename SegmentationLevelSetFunction<TImageType, TFeatureImageType>::VectorType SegmentationLevelSetFunction<TImageType, TFeatureImageType>::AdvectionField(const NeighborhoodType & neighborhood, const FloatOffsetType & offset, GlobalDataStruct *) const { IndexType idx = neighborhood.GetIndex(); ContinuousIndexType cdx; for (unsigned i = 0; i < ImageDimension; ++i) { cdx[i] = static_cast<double>(idx[i]) - offset[i]; } if (m_VectorInterpolator->IsInsideBuffer(cdx)) { return (static_cast<VectorType>(m_VectorInterpolator->EvaluateAtContinuousIndex(cdx))); } // Just return the default else return (m_AdvectionImage->GetPixel(idx)); } } // end namespace itk #endif
1,677
1,000
/** * Copyright (C) 2015 Topology LP * Copyright (C) 2018 <NAME> * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ #ifndef CPPCODEC_DETAIL_HEX #define CPPCODEC_DETAIL_HEX #include <stdint.h> #include <stdlib.h> // for abort() #include "../data/access.hpp" #include "../parse_error.hpp" #include "stream_codec.hpp" namespace cppcodec { namespace detail { template <typename CodecVariant> class hex : public CodecVariant::template codec_impl<hex<CodecVariant>> { public: static inline constexpr uint8_t binary_block_size() { return 1; } static inline constexpr uint8_t encoded_block_size() { return 2; } static CPPCODEC_ALWAYS_INLINE constexpr uint8_t num_encoded_tail_symbols(uint8_t /*num_bytes*/) noexcept { // Hex encoding only works on full bytes so there are no tails, // no padding characters, and this function should (must) never be called. return 0; } template <uint8_t I> static CPPCODEC_ALWAYS_INLINE constexpr uint8_t index( const uint8_t* b /*binary block*/) noexcept { static_assert(I >= 0 && I < encoded_block_size(), "invalid encoding symbol index in a block"); return (I == 0) ? (b[0] >> 4) // first 4 bits : /*I == 1*/ (b[0] & 0xF); // last 4 bits } // With only 2 bytes, enc<1> will always result in a full index() call and // enc<0> will be protected by a not-reached assertion, so we don't actually // care about index_last() except optimizing it out as good as possible. template <bool B> using uint8_if = typename std::enable_if<B, uint8_t>::type; template <uint8_t I> static CPPCODEC_ALWAYS_INLINE constexpr uint8_if<I == 0> index_last( const uint8_t* /*binary block*/) noexcept { return 0; } template <uint8_t I> static CPPCODEC_ALWAYS_INLINE uint8_if<I != 0> index_last( const uint8_t* /*binary block*/) { throw std::domain_error("invalid last encoding symbol index in a tail"); } template <typename Result, typename ResultState> static CPPCODEC_ALWAYS_INLINE void decode_block( Result& decoded, ResultState&, const alphabet_index_t* idx); template <typename Result, typename ResultState> static CPPCODEC_ALWAYS_INLINE void decode_tail( Result& decoded, ResultState&, const alphabet_index_t* idx, size_t idx_len); }; template <typename CodecVariant> template <typename Result, typename ResultState> CPPCODEC_ALWAYS_INLINE void hex<CodecVariant>::decode_block( Result& decoded, ResultState& state, const alphabet_index_t* idx) { data::put(decoded, state, static_cast<uint8_t>((idx[0] << 4) | idx[1])); } template <typename CodecVariant> template <typename Result, typename ResultState> CPPCODEC_ALWAYS_INLINE void hex<CodecVariant>::decode_tail( Result&, ResultState&, const alphabet_index_t*, size_t) { throw invalid_input_length( "odd-length hex input is not supported by the streaming octet decoder, " "use a place-based number decoder instead"); } } // namespace detail } // namespace cppcodec #endif // CPPCODEC_DETAIL_HEX
1,532
585
<reponame>madrob/solr /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.handler.component; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.solr.search.SortSpec; import org.mockito.Mockito; public class MockSortSpecBuilder { private final SortSpec sortSpec; public MockSortSpecBuilder() { this.sortSpec = Mockito.mock(SortSpec.class); Mockito.when(sortSpec.getCount()).thenReturn(10); } public static MockSortSpecBuilder create() { return new MockSortSpecBuilder(); } public MockSortSpecBuilder withSortFields(SortField[] sortFields) { Sort sort = new Sort(sortFields); Mockito.when(sortSpec.getSort()).thenReturn(sort); return this; } public MockSortSpecBuilder withIncludesNonScoreOrDocSortField(boolean include) { Mockito.when(sortSpec.includesNonScoreOrDocField()).thenReturn(include); return this; } public SortSpec build() { return sortSpec; } }
569
555
package com.foo; class A { }
13
4,535
// Copyright 2019, Intel Corporation #pragma once #include <boost/filesystem.hpp> namespace vertexai { namespace tile { namespace codegen { namespace test { namespace passes { bool VerifyPasses(const boost::filesystem::path& passes_dir); } // namespace passes } // namespace test } // namespace codegen } // namespace tile } // namespace vertexai
106
1,109
<reponame>qiquanzhijia/gryphon<gh_stars>1000+ from gryphon.data_service.pollers.orderbook.orderbook_poller import OrderbookPoller class BitstampBCHEUROrderbook(OrderbookPoller): def __init__(self): self.exchange_name = u'BITSTAMP_BCH_EUR' self.url = 'https://www.bitstamp.net/api/v2/order_book/bcheur/' self.poll_time = 1 # API response format: # { # "asks":[["0.09022887",1704.11453071]], # "bids":[["0.09000011",3.74072284]], # "isFrozen":"0","seq":477056211, # } def parse_order(self, order): return [order[0], order[1], '']
285
344
<filename>openunreid/data/transformers/__init__.py<gh_stars>100-1000 # Modified from https://github.com/JDAI-CV/fast-reid/blob/master/fastreid/data/transforms/build.py # noqa import torchvision.transforms as T from .auto_augment import ImageNetPolicy from .gaussian_blur import GaussianBlur from .mutual_transformer import MutualTransform from .random_erasing import RandomErasing __all__ = ["build_train_transformer", "build_test_transformer"] def build_train_transformer(cfg): res = [] # auto augmentation if cfg.DATA.TRAIN.is_autoaug: total_iters = cfg.TRAIN.epochs * cfg.TRAIN.iters res.append(ImageNetPolicy(total_iters)) # resize res.append(T.Resize((cfg.DATA.height, cfg.DATA.width), interpolation=3)) # horizontal filp if cfg.DATA.TRAIN.is_flip: res.append(T.RandomHorizontalFlip(p=cfg.DATA.TRAIN.flip_prob)) # padding if cfg.DATA.TRAIN.is_pad: res.extend( [ T.Pad(cfg.DATA.TRAIN.pad_size), T.RandomCrop((cfg.DATA.height, cfg.DATA.width)), ] ) # gaussian blur if cfg.DATA.TRAIN.is_blur: res.append( T.RandomApply([GaussianBlur([0.1, 2.0])], p=cfg.DATA.TRAIN.blur_prob) ) # totensor res.append(T.ToTensor()) # normalize res.append(T.Normalize(mean=cfg.DATA.norm_mean, std=cfg.DATA.norm_std)) # random erasing if cfg.DATA.TRAIN.is_erase: res.append( RandomErasing( probability=cfg.DATA.TRAIN.erase_prob, mean=cfg.DATA.norm_mean ) ) # mutual transform (for MMT) if cfg.DATA.TRAIN.is_mutual_transform: return MutualTransform(T.Compose(res), cfg.DATA.TRAIN.mutual_times) return T.Compose(res) def build_test_transformer(cfg): res = [] # resize res.append(T.Resize((cfg.DATA.height, cfg.DATA.width), interpolation=3)) # totensor res.append(T.ToTensor()) # normalize res.append(T.Normalize(mean=cfg.DATA.norm_mean, std=cfg.DATA.norm_std)) return T.Compose(res)
968
307
<gh_stars>100-1000 // // #include "GPUMemoryHeap.h" namespace { BufferType getBufferType(GpuHeap heap_type) { switch(heap_type) { case GpuHeap::ModelVertex: return BufferType::Vertex; case GpuHeap::ModelIndex: return BufferType::Index; case GpuHeap::NUM_VALUES: default: UNREACHABLE("Invalid heap type detected!"); return BufferType::Vertex; } } } namespace graphics { namespace util { GPUMemoryHeap::GPUMemoryHeap(GpuHeap heap_type) { _bufferHandle = gr_create_buffer(getBufferType(heap_type), BufferUsageHint::Static); _allocator.reset(new ::util::HeapAllocator([this](size_t n) { resizeBuffer(n); })); } GPUMemoryHeap::~GPUMemoryHeap() { if (_bufferHandle.isValid()) { gr_delete_buffer(_bufferHandle); _bufferHandle = gr_buffer_handle(); } if (_dataBuffer != nullptr) { vm_free(_dataBuffer); _dataBuffer = nullptr; _bufferSize = 0; } } void GPUMemoryHeap::resizeBuffer(size_t newSize) { _dataBuffer = vm_realloc(_dataBuffer, newSize); _bufferSize = newSize; gr_update_buffer_data(_bufferHandle, _bufferSize, _dataBuffer); } void* GPUMemoryHeap::bufferPointer(size_t offset) { auto bytePtr = reinterpret_cast<uint8_t*>(_dataBuffer); return reinterpret_cast<void*>(bytePtr + offset); } size_t GPUMemoryHeap::allocateGpuData(size_t size, void* data) { auto offset = _allocator->allocate(size); auto dataPtr = bufferPointer(offset); memcpy(dataPtr, data, size); gr_update_buffer_data_offset(_bufferHandle, offset, size, data); return offset; } void GPUMemoryHeap::freeGpuData(size_t offset) { _allocator->free(offset); // Just leave the data in the buffers since it doesn't hurt anyone if it's kept in there } gr_buffer_handle GPUMemoryHeap::bufferHandle() { return _bufferHandle; } } }
635
2,338
<gh_stars>1000+ //===-- Timer.h -------------------------------------------------*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #ifndef LLVM_LIBC_UTILS_TESTUTILS_TIMER_H #define LLVM_LIBC_UTILS_TESTUTILS_TIMER_H #include <stdint.h> namespace __llvm_libc { namespace testing { class Timer { void *Impl; public: Timer(); ~Timer(); void start(); void stop(); uint64_t nanoseconds() const; }; } // namespace testing } // namespace __llvm_libc #endif // LLVM_LIBC_UTILS_TESTUTILS_TIMER_H
266
517
<reponame>finesoft/java // Generated by the protocol buffer compiler. DO NOT EDIT! // source: tensorflow/core/framework/resource_handle.proto package org.tensorflow.proto.framework; /** * <pre> * Protocol buffer representing a handle to a tensorflow resource. Handles are * not valid across executions, but can be serialized back and forth from within * a single run. * </pre> * * Protobuf type {@code tensorflow.ResourceHandleProto} */ public final class ResourceHandleProto extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.ResourceHandleProto) ResourceHandleProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceHandleProto.newBuilder() to construct. private ResourceHandleProto(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ResourceHandleProto() { device_ = ""; container_ = ""; name_ = ""; maybeTypeName_ = ""; dtypesAndShapes_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceHandleProto(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ResourceHandleProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); device_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); container_ = s; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } case 32: { hashCode_ = input.readUInt64(); break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); maybeTypeName_ = s; break; } case 50: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { dtypesAndShapes_ = new java.util.ArrayList<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape>(); mutable_bitField0_ |= 0x00000001; } dtypesAndShapes_.add( input.readMessage(org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.parser(), extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { dtypesAndShapes_ = java.util.Collections.unmodifiableList(dtypesAndShapes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.proto.framework.ResourceHandleProto.class, org.tensorflow.proto.framework.ResourceHandleProto.Builder.class); } public interface DtypeAndShapeOrBuilder extends // @@protoc_insertion_point(interface_extends:tensorflow.ResourceHandleProto.DtypeAndShape) com.google.protobuf.MessageOrBuilder { /** * <code>.tensorflow.DataType dtype = 1;</code> */ int getDtypeValue(); /** * <code>.tensorflow.DataType dtype = 1;</code> */ org.tensorflow.proto.framework.DataType getDtype(); /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ boolean hasShape(); /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ org.tensorflow.proto.framework.TensorShapeProto getShape(); /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ org.tensorflow.proto.framework.TensorShapeProtoOrBuilder getShapeOrBuilder(); } /** * <pre> * Protocol buffer representing a pair of (data type, tensor shape). * </pre> * * Protobuf type {@code tensorflow.ResourceHandleProto.DtypeAndShape} */ public static final class DtypeAndShape extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.ResourceHandleProto.DtypeAndShape) DtypeAndShapeOrBuilder { private static final long serialVersionUID = 0L; // Use DtypeAndShape.newBuilder() to construct. private DtypeAndShape(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DtypeAndShape() { dtype_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new DtypeAndShape(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DtypeAndShape( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); dtype_ = rawValue; break; } case 18: { org.tensorflow.proto.framework.TensorShapeProto.Builder subBuilder = null; if (shape_ != null) { subBuilder = shape_.toBuilder(); } shape_ = input.readMessage(org.tensorflow.proto.framework.TensorShapeProto.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(shape_); shape_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_DtypeAndShape_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_DtypeAndShape_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.class, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder.class); } public static final int DTYPE_FIELD_NUMBER = 1; private int dtype_; /** * <code>.tensorflow.DataType dtype = 1;</code> */ public int getDtypeValue() { return dtype_; } /** * <code>.tensorflow.DataType dtype = 1;</code> */ public org.tensorflow.proto.framework.DataType getDtype() { @SuppressWarnings("deprecation") org.tensorflow.proto.framework.DataType result = org.tensorflow.proto.framework.DataType.valueOf(dtype_); return result == null ? org.tensorflow.proto.framework.DataType.UNRECOGNIZED : result; } public static final int SHAPE_FIELD_NUMBER = 2; private org.tensorflow.proto.framework.TensorShapeProto shape_; /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public boolean hasShape() { return shape_ != null; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public org.tensorflow.proto.framework.TensorShapeProto getShape() { return shape_ == null ? org.tensorflow.proto.framework.TensorShapeProto.getDefaultInstance() : shape_; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public org.tensorflow.proto.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() { return getShape(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (dtype_ != org.tensorflow.proto.framework.DataType.DT_INVALID.getNumber()) { output.writeEnum(1, dtype_); } if (shape_ != null) { output.writeMessage(2, getShape()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (dtype_ != org.tensorflow.proto.framework.DataType.DT_INVALID.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, dtype_); } if (shape_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getShape()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape)) { return super.equals(obj); } org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape other = (org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape) obj; if (dtype_ != other.dtype_) return false; if (hasShape() != other.hasShape()) return false; if (hasShape()) { if (!getShape() .equals(other.getShape())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DTYPE_FIELD_NUMBER; hash = (53 * hash) + dtype_; if (hasShape()) { hash = (37 * hash) + SHAPE_FIELD_NUMBER; hash = (53 * hash) + getShape().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Protocol buffer representing a pair of (data type, tensor shape). * </pre> * * Protobuf type {@code tensorflow.ResourceHandleProto.DtypeAndShape} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:tensorflow.ResourceHandleProto.DtypeAndShape) org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_DtypeAndShape_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_DtypeAndShape_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.class, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder.class); } // Construct using org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); dtype_ = 0; if (shapeBuilder_ == null) { shape_ = null; } else { shape_ = null; shapeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_DtypeAndShape_descriptor; } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape getDefaultInstanceForType() { return org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.getDefaultInstance(); } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape build() { org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape buildPartial() { org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape result = new org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape(this); result.dtype_ = dtype_; if (shapeBuilder_ == null) { result.shape_ = shape_; } else { result.shape_ = shapeBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape) { return mergeFrom((org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape other) { if (other == org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.getDefaultInstance()) return this; if (other.dtype_ != 0) { setDtypeValue(other.getDtypeValue()); } if (other.hasShape()) { mergeShape(other.getShape()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int dtype_ = 0; /** * <code>.tensorflow.DataType dtype = 1;</code> */ public int getDtypeValue() { return dtype_; } /** * <code>.tensorflow.DataType dtype = 1;</code> */ public Builder setDtypeValue(int value) { dtype_ = value; onChanged(); return this; } /** * <code>.tensorflow.DataType dtype = 1;</code> */ public org.tensorflow.proto.framework.DataType getDtype() { @SuppressWarnings("deprecation") org.tensorflow.proto.framework.DataType result = org.tensorflow.proto.framework.DataType.valueOf(dtype_); return result == null ? org.tensorflow.proto.framework.DataType.UNRECOGNIZED : result; } /** * <code>.tensorflow.DataType dtype = 1;</code> */ public Builder setDtype(org.tensorflow.proto.framework.DataType value) { if (value == null) { throw new NullPointerException(); } dtype_ = value.getNumber(); onChanged(); return this; } /** * <code>.tensorflow.DataType dtype = 1;</code> */ public Builder clearDtype() { dtype_ = 0; onChanged(); return this; } private org.tensorflow.proto.framework.TensorShapeProto shape_; private com.google.protobuf.SingleFieldBuilderV3< org.tensorflow.proto.framework.TensorShapeProto, org.tensorflow.proto.framework.TensorShapeProto.Builder, org.tensorflow.proto.framework.TensorShapeProtoOrBuilder> shapeBuilder_; /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public boolean hasShape() { return shapeBuilder_ != null || shape_ != null; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public org.tensorflow.proto.framework.TensorShapeProto getShape() { if (shapeBuilder_ == null) { return shape_ == null ? org.tensorflow.proto.framework.TensorShapeProto.getDefaultInstance() : shape_; } else { return shapeBuilder_.getMessage(); } } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public Builder setShape(org.tensorflow.proto.framework.TensorShapeProto value) { if (shapeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } shape_ = value; onChanged(); } else { shapeBuilder_.setMessage(value); } return this; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public Builder setShape( org.tensorflow.proto.framework.TensorShapeProto.Builder builderForValue) { if (shapeBuilder_ == null) { shape_ = builderForValue.build(); onChanged(); } else { shapeBuilder_.setMessage(builderForValue.build()); } return this; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public Builder mergeShape(org.tensorflow.proto.framework.TensorShapeProto value) { if (shapeBuilder_ == null) { if (shape_ != null) { shape_ = org.tensorflow.proto.framework.TensorShapeProto.newBuilder(shape_).mergeFrom(value).buildPartial(); } else { shape_ = value; } onChanged(); } else { shapeBuilder_.mergeFrom(value); } return this; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public Builder clearShape() { if (shapeBuilder_ == null) { shape_ = null; onChanged(); } else { shape_ = null; shapeBuilder_ = null; } return this; } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public org.tensorflow.proto.framework.TensorShapeProto.Builder getShapeBuilder() { onChanged(); return getShapeFieldBuilder().getBuilder(); } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ public org.tensorflow.proto.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() { if (shapeBuilder_ != null) { return shapeBuilder_.getMessageOrBuilder(); } else { return shape_ == null ? org.tensorflow.proto.framework.TensorShapeProto.getDefaultInstance() : shape_; } } /** * <code>.tensorflow.TensorShapeProto shape = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< org.tensorflow.proto.framework.TensorShapeProto, org.tensorflow.proto.framework.TensorShapeProto.Builder, org.tensorflow.proto.framework.TensorShapeProtoOrBuilder> getShapeFieldBuilder() { if (shapeBuilder_ == null) { shapeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.tensorflow.proto.framework.TensorShapeProto, org.tensorflow.proto.framework.TensorShapeProto.Builder, org.tensorflow.proto.framework.TensorShapeProtoOrBuilder>( getShape(), getParentForChildren(), isClean()); shape_ = null; } return shapeBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:tensorflow.ResourceHandleProto.DtypeAndShape) } // @@protoc_insertion_point(class_scope:tensorflow.ResourceHandleProto.DtypeAndShape) private static final org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape(); } public static org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DtypeAndShape> PARSER = new com.google.protobuf.AbstractParser<DtypeAndShape>() { @java.lang.Override public DtypeAndShape parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DtypeAndShape(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DtypeAndShape> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DtypeAndShape> getParserForType() { return PARSER; } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public static final int DEVICE_FIELD_NUMBER = 1; private volatile java.lang.Object device_; /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public java.lang.String getDevice() { java.lang.Object ref = device_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); device_ = s; return s; } } /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public com.google.protobuf.ByteString getDeviceBytes() { java.lang.Object ref = device_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); device_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONTAINER_FIELD_NUMBER = 2; private volatile java.lang.Object container_; /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public java.lang.String getContainer() { java.lang.Object ref = container_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); container_ = s; return s; } } /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public com.google.protobuf.ByteString getContainerBytes() { java.lang.Object ref = container_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); container_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAME_FIELD_NUMBER = 3; private volatile java.lang.Object name_; /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int HASH_CODE_FIELD_NUMBER = 4; private long hashCode_; /** * <pre> * Hash code for the type of the resource. Is only valid in the same device * and in the same execution. * </pre> * * <code>uint64 hash_code = 4;</code> */ public long getHashCode() { return hashCode_; } public static final int MAYBE_TYPE_NAME_FIELD_NUMBER = 5; private volatile java.lang.Object maybeTypeName_; /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public java.lang.String getMaybeTypeName() { java.lang.Object ref = maybeTypeName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); maybeTypeName_ = s; return s; } } /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public com.google.protobuf.ByteString getMaybeTypeNameBytes() { java.lang.Object ref = maybeTypeName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); maybeTypeName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DTYPES_AND_SHAPES_FIELD_NUMBER = 6; private java.util.List<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape> dtypesAndShapes_; /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public java.util.List<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape> getDtypesAndShapesList() { return dtypesAndShapes_; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public java.util.List<? extends org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder> getDtypesAndShapesOrBuilderList() { return dtypesAndShapes_; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public int getDtypesAndShapesCount() { return dtypesAndShapes_.size(); } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape getDtypesAndShapes(int index) { return dtypesAndShapes_.get(index); } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder getDtypesAndShapesOrBuilder( int index) { return dtypesAndShapes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getDeviceBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, device_); } if (!getContainerBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, container_); } if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_); } if (hashCode_ != 0L) { output.writeUInt64(4, hashCode_); } if (!getMaybeTypeNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, maybeTypeName_); } for (int i = 0; i < dtypesAndShapes_.size(); i++) { output.writeMessage(6, dtypesAndShapes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getDeviceBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, device_); } if (!getContainerBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, container_); } if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_); } if (hashCode_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, hashCode_); } if (!getMaybeTypeNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, maybeTypeName_); } for (int i = 0; i < dtypesAndShapes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, dtypesAndShapes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.proto.framework.ResourceHandleProto)) { return super.equals(obj); } org.tensorflow.proto.framework.ResourceHandleProto other = (org.tensorflow.proto.framework.ResourceHandleProto) obj; if (!getDevice() .equals(other.getDevice())) return false; if (!getContainer() .equals(other.getContainer())) return false; if (!getName() .equals(other.getName())) return false; if (getHashCode() != other.getHashCode()) return false; if (!getMaybeTypeName() .equals(other.getMaybeTypeName())) return false; if (!getDtypesAndShapesList() .equals(other.getDtypesAndShapesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DEVICE_FIELD_NUMBER; hash = (53 * hash) + getDevice().hashCode(); hash = (37 * hash) + CONTAINER_FIELD_NUMBER; hash = (53 * hash) + getContainer().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + HASH_CODE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getHashCode()); hash = (37 * hash) + MAYBE_TYPE_NAME_FIELD_NUMBER; hash = (53 * hash) + getMaybeTypeName().hashCode(); if (getDtypesAndShapesCount() > 0) { hash = (37 * hash) + DTYPES_AND_SHAPES_FIELD_NUMBER; hash = (53 * hash) + getDtypesAndShapesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ResourceHandleProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ResourceHandleProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.proto.framework.ResourceHandleProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Protocol buffer representing a handle to a tensorflow resource. Handles are * not valid across executions, but can be serialized back and forth from within * a single run. * </pre> * * Protobuf type {@code tensorflow.ResourceHandleProto} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:tensorflow.ResourceHandleProto) org.tensorflow.proto.framework.ResourceHandleProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.proto.framework.ResourceHandleProto.class, org.tensorflow.proto.framework.ResourceHandleProto.Builder.class); } // Construct using org.tensorflow.proto.framework.ResourceHandleProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getDtypesAndShapesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); device_ = ""; container_ = ""; name_ = ""; hashCode_ = 0L; maybeTypeName_ = ""; if (dtypesAndShapesBuilder_ == null) { dtypesAndShapes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { dtypesAndShapesBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.proto.framework.ResourceHandle.internal_static_tensorflow_ResourceHandleProto_descriptor; } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto getDefaultInstanceForType() { return org.tensorflow.proto.framework.ResourceHandleProto.getDefaultInstance(); } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto build() { org.tensorflow.proto.framework.ResourceHandleProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto buildPartial() { org.tensorflow.proto.framework.ResourceHandleProto result = new org.tensorflow.proto.framework.ResourceHandleProto(this); int from_bitField0_ = bitField0_; result.device_ = device_; result.container_ = container_; result.name_ = name_; result.hashCode_ = hashCode_; result.maybeTypeName_ = maybeTypeName_; if (dtypesAndShapesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { dtypesAndShapes_ = java.util.Collections.unmodifiableList(dtypesAndShapes_); bitField0_ = (bitField0_ & ~0x00000001); } result.dtypesAndShapes_ = dtypesAndShapes_; } else { result.dtypesAndShapes_ = dtypesAndShapesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.proto.framework.ResourceHandleProto) { return mergeFrom((org.tensorflow.proto.framework.ResourceHandleProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.proto.framework.ResourceHandleProto other) { if (other == org.tensorflow.proto.framework.ResourceHandleProto.getDefaultInstance()) return this; if (!other.getDevice().isEmpty()) { device_ = other.device_; onChanged(); } if (!other.getContainer().isEmpty()) { container_ = other.container_; onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (other.getHashCode() != 0L) { setHashCode(other.getHashCode()); } if (!other.getMaybeTypeName().isEmpty()) { maybeTypeName_ = other.maybeTypeName_; onChanged(); } if (dtypesAndShapesBuilder_ == null) { if (!other.dtypesAndShapes_.isEmpty()) { if (dtypesAndShapes_.isEmpty()) { dtypesAndShapes_ = other.dtypesAndShapes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.addAll(other.dtypesAndShapes_); } onChanged(); } } else { if (!other.dtypesAndShapes_.isEmpty()) { if (dtypesAndShapesBuilder_.isEmpty()) { dtypesAndShapesBuilder_.dispose(); dtypesAndShapesBuilder_ = null; dtypesAndShapes_ = other.dtypesAndShapes_; bitField0_ = (bitField0_ & ~0x00000001); dtypesAndShapesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDtypesAndShapesFieldBuilder() : null; } else { dtypesAndShapesBuilder_.addAllMessages(other.dtypesAndShapes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.tensorflow.proto.framework.ResourceHandleProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.tensorflow.proto.framework.ResourceHandleProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object device_ = ""; /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public java.lang.String getDevice() { java.lang.Object ref = device_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); device_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public com.google.protobuf.ByteString getDeviceBytes() { java.lang.Object ref = device_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); device_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public Builder setDevice( java.lang.String value) { if (value == null) { throw new NullPointerException(); } device_ = value; onChanged(); return this; } /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public Builder clearDevice() { device_ = getDefaultInstance().getDevice(); onChanged(); return this; } /** * <pre> * Unique name for the device containing the resource. * </pre> * * <code>string device = 1;</code> */ public Builder setDeviceBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); device_ = value; onChanged(); return this; } private java.lang.Object container_ = ""; /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public java.lang.String getContainer() { java.lang.Object ref = container_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); container_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public com.google.protobuf.ByteString getContainerBytes() { java.lang.Object ref = container_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); container_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public Builder setContainer( java.lang.String value) { if (value == null) { throw new NullPointerException(); } container_ = value; onChanged(); return this; } /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public Builder clearContainer() { container_ = getDefaultInstance().getContainer(); onChanged(); return this; } /** * <pre> * Container in which this resource is placed. * </pre> * * <code>string container = 2;</code> */ public Builder setContainerBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); container_ = value; onChanged(); return this; } private java.lang.Object name_ = ""; /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * Unique name of this resource. * </pre> * * <code>string name = 3;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private long hashCode_ ; /** * <pre> * Hash code for the type of the resource. Is only valid in the same device * and in the same execution. * </pre> * * <code>uint64 hash_code = 4;</code> */ public long getHashCode() { return hashCode_; } /** * <pre> * Hash code for the type of the resource. Is only valid in the same device * and in the same execution. * </pre> * * <code>uint64 hash_code = 4;</code> */ public Builder setHashCode(long value) { hashCode_ = value; onChanged(); return this; } /** * <pre> * Hash code for the type of the resource. Is only valid in the same device * and in the same execution. * </pre> * * <code>uint64 hash_code = 4;</code> */ public Builder clearHashCode() { hashCode_ = 0L; onChanged(); return this; } private java.lang.Object maybeTypeName_ = ""; /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public java.lang.String getMaybeTypeName() { java.lang.Object ref = maybeTypeName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); maybeTypeName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public com.google.protobuf.ByteString getMaybeTypeNameBytes() { java.lang.Object ref = maybeTypeName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); maybeTypeName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public Builder setMaybeTypeName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } maybeTypeName_ = value; onChanged(); return this; } /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public Builder clearMaybeTypeName() { maybeTypeName_ = getDefaultInstance().getMaybeTypeName(); onChanged(); return this; } /** * <pre> * For debug-only, the name of the type pointed to by this handle, if * available. * </pre> * * <code>string maybe_type_name = 5;</code> */ public Builder setMaybeTypeNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); maybeTypeName_ = value; onChanged(); return this; } private java.util.List<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape> dtypesAndShapes_ = java.util.Collections.emptyList(); private void ensureDtypesAndShapesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { dtypesAndShapes_ = new java.util.ArrayList<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape>(dtypesAndShapes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder> dtypesAndShapesBuilder_; /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public java.util.List<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape> getDtypesAndShapesList() { if (dtypesAndShapesBuilder_ == null) { return java.util.Collections.unmodifiableList(dtypesAndShapes_); } else { return dtypesAndShapesBuilder_.getMessageList(); } } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public int getDtypesAndShapesCount() { if (dtypesAndShapesBuilder_ == null) { return dtypesAndShapes_.size(); } else { return dtypesAndShapesBuilder_.getCount(); } } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape getDtypesAndShapes(int index) { if (dtypesAndShapesBuilder_ == null) { return dtypesAndShapes_.get(index); } else { return dtypesAndShapesBuilder_.getMessage(index); } } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder setDtypesAndShapes( int index, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape value) { if (dtypesAndShapesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.set(index, value); onChanged(); } else { dtypesAndShapesBuilder_.setMessage(index, value); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder setDtypesAndShapes( int index, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder builderForValue) { if (dtypesAndShapesBuilder_ == null) { ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.set(index, builderForValue.build()); onChanged(); } else { dtypesAndShapesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder addDtypesAndShapes(org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape value) { if (dtypesAndShapesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.add(value); onChanged(); } else { dtypesAndShapesBuilder_.addMessage(value); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder addDtypesAndShapes( int index, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape value) { if (dtypesAndShapesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.add(index, value); onChanged(); } else { dtypesAndShapesBuilder_.addMessage(index, value); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder addDtypesAndShapes( org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder builderForValue) { if (dtypesAndShapesBuilder_ == null) { ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.add(builderForValue.build()); onChanged(); } else { dtypesAndShapesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder addDtypesAndShapes( int index, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder builderForValue) { if (dtypesAndShapesBuilder_ == null) { ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.add(index, builderForValue.build()); onChanged(); } else { dtypesAndShapesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder addAllDtypesAndShapes( java.lang.Iterable<? extends org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape> values) { if (dtypesAndShapesBuilder_ == null) { ensureDtypesAndShapesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, dtypesAndShapes_); onChanged(); } else { dtypesAndShapesBuilder_.addAllMessages(values); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder clearDtypesAndShapes() { if (dtypesAndShapesBuilder_ == null) { dtypesAndShapes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { dtypesAndShapesBuilder_.clear(); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public Builder removeDtypesAndShapes(int index) { if (dtypesAndShapesBuilder_ == null) { ensureDtypesAndShapesIsMutable(); dtypesAndShapes_.remove(index); onChanged(); } else { dtypesAndShapesBuilder_.remove(index); } return this; } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder getDtypesAndShapesBuilder( int index) { return getDtypesAndShapesFieldBuilder().getBuilder(index); } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder getDtypesAndShapesOrBuilder( int index) { if (dtypesAndShapesBuilder_ == null) { return dtypesAndShapes_.get(index); } else { return dtypesAndShapesBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public java.util.List<? extends org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder> getDtypesAndShapesOrBuilderList() { if (dtypesAndShapesBuilder_ != null) { return dtypesAndShapesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(dtypesAndShapes_); } } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder addDtypesAndShapesBuilder() { return getDtypesAndShapesFieldBuilder().addBuilder( org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.getDefaultInstance()); } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder addDtypesAndShapesBuilder( int index) { return getDtypesAndShapesFieldBuilder().addBuilder( index, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.getDefaultInstance()); } /** * <pre> * Data types and shapes for the underlying resource. * </pre> * * <code>repeated .tensorflow.ResourceHandleProto.DtypeAndShape dtypes_and_shapes = 6;</code> */ public java.util.List<org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder> getDtypesAndShapesBuilderList() { return getDtypesAndShapesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder> getDtypesAndShapesFieldBuilder() { if (dtypesAndShapesBuilder_ == null) { dtypesAndShapesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShape.Builder, org.tensorflow.proto.framework.ResourceHandleProto.DtypeAndShapeOrBuilder>( dtypesAndShapes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); dtypesAndShapes_ = null; } return dtypesAndShapesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:tensorflow.ResourceHandleProto) } // @@protoc_insertion_point(class_scope:tensorflow.ResourceHandleProto) private static final org.tensorflow.proto.framework.ResourceHandleProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.proto.framework.ResourceHandleProto(); } public static org.tensorflow.proto.framework.ResourceHandleProto getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ResourceHandleProto> PARSER = new com.google.protobuf.AbstractParser<ResourceHandleProto>() { @java.lang.Override public ResourceHandleProto parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ResourceHandleProto(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ResourceHandleProto> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ResourceHandleProto> getParserForType() { return PARSER; } @java.lang.Override public org.tensorflow.proto.framework.ResourceHandleProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
31,924
1,568
from __future__ import print_function ''' Double-base palindromes Problem 36 The decimal number, 585 = 10010010012 (binary), is palindromic in both bases. Find the sum of all numbers, less than one million, which are palindromic in base 10 and base 2. (Please note that the palindromic number, in either base, may not include leading zeros.) ''' try: xrange #Python 2 except NameError: xrange = range #Python 3 def is_palindrome(n): n = str(n) if n == n[::-1]: return True else: return False total = 0 for i in xrange(1, 1000000): if is_palindrome(i) and is_palindrome(bin(i).split('b')[1]): total += i print(total)
231
721
<gh_stars>100-1000 package crazypants.enderio.base.config.factory; import javax.annotation.Nonnull; import info.loenwind.autoconfig.factory.ValueFactory; public class ValueFactoryEIO extends ValueFactory implements IFactoryEIO { public ValueFactoryEIO(@Nonnull String modid) { super(modid); } @Override public @Nonnull IValueFactoryEIO section(@SuppressWarnings("hiding") @Nonnull String section) { return new SlaveFactoryEIO(this, section); } }
157
1,133
<reponame>mikesep/comdb2 /* Copyright 2015 <NAME>.P. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #ifndef __rep_qstat_h #define __rep_qstat_h #include <net.h> typedef struct net_queue_stat { char *nettype; char *hostname; pthread_mutex_t lock; /* Keep track of the minimum and maximum lsn */ DB_LSN min_lsn; DB_LSN max_lsn; /* Keep track of how many of each type of record */ int max_type; int *type_counts; /* Other counts */ int64_t unknown_count; int64_t total_count; } net_queue_stat_t; void net_rep_qstat_init(netinfo_type *netinfo_ptr); #endif
390