max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
3,269
<reponame>Sourav692/FAANG-Interview-Preparation<filename>Algo and DSA/LeetCode-Solutions-master/Python/the-skyline-problem.py # Time: O(nlogn) # Space: O(n) start, end, height = 0, 1, 2 class Solution(object): # @param {integer[][]} buildings # @return {integer[][]} def getSkyline(self, buildings): intervals = self.ComputeSkylineInInterval(buildings, 0, len(buildings)) res = [] last_end = -1 for interval in intervals: if last_end != -1 and last_end < interval[start]: res.append([last_end, 0]) res.append([interval[start], interval[height]]) last_end = interval[end] if last_end != -1: res.append([last_end, 0]) return res # Divide and Conquer. def ComputeSkylineInInterval(self, buildings, left_endpoint, right_endpoint): if right_endpoint - left_endpoint <= 1: return buildings[left_endpoint:right_endpoint] mid = left_endpoint + ((right_endpoint - left_endpoint) / 2) left_skyline = self.ComputeSkylineInInterval(buildings, left_endpoint, mid) right_skyline = self.ComputeSkylineInInterval(buildings, mid, right_endpoint) return self.MergeSkylines(left_skyline, right_skyline) # Merge Sort. def MergeSkylines(self, left_skyline, right_skyline): i, j = 0, 0 merged = [] while i < len(left_skyline) and j < len(right_skyline): if left_skyline[i][end] < right_skyline[j][start]: merged.append(left_skyline[i]) i += 1 elif right_skyline[j][end] < left_skyline[i][start]: merged.append(right_skyline[j]) j += 1 elif left_skyline[i][start] <= right_skyline[j][start]: i, j = self.MergeIntersectSkylines(merged, left_skyline[i], i,\ right_skyline[j], j) else: # left_skyline[i][start] > right_skyline[j][start]. j, i = self.MergeIntersectSkylines(merged, right_skyline[j], j, \ left_skyline[i], i) # Insert the remaining skylines. merged += left_skyline[i:] merged += right_skyline[j:] return merged # a[start] <= b[start] def MergeIntersectSkylines(self, merged, a, a_idx, b, b_idx): if a[end] <= b[end]: if a[height] > b[height]: # |aaa| if b[end] != a[end]: # |abb|b b[start] = a[end] merged.append(a) a_idx += 1 else: # aaa b_idx += 1 # abb elif a[height] == b[height]: # abb b[start] = a[start] # abb a_idx += 1 else: # a[height] < b[height]. if a[start] != b[start]: # bb merged.append([a[start], b[start], a[height]]) # |a|bb a_idx += 1 else: # a[end] > b[end]. if a[height] >= b[height]: # aaaa b_idx += 1 # abba else: # |bb| # |a||bb|a if a[start] != b[start]: merged.append([a[start], b[start], a[height]]) a[start] = b[end] merged.append(b) b_idx += 1 return a_idx, b_idx
1,908
6,478
/* * Copyright 2013-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.cloud.nacos; import com.alibaba.cloud.nacos.client.NacosPropertySourceLocator; import com.alibaba.cloud.nacos.endpoint.NacosConfigEndpointAutoConfiguration; import com.alibaba.nacos.api.exception.NacosException; import com.alibaba.nacos.client.config.NacosConfigService; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import org.springframework.test.util.ReflectionTestUtils; import static com.alibaba.cloud.nacos.NacosConfigurationExtConfigTests.TestConfig; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.when; import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.NONE; /** * * @author xiaojing * @author freeman */ @SpringBootTest(classes = TestConfig.class, webEnvironment = NONE, properties = { "spring.application.name=myTestService1", "spring.profiles.active=dev,test", "spring.cloud.nacos.config.server-addr=127.0.0.1:8848", "spring.cloud.nacos.config.encode=utf-8", "spring.cloud.nacos.config.timeout=1000", "spring.cloud.nacos.config.file-extension=properties", "spring.cloud.nacos.config.ext-config[0].data-id=ext-config-common01.properties", "spring.cloud.nacos.config.ext-config[1].data-id=ext-config-common02.properties", "spring.cloud.nacos.config.ext-config[1].group=GLOBAL_GROUP", "spring.cloud.nacos.config.shared-dataids=common1.properties,common2.properties", "spring.cloud.nacos.config.accessKey=test-accessKey", "spring.cloud.nacos.config.secretKey=test-secretKey", "spring.cloud.bootstrap.enabled=true" }) public class NacosConfigurationExtConfigTests { @Autowired private Environment environment; @Autowired private NacosPropertySourceLocator locator; @Autowired private NacosConfigProperties properties; static { try { NacosConfigService mockedNacosConfigService = Mockito .mock(NacosConfigService.class); when(mockedNacosConfigService.getConfig(any(), any(), anyLong())) .thenAnswer(new Answer<String>() { @Override public String answer(InvocationOnMock invocationOnMock) throws Throwable { String dataId = invocationOnMock.getArgument(0, String.class); String group = invocationOnMock.getArgument(1, String.class); if ("test-name.properties".equals(dataId) && "DEFAULT_GROUP".equals(group)) { return "user.name=hello\nuser.age=12"; } if ("test-name-dev.properties".equals(dataId) && "DEFAULT_GROUP".equals(group)) { return "user.name=dev"; } if ("ext-config-common01.properties".equals(dataId) && "DEFAULT_GROUP".equals(group)) { return "test-ext-config1=config1\ntest-ext-config2=config1"; } if ("ext-config-common02.properties".equals(dataId) && "GLOBAL_GROUP".equals(group)) { return "test-ext-config2=config2"; } if ("common1.properties".equals(dataId) && "DEFAULT_GROUP".equals(group)) { return "test-common1=common1\ntest-common2=common1"; } if ("common2.properties".equals(dataId) && "DEFAULT_GROUP".equals(group)) { return "test-common2=common2"; } return ""; } }); ReflectionTestUtils.setField(NacosConfigManager.class, "service", mockedNacosConfigService); } catch (NacosException ignored) { ignored.printStackTrace(); } } @Test public void contextLoads() throws Exception { assertThat(locator).isNotNull(); assertThat(properties).isNotNull(); assertThat(environment.getProperty("test-ext-config1")).isEqualTo("config1"); assertThat(environment.getProperty("test-ext-config2")).isEqualTo("config2"); assertThat(environment.getProperty("test-common1")).isEqualTo("common1"); assertThat(environment.getProperty("test-common2")).isEqualTo("common2"); } @Configuration @EnableAutoConfiguration @ImportAutoConfiguration({ NacosConfigEndpointAutoConfiguration.class, NacosConfigAutoConfiguration.class, NacosConfigBootstrapConfiguration.class }) public static class TestConfig { } }
1,923
11,396
# Generated by Django 2.2.16 on 2021-05-12 20:08 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0141_remove_isolated_instances'), ] operations = [ migrations.AlterField( model_name='executionenvironment', name='image', field=models.CharField( help_text='The full image location, including the container registry, image name, and version tag.', max_length=1024, verbose_name='image location', ), ), ]
267
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/tracing/background_memory_tracing_observer.h" #include "base/trace_event/heap_profiler_allocation_context_tracker.h" #include "base/trace_event/heap_profiler_event_filter.h" #include "base/trace_event/memory_dump_request_args.h" #include "base/trace_event/trace_log.h" #include "content/browser/tracing/background_tracing_rule.h" #include "services/resource_coordinator/public/cpp/memory_instrumentation/memory_instrumentation.h" using base::trace_event::AllocationContextTracker; using base::trace_event::TraceConfig; using base::trace_event::TraceLog; namespace content { namespace { const char kHeapProfilerCategoryFilter[] = "heap_profiler_category_filter"; } // namespace // static BackgroundMemoryTracingObserver* BackgroundMemoryTracingObserver::GetInstance() { static auto* instance = new BackgroundMemoryTracingObserver(); return instance; } BackgroundMemoryTracingObserver::BackgroundMemoryTracingObserver() {} BackgroundMemoryTracingObserver::~BackgroundMemoryTracingObserver() {} void BackgroundMemoryTracingObserver::OnScenarioActivated( const BackgroundTracingConfigImpl* config) { if (!config) { DCHECK(!enabled_); return; } const BackgroundTracingRule* heap_profiling_rule = nullptr; for (const auto& rule : config->rules()) { if (rule->category_preset() == BackgroundTracingConfigImpl::CategoryPreset:: BENCHMARK_MEMORY_LIGHT && rule->args()) { heap_profiling_rule = rule.get(); break; } } if (!heap_profiling_rule) return; enabled_ = true; // TODO(ssid): Add ability to enable profiling on all processes, // crbug.com/700245. AllocationContextTracker::SetCaptureMode( AllocationContextTracker::CaptureMode::MIXED_STACK); std::string filter_string; if ((TraceLog::GetInstance()->enabled_modes() & TraceLog::FILTERING_MODE) || !heap_profiling_rule->args()->GetString(kHeapProfilerCategoryFilter, &filter_string)) { return; } base::trace_event::TraceConfigCategoryFilter category_filter; category_filter.InitializeFromString(filter_string); TraceConfig::EventFilterConfig heap_profiler_filter_config( base::trace_event::HeapProfilerEventFilter::kName); heap_profiler_filter_config.SetCategoryFilter(category_filter); TraceConfig::EventFilters filters; filters.push_back(heap_profiler_filter_config); TraceConfig filtering_trace_config; filtering_trace_config.SetEventFilters(filters); TraceLog::GetInstance()->SetEnabled(filtering_trace_config, TraceLog::FILTERING_MODE); } void BackgroundMemoryTracingObserver::OnScenarioAborted() { if (!enabled_) return; enabled_ = false; base::trace_event::AllocationContextTracker::SetCaptureMode( AllocationContextTracker::CaptureMode::DISABLED); TraceLog::GetInstance()->SetDisabled(TraceLog::FILTERING_MODE); } void BackgroundMemoryTracingObserver::OnTracingEnabled( BackgroundTracingConfigImpl::CategoryPreset preset) { if (preset != BackgroundTracingConfigImpl::CategoryPreset::BENCHMARK_MEMORY_LIGHT) return; memory_instrumentation::MemoryInstrumentation::GetInstance() ->RequestGlobalDumpAndAppendToTrace( base::trace_event::MemoryDumpType::EXPLICITLY_TRIGGERED, base::trace_event::MemoryDumpLevelOfDetail::BACKGROUND, memory_instrumentation::MemoryInstrumentation:: RequestGlobalMemoryDumpAndAppendToTraceCallback()); } } // namespace content
1,309
541
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.rest.link.process; import java.util.LinkedList; import java.util.Map; import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.link.HalLinkFactory; import org.dspace.app.rest.model.SubmissionCCLicenseUrlRest; import org.dspace.app.rest.model.hateoas.SubmissionCCLicenseUrlResource; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.hateoas.Link; import org.springframework.stereotype.Component; import org.springframework.util.LinkedMultiValueMap; import org.springframework.web.util.UriComponentsBuilder; /** * This class will provide the SubmissionCCLicenseUrlResource with links */ @Component public class SubmissionCCLicenseUrlResourceHalLinkFactory extends HalLinkFactory<SubmissionCCLicenseUrlResource, RestResourceController> { @Autowired RequestService requestService; /** * Add a self link based on the search parameters * * @param halResource - The halResource * @param pageable - The page information * @param list - The list of present links * @throws Exception */ @Override protected void addLinks(SubmissionCCLicenseUrlResource halResource, final Pageable pageable, LinkedList<Link> list) throws Exception { halResource.removeLinks(); Map<String, String[]> parameterMap = requestService.getCurrentRequest().getHttpServletRequest() .getParameterMap(); UriComponentsBuilder uriComponentsBuilder = uriBuilder(getMethodOn().executeSearchMethods( SubmissionCCLicenseUrlRest.CATEGORY, SubmissionCCLicenseUrlRest.PLURAL, "rightsByQuestions", null, null, null, null, new LinkedMultiValueMap<>())); for (String key : parameterMap.keySet()) { uriComponentsBuilder.queryParam(key, parameterMap.get(key)); } list.add(buildLink("self", uriComponentsBuilder.build().toUriString())); } @Override protected Class<RestResourceController> getControllerClass() { return RestResourceController.class; } @Override protected Class<SubmissionCCLicenseUrlResource> getResourceClass() { return SubmissionCCLicenseUrlResource.class; } }
924
372
<reponame>kbore/pbis-open /* * * (c) Copyright 1992 OPEN SOFTWARE FOUNDATION, INC. * (c) Copyright 1992 HEWLETT-PACKARD COMPANY * (c) Copyright 1992 DIGITAL EQUIPMENT CORPORATION * To anyone who acknowledges that this file is provided "AS IS" * without any express or implied warranty: * permission to use, copy, modify, and distribute this * file for any purpose is hereby granted without fee, provided that * the above copyright notices and this notice appears in all source * code copies, and that none of the names of Open Software * Foundation, Inc., Hewlett-Packard Company, or Digital Equipment * Corporation be used in advertising or publicity pertaining to * distribution of the software without specific, written prior * permission. Neither Open Software Foundation, Inc., Hewlett- * Packard Company, nor Digital Equipment Corporation makes any * representations about the suitability of this software for any * purpose. * */ /* ** ** NAME: ** ** mtspipes.c ** ** FACILITY: ** ** Interface Definition Language (IDL) Compiler ** ** ABSTRACT: ** ** Pipes for MTS compiler ** ** VERSION: DCE 1.0 ** */ #include <nidl.h> #include <ast.h> #include <bedeck.h> #include <cspell.h> #include <ddbe.h> #include <dutils.h> #include <nametbl.h> #include <mtspipes.h> /******************************************************************************/ /* */ /* Find index of next [in] or [out] pipe */ /* */ /******************************************************************************/ static void BE_get_next_pipe_index ( AST_parameter_n_t *p_parameter, unsigned long ast_in_or_out, /* AST_IN or AST_OUT */ long curr_pipe_index, long *p_next_pipe_index /* 0 if no more pipes inrequested direction */ ) { for ( p_parameter = p_parameter->next; p_parameter != NULL; p_parameter = p_parameter->next ) { if ( (p_parameter->type->kind == AST_pipe_k) || ((p_parameter->type->kind == AST_pointer_k) && (p_parameter->type->type_structure.pointer->pointee_type ->kind == AST_pipe_k)) ) { curr_pipe_index++; if (ast_in_or_out & (p_parameter->flags)) { *p_next_pipe_index = curr_pipe_index; return; } } } *p_next_pipe_index = 0; } /******************************************************************************/ /* */ /* Get pipe type name for parameter */ /* */ /******************************************************************************/ static void BE_get_pipe_type_name ( AST_parameter_n_t *p_parameter, char const **p_p_name ) { if (p_parameter->type->kind == AST_pipe_k) { NAMETABLE_id_to_string( p_parameter->type->name, p_p_name ); } else /* parameter is reference pointer to pipe */ { NAMETABLE_id_to_string( p_parameter->type->type_structure.pointer ->pointee_type->name, p_p_name ); } } /******************************************************************************/ /* */ /* Initialization of server pipes */ /* */ /******************************************************************************/ void DDBE_init_server_pipes ( FILE *fid, AST_operation_n_t *p_operation, long *p_first_pipe /* ptr to index and direction of first pipe */ ) { long first_in_pipe; /* index of first [in] pipe */ long first_out_pipe; /* index of first [out] pipe */ long curr_pipe_index; long next_in_pipe_index; long next_out_pipe_index; AST_parameter_n_t *p_parameter; char const *p_pipe_type_name; /* Establish indices of first pipes */ first_in_pipe = 0; first_out_pipe = 0; curr_pipe_index = 0; for ( p_parameter = p_operation->parameters; p_parameter != NULL; p_parameter = p_parameter->next ) { if ( (p_parameter->type->kind == AST_pipe_k) || ((p_parameter->type->kind == AST_pointer_k) && (p_parameter->type->type_structure.pointer->pointee_type ->kind == AST_pipe_k)) ) { curr_pipe_index++; if ( AST_IN_SET(p_parameter) ) { if (first_in_pipe == 0) first_in_pipe = curr_pipe_index; } if ( AST_OUT_SET(p_parameter) ) { if (first_out_pipe == 0) first_out_pipe = curr_pipe_index; } } } if ( first_in_pipe != 0 ) *p_first_pipe = first_in_pipe; else *p_first_pipe = -first_out_pipe; /* Emit initialization code */ curr_pipe_index = 0; for ( p_parameter = p_operation->parameters; p_parameter != NULL; p_parameter = p_parameter->next ) { if ( (p_parameter->type->kind == AST_pipe_k) || ((p_parameter->type->kind == AST_pointer_k) && (p_parameter->type->type_structure.pointer->pointee_type ->kind == AST_pipe_k)) ) { AST_type_n_t *pipe_t = p_parameter->type; /* Find pipe type, if passed by reference */ if (pipe_t->kind == AST_pointer_k) pipe_t = pipe_t->type_structure.pointer->pointee_type; curr_pipe_index++; BE_get_pipe_type_name( p_parameter, &p_pipe_type_name ); /* Hook the push and pull routines */ fprintf( fid, "%s.push=(",BE_get_name(p_parameter->name) ); CSPELL_pipe_struct_routine_decl(fid, pipe_t, BE_pipe_push_k, TRUE); fprintf( fid, ")rpc_ss_ndr_ee_marsh_pipe_chunk;\n"); fprintf( fid, "%s.pull=(",BE_get_name(p_parameter->name) ); CSPELL_pipe_struct_routine_decl(fid, pipe_t, BE_pipe_pull_k, TRUE); fprintf( fid, ")rpc_ss_ndr_ee_unmar_pipe_chunk;\n"); /* Initialize the state block */ next_in_pipe_index = 0; next_out_pipe_index = 0; if ( AST_IN_SET(p_parameter) ) { BE_get_next_pipe_index( p_parameter, AST_IN, curr_pipe_index, &next_in_pipe_index ); if (next_in_pipe_index == 0) { /* Next pipe is [out] */ if (first_out_pipe != 0) next_in_pipe_index = -first_out_pipe; else next_in_pipe_index = BE_FINISHED_WITH_PIPES; } } if ( AST_OUT_SET(p_parameter) ) { BE_get_next_pipe_index( p_parameter, AST_OUT, curr_pipe_index, &next_out_pipe_index ); if (next_out_pipe_index == 0 ) next_out_pipe_index = BE_FINISHED_WITH_PIPES; else next_out_pipe_index = -next_out_pipe_index; } fprintf( fid, "rpc_ss_mts_init_callee_pipe(%ld,%ld,%ld,&IDL_current_pipe,&IDL_ms,\n", curr_pipe_index, next_in_pipe_index, next_out_pipe_index ); fprintf( fid, "%" PRId_ddbe32 ",(rpc_ss_mts_ee_pipe_state_t**)&%s.state);\n", (p_parameter->type->kind == AST_pipe_k) ? p_parameter->type->be_info.dd_type->type_vec_p->index : p_parameter->type->type_structure.pointer->pointee_type ->be_info.dd_type->type_vec_p->index, BE_get_name(p_parameter->name) ); } } }
4,144
704
<reponame>klen/muffin """Muffin Handlers.""" import inspect import typing as t from http_router import Router from http_router.typing import TYPE_METHODS from asgi_tools import Request from asgi_tools.app import HTTPView, HTTP_METHODS from asgi_tools.utils import is_awaitable class HandlerMeta(type): """Prepare handlers.""" def __new__(mcs, name, bases, params): """Prepare a Handler Class.""" cls = super().__new__(mcs, name, bases, params) # Ensure that the class methods are exist and iterable if not cls.methods: cls.methods = set(method for method in HTTP_METHODS if method.lower() in cls.__dict__) elif isinstance(cls.methods, str): cls.methods = [cls.methods] cls.methods = set(method.upper() for method in cls.methods) for m in cls.methods: method = getattr(cls, m.lower(), None) if method and not is_awaitable(method): raise TypeError(f"The method '{method.__qualname__}' has to be awaitable") return cls def route_method(*paths: str, **params) -> t.Callable: """Mark a method as a route.""" def wrapper(method): """Wrap a method.""" method.__route__ = paths, params return method return wrapper class Handler(HTTPView, metaclass=HandlerMeta): """Class-based view pattern for handling HTTP method dispatching. .. code-block:: python @app.route('/hello', '/hello/{name}') class HelloHandler(Handler): async def get(self, request): name = request.patch_params.get('name') or 'all' return "GET: Hello f{name}" async def post(self, request): name = request.patch_params.get('name') or 'all' return "POST: Hello f{name}" @Handler.route('/hello/custom') async def custom(self, request): return 'Custom HELLO' # ... async def test_my_endpoint(client): response = await client.get('/hello') assert await response.text() == 'GET: Hello all' response = await client.get('/hello/john') assert await response.text() == 'GET: Hello john' response = await client.post('/hello') assert await response.text() == 'POST: Hello all' response = await client.get('/hello/custom') assert await response.text() == 'Custom HELLO' response = await client.delete('/hello') assert response.status_code == 405 """ methods: t.Optional[t.Sequence[str]] = None @classmethod def __route__(cls, router: Router, *paths: str, methods: TYPE_METHODS = None, **params): """Check for registered methods.""" router.bind(cls, *paths, methods=methods or cls.methods, **params) for _, method in inspect.getmembers(cls, lambda m: hasattr(m, '__route__')): cpaths, cparams = method.__route__ router.bind(cls, *cpaths, __meth__=method.__name__, **cparams) return cls def __call__(self, request: Request, *args, **opts) -> t.Awaitable: """Dispatch the given request by HTTP method.""" method = getattr(self, opts.get('__meth__') or request.method.lower()) return method(request) route = route_method
1,411
398
<gh_stars>100-1000 package io.joyrpc.transport.netty4.http2; /*- * #%L * joyrpc * %% * Copyright (C) 2019 joyrpc.io * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import io.joyrpc.transport.channel.Channel; import io.joyrpc.transport.codec.Http2Codec; import io.joyrpc.transport.http2.DefaultHttp2RequestMessage; import io.joyrpc.transport.http2.Http2ResponseMessage; import io.joyrpc.transport.netty4.buffer.NettyChannelBuffer; import io.joyrpc.transport.netty4.transport.NettyServer; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http2.*; import io.netty.handler.codec.http2.Http2Connection.PropertyKey; import io.netty.handler.logging.LogLevel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.netty.buffer.Unpooled.wrappedBuffer; /** * http2 server端 编解码器 */ public class Http2ServerCodecHandler extends Http2ConnectionHandler { private static final Logger logger = LoggerFactory.getLogger(NettyServer.class); protected final Http2ConnectionDecoder decoder; protected final Http2ConnectionEncoder encoder; /** * 消息头键 */ protected PropertyKey headerKey; /** * 编解码 */ protected Http2Codec codec; /** * 通道 */ protected Channel channel; public Http2ServerCodecHandler(Http2ConnectionDecoder decoder, Http2ConnectionEncoder encoder, Http2Settings initialSettings, Channel channel, Http2Codec codec) { super(decoder, encoder, initialSettings); this.decoder = decoder; this.encoder = encoder; this.channel = channel; this.codec = codec; this.headerKey = encoder().connection().newKey(); decoder.frameListener(new FrameListener(encoder.connection(), headerKey, codec, channel)); } @Override public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { if (msg instanceof Http2ResponseMessage) { Http2ResponseMessage response = (Http2ResponseMessage) msg; Http2Headers headers = response.headers() == null ? null : new Http2NettyHeaders(response.headers()); Http2Headers endHeaders = response.endHeaders() == null ? null : new Http2NettyHeaders(response.endHeaders()); byte[] content = response.content(); if (headers != null) { //开始头 encoder.writeHeaders(ctx, response.getStreamId(), headers, 0, false, endHeaders == null && content == null ? promise : ctx.voidPromise()); } if (endHeaders == null) { //没有结束头 if (content != null) { //有内容 encoder.writeData(ctx, response.getStreamId(), wrappedBuffer(content), 0, response.isEnd(), promise); } } else { //有结束头 if (content != null) { //有内容 encoder.writeData(ctx, response.getStreamId(), wrappedBuffer(content), 0, false, ctx.voidPromise()); } //结束头 encoder.writeHeaders(ctx, response.getStreamId(), endHeaders, 0, true, promise); } } else { super.write(ctx, msg, promise); } } /** * 创建http2服务端编解码处理器 * * @param channel 通道 * @param http2Codec http2编解码 * @return http2服务端编解码处理器 */ public static Http2ServerCodecHandler create(final Channel channel, final Http2Codec http2Codec) { Http2FrameLogger frameLogger = new Http2FrameLogger(LogLevel.DEBUG, Http2ServerCodecHandler.class); int payload = channel.getPayloadSize(); Http2HeadersDecoder headersDecoder = new DefaultHttp2HeadersDecoder(true, payload); Http2FrameReader frameReader = new Http2InboundFrameLogger(new DefaultHttp2FrameReader(headersDecoder), frameLogger); Http2FrameWriter frameWriter = new Http2OutboundFrameLogger(new DefaultHttp2FrameWriter(), frameLogger); Http2Connection connection = new DefaultHttp2Connection(true); Http2ConnectionEncoder encoder = new DefaultHttp2ConnectionEncoder(connection, frameWriter); Http2ConnectionDecoder decoder = new DefaultHttp2ConnectionDecoder(connection, encoder, frameReader); Http2Settings settings = new Http2Settings(); settings.initialWindowSize(1048576); settings.maxConcurrentStreams(Integer.MAX_VALUE); settings.maxHeaderListSize(8192); return new Http2ServerCodecHandler(decoder, encoder, settings, channel, http2Codec); } /** * 框架监听器 */ protected static class FrameListener extends Http2FrameAdapter { /** * 连接 */ protected final Http2Connection connection; /** * 头部Key */ protected final PropertyKey headerKey; /** * 编解码 */ protected final Http2Codec codec; /** * 通道 */ protected final Channel channel; public FrameListener(Http2Connection connection, PropertyKey headerKey, Http2Codec codec, Channel channel) { this.connection = connection; this.headerKey = headerKey; this.codec = codec; this.channel = channel; } @Override public int onDataRead(final ChannelHandlerContext ctx, final int streamId, final ByteBuf data, final int padding, final boolean endOfStream) throws Http2Exception { int processed = data.readableBytes() + padding; Http2Stream http2Stream = connection.stream(streamId); Http2Headers headers = http2Stream.getProperty(headerKey); dispatch(ctx, streamId, headers, data, null, endOfStream); return processed; } @Override public void onHeadersRead(final ChannelHandlerContext ctx, final int streamId, final Http2Headers headers, final int padding, final boolean endStream) throws Http2Exception { if (streamId > 0) { // 正常的请求(streamId==1 的是settings请求) if (endStream) { // 没有DATA帧的请求,可能是DATA dispatch(ctx, streamId, null, null, headers, true); } else { // 缓存起来 Http2Stream stream = connection.stream(streamId); if (stream != null) { stream.setProperty(headerKey, headers); } } } } @Override public void onHeadersRead(final ChannelHandlerContext ctx, final int streamId, final Http2Headers headers, final int streamDependency, final short weight, final boolean exclusive, final int padding, final boolean endStream) throws Http2Exception { onHeadersRead(ctx, streamId, headers, padding, endStream); } @Override public void onRstStreamRead(final ChannelHandlerContext ctx, final int streamId, final long errorCode) { logger.error("onRstStreamRead streamId:" + streamId + " errorCode:" + errorCode); } @Override public void onPingRead(final ChannelHandlerContext ctx, final long data) throws Http2Exception { logger.warn("onPingRead data:" + data); } @Override public void onPingAckRead(final ChannelHandlerContext ctx, final long data) throws Http2Exception { logger.warn("onPingAckRead data:" + data); } /** * 派发请求 * * @param ctx 上下文 * @param streamId 流ID * @param headers 开始头 * @param body 数据 * @param endHeaders 结束头 * @param endStream 结束标识 * @throws Http2Exception */ protected void dispatch(final ChannelHandlerContext ctx, final int streamId, final Http2Headers headers, final ByteBuf body, final Http2Headers endHeaders, final boolean endStream) throws Http2Exception { try { //获取请求body byte[] content = body != null ? (byte[]) codec.decode(new Http2DecodeContext(channel), new NettyChannelBuffer(body)) : null; //server端收到消息,没有bizId,这里用streamId充当bizId ctx.fireChannelRead(new DefaultHttp2RequestMessage(streamId, streamId, headers == null ? null : new io.joyrpc.transport.http2.DefaultHttp2Headers(headers), content, endHeaders == null ? null : new io.joyrpc.transport.http2.DefaultHttp2Headers(endHeaders), endStream)); } catch (Exception e) { throw Http2Exception.streamError(streamId, Http2Error.PROTOCOL_ERROR, e, "has error when codec"); } } } }
4,560
806
package com.alibaba.sdk.android.oss.model; public class PutBucketLoggingResult extends OSSResult { }
33
348
{"nom":"Grandvelle-et-le-Perrenot","circ":"1ère circonscription","dpt":"Haute-Saône","inscrits":274,"abs":121,"votants":153,"blancs":13,"nuls":4,"exp":136,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":71},{"nuance":"FN","nom":"Mme <NAME>","voix":65}]}
103
8,027
package com.test; public class ExportedProvidedLibraryClass {}
17
588
<reponame>fmilano/CppMicroServices #ifndef _SERVICE_IMPL_HPP_ #define _SERVICE_IMPL_HPP_ #include "TestInterfaces/Interfaces.hpp" #include "cppmicroservices/servicecomponent/ComponentContext.hpp" using ComponentContext = cppmicroservices::service::component::ComponentContext; namespace dependent { class TestBundleDSDependentImpl : public test::TestBundleDSDependent { public: TestBundleDSDependentImpl( const std::shared_ptr<test::TestBundleDSUpstreamDependency>&); ~TestBundleDSDependentImpl() override; void Activate(const std::shared_ptr<ComponentContext>&) {} void Deactivate(const std::shared_ptr<ComponentContext>&) {} private: std::shared_ptr<test::TestBundleDSUpstreamDependency> ref; }; } #endif // _SERVICE_IMPL_HPP_
252
17,085
<gh_stars>1000+ /* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #pragma once #include <algorithm> #include <vector> #include "paddle/fluid/framework/eigen.h" #include "paddle/fluid/framework/op_registry.h" #include "paddle/fluid/framework/operator.h" #include "paddle/fluid/operators/eigen/eigen_function.h" #define MAX_RANK_SUPPORTED 6 namespace paddle { namespace operators { inline std::vector<int> get_repeat_times( const framework::ExecutionContext& ctx) { if (ctx.HasInput("RepeatTimes")) { auto* repeat_tensor = ctx.Input<framework::LoDTensor>("RepeatTimes"); auto* repeat_data = repeat_tensor->data<int>(); framework::Tensor cpu_repeat_tensor; if (platform::is_gpu_place(repeat_tensor->place()) || platform::is_npu_place(repeat_tensor->place())) { TensorCopySync(*repeat_tensor, platform::CPUPlace(), &cpu_repeat_tensor); repeat_data = cpu_repeat_tensor.data<int>(); } auto vec_repeat_times = std::vector<int>(repeat_data, repeat_data + repeat_tensor->numel()); return vec_repeat_times; } auto list_repeat_times_tensor = ctx.MultiInput<framework::Tensor>("repeat_times_tensor"); if (list_repeat_times_tensor.size() > 0) { // get tensor from std::vector<int> vec_repeat_times; for (size_t i = 0; i < list_repeat_times_tensor.size(); ++i) { auto tensor = list_repeat_times_tensor[i]; if (platform::is_gpu_place(tensor->place()) || platform::is_npu_place(tensor->place())) { framework::Tensor temp; TensorCopySync(*tensor, platform::CPUPlace(), &temp); vec_repeat_times.push_back(*temp.data<int32_t>()); } else { vec_repeat_times.push_back(*tensor->data<int32_t>()); } } return vec_repeat_times; } else { return ctx.Attr<std::vector<int>>("repeat_times"); } } using Tensor = framework::Tensor; template <typename T, int MajorType = Eigen::RowMajor, typename IndexType = Eigen::DenseIndex> using EigenVector = framework::EigenVector<T, MajorType, IndexType>; template <typename T, size_t D, int MajorType = Eigen::RowMajor, typename IndexType = Eigen::DenseIndex> using EigenTensor = framework::EigenTensor<T, D, MajorType, IndexType>; using framework::To32BitIndex; template <typename DeviceContext, typename T> class TileKernel : public framework::OpKernel<T> { public: void Compute(const framework::ExecutionContext& context) const override { auto rank = context.Input<Tensor>("X")->dims().size(); PADDLE_ENFORCE_GE( rank, 1, platform::errors::InvalidArgument( "The rank of the input 'x' for tile op must be a positive " "integer, but the value received is %d.", rank)); PADDLE_ENFORCE_LE( rank, MAX_RANK_SUPPORTED, platform::errors::InvalidArgument( "The rank of the input 'x' for tile op " "must be less than or equal to %d, but the value received is %d.", MAX_RANK_SUPPORTED, rank)); auto repeat_times = get_repeat_times(context); int repeat_times_size = repeat_times.size(); PADDLE_ENFORCE_GE( repeat_times_size, 1, platform::errors::InvalidArgument( "The number of elements of the input 'repeat_times' for tile " "op must be positive, but the value received is %d.", repeat_times_size)); PADDLE_ENFORCE_LE( repeat_times_size, MAX_RANK_SUPPORTED, platform::errors::InvalidArgument( "The number of elements of the input 'repeat_times' for tile op " "must be less than or equal to %d, but the value received is %d.", MAX_RANK_SUPPORTED, repeat_times_size)); rank = std::max(rank, repeat_times_size); switch (rank) { case 1: Tile<1>(context); break; case 2: Tile<2>(context); break; case 3: Tile<3>(context); break; case 4: Tile<4>(context); break; case 5: Tile<5>(context); break; case 6: Tile<6>(context); break; } } protected: template <int Rank> void Tile(const framework::ExecutionContext& context) const { auto* in0 = context.Input<Tensor>("X"); auto in_dims = in0->dims(); auto repeat_times = get_repeat_times(context); for (size_t i = 0; i < repeat_times.size(); ++i) { PADDLE_ENFORCE_GT( repeat_times[i], 0, platform::errors::InvalidArgument( "All elements of the input 'repeat_times' for tile op must " "be positive integers, but the value received is %d.", repeat_times[i])); } auto vec_in_dims = framework::vectorize<int>(in_dims); if (repeat_times.size() < vec_in_dims.size()) { int diff = vec_in_dims.size() - repeat_times.size(); repeat_times.insert(repeat_times.begin(), diff, 1); } else { int diff = repeat_times.size() - vec_in_dims.size(); vec_in_dims.insert(vec_in_dims.begin(), diff, 1); } PADDLE_ENFORCE_EQ( repeat_times.size(), vec_in_dims.size(), platform::errors::InvalidArgument( "The rank (%d) of the input 'x' and the rank (%d) of the input " "'repeat_times' for tile op must match after promotion.", vec_in_dims.size(), repeat_times.size())); auto* out0 = context.Output<Tensor>("Out"); Eigen::DSizes<Eigen::DenseIndex, Rank> bcast_dims; for (size_t i = 0; i < repeat_times.size(); ++i) { bcast_dims[i] = repeat_times[i]; } framework::DDim new_in_dims = framework::make_ddim(vec_in_dims); framework::DDim out_dims(new_in_dims); for (size_t i = 0; i < repeat_times.size(); ++i) { out_dims[i] *= repeat_times[i]; } out0->Resize(out_dims); auto x = EigenTensor<T, Rank>::From(*in0, new_in_dims); out0->mutable_data<T>(context.GetPlace()); auto y = EigenTensor<T, Rank>::From(*out0, out_dims); auto& place = *context.template device_context<DeviceContext>().eigen_device(); // use 32-bit index to speed up bool use_32bit_index = y.size() < Eigen::NumTraits<int>::highest(); if (use_32bit_index) { EigenBroadcast<std::decay_t<decltype(place)>, T, Rank>::Eval( place, To32BitIndex(y), To32BitIndex(x), bcast_dims); } else { EigenBroadcast<std::decay_t<decltype(place)>, T, Rank>::Eval(place, y, x, bcast_dims); } } }; template <typename DeviceContext, typename T> class TileGradKernel : public framework::OpKernel<T> { public: void Compute(const framework::ExecutionContext& context) const override { auto* x = context.Input<Tensor>("X"); auto repeat_times = get_repeat_times(context); auto x_dims = x->dims(); auto vec_in_dims = framework::vectorize<int>(x_dims); if (repeat_times.size() < vec_in_dims.size()) { int diff = vec_in_dims.size() - repeat_times.size(); repeat_times.insert(repeat_times.begin(), diff, 1); } else { int diff = repeat_times.size() - vec_in_dims.size(); vec_in_dims.insert(vec_in_dims.begin(), diff, 1); } // 1. reshape_dims_vec is the broadcast parameter. // 2. reduce_dims_vec is the dimension parameter to compute gradients. For // each dimension expanded, the gradients should be summed to original // size. std::vector<int> reshape_dims_vec; std::vector<int> reduce_dims_vec; for (size_t i = 0; i < repeat_times.size(); ++i) { reduce_dims_vec.push_back(reshape_dims_vec.size()); reshape_dims_vec.push_back(repeat_times[i]); reshape_dims_vec.push_back(vec_in_dims[i]); } int dims = reduce_dims_vec.size(); bool just_copy = true; for (size_t i = 0; i < repeat_times.size(); i++) { if (repeat_times[i] != 1) { just_copy = false; break; } } // no need reduce, just copy if (just_copy) { auto* dout = context.Input<Tensor>(framework::GradVarName("Out")); auto* dx = context.Output<Tensor>(framework::GradVarName("X")); dx->mutable_data<T>(context.GetPlace()); framework::TensorCopy(*dout, context.GetPlace(), context.device_context(), dx); // TensorCopy may change the dims of dx dx->Resize(x_dims); } else { PADDLE_ENFORCE_GE(dims, 1, platform::errors::InvalidArgument( "Th rank of the input 'Out@GRAD' for tile_grad op " " must be greater than or equal to 1, but " "the value received is %d.", dims)); PADDLE_ENFORCE_LE(dims, MAX_RANK_SUPPORTED, platform::errors::InvalidArgument( "The rank of the input 'Out@GRAD' for tile_grad op " "must be less than or equal " "to %d, but the value received is %d.", MAX_RANK_SUPPORTED, dims)); switch (dims) { case 1: TileBackward<1>(context, reshape_dims_vec, reduce_dims_vec); break; case 2: TileBackward<2>(context, reshape_dims_vec, reduce_dims_vec); break; case 3: TileBackward<3>(context, reshape_dims_vec, reduce_dims_vec); break; case 4: TileBackward<4>(context, reshape_dims_vec, reduce_dims_vec); break; case 5: TileBackward<5>(context, reshape_dims_vec, reduce_dims_vec); break; case 6: TileBackward<6>(context, reshape_dims_vec, reduce_dims_vec); break; default: PADDLE_THROW(platform::errors::InvalidArgument( "Only support tensor with rank being between 1 and 6. But " "received tensor's rank = %d.", dims)); } } } protected: template <int Dims> void TileBackward(const framework::ExecutionContext& context, const std::vector<int>& reshape_dims_vec, const std::vector<int>& reduce_dims_vec) const { size_t reshape_size = reshape_dims_vec.size(); size_t reduce_size = reduce_dims_vec.size(); auto* in0 = context.Input<Tensor>(framework::GradVarName("Out")); auto* out0 = context.Output<Tensor>(framework::GradVarName("X")); out0->mutable_data<T>(context.GetPlace()); auto x_grad = EigenVector<T>::Flatten(*out0); Eigen::DSizes<Eigen::DenseIndex, Dims * 2> reshape_dims; for (size_t i = 0; i < reshape_size; ++i) { reshape_dims[i] = reshape_dims_vec[i]; } Eigen::DSizes<Eigen::DenseIndex, Dims> reduce_dims; for (size_t i = 0; i < reduce_size; ++i) { reduce_dims[i] = reduce_dims_vec[i]; } auto out_grad = EigenVector<T>::Flatten(*in0); auto& place = *context.template device_context<DeviceContext>().eigen_device(); EigenBroadcastGrad<std::decay_t<decltype(place)>, T, Dims>::Eval( place, x_grad, out_grad, reduce_dims, reshape_dims); } }; } // namespace operators } // namespace paddle
5,186
345
/* * Copyright 2016 HuntBugs contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package one.util.huntbugs.flow.etype; import java.util.HashSet; import java.util.Set; import com.strobel.assembler.metadata.TypeReference; import one.util.huntbugs.flow.etype.SingleType.What; import one.util.huntbugs.util.Types; import one.util.huntbugs.util.YesNoMaybe; /** * @author shustkost * */ public class AndType extends ComplexType { AndType(Set<SingleType> types) { super(types); } static EType of(Set<SingleType> types) { if(types.isEmpty()) return UNKNOWN; if(types.size() == 1) return types.iterator().next(); return new AndType(types); } @Override public YesNoMaybe is(TypeReference tr, boolean exact) { boolean hasYes = false, hasNo = false; for (EType type : types) { switch (type.is(tr, exact)) { case YES: hasYes = true; break; case NO: hasNo = true; break; default: } } if (hasYes && hasNo) return YesNoMaybe.MAYBE; if (hasYes) return YesNoMaybe.YES; if (hasNo) return YesNoMaybe.NO; return YesNoMaybe.MAYBE; } @Override public YesNoMaybe isArray() { boolean hasYes = false, hasNo = false; for (EType type : types) { switch (type.isArray()) { case YES: hasYes = true; break; case NO: hasNo = true; break; default: } } if (hasYes && hasNo) return YesNoMaybe.MAYBE; if (hasYes) return YesNoMaybe.YES; if (hasNo) return YesNoMaybe.NO; return YesNoMaybe.MAYBE; } @Override public EType shrinkConstraint(TypeReference tr, boolean exact) { Set<SingleType> yes = new HashSet<>(), no = new HashSet<>(); for (SingleType type : types) { switch (type.is(tr, exact)) { case YES: yes.add(type); break; case NO: no.add(type); break; default: } } if (!yes.isEmpty() && !no.isEmpty()) return this; if (!yes.isEmpty()) return of(yes); if (!no.isEmpty()) return of(no); return this; } @Override public EType negate() { Set<SingleType> newTypes = new HashSet<>(); for (SingleType type : types) { EType neg = type.negate(); if (neg instanceof SingleType) newTypes.add((SingleType) neg); else if (neg == UNKNOWN) { return UNKNOWN; } else throw new IllegalStateException("Unexpected type: " + type); } return new OrType(newTypes); } @Override EType reduce() { SingleType result = null; for(SingleType type : types) { if(result == null) result = type; else { if(result.what == What.EXACT) { if(type.what == What.EXACT) return UNKNOWN; continue; } if(type.what == What.EXACT) { result = type; continue; } if(result.what == What.SUBTYPE) { if(type.what == What.SUBTYPE && Types.isInstance(type.tr, result.tr)) result = type; continue; } if(type.what == What.SUBTYPE) { result = type; continue; } return UNKNOWN; } } return result; } @Override EType append(SingleType st) { if(types.contains(st)) return types.size() == 1 ? types.iterator().next() : this; if(types.contains(st.negate())) return UNKNOWN; if(st.what == What.EXACT) { if(types.stream().anyMatch(t -> t.is(st.tr, true) == YesNoMaybe.NO)) return UNKNOWN; return st; } if(types.size() == 1) { SingleType cur = types.iterator().next(); if(cur.what == What.EXACT) { return st.is(cur.tr, true) == YesNoMaybe.NO ? UNKNOWN : cur; } if(cur.what == What.SUBTYPE && st.what == What.SUBTYPE) { if(cur.is(st.tr, false) == YesNoMaybe.YES) return cur; if(st.is(cur.tr, false) == YesNoMaybe.YES) return st; } } Set<SingleType> newTypes = new HashSet<>(types); newTypes.add(st); return new AndType(newTypes); } @Override EType appendAny(EType type) { if(type == UNKNOWN) return types.size() == 1 ? types.iterator().next() : this; if(type instanceof SingleType) return append((SingleType) type); if(type instanceof AndType) { AndType result = (AndType) type; for(SingleType t : types) { EType newResult = result.append(t); if(newResult == UNKNOWN) return UNKNOWN; result = (AndType)newResult; } return result; } if(type instanceof ComplexType) { return appendAny(((ComplexType)type).reduce()); } return UNKNOWN; } @Override public String toString() { return toString(" and "); } }
3,295
18,621
<gh_stars>1000+ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # isort:skip_file from datetime import datetime from importlib.util import find_spec import math from typing import Any, List, Optional import numpy as np from pandas import DataFrame, Series, Timestamp, to_datetime import pytest from superset.exceptions import QueryObjectValidationError from superset.utils import pandas_postprocessing as proc from superset.utils.core import ( DTTM_ALIAS, PostProcessingContributionOrientation, PostProcessingBoxplotWhiskerType, ) from .base_tests import SupersetTestCase from .fixtures.dataframes import ( categories_df, single_metric_df, multiple_metrics_df, lonlat_df, names_df, timeseries_df, prophet_df, timeseries_df2, ) AGGREGATES_SINGLE = {"idx_nulls": {"operator": "sum"}} AGGREGATES_MULTIPLE = { "idx_nulls": {"operator": "sum"}, "asc_idx": {"operator": "mean"}, } def series_to_list(series: Series) -> List[Any]: """ Converts a `Series` to a regular list, and replaces non-numeric values to Nones. :param series: Series to convert :return: list without nan or inf """ return [ None if not isinstance(val, str) and (math.isnan(val) or math.isinf(val)) else val for val in series.tolist() ] def round_floats( floats: List[Optional[float]], precision: int ) -> List[Optional[float]]: """ Round list of floats to certain precision :param floats: floats to round :param precision: intended decimal precision :return: rounded floats """ return [round(val, precision) if val else None for val in floats] class TestPostProcessing(SupersetTestCase): def test_flatten_column_after_pivot(self): """ Test pivot column flattening function """ # single aggregate cases self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_SINGLE, column="idx_nulls", ), "idx_nulls", ) self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_SINGLE, column=1234, ), "1234", ) self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_SINGLE, column=Timestamp("2020-09-29T00:00:00"), ), "2020-09-29 00:00:00", ) self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_SINGLE, column="idx_nulls", ), "idx_nulls", ) self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_SINGLE, column=("idx_nulls", "col1"), ), "col1", ) self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_SINGLE, column=("idx_nulls", "col1", 1234), ), "col1, 1234", ) # Multiple aggregate cases self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_MULTIPLE, column=("idx_nulls", "asc_idx", "col1"), ), "idx_nulls, asc_idx, col1", ) self.assertEqual( proc._flatten_column_after_pivot( aggregates=AGGREGATES_MULTIPLE, column=("idx_nulls", "asc_idx", "col1", 1234), ), "idx_nulls, asc_idx, col1, 1234", ) def test_pivot_without_columns(self): """ Make sure pivot without columns returns correct DataFrame """ df = proc.pivot(df=categories_df, index=["name"], aggregates=AGGREGATES_SINGLE,) self.assertListEqual( df.columns.tolist(), ["name", "idx_nulls"], ) self.assertEqual(len(df), 101) self.assertEqual(df.sum()[1], 1050) def test_pivot_with_single_column(self): """ Make sure pivot with single column returns correct DataFrame """ df = proc.pivot( df=categories_df, index=["name"], columns=["category"], aggregates=AGGREGATES_SINGLE, ) self.assertListEqual( df.columns.tolist(), ["name", "cat0", "cat1", "cat2"], ) self.assertEqual(len(df), 101) self.assertEqual(df.sum()[1], 315) df = proc.pivot( df=categories_df, index=["dept"], columns=["category"], aggregates=AGGREGATES_SINGLE, ) self.assertListEqual( df.columns.tolist(), ["dept", "cat0", "cat1", "cat2"], ) self.assertEqual(len(df), 5) def test_pivot_with_multiple_columns(self): """ Make sure pivot with multiple columns returns correct DataFrame """ df = proc.pivot( df=categories_df, index=["name"], columns=["category", "dept"], aggregates=AGGREGATES_SINGLE, ) self.assertEqual(len(df.columns), 1 + 3 * 5) # index + possible permutations def test_pivot_fill_values(self): """ Make sure pivot with fill values returns correct DataFrame """ df = proc.pivot( df=categories_df, index=["name"], columns=["category"], metric_fill_value=1, aggregates={"idx_nulls": {"operator": "sum"}}, ) self.assertEqual(df.sum()[1], 382) def test_pivot_fill_column_values(self): """ Make sure pivot witn null column names returns correct DataFrame """ df_copy = categories_df.copy() df_copy["category"] = None df = proc.pivot( df=df_copy, index=["name"], columns=["category"], aggregates={"idx_nulls": {"operator": "sum"}}, ) assert len(df) == 101 assert df.columns.tolist() == ["name", "<NULL>"] def test_pivot_exceptions(self): """ Make sure pivot raises correct Exceptions """ # Missing index self.assertRaises( TypeError, proc.pivot, df=categories_df, columns=["dept"], aggregates=AGGREGATES_SINGLE, ) # invalid index reference self.assertRaises( QueryObjectValidationError, proc.pivot, df=categories_df, index=["abc"], columns=["dept"], aggregates=AGGREGATES_SINGLE, ) # invalid column reference self.assertRaises( QueryObjectValidationError, proc.pivot, df=categories_df, index=["dept"], columns=["abc"], aggregates=AGGREGATES_SINGLE, ) # invalid aggregate options self.assertRaises( QueryObjectValidationError, proc.pivot, df=categories_df, index=["name"], columns=["category"], aggregates={"idx_nulls": {}}, ) def test_pivot_eliminate_cartesian_product_columns(self): # single metric mock_df = DataFrame( { "dttm": to_datetime(["2019-01-01", "2019-01-01"]), "a": [0, 1], "b": [0, 1], "metric": [9, np.NAN], } ) df = proc.pivot( df=mock_df, index=["dttm"], columns=["a", "b"], aggregates={"metric": {"operator": "mean"}}, drop_missing_columns=False, ) self.assertEqual(list(df.columns), ["dttm", "0, 0", "1, 1"]) self.assertTrue(np.isnan(df["1, 1"][0])) # multiple metrics mock_df = DataFrame( { "dttm": to_datetime(["2019-01-01", "2019-01-01"]), "a": [0, 1], "b": [0, 1], "metric": [9, np.NAN], "metric2": [10, 11], } ) df = proc.pivot( df=mock_df, index=["dttm"], columns=["a", "b"], aggregates={ "metric": {"operator": "mean"}, "metric2": {"operator": "mean"}, }, drop_missing_columns=False, ) self.assertEqual( list(df.columns), ["dttm", "metric, 0, 0", "metric, 1, 1", "metric2, 0, 0", "metric2, 1, 1"], ) self.assertTrue(np.isnan(df["metric, 1, 1"][0])) def test_pivot_without_flatten_columns_and_reset_index(self): df = proc.pivot( df=single_metric_df, index=["dttm"], columns=["country"], aggregates={"sum_metric": {"operator": "sum"}}, flatten_columns=False, reset_index=False, ) # metric # country UK US # dttm # 2019-01-01 5 6 # 2019-01-02 7 8 assert df.columns.to_list() == [("sum_metric", "UK"), ("sum_metric", "US")] assert df.index.to_list() == to_datetime(["2019-01-01", "2019-01-02"]).to_list() def test_aggregate(self): aggregates = { "asc sum": {"column": "asc_idx", "operator": "sum"}, "asc q2": { "column": "asc_idx", "operator": "percentile", "options": {"q": 75}, }, "desc q1": { "column": "desc_idx", "operator": "percentile", "options": {"q": 25}, }, } df = proc.aggregate( df=categories_df, groupby=["constant"], aggregates=aggregates ) self.assertListEqual( df.columns.tolist(), ["constant", "asc sum", "asc q2", "desc q1"] ) self.assertEqual(series_to_list(df["asc sum"])[0], 5050) self.assertEqual(series_to_list(df["asc q2"])[0], 75) self.assertEqual(series_to_list(df["desc q1"])[0], 25) def test_sort(self): df = proc.sort(df=categories_df, columns={"category": True, "asc_idx": False}) self.assertEqual(96, series_to_list(df["asc_idx"])[1]) self.assertRaises( QueryObjectValidationError, proc.sort, df=df, columns={"abc": True} ) def test_rolling(self): # sum rolling type post_df = proc.rolling( df=timeseries_df, columns={"y": "y"}, rolling_type="sum", window=2, min_periods=0, ) self.assertListEqual(post_df.columns.tolist(), ["label", "y"]) self.assertListEqual(series_to_list(post_df["y"]), [1.0, 3.0, 5.0, 7.0]) # mean rolling type with alias post_df = proc.rolling( df=timeseries_df, rolling_type="mean", columns={"y": "y_mean"}, window=10, min_periods=0, ) self.assertListEqual(post_df.columns.tolist(), ["label", "y", "y_mean"]) self.assertListEqual(series_to_list(post_df["y_mean"]), [1.0, 1.5, 2.0, 2.5]) # count rolling type post_df = proc.rolling( df=timeseries_df, rolling_type="count", columns={"y": "y"}, window=10, min_periods=0, ) self.assertListEqual(post_df.columns.tolist(), ["label", "y"]) self.assertListEqual(series_to_list(post_df["y"]), [1.0, 2.0, 3.0, 4.0]) # quantile rolling type post_df = proc.rolling( df=timeseries_df, columns={"y": "q1"}, rolling_type="quantile", rolling_type_options={"quantile": 0.25}, window=10, min_periods=0, ) self.assertListEqual(post_df.columns.tolist(), ["label", "y", "q1"]) self.assertListEqual(series_to_list(post_df["q1"]), [1.0, 1.25, 1.5, 1.75]) # incorrect rolling type self.assertRaises( QueryObjectValidationError, proc.rolling, df=timeseries_df, columns={"y": "y"}, rolling_type="abc", window=2, ) # incorrect rolling type options self.assertRaises( QueryObjectValidationError, proc.rolling, df=timeseries_df, columns={"y": "y"}, rolling_type="quantile", rolling_type_options={"abc": 123}, window=2, ) def test_rolling_with_pivot_df_and_single_metric(self): pivot_df = proc.pivot( df=single_metric_df, index=["dttm"], columns=["country"], aggregates={"sum_metric": {"operator": "sum"}}, flatten_columns=False, reset_index=False, ) rolling_df = proc.rolling( df=pivot_df, rolling_type="sum", window=2, min_periods=0, is_pivot_df=True, ) # dttm UK US # 0 2019-01-01 5 6 # 1 2019-01-02 12 14 assert rolling_df["UK"].to_list() == [5.0, 12.0] assert rolling_df["US"].to_list() == [6.0, 14.0] assert ( rolling_df["dttm"].to_list() == to_datetime(["2019-01-01", "2019-01-02",]).to_list() ) rolling_df = proc.rolling( df=pivot_df, rolling_type="sum", window=2, min_periods=2, is_pivot_df=True, ) assert rolling_df.empty is True def test_rolling_with_pivot_df_and_multiple_metrics(self): pivot_df = proc.pivot( df=multiple_metrics_df, index=["dttm"], columns=["country"], aggregates={ "sum_metric": {"operator": "sum"}, "count_metric": {"operator": "sum"}, }, flatten_columns=False, reset_index=False, ) rolling_df = proc.rolling( df=pivot_df, rolling_type="sum", window=2, min_periods=0, is_pivot_df=True, ) # dttm count_metric, UK count_metric, US sum_metric, UK sum_metric, US # 0 2019-01-01 1.0 2.0 5.0 6.0 # 1 2019-01-02 4.0 6.0 12.0 14.0 assert rolling_df["count_metric, UK"].to_list() == [1.0, 4.0] assert rolling_df["count_metric, US"].to_list() == [2.0, 6.0] assert rolling_df["sum_metric, UK"].to_list() == [5.0, 12.0] assert rolling_df["sum_metric, US"].to_list() == [6.0, 14.0] assert ( rolling_df["dttm"].to_list() == to_datetime(["2019-01-01", "2019-01-02",]).to_list() ) def test_select(self): # reorder columns post_df = proc.select(df=timeseries_df, columns=["y", "label"]) self.assertListEqual(post_df.columns.tolist(), ["y", "label"]) # one column post_df = proc.select(df=timeseries_df, columns=["label"]) self.assertListEqual(post_df.columns.tolist(), ["label"]) # rename and select one column post_df = proc.select(df=timeseries_df, columns=["y"], rename={"y": "y1"}) self.assertListEqual(post_df.columns.tolist(), ["y1"]) # rename one and leave one unchanged post_df = proc.select(df=timeseries_df, rename={"y": "y1"}) self.assertListEqual(post_df.columns.tolist(), ["label", "y1"]) # drop one column post_df = proc.select(df=timeseries_df, exclude=["label"]) self.assertListEqual(post_df.columns.tolist(), ["y"]) # rename and drop one column post_df = proc.select(df=timeseries_df, rename={"y": "y1"}, exclude=["label"]) self.assertListEqual(post_df.columns.tolist(), ["y1"]) # invalid columns self.assertRaises( QueryObjectValidationError, proc.select, df=timeseries_df, columns=["abc"], rename={"abc": "qwerty"}, ) # select renamed column by new name self.assertRaises( QueryObjectValidationError, proc.select, df=timeseries_df, columns=["label_new"], rename={"label": "label_new"}, ) def test_diff(self): # overwrite column post_df = proc.diff(df=timeseries_df, columns={"y": "y"}) self.assertListEqual(post_df.columns.tolist(), ["label", "y"]) self.assertListEqual(series_to_list(post_df["y"]), [None, 1.0, 1.0, 1.0]) # add column post_df = proc.diff(df=timeseries_df, columns={"y": "y1"}) self.assertListEqual(post_df.columns.tolist(), ["label", "y", "y1"]) self.assertListEqual(series_to_list(post_df["y"]), [1.0, 2.0, 3.0, 4.0]) self.assertListEqual(series_to_list(post_df["y1"]), [None, 1.0, 1.0, 1.0]) # look ahead post_df = proc.diff(df=timeseries_df, columns={"y": "y1"}, periods=-1) self.assertListEqual(series_to_list(post_df["y1"]), [-1.0, -1.0, -1.0, None]) # invalid column reference self.assertRaises( QueryObjectValidationError, proc.diff, df=timeseries_df, columns={"abc": "abc"}, ) # diff by columns post_df = proc.diff(df=timeseries_df2, columns={"y": "y", "z": "z"}, axis=1) self.assertListEqual(post_df.columns.tolist(), ["label", "y", "z"]) self.assertListEqual(series_to_list(post_df["z"]), [0.0, 2.0, 8.0, 6.0]) def test_compare(self): # `difference` comparison post_df = proc.compare( df=timeseries_df2, source_columns=["y"], compare_columns=["z"], compare_type="difference", ) self.assertListEqual( post_df.columns.tolist(), ["label", "y", "z", "difference__y__z",] ) self.assertListEqual( series_to_list(post_df["difference__y__z"]), [0.0, -2.0, -8.0, -6.0], ) # drop original columns post_df = proc.compare( df=timeseries_df2, source_columns=["y"], compare_columns=["z"], compare_type="difference", drop_original_columns=True, ) self.assertListEqual(post_df.columns.tolist(), ["label", "difference__y__z",]) # `percentage` comparison post_df = proc.compare( df=timeseries_df2, source_columns=["y"], compare_columns=["z"], compare_type="percentage", ) self.assertListEqual( post_df.columns.tolist(), ["label", "y", "z", "percentage__y__z",] ) self.assertListEqual( series_to_list(post_df["percentage__y__z"]), [0.0, -0.5, -0.8, -0.75], ) # `ratio` comparison post_df = proc.compare( df=timeseries_df2, source_columns=["y"], compare_columns=["z"], compare_type="ratio", ) self.assertListEqual( post_df.columns.tolist(), ["label", "y", "z", "ratio__y__z",] ) self.assertListEqual( series_to_list(post_df["ratio__y__z"]), [1.0, 0.5, 0.2, 0.25], ) def test_cum(self): # create new column (cumsum) post_df = proc.cum(df=timeseries_df, columns={"y": "y2"}, operator="sum",) self.assertListEqual(post_df.columns.tolist(), ["label", "y", "y2"]) self.assertListEqual(series_to_list(post_df["label"]), ["x", "y", "z", "q"]) self.assertListEqual(series_to_list(post_df["y"]), [1.0, 2.0, 3.0, 4.0]) self.assertListEqual(series_to_list(post_df["y2"]), [1.0, 3.0, 6.0, 10.0]) # overwrite column (cumprod) post_df = proc.cum(df=timeseries_df, columns={"y": "y"}, operator="prod",) self.assertListEqual(post_df.columns.tolist(), ["label", "y"]) self.assertListEqual(series_to_list(post_df["y"]), [1.0, 2.0, 6.0, 24.0]) # overwrite column (cummin) post_df = proc.cum(df=timeseries_df, columns={"y": "y"}, operator="min",) self.assertListEqual(post_df.columns.tolist(), ["label", "y"]) self.assertListEqual(series_to_list(post_df["y"]), [1.0, 1.0, 1.0, 1.0]) # invalid operator self.assertRaises( QueryObjectValidationError, proc.cum, df=timeseries_df, columns={"y": "y"}, operator="abc", ) def test_cum_with_pivot_df_and_single_metric(self): pivot_df = proc.pivot( df=single_metric_df, index=["dttm"], columns=["country"], aggregates={"sum_metric": {"operator": "sum"}}, flatten_columns=False, reset_index=False, ) cum_df = proc.cum(df=pivot_df, operator="sum", is_pivot_df=True,) # dttm UK US # 0 2019-01-01 5 6 # 1 2019-01-02 12 14 assert cum_df["UK"].to_list() == [5.0, 12.0] assert cum_df["US"].to_list() == [6.0, 14.0] assert ( cum_df["dttm"].to_list() == to_datetime(["2019-01-01", "2019-01-02",]).to_list() ) def test_cum_with_pivot_df_and_multiple_metrics(self): pivot_df = proc.pivot( df=multiple_metrics_df, index=["dttm"], columns=["country"], aggregates={ "sum_metric": {"operator": "sum"}, "count_metric": {"operator": "sum"}, }, flatten_columns=False, reset_index=False, ) cum_df = proc.cum(df=pivot_df, operator="sum", is_pivot_df=True,) # dttm count_metric, UK count_metric, US sum_metric, UK sum_metric, US # 0 2019-01-01 1 2 5 6 # 1 2019-01-02 4 6 12 14 assert cum_df["count_metric, UK"].to_list() == [1.0, 4.0] assert cum_df["count_metric, US"].to_list() == [2.0, 6.0] assert cum_df["sum_metric, UK"].to_list() == [5.0, 12.0] assert cum_df["sum_metric, US"].to_list() == [6.0, 14.0] assert ( cum_df["dttm"].to_list() == to_datetime(["2019-01-01", "2019-01-02",]).to_list() ) def test_geohash_decode(self): # decode lon/lat from geohash post_df = proc.geohash_decode( df=lonlat_df[["city", "geohash"]], geohash="geohash", latitude="latitude", longitude="longitude", ) self.assertListEqual( sorted(post_df.columns.tolist()), sorted(["city", "geohash", "latitude", "longitude"]), ) self.assertListEqual( round_floats(series_to_list(post_df["longitude"]), 6), round_floats(series_to_list(lonlat_df["longitude"]), 6), ) self.assertListEqual( round_floats(series_to_list(post_df["latitude"]), 6), round_floats(series_to_list(lonlat_df["latitude"]), 6), ) def test_geohash_encode(self): # encode lon/lat into geohash post_df = proc.geohash_encode( df=lonlat_df[["city", "latitude", "longitude"]], latitude="latitude", longitude="longitude", geohash="geohash", ) self.assertListEqual( sorted(post_df.columns.tolist()), sorted(["city", "geohash", "latitude", "longitude"]), ) self.assertListEqual( series_to_list(post_df["geohash"]), series_to_list(lonlat_df["geohash"]), ) def test_geodetic_parse(self): # parse geodetic string with altitude into lon/lat/altitude post_df = proc.geodetic_parse( df=lonlat_df[["city", "geodetic"]], geodetic="geodetic", latitude="latitude", longitude="longitude", altitude="altitude", ) self.assertListEqual( sorted(post_df.columns.tolist()), sorted(["city", "geodetic", "latitude", "longitude", "altitude"]), ) self.assertListEqual( series_to_list(post_df["longitude"]), series_to_list(lonlat_df["longitude"]), ) self.assertListEqual( series_to_list(post_df["latitude"]), series_to_list(lonlat_df["latitude"]), ) self.assertListEqual( series_to_list(post_df["altitude"]), series_to_list(lonlat_df["altitude"]), ) # parse geodetic string into lon/lat post_df = proc.geodetic_parse( df=lonlat_df[["city", "geodetic"]], geodetic="geodetic", latitude="latitude", longitude="longitude", ) self.assertListEqual( sorted(post_df.columns.tolist()), sorted(["city", "geodetic", "latitude", "longitude"]), ) self.assertListEqual( series_to_list(post_df["longitude"]), series_to_list(lonlat_df["longitude"]), ) self.assertListEqual( series_to_list(post_df["latitude"]), series_to_list(lonlat_df["latitude"]), ) def test_contribution(self): df = DataFrame( { DTTM_ALIAS: [ datetime(2020, 7, 16, 14, 49), datetime(2020, 7, 16, 14, 50), ], "a": [1, 3], "b": [1, 9], } ) with pytest.raises(QueryObjectValidationError, match="not numeric"): proc.contribution(df, columns=[DTTM_ALIAS]) with pytest.raises(QueryObjectValidationError, match="same length"): proc.contribution(df, columns=["a"], rename_columns=["aa", "bb"]) # cell contribution across row processed_df = proc.contribution( df, orientation=PostProcessingContributionOrientation.ROW, ) self.assertListEqual(processed_df.columns.tolist(), [DTTM_ALIAS, "a", "b"]) self.assertListEqual(processed_df["a"].tolist(), [0.5, 0.25]) self.assertListEqual(processed_df["b"].tolist(), [0.5, 0.75]) # cell contribution across column without temporal column df.pop(DTTM_ALIAS) processed_df = proc.contribution( df, orientation=PostProcessingContributionOrientation.COLUMN ) self.assertListEqual(processed_df.columns.tolist(), ["a", "b"]) self.assertListEqual(processed_df["a"].tolist(), [0.25, 0.75]) self.assertListEqual(processed_df["b"].tolist(), [0.1, 0.9]) # contribution only on selected columns processed_df = proc.contribution( df, orientation=PostProcessingContributionOrientation.COLUMN, columns=["a"], rename_columns=["pct_a"], ) self.assertListEqual(processed_df.columns.tolist(), ["a", "b", "pct_a"]) self.assertListEqual(processed_df["a"].tolist(), [1, 3]) self.assertListEqual(processed_df["b"].tolist(), [1, 9]) self.assertListEqual(processed_df["pct_a"].tolist(), [0.25, 0.75]) def test_prophet_valid(self): pytest.importorskip("prophet") df = proc.prophet( df=prophet_df, time_grain="P1M", periods=3, confidence_interval=0.9 ) columns = {column for column in df.columns} assert columns == { DTTM_ALIAS, "a__yhat", "a__yhat_upper", "a__yhat_lower", "a", "b__yhat", "b__yhat_upper", "b__yhat_lower", "b", } assert df[DTTM_ALIAS].iloc[0].to_pydatetime() == datetime(2018, 12, 31) assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 3, 31) assert len(df) == 7 df = proc.prophet( df=prophet_df, time_grain="P1M", periods=5, confidence_interval=0.9 ) assert df[DTTM_ALIAS].iloc[0].to_pydatetime() == datetime(2018, 12, 31) assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2022, 5, 31) assert len(df) == 9 def test_prophet_valid_zero_periods(self): pytest.importorskip("prophet") df = proc.prophet( df=prophet_df, time_grain="P1M", periods=0, confidence_interval=0.9 ) columns = {column for column in df.columns} assert columns == { DTTM_ALIAS, "a__yhat", "a__yhat_upper", "a__yhat_lower", "a", "b__yhat", "b__yhat_upper", "b__yhat_lower", "b", } assert df[DTTM_ALIAS].iloc[0].to_pydatetime() == datetime(2018, 12, 31) assert df[DTTM_ALIAS].iloc[-1].to_pydatetime() == datetime(2021, 12, 31) assert len(df) == 4 def test_prophet_import(self): prophet = find_spec("prophet") if prophet is None: with pytest.raises(QueryObjectValidationError): proc.prophet( df=prophet_df, time_grain="P1M", periods=3, confidence_interval=0.9 ) def test_prophet_missing_temporal_column(self): df = prophet_df.drop(DTTM_ALIAS, axis=1) self.assertRaises( QueryObjectValidationError, proc.prophet, df=df, time_grain="P1M", periods=3, confidence_interval=0.9, ) def test_prophet_incorrect_confidence_interval(self): self.assertRaises( QueryObjectValidationError, proc.prophet, df=prophet_df, time_grain="P1M", periods=3, confidence_interval=0.0, ) self.assertRaises( QueryObjectValidationError, proc.prophet, df=prophet_df, time_grain="P1M", periods=3, confidence_interval=1.0, ) def test_prophet_incorrect_periods(self): self.assertRaises( QueryObjectValidationError, proc.prophet, df=prophet_df, time_grain="P1M", periods=-1, confidence_interval=0.8, ) def test_prophet_incorrect_time_grain(self): self.assertRaises( QueryObjectValidationError, proc.prophet, df=prophet_df, time_grain="yearly", periods=10, confidence_interval=0.8, ) def test_boxplot_tukey(self): df = proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.TUKEY, metrics=["cars"], ) columns = {column for column in df.columns} assert columns == { "cars__mean", "cars__median", "cars__q1", "cars__q3", "cars__max", "cars__min", "cars__count", "cars__outliers", "region", } assert len(df) == 4 def test_boxplot_min_max(self): df = proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.MINMAX, metrics=["cars"], ) columns = {column for column in df.columns} assert columns == { "cars__mean", "cars__median", "cars__q1", "cars__q3", "cars__max", "cars__min", "cars__count", "cars__outliers", "region", } assert len(df) == 4 def test_boxplot_percentile(self): df = proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, metrics=["cars"], percentiles=[1, 99], ) columns = {column for column in df.columns} assert columns == { "cars__mean", "cars__median", "cars__q1", "cars__q3", "cars__max", "cars__min", "cars__count", "cars__outliers", "region", } assert len(df) == 4 def test_boxplot_percentile_incorrect_params(self): with pytest.raises(QueryObjectValidationError): proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, metrics=["cars"], ) with pytest.raises(QueryObjectValidationError): proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, metrics=["cars"], percentiles=[10], ) with pytest.raises(QueryObjectValidationError): proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, metrics=["cars"], percentiles=[90, 10], ) with pytest.raises(QueryObjectValidationError): proc.boxplot( df=names_df, groupby=["region"], whisker_type=PostProcessingBoxplotWhiskerType.PERCENTILE, metrics=["cars"], percentiles=[10, 90, 10], ) def test_resample(self): df = timeseries_df.copy() df.index.name = "time_column" df.reset_index(inplace=True) post_df = proc.resample( df=df, rule="1D", method="ffill", time_column="time_column", ) self.assertListEqual( post_df["label"].tolist(), ["x", "y", "y", "y", "z", "z", "q"] ) self.assertListEqual(post_df["y"].tolist(), [1.0, 2.0, 2.0, 2.0, 3.0, 3.0, 4.0]) post_df = proc.resample( df=df, rule="1D", method="asfreq", time_column="time_column", fill_value=0, ) self.assertListEqual(post_df["label"].tolist(), ["x", "y", 0, 0, "z", 0, "q"]) self.assertListEqual(post_df["y"].tolist(), [1.0, 2.0, 0, 0, 3.0, 0, 4.0])
18,647
865
<gh_stars>100-1000 # Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # import os import blink.utils as utils import blink.ner as NER import blink.candidate_generation as CG import blink.candidate_data_fetcher as CDF import blink.reranker as R import argparse import shutil def main(parameters): print("Parameters:", parameters) # Read data sentences = utils.read_sentences_from_file( parameters["path_to_input_file"], one_sentence_per_line=parameters["one_sentence_per_line"], ) # Identify mentions ner_model = NER.get_model(parameters) ner_output_data = ner_model.predict(sentences) sentences = ner_output_data["sentences"] mentions = ner_output_data["mentions"] output_folder_path = parameters["output_folder_path"] if ( (output_folder_path is not None) and os.path.exists(output_folder_path) and os.listdir(output_folder_path) ): print( "The given output directory ({}) already exists and is not empty.".format( output_folder_path ) ) answer = input("Would you like to empty the existing directory? [Y/N]\n") if answer.strip() == "Y": print("Deleting {}...".format(output_folder_path)) shutil.rmtree(output_folder_path) else: raise ValueError( "Output directory ({}) already exists and is not empty.".format( output_folder_path ) ) if output_folder_path is not None: utils.write_dicts_as_json_per_line( sentences, utils.get_sentences_txt_file_path(output_folder_path) ) utils.write_dicts_as_json_per_line( mentions, utils.get_mentions_txt_file_path(output_folder_path) ) # Generate candidates and get the data that describes the candidates candidate_generator = CG.get_model(parameters) candidate_generator.process_mentions_for_candidate_generator( sentences=sentences, mentions=mentions ) for mention in mentions: mention["candidates"] = candidate_generator.get_candidates(mention) if parameters["consider_additional_datafetcher"]: data_fetcher = CDF.get_model(parameters) for candidate in mention["candidates"]: data_fetcher.get_data_for_entity(candidate) if output_folder_path is not None: utils.write_dicts_as_json_per_line( mentions, utils.get_mentions_txt_file_path(output_folder_path) ) # Reranking reranking_model = R.get_model(parameters) reranking_model.rerank(mentions, sentences) if output_folder_path is not None: utils.write_dicts_as_json_per_line( mentions, utils.get_mentions_txt_file_path(output_folder_path) ) utils.write_end2end_pickle_output(sentences, mentions, output_folder_path) utils.present_annotated_sentences( sentences, mentions, utils.get_end2end_pretty_output_file_path(output_folder_path), ) # Showcase results utils.present_annotated_sentences(sentences, mentions) if __name__ == "__main__": parser = argparse.ArgumentParser() # Input data parser.add_argument( "--path_to_input_file", "--i", dest="path_to_input_file", type=str, required=True, ) parser.add_argument( "--one_sentence_per_line", action="store_true", help="Set if the input file has one sentence per line", ) # Candidate generation parser.add_argument( "--solr_address", default="http://localhost:8983/solr/wikipedia", type=str, help="The address to the solr index.", ) parser.add_argument( "--query", type=str, default='title:( {} ) OR aliases:" {} " OR sent_desc_1:( {} )^0.5', help="The query following the argument template of str.format", ) parser.add_argument( "--keys", type=str, default="text,text,context", help="The comma separated list of keys to be feeded to str.format with the query as the formating string.", ) parser.add_argument( "--boosting", default="log(sum(num_incoming_links,1))", type=str, help="The address to the solr index.", ) parser.add_argument( "--raw_solr_fields", action="store_true", help="Whether to escape the special characters in the solr queries.", ) # Candidate desciptions and additional data parser.add_argument( "--consider_additional_datafetcher", action="store_true", help="Whether to include some additional data to the candidates using a datafetcher.", ) parser.add_argument( "--path_to_candidate_data_dict", default="data/KB_data/title2enriched_parsed_obj_plus.p", type=str, help="The path to the data used by the data fetcher (the default path points to the wikipedia data).", ) # Reranking parser.add_argument( "--path_to_model", "--m", dest="path_to_model", type=str, required=True, help="The full path to the model.", ) parser.add_argument( "--max_seq_length", default=512, type=int, help="The maximum total input sequence length after WordPiece tokenization. \n" "Sequences longer than this will be truncated, and sequences shorter \n" "than this will be padded.", ) parser.add_argument( "--evaluation_batch_size", default=1, type=int, help="Total batch size for evaluation.", ) parser.add_argument( "--top_k", type=int, default=80, help="The number of candidates retrieved by the candiadate generator and considered by the reranker", ) parser.add_argument( "--no_cuda", action="store_true", help="Whether to use CUDA when available" ) parser.add_argument( "--lowercase_flag", action="store_true", help="Whether to lower case the input text. True for uncased models, False for cased models.", ) parser.add_argument( "--context_key", default="tagged_context", type=str, help="The field that contains the mention context.", ) parser.add_argument( "--dataparallel_bert", action="store_true", help="Whether to distributed the candidate generation process.", ) parser.add_argument( "--silent", action="store_true", help="Whether to print progress bars." ) # Output parser.add_argument( "--output_folder_path", "--o", dest="output_folder_path", default=None, type=str, help="A path to the folder where the mentions and sentences are to be dumped. If it is not given, the results would not be saved.", ) args = parser.parse_args() args.rows = args.top_k parameters = args.__dict__ main(parameters)
3,058
72,551
<gh_stars>1000+ #include "inline-static-member-var.h" int *WithInlineStaticMember::getStaticMemberAddress() { return &staticMember; } int WithInlineStaticMember::getStaticMemberFromCxx() { return staticMember; } void WithInlineStaticMember::setStaticMemberFromCxx(int newVal) { staticMember = newVal; }
97
19,824
<reponame>pazams/keystone { "main": "dist/keystone-ui-icons-icons-BatteryChargingIcon.cjs.js", "module": "dist/keystone-ui-icons-icons-BatteryChargingIcon.esm.js" }
69
923
################################################## # Copyright (c) <NAME> [GitHub D-X-Y], 2019 # ##################################################################################################### # modified from https://github.com/pytorch/examples/blob/master/reinforcement_learning/reinforce.py # ##################################################################################################### import os, sys, time, glob, random, argparse import numpy as np, collections from copy import deepcopy from pathlib import Path import torch import torch.nn as nn from torch.distributions import Categorical lib_dir = (Path(__file__).parent / '..' / '..' / 'lib').resolve() if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir)) from config_utils import load_config, dict2config, configure2str from datasets import get_datasets, SearchDataset from procedures import prepare_seed, prepare_logger, save_checkpoint, copy_checkpoint, get_optim_scheduler from utils import get_model_infos, obtain_accuracy from log_utils import AverageMeter, time_string, convert_secs2time from nas_102_api import NASBench102API as API from models import CellStructure, get_search_spaces from R_EA import train_and_eval class Policy(nn.Module): def __init__(self, max_nodes, search_space): super(Policy, self).__init__() self.max_nodes = max_nodes self.search_space = deepcopy(search_space) self.edge2index = {} for i in range(1, max_nodes): for j in range(i): node_str = '{:}<-{:}'.format(i, j) self.edge2index[ node_str ] = len(self.edge2index) self.arch_parameters = nn.Parameter( 1e-3*torch.randn(len(self.edge2index), len(search_space)) ) def generate_arch(self, actions): genotypes = [] for i in range(1, self.max_nodes): xlist = [] for j in range(i): node_str = '{:}<-{:}'.format(i, j) op_name = self.search_space[ actions[ self.edge2index[ node_str ] ] ] xlist.append((op_name, j)) genotypes.append( tuple(xlist) ) return CellStructure( genotypes ) def genotype(self): genotypes = [] for i in range(1, self.max_nodes): xlist = [] for j in range(i): node_str = '{:}<-{:}'.format(i, j) with torch.no_grad(): weights = self.arch_parameters[ self.edge2index[node_str] ] op_name = self.search_space[ weights.argmax().item() ] xlist.append((op_name, j)) genotypes.append( tuple(xlist) ) return CellStructure( genotypes ) def forward(self): alphas = nn.functional.softmax(self.arch_parameters, dim=-1) return alphas class ExponentialMovingAverage(object): """Class that maintains an exponential moving average.""" def __init__(self, momentum): self._numerator = 0 self._denominator = 0 self._momentum = momentum def update(self, value): self._numerator = self._momentum * self._numerator + (1 - self._momentum) * value self._denominator = self._momentum * self._denominator + (1 - self._momentum) def value(self): """Return the current value of the moving average""" return self._numerator / self._denominator def select_action(policy): probs = policy() m = Categorical(probs) action = m.sample() #policy.saved_log_probs.append(m.log_prob(action)) return m.log_prob(action), action.cpu().tolist() def main(xargs, nas_bench): assert torch.cuda.is_available(), 'CUDA is not available.' torch.backends.cudnn.enabled = True torch.backends.cudnn.benchmark = False torch.backends.cudnn.deterministic = True torch.set_num_threads( xargs.workers ) prepare_seed(xargs.rand_seed) logger = prepare_logger(args) assert xargs.dataset == 'cifar10', 'currently only support CIFAR-10' if xargs.data_path is not None: train_data, valid_data, xshape, class_num = get_datasets(xargs.dataset, xargs.data_path, -1) split_Fpath = 'configs/nas-benchmark/cifar-split.txt' cifar_split = load_config(split_Fpath, None, None) train_split, valid_split = cifar_split.train, cifar_split.valid logger.log('Load split file from {:}'.format(split_Fpath)) config_path = 'configs/nas-benchmark/algos/R-EA.config' config = load_config(config_path, {'class_num': class_num, 'xshape': xshape}, logger) # To split data train_data_v2 = deepcopy(train_data) train_data_v2.transform = valid_data.transform valid_data = train_data_v2 search_data = SearchDataset(xargs.dataset, train_data, train_split, valid_split) # data loader train_loader = torch.utils.data.DataLoader(train_data, batch_size=config.batch_size, sampler=torch.utils.data.sampler.SubsetRandomSampler(train_split) , num_workers=xargs.workers, pin_memory=True) valid_loader = torch.utils.data.DataLoader(valid_data, batch_size=config.batch_size, sampler=torch.utils.data.sampler.SubsetRandomSampler(valid_split), num_workers=xargs.workers, pin_memory=True) logger.log('||||||| {:10s} ||||||| Train-Loader-Num={:}, Valid-Loader-Num={:}, batch size={:}'.format(xargs.dataset, len(train_loader), len(valid_loader), config.batch_size)) logger.log('||||||| {:10s} ||||||| Config={:}'.format(xargs.dataset, config)) extra_info = {'config': config, 'train_loader': train_loader, 'valid_loader': valid_loader} else: config_path = 'configs/nas-benchmark/algos/R-EA.config' config = load_config(config_path, None, logger) extra_info = {'config': config, 'train_loader': None, 'valid_loader': None} logger.log('||||||| {:10s} ||||||| Config={:}'.format(xargs.dataset, config)) search_space = get_search_spaces('cell', xargs.search_space_name) policy = Policy(xargs.max_nodes, search_space) optimizer = torch.optim.Adam(policy.parameters(), lr=xargs.learning_rate) eps = np.finfo(np.float32).eps.item() baseline = ExponentialMovingAverage(xargs.EMA_momentum) logger.log('policy : {:}'.format(policy)) logger.log('optimizer : {:}'.format(optimizer)) logger.log('eps : {:}'.format(eps)) # nas dataset load logger.log('{:} use nas_bench : {:}'.format(time_string(), nas_bench)) # REINFORCE # attempts = 0 x_start_time = time.time() logger.log('Will start searching with time budget of {:} s.'.format(xargs.time_budget)) total_steps, total_costs = 0, 0 #for istep in range(xargs.RL_steps): while total_costs < xargs.time_budget: start_time = time.time() log_prob, action = select_action( policy ) arch = policy.generate_arch( action ) reward, cost_time = train_and_eval(arch, nas_bench, extra_info) # accumulate time if total_costs + cost_time < xargs.time_budget: total_costs += cost_time else: break baseline.update(reward) # calculate loss policy_loss = ( -log_prob * (reward - baseline.value()) ).sum() optimizer.zero_grad() policy_loss.backward() optimizer.step() # accumulate time total_costs += time.time() - start_time total_steps += 1 logger.log('step [{:3d}] : average-reward={:.3f} : policy_loss={:.4f} : {:}'.format(total_steps, baseline.value(), policy_loss.item(), policy.genotype())) #logger.log('----> {:}'.format(policy.arch_parameters)) #logger.log('') best_arch = policy.genotype() logger.log('REINFORCE finish with {:} steps and {:.1f} s (real cost={:.3f}).'.format(total_steps, total_costs, time.time()-x_start_time)) info = nas_bench.query_by_arch( best_arch ) if info is None: logger.log('Did not find this architecture : {:}.'.format(best_arch)) else : logger.log('{:}'.format(info)) logger.log('-'*100) logger.close() return logger.log_dir, nas_bench.query_index_by_arch( best_arch ) if __name__ == '__main__': parser = argparse.ArgumentParser("Regularized Evolution Algorithm") parser.add_argument('--data_path', type=str, help='Path to dataset') parser.add_argument('--dataset', type=str, choices=['cifar10', 'cifar100', 'ImageNet16-120'], help='Choose between Cifar10/100 and ImageNet-16.') # channels and number-of-cells parser.add_argument('--search_space_name', type=str, help='The search space name.') parser.add_argument('--max_nodes', type=int, help='The maximum number of nodes.') parser.add_argument('--channel', type=int, help='The number of channels.') parser.add_argument('--num_cells', type=int, help='The number of cells in one stage.') parser.add_argument('--learning_rate', type=float, help='The learning rate for REINFORCE.') #parser.add_argument('--RL_steps', type=int, help='The steps for REINFORCE.') parser.add_argument('--EMA_momentum', type=float, help='The momentum value for EMA.') parser.add_argument('--time_budget', type=int, help='The total time cost budge for searching (in seconds).') # log parser.add_argument('--workers', type=int, default=2, help='number of data loading workers (default: 2)') parser.add_argument('--save_dir', type=str, help='Folder to save checkpoints and log.') parser.add_argument('--arch_nas_dataset', type=str, help='The path to load the architecture dataset (tiny-nas-benchmark).') parser.add_argument('--print_freq', type=int, help='print frequency (default: 200)') parser.add_argument('--rand_seed', type=int, default=-1, help='manual seed') args = parser.parse_args() #if args.rand_seed is None or args.rand_seed < 0: args.rand_seed = random.randint(1, 100000) if args.arch_nas_dataset is None or not os.path.isfile(args.arch_nas_dataset): nas_bench = None else: print ('{:} build NAS-Benchmark-API from {:}'.format(time_string(), args.arch_nas_dataset)) nas_bench = API(args.arch_nas_dataset) if args.rand_seed < 0: save_dir, all_indexes, num = None, [], 500 for i in range(num): print ('{:} : {:03d}/{:03d}'.format(time_string(), i, num)) args.rand_seed = random.randint(1, 100000) save_dir, index = main(args, nas_bench) all_indexes.append( index ) torch.save(all_indexes, save_dir / 'results.pth') else: main(args, nas_bench)
3,863
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.automanage.generated; import com.azure.core.util.Context; /** Samples for ConfigurationProfilesVersions Delete. */ public final class ConfigurationProfilesVersionsDeleteSamples { /* * x-ms-original-file: specification/automanage/resource-manager/Microsoft.Automanage/preview/2021-04-30-preview/examples/deleteConfigurationProfileVersion.json */ /** * Sample code: Delete a configuration profile version. * * @param manager Entry point to AutomanageManager. */ public static void deleteAConfigurationProfileVersion( com.azure.resourcemanager.automanage.AutomanageManager manager) { manager .configurationProfilesVersions() .deleteWithResponse("rg", "customConfigurationProfile", "version1", Context.NONE); } }
313
5,703
/* * * Copyright 2015-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.documentation.swagger.readers.operation; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.Extension; import io.swagger.annotations.ExtensionProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; import springfox.documentation.service.ObjectVendorExtension; import springfox.documentation.service.StringVendorExtension; import springfox.documentation.service.VendorExtension; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spi.service.OperationBuilderPlugin; import springfox.documentation.spi.service.contexts.OperationContext; import springfox.documentation.swagger.common.SwaggerPluginSupport; import java.util.List; import java.util.Optional; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import static java.util.Optional.*; import static java.util.stream.Collectors.*; import static org.springframework.util.StringUtils.*; @Component @Order(SwaggerPluginSupport.SWAGGER_PLUGIN_ORDER) public class VendorExtensionsReader implements OperationBuilderPlugin { private static final Logger LOG = LoggerFactory.getLogger(VendorExtensionsReader.class); @Override public void apply(OperationContext context) { Optional<ApiOperation> apiOperation = context.findAnnotation(ApiOperation.class); if (apiOperation.isPresent()) { Extension[] extensionsAnnotations = apiOperation.get().extensions(); List<VendorExtension> extensions = readExtensions(extensionsAnnotations); LOG.debug("Extension count {} for method {}", extensions.size(), context.getName()); context.operationBuilder().extensions(extensions); } } private List<VendorExtension> readExtensions(Extension[] vendorAnnotations) { return Stream.of(vendorAnnotations) .map(toVendorExtension()).collect(toList()); } private Function<Extension, VendorExtension> toVendorExtension() { return input -> ofNullable(input.name()).filter(((Predicate<String>) String::isEmpty).negate()) .map(propertyExtension(input)) .orElse(objectExtension(input)); } private VendorExtension objectExtension(Extension each) { ObjectVendorExtension extension = new ObjectVendorExtension(ensurePrefixed(ofNullable(each.name()).orElse(""))); for (ExtensionProperty property : each.properties()) { if (!isEmpty(property.name()) && !isEmpty(property.value())) { extension.addProperty(new StringVendorExtension(property.name(), property.value())); } } return extension; } private Function<String, VendorExtension> propertyExtension(final Extension annotation) { return input -> { ObjectVendorExtension extension = new ObjectVendorExtension(ensurePrefixed(input)); for (ExtensionProperty each : annotation.properties()) { extension.addProperty(new StringVendorExtension(each.name(), each.value())); } return extension; }; } private String ensurePrefixed(String name) { if (!isEmpty(name) && !name.startsWith("x-")) { return "x-" + name; } return name; } @Override public boolean supports(DocumentationType delimiter) { return SwaggerPluginSupport.pluginDoesApply(delimiter); } }
1,226
387
<reponame>ejoebstl/DlibDotNet<gh_stars>100-1000 #include "surf.h"
33
337
<gh_stars>100-1000 package test; import java.lang.String; public class Integer { public Integer(String s){} public static Integer valueOf(String value) {return new Integer(value);} } class Test { public static void test() { Integer.valueOf("1"); test.Integer.valueOf("1"); java.lang.Integer.valueOf("1"); } }
111
1,652
<gh_stars>1000+ package com.ctrip.framework.xpipe.redis.utils; import com.alibaba.arthas.deps.org.slf4j.Logger; import com.alibaba.arthas.deps.org.slf4j.LoggerFactory; import com.ctrip.framework.xpipe.redis.proxy.ProxyInetSocketAddress; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; public class ConnectionUtil { private static final Logger logger = LoggerFactory.getLogger(ConnectionUtil.class); public static Map<SocketChannel, Lock> socketChannelMap = new ConcurrentHashMap<>(); public static InetSocketAddress getAddress(Object o, InetSocketAddress socketAddress) { if (ProxyUtil.getInstance().needProxy(socketAddress)) { InetSocketAddress proxy = ProxyUtil.getInstance().getProxyAddress(o, socketAddress); logger.info("[Destination -> Proxy]: {} -> {}", socketAddress, proxy); return proxy; } else { return socketAddress; } } public static SocketAddress getAddress(Object o, SocketAddress socketAddress) { return getAddress(o, (InetSocketAddress) socketAddress); } public static SocketAddress removeAddress(Object o) { logger.info("[SocketAddress] removed for {}", o); return ProxyUtil.getInstance().removeProxyAddress(o); } public static void connectToProxy(Socket socket, InetSocketAddress address, int timeout) throws IOException { try { socket.connect(address, timeout); ((ProxyInetSocketAddress) address).sick = false; } catch (IOException e) { logger.info("address {} {}", address , e); ((ProxyInetSocketAddress) address).sick = true; throw e; } byte[] bytes = ProxyUtil.getInstance().getProxyConnectProtocol(socket); socket.getOutputStream().write(bytes); socket.getOutputStream().flush(); logger.info("[Connect] to {} -> {} with protocol {}", socket.getLocalSocketAddress(), address, new String(bytes)); } public static boolean connectToProxy(SocketChannel socketChannel, SocketAddress address) throws IOException { socketChannelMap.put(socketChannel, new ReentrantLock()); logger.info("[Connect] to proxy {} through Netty SocketChannel", address); try { boolean result = socketChannel.connect(address); ((ProxyInetSocketAddress) address).sick = !result; return result; } catch (IOException exception) { logger.info("address {} {}", address , exception); ((ProxyInetSocketAddress) address).sick = true; throw exception; } } public static String getString(SocketAddress address) { return address.toString(); } /** * send protocol in first write * @param socketChannel * @throws IOException */ public static void sendProtocolToProxy(SocketChannel socketChannel) throws IOException { Lock lock = socketChannelMap.get(socketChannel); if (lock == null) { return; } try { lock.lock(); Lock l = socketChannelMap.get(socketChannel); if (l != null) { socketChannelMap.remove(socketChannel); ByteBuffer byteBuffer = ByteBuffer.allocate(512); byte[] bytes = ProxyUtil.getInstance().getProxyConnectProtocol(socketChannel); byteBuffer.put(bytes); byteBuffer.flip(); socketChannel.write(byteBuffer); byteBuffer.clear(); logger.info("[Proxy] sends protocol {} to {} -> {}", new String(bytes), socketChannel.getLocalAddress(), socketChannel.getRemoteAddress()); } } finally { lock.unlock(); } } }
1,580
839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.sts; import java.security.Principal; /** * This interface defines a pluggable way of mapping an identity from a source realm to a target * realm. */ public interface IdentityMapper { /** * Map a principal in the source realm to the target realm * @param sourceRealm the source realm of the Principal * @param sourcePrincipal the principal in the source realm * @param targetRealm the target realm of the Principal * @return the principal in the target realm */ Principal mapPrincipal(String sourceRealm, Principal sourcePrincipal, String targetRealm); }
369
343
/* $Id$ $Revision$ */ /* vim:set shiftwidth=4 ts=8: */ /************************************************************************* * Copyright (c) 2011 AT&T Intellectual Property * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: See CVS logs. Details at http://www.graphviz.org/ *************************************************************************/ /* * Written by <NAME> * Updated by <NAME> * Adapted to gvToolTred(g) by <NAME> */ /* * performs an inplace transitive reduction on a graph */ #include "config.h" #include <stdio.h> #include "cgraph.h" #include "gvc.h" typedef struct { Agrec_t h; int mark; } Agmarknodeinfo_t; #define MARK(n) (((Agmarknodeinfo_t*)(n->base.data))->mark) #define agrootof(n) ((n)->root) static int dfs(Agnode_t * n, Agedge_t * link, int warn) { Agedge_t *e; Agedge_t *f; Agraph_t *g = agrootof(n); MARK(n) = 1; for (e = agfstin(g, n); e; e = f) { f = agnxtin(g, e); if (e == link) continue; if (MARK(agtail(e))) agdelete(g, e); } for (e = agfstout(g, n); e; e = agnxtout(g, e)) { if (MARK(aghead(e))) { if (!warn) { warn++; fprintf(stderr, "warning: %s has cycle(s), transitive reduction not unique\n", agnameof(g)); fprintf(stderr, "cycle involves edge %s -> %s\n", agnameof(agtail(e)), agnameof(aghead(e))); } } else warn = dfs(aghead(e), AGOUT2IN(e), warn); } MARK(n) = 0; return warn; } int gvToolTred(Agraph_t * g) { Agnode_t *n; int warn = 0; if (agisdirected(g)) { aginit(g, AGNODE, "info", sizeof(Agmarknodeinfo_t), TRUE); for (n = agfstnode(g); n; n = agnxtnode(g, n)) { warn = dfs(n, NULL, warn); } agclean(g, AGNODE, "info"); } else { fprintf(stderr, "warning: %s is not a directed graph, not attempting tred\n", agnameof(g)); } return 0; // FIXME - return proper errors }
894
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Saint-Pierre-dels-Forcats","dpt":"Pyrénées-Orientales","inscrits":221,"abs":35,"votants":186,"blancs":20,"nuls":18,"exp":148,"res":[{"panneau":"1","voix":81},{"panneau":"2","voix":67}]}
99
421
<reponame>hamarb123/dotnet-api-docs #using <System.Data.dll> #using <System.Windows.Forms.dll> #using <System.dll> #using <System.Drawing.dll> using namespace System; using namespace System::Drawing; using namespace System::Collections; using namespace System::ComponentModel; using namespace System::Windows::Forms; using namespace System::Data; namespace CursorStuff { public ref class Form1: public System::Windows::Forms::Form { private: System::Windows::Forms::Button^ button1; System::Windows::Forms::Button^ button2; System::ComponentModel::Container^ components; public: Form1() { InitializeComponent(); } protected: ~Form1() { if ( components != nullptr ) { delete components; } } private: void InitializeComponent() { this->button1 = gcnew System::Windows::Forms::Button; this->button2 = gcnew System::Windows::Forms::Button; this->SuspendLayout(); // // button1 // this->button1->Cursor = System::Windows::Forms::Cursors::Default; this->button1->Location = System::Drawing::Point( 40, 184 ); this->button1->Name = "button1"; this->button1->TabIndex = 2; this->button1->Text = "button1"; this->button1->Click += gcnew System::EventHandler( this, &Form1::button1_Click ); // // button2 // this->button2->Cursor = System::Windows::Forms::Cursors::Default; this->button2->Location = System::Drawing::Point( 56, 232 ); this->button2->Name = "button2"; this->button2->TabIndex = 3; this->button2->Text = "button2"; this->button2->Click += gcnew System::EventHandler( this, &Form1::button2_Click ); // // Form1 // this->ClientSize = System::Drawing::Size( 292, 273 ); array<System::Windows::Forms::Control^>^temp0 = {this->button2,this->button1}; this->Controls->AddRange( temp0 ); this->Cursor = System::Windows::Forms::Cursors::Hand; this->Name = "Form1"; this->Text = "Form1"; this->ResumeLayout( false ); } void button1_Click( Object^ /*sender*/, System::EventArgs^ /*e*/ ) { this->MoveCursor(); } //<snippet1> void MoveCursor() { // Set the Current cursor, move the cursor's Position, // and set its clipping rectangle to the form. this->Cursor = gcnew System::Windows::Forms::Cursor( ::Cursor::Current->Handle ); ::Cursor::Position = Point(::Cursor::Position.X - 50,::Cursor::Position.Y - 50); ::Cursor::Clip = Rectangle(this->Location,this->Size); } //</snippet1> //<snippet2> void DrawCursorsOnForm( System::Windows::Forms::Cursor^ cursor ) { // If the form's cursor is not the Hand cursor and the // Current cursor is the Default, Draw the specified // cursor on the form in normal size and twice normal size. if ( this->Cursor != Cursors::Hand && System::Windows::Forms::Cursor::Current == Cursors::Default ) { // Draw the cursor stretched. Graphics^ graphics = this->CreateGraphics(); Rectangle rectangle = Rectangle(Point(10,10),System::Drawing::Size( cursor->Size.Width * 2, cursor->Size.Height * 2 )); cursor->DrawStretched( graphics, rectangle ); // Draw the cursor in normal size. rectangle.Location = Point(rectangle.Width + rectangle.Location.X,rectangle.Height + rectangle.Location.Y); rectangle.Size = cursor->Size; cursor->Draw( graphics, rectangle ); // Dispose of the cursor. delete cursor; } } //</snippet2> void button2_Click( Object^ /*sender*/, System::EventArgs^ /*e*/ ) { this->DrawCursorsOnForm( gcnew System::Windows::Forms::Cursor( "c:\\MyCursor.cur" ) ); } }; } [STAThread] int main() { Application::Run( gcnew CursorStuff::Form1 ); }
1,978
307
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.streamnative.pulsar.handlers.kop.coordinator.transaction; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import com.google.common.collect.Sets; import io.streamnative.pulsar.handlers.kop.KafkaProtocolHandler; import io.streamnative.pulsar.handlers.kop.KopProtocolHandlerTestBase; import java.time.Duration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CountDownLatch; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.record.RecordBatch; import org.apache.pulsar.client.admin.PulsarAdminException; import org.apache.pulsar.common.policies.data.BundlesData; import org.apache.pulsar.common.policies.data.RetentionPolicies; import org.apache.pulsar.common.policies.data.TenantInfoImpl; import org.awaitility.Awaitility; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.testng.collections.Maps; /** * Transaction state manager test. */ @Slf4j public class TransactionStateManagerTest extends KopProtocolHandlerTestBase { private static final Short producerEpoch = 0; private static final Integer transactionTimeoutMs = 1000; @BeforeClass @Override protected void setup() throws Exception { this.conf.setEnableTransactionCoordinator(true); internalSetup(); TenantInfoImpl tenantInfo = new TenantInfoImpl(Sets.newHashSet("appid1", "appid2"), Sets.newHashSet("test")); if (!admin.tenants().getTenants().contains("public")) { admin.tenants().createTenant("public", tenantInfo); } else { admin.tenants().updateTenant("public", tenantInfo); } if (!admin.namespaces().getNamespaces("public").contains("public/default")) { admin.namespaces().createNamespace("public/default"); admin.namespaces().setNamespaceReplicationClusters("public/default", Sets.newHashSet("test")); admin.namespaces().setRetention("public/default", new RetentionPolicies(60, 1000)); } if (!admin.namespaces().getNamespaces("public").contains("public/__kafka")) { admin.namespaces().createNamespace("public/__kafka"); admin.namespaces().setNamespaceReplicationClusters("public/__kafka", Sets.newHashSet("test")); admin.namespaces().setRetention("public/__kafka", new RetentionPolicies(20, 100)); } log.info("txn topic partition {}", admin.topics().getPartitionedTopicMetadata( TransactionConfig.DefaultTransactionMetadataTopicName).partitions); } @AfterClass @Override protected void cleanup() throws Exception { internalCleanup(); } @Test(timeOut = 1000 * 10) public void txnLogStoreAndTCImmigrationTest() throws Exception { Map<String, Long> pidMappings = Maps.newHashMap(); pidMappings.put("zero", 0L); pidMappings.put("one", 1L); pidMappings.put("two", 2L); pidMappings.put("three", 3L); pidMappings.put("four", 4L); pidMappings.put("five", 5L); Map<Long, TransactionState> transactionStates = Maps.newHashMap(); transactionStates.put(0L, TransactionState.EMPTY); transactionStates.put(1L, TransactionState.ONGOING); transactionStates.put(2L, TransactionState.PREPARE_COMMIT); transactionStates.put(3L, TransactionState.COMPLETE_COMMIT); transactionStates.put(4L, TransactionState.PREPARE_ABORT); transactionStates.put(5L, TransactionState.COMPLETE_ABORT); TransactionStateManager transactionStateManager = getTxnManager(); CountDownLatch countDownLatch = new CountDownLatch(pidMappings.size()); pidMappings.forEach((transactionalId, producerId) -> { TransactionMetadata.TransactionMetadataBuilder txnMetadataBuilder = TransactionMetadata.builder() .transactionalId(transactionalId) .producerId(producerId) .lastProducerId(RecordBatch.NO_PRODUCER_ID) .producerEpoch(producerEpoch) .lastProducerEpoch(RecordBatch.NO_PRODUCER_EPOCH) .txnTimeoutMs(transactionTimeoutMs) .state(transactionStates.get(producerId)) .pendingState(Optional.of(transactionStates.get(producerId))) .topicPartitions(Sets.newHashSet()) .txnStartTimestamp(transactionStates.get(producerId) == TransactionState.EMPTY ? -1 : System.currentTimeMillis()); if (transactionStates.get(producerId).equals(TransactionState.COMPLETE_ABORT) || transactionStates.get(producerId).equals(TransactionState.COMPLETE_COMMIT)) { txnMetadataBuilder.txnStartTimestamp(0); } TransactionMetadata txnMetadata = txnMetadataBuilder.build(); transactionStateManager.putTransactionStateIfNotExists(txnMetadata); transactionStateManager.appendTransactionToLog(transactionalId, -1, txnMetadata.prepareNoTransit(), new TransactionStateManager.ResponseCallback() { @Override public void complete() { log.info("Success append transaction log."); countDownLatch.countDown(); } @Override public void fail(Errors errors) { log.error("Failed append transaction log.", errors.exception()); countDownLatch.countDown(); Assert.fail("Failed append transaction log."); } }, errors -> false); }); countDownLatch.await(); Map<Integer, Map<String, TransactionMetadata>> txnMetadataCache = transactionStateManager.transactionMetadataCache; // retain the transaction metadata cache Map<Integer, Map<String, TransactionMetadata>> beforeTxnMetadataCache = new HashMap<>(txnMetadataCache); BundlesData bundles = pulsar.getAdminClient().namespaces().getBundles( conf.getKafkaTenant() + "/" + conf.getKafkaNamespace()); List<String> boundaries = bundles.getBoundaries(); for (int i = 0; i < boundaries.size() - 1; i++) { String bundle = String.format("%s_%s", boundaries.get(i), boundaries.get(i + 1)); pulsar.getAdminClient().namespaces() .unloadNamespaceBundle(conf.getKafkaTenant() + "/" + conf.getKafkaNamespace(), bundle); } waitTCImmigrationComplete(); // verify the loaded transaction metadata verifyImmigration(transactionStateManager, beforeTxnMetadataCache); } private void verifyImmigration(TransactionStateManager transactionStateManager, Map<Integer, Map<String, TransactionMetadata>> beforeTxnMetadataCache) { Map<Integer, Map<String, TransactionMetadata>> loadedTxnMetadataCache = transactionStateManager.transactionMetadataCache; for (int i = 0; i < conf.getTxnLogTopicNumPartitions(); i++) { Map<String, TransactionMetadata> txnMetadataMap = beforeTxnMetadataCache.get(i); Map<String, TransactionMetadata> loadedTxnMetadataMap = loadedTxnMetadataCache.get(i); if (txnMetadataMap == null) { assertNull(loadedTxnMetadataMap); continue; } assertEquals(txnMetadataMap.size(), loadedTxnMetadataMap.size()); txnMetadataMap.forEach((txnId, txnMetadata) -> { TransactionMetadata loadedTxnMetadata = loadedTxnMetadataMap.get(txnId); assertEquals(txnMetadata.getTransactionalId(), loadedTxnMetadata.getTransactionalId()); assertEquals(txnMetadata.getProducerId(), loadedTxnMetadata.getProducerId()); assertEquals(txnMetadata.getLastProducerId(), loadedTxnMetadata.getLastProducerId()); assertEquals(txnMetadata.getProducerEpoch(), loadedTxnMetadata.getProducerEpoch()); assertEquals(txnMetadata.getLastProducerEpoch(), loadedTxnMetadata.getLastProducerEpoch()); assertEquals(txnMetadata.getTxnTimeoutMs(), loadedTxnMetadata.getTxnTimeoutMs()); assertEquals(txnMetadata.getTopicPartitions(), loadedTxnMetadata.getTopicPartitions()); assertEquals(txnMetadata.getTxnStartTimestamp(), loadedTxnMetadata.getTxnStartTimestamp()); if (txnMetadata.getState().equals(TransactionState.PREPARE_ABORT)) { // to prepare state will complete waitTxnComplete(loadedTxnMetadata, TransactionState.COMPLETE_ABORT); } else if (txnMetadata.getState().equals(TransactionState.PREPARE_COMMIT)) { // to prepare state will complete waitTxnComplete(loadedTxnMetadata, TransactionState.COMPLETE_COMMIT); } else { assertEquals(txnMetadata.getState(), loadedTxnMetadata.getState()); assertEquals(txnMetadata.getTxnLastUpdateTimestamp(), loadedTxnMetadata.getTxnLastUpdateTimestamp()); } }); } } private void waitTxnComplete(TransactionMetadata loadedTxnMetadata, TransactionState expectedState) { Awaitility.await() .pollDelay(Duration.ofMillis(500)) .untilAsserted(() -> assertEquals(loadedTxnMetadata.getState(), expectedState)); assertEquals(expectedState, loadedTxnMetadata.getState()); assertTrue(loadedTxnMetadata.getTxnLastUpdateTimestamp() > 0); } private void waitTCImmigrationComplete() throws PulsarAdminException { admin.lookups().lookupTopic("public/default/__transaction_state-partition-0"); TransactionStateManager txnStateManager = getTxnManager(); // The lookup request will trigger topic on-load operation, // the TC partition log will recover when the namespace on-load, it's asynchronously, // so wait the TC partition log load complete. assertTrue(txnStateManager.isLoading()); Awaitility.await() .pollDelay(Duration.ofMillis(500)) .untilAsserted(() -> assertFalse(txnStateManager.isLoading())); } private TransactionStateManager getTxnManager() { return ((KafkaProtocolHandler) this.pulsar.getProtocolHandlers().protocol("kafka")) .getTransactionCoordinator("public").getTxnManager(); } }
4,807
857
# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license. # See LICENSE in the project root for license information. import datetime import time import calendar import oncall.scheduler.default from pytz import utc, timezone MIN = 60 HOUR = 60 * MIN DAY = 24 * HOUR WEEK = 7 * DAY MOCK_SCHEDULE = {'team_id': 1, 'role_id': 2, 'roster_id': 3} def test_find_new_user_as_least_active_user(mocker): scheduler = oncall.scheduler.default.Scheduler() mocker.patch('oncall.scheduler.default.Scheduler.find_new_user_in_roster').return_value = {123} mocker.patch('oncall.scheduler.default.Scheduler.get_roster_user_ids').return_value = {135, 123} mocker.patch('oncall.scheduler.default.Scheduler.get_busy_user_by_event_range') mocker.patch('oncall.scheduler.default.Scheduler.find_least_active_user_id_by_team') user_id = scheduler.find_next_user_id(MOCK_SCHEDULE, [{'start': 0, 'end': 5}], None) assert user_id == 123 def test_calculate_future_events_7_24_shifts(mocker): mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = None mock_dt = datetime.datetime(year=2017, month=2, day=7, hour=10) mocker.patch('time.time').return_value = time.mktime(mock_dt.timetuple()) start = DAY + 10 * HOUR + 30 * MIN # Monday at 10:30 am schedule_foo = { 'timezone': 'US/Pacific', 'auto_populate_threshold': 21, 'events': [{ 'start': start, # 24hr weeklong shift starting Monday at 10:30 am 'duration': WEEK }] } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 4 mondays = (6, 13, 20, 27) for epoch, monday in zip(future_events, mondays): assert len(epoch) == 1 ev = epoch[0] start_dt = utc.localize(datetime.datetime.utcfromtimestamp(ev['start'])) start_dt = start_dt.astimezone(timezone('US/Pacific')) assert start_dt.timetuple().tm_year == mock_dt.timetuple().tm_year assert start_dt.timetuple().tm_mon == mock_dt.timetuple().tm_mon assert start_dt.timetuple().tm_mday == monday assert start_dt.timetuple().tm_wday == 0 # Monday assert start_dt.timetuple().tm_hour == 10 # 10: assert start_dt.timetuple().tm_min == 30 # 30 am assert start_dt.timetuple().tm_sec == 00 assert ev['end'] - ev['start'] == WEEK def test_calculate_future_events_7_12_shifts(mocker): mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = None mock_dt = datetime.datetime(year=2016, month=9, day=9, hour=10) mocker.patch('time.time').return_value = time.mktime(mock_dt.timetuple()) start = 3 * DAY + 12 * HOUR # Wednesday at noon events = [] for i in range(7): events.append({'start': start + DAY * i, 'duration': 12 * HOUR}) schedule_foo = { 'timezone': 'US/Eastern', 'auto_populate_threshold': 7, 'events': events } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 2 assert len(future_events[0]) == 7 assert len(future_events[1]) == 7 days = range(14, 22) for ev, day in zip(future_events[1], days): start_dt = utc.localize(datetime.datetime.utcfromtimestamp(ev['start'])) start_dt = start_dt.astimezone(timezone('US/Eastern')) assert start_dt.timetuple().tm_year == mock_dt.timetuple().tm_year assert start_dt.timetuple().tm_mon == mock_dt.timetuple().tm_mon assert start_dt.timetuple().tm_mday == day assert start_dt.timetuple().tm_hour == 12 assert start_dt.timetuple().tm_min == 00 assert start_dt.timetuple().tm_sec == 00 def test_calculate_future_events_14_12_shifts(mocker): mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = None mock_dt = datetime.datetime(year=2016, month=9, day=9, hour=10) mocker.patch('time.time').return_value = time.mktime(mock_dt.timetuple()) start = 3 * DAY + 12 * HOUR # Wednesday at noon events = [] for i in range(14): events.append({'start': start + DAY * i, 'duration': 12 * HOUR}) schedule_foo = { 'timezone': 'US/Central', 'auto_populate_threshold': 21, 'events': events } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 2 assert len(future_events[1]) == 14 days = list(range(21, 31)) + list(range(1, 6)) for ev, day in zip(future_events[1], days): start_dt = utc.localize(datetime.datetime.utcfromtimestamp(ev['start'])) start_dt = start_dt.astimezone(timezone('US/Central')) assert start_dt.timetuple().tm_year == mock_dt.timetuple().tm_year assert start_dt.timetuple().tm_mday == day assert start_dt.timetuple().tm_hour == 12 assert start_dt.timetuple().tm_min == 00 assert start_dt.timetuple().tm_sec == 00 def test_dst_ambiguous_schedule(mocker): mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = None mock_dt = datetime.datetime(year=2016, month=10, day=29, hour=10) mocker.patch('time.time').return_value = time.mktime(mock_dt.timetuple()) start = HOUR + 30 * MIN # Sunday at 1:30 am schedule_foo = { 'timezone': 'US/Central', 'auto_populate_threshold': 14, 'events': [{ 'start': start, # 24hr weeklong shift starting Sunday at 1:30 am 'duration': WEEK }] } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 3 dst_events = future_events[1] + future_events[2] assert len(dst_events) == 2 # Make sure that events are consecutive (no gaps) assert dst_events[0]['end'] == dst_events[1]['start'] def test_dst_schedule(mocker): mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = None mock_dt = datetime.datetime(year=2016, month=10, day=29, hour=10) mocker.patch('time.time').return_value = time.mktime(mock_dt.timetuple()) start = DAY + 11 * HOUR # Monday at 11:00 am schedule_foo = { 'timezone': 'US/Central', 'auto_populate_threshold': 14, 'events': [{ 'start': start, # 24hr weeklong shift starting Monday at 11:00 am 'duration': WEEK }] } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 3 dst_events = future_events[1] + future_events[2] assert len(dst_events) == 2 # Make sure that events are consecutive (no gaps) assert dst_events[0]['end'] == dst_events[1]['start'] for ev in dst_events: start_dt = utc.localize(datetime.datetime.utcfromtimestamp(ev['start'])) start_dt = start_dt.astimezone(timezone('US/Central')) assert start_dt.timetuple().tm_hour == 11 def test_existing_schedule(mocker): mock_dt = datetime.datetime(year=2017, month=2, day=5, hour=0, tzinfo=timezone('US/Pacific')) mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = \ calendar.timegm(mock_dt.astimezone(utc).timetuple()) mocker.patch('time.time').return_value = time.mktime(datetime.datetime(year=2017, month=2, day=7).timetuple()) start = DAY + 10 * HOUR + 30 * MIN # Monday at 10:30 am schedule_foo = { 'timezone': 'US/Pacific', 'auto_populate_threshold': 21, 'events': [{ 'start': start, # 24hr weeklong shift starting Monday at 10:30 am 'duration': WEEK }] } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 3 mondays = (13, 20, 27) for epoch, monday in zip(future_events, mondays): assert len(epoch) == 1 ev = epoch[0] start_dt = utc.localize(datetime.datetime.utcfromtimestamp(ev['start'])) start_dt = start_dt.astimezone(timezone('US/Pacific')) assert start_dt.timetuple().tm_year == mock_dt.timetuple().tm_year assert start_dt.timetuple().tm_mon == mock_dt.timetuple().tm_mon assert start_dt.timetuple().tm_mday == monday assert start_dt.timetuple().tm_wday == 0 # Monday assert start_dt.timetuple().tm_hour == 10 # 10: assert start_dt.timetuple().tm_min == 30 # 30 am assert start_dt.timetuple().tm_sec == 00 assert ev['end'] - ev['start'] == WEEK def test_existing_schedule_change_epoch(mocker): mock_dt = datetime.datetime(year=2017, month=2, day=5, hour=0, tzinfo=timezone('US/Eastern')) mocker.patch('oncall.scheduler.default.Scheduler.get_schedule_last_epoch').return_value = \ calendar.timegm(mock_dt.astimezone(utc).timetuple()) mocker.patch('time.time').return_value = time.mktime(datetime.datetime(year=2017, month=2, day=7).timetuple()) start = DAY + 10 * HOUR + 30 * MIN # Monday at 10:30 am schedule_foo = { 'timezone': 'US/Pacific', 'auto_populate_threshold': 21, 'events': [{ 'start': start, # 24hr weeklong shift starting Monday at 10:30 am 'duration': WEEK }] } scheduler = oncall.scheduler.default.Scheduler() future_events, last_epoch = scheduler.calculate_future_events(schedule_foo, None) assert len(future_events) == 3 mondays = (13, 20, 27) for epoch, monday in zip(future_events, mondays): assert len(epoch) == 1 ev = epoch[0] start_dt = utc.localize(datetime.datetime.utcfromtimestamp(ev['start'])) start_dt = start_dt.astimezone(timezone('US/Pacific')) assert start_dt.timetuple().tm_year == mock_dt.timetuple().tm_year assert start_dt.timetuple().tm_mon == mock_dt.timetuple().tm_mon assert start_dt.timetuple().tm_mday == monday assert start_dt.timetuple().tm_wday == 0 # Monday assert start_dt.timetuple().tm_hour == 10 # 10: assert start_dt.timetuple().tm_min == 30 # 30 am assert start_dt.timetuple().tm_sec == 00 assert ev['end'] - ev['start'] == WEEK def test_find_least_active_available_user(mocker): mock_user_ids = [123, 456, 789] mocker.patch('oncall.scheduler.default.Scheduler.find_new_user_in_roster').return_value = set() mocker.patch('oncall.scheduler.default.Scheduler.get_roster_user_ids').return_value = [i for i in mock_user_ids] mock_busy_user_by_range = mocker.patch('oncall.scheduler.default.Scheduler.get_busy_user_by_event_range') mock_active_user_by_team = mocker.patch('oncall.scheduler.default.Scheduler.find_least_active_user_id_by_team') def mock_busy_user_by_range_side_effect(user_ids, team_id, events, cursor, table_name='event'): assert user_ids == set(mock_user_ids) return [123] mock_busy_user_by_range.side_effect = mock_busy_user_by_range_side_effect future_events = [{'start': 440, 'end': 570}, {'start': 570, 'end': 588}, {'start': 600, 'end': 700}] scheduler = oncall.scheduler.default.Scheduler() scheduler.find_next_user_id(MOCK_SCHEDULE, future_events, None, 'event') mock_active_user_by_team.assert_called_with({456, 789}, 1, 440, 2, None, 'event') def test_find_least_active_available_user_conflicts(mocker): mock_user_ids = [123, 456, 789] mocker.patch('oncall.scheduler.default.Scheduler.find_new_user_in_roster').return_value = None mocker.patch('oncall.scheduler.default.Scheduler.get_roster_user_ids').return_value = [i for i in mock_user_ids] mock_busy_user_by_range = mocker.patch('oncall.scheduler.default.Scheduler.get_busy_user_by_event_range') mock_active_user_by_team = mocker.patch('oncall.scheduler.default.Scheduler.find_least_active_user_id_by_team') def mock_busy_user_by_range_side_effect(user_ids, team_id, events, cursor, table_name='event'): assert user_ids == set(mock_user_ids) return [123, 456, 789] mock_busy_user_by_range.side_effect = mock_busy_user_by_range_side_effect future_events = [{'start': 440, 'end': 570}] scheduler = oncall.scheduler.default.Scheduler() assert scheduler.find_next_user_id(MOCK_SCHEDULE, future_events, None, table_name='event') is None mock_active_user_by_team.assert_not_called()
5,418
572
import hou import qLibCameraZoomVertigo def createViewerStateTemplate(): state_name = "qLib::camera_zoom_vertigo_ql_dop" state_label = "Camera Zoom/Vertigo (dop) [qL]" template = hou.ViewerStateTemplate( state_name, state_label, hou.dopNodeTypeCategory(), #contexts = [ hou.sopNodeTypeCategory(), hou.dopNodeTypeCategory(), hou.lopNodeTypeCategory(), ], ) template.bindFactory(qLibCameraZoomVertigo.State) return template
219
1,502
<gh_stars>1000+ #ifndef BOOST_NETWORK_PROTOCOL_HTTP_MESSAGE_MODIFIERS_VERSION_HPP_20100608 #define BOOST_NETWORK_PROTOCOL_HTTP_MESSAGE_MODIFIERS_VERSION_HPP_20100608 // Copyright 2010 (c) <NAME> // Copyright 2010 (c) Sinefunc, Inc. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include <boost/mpl/if.hpp> #include <boost/network/support/is_async.hpp> #include <boost/network/support/is_sync.hpp> namespace boost { namespace network { namespace http { template <class Tag> struct basic_response; namespace impl { template <class Tag, class T> void version(basic_response<Tag> &response, T const &value, mpl::false_ const & /*unused*/) { response << boost::network::http::version(value); } template <class Tag, class T> void version(basic_response<Tag> &response, T const &future, mpl::true_ const & /*unused*/) { response.version(future); } } // namespace impl template <class Tag, class T> void version(basic_response<Tag> &response, T const &value) { impl::version(response, value, is_async<Tag>()); } } // namespace http } // namespace network } // namespace boost #endif // BOOST_NETWORK_PROTOCOL_HTTP_MESSAGE_MODIFIERS_VERSION_HPP_20100608
490
416
package org.simpleflatmapper.jdbc.impl.setter; import org.simpleflatmapper.converter.Context; import org.simpleflatmapper.map.setter.ContextualSetter; import org.simpleflatmapper.map.setter.DoubleContextualSetter; import org.simpleflatmapper.reflect.Setter; import org.simpleflatmapper.reflect.primitive.DoubleSetter; import java.sql.PreparedStatement; public class DoublePreparedStatementSetter implements ContextualSetter<PreparedStatement, Double>, DoubleContextualSetter<PreparedStatement> { private final int columnIndex; private final DoublePreparedStatementIndexSetter setter = new DoublePreparedStatementIndexSetter(); public DoublePreparedStatementSetter(int columnIndex) { this.columnIndex = columnIndex; } @Override public void setDouble(PreparedStatement target, double value, Context context) throws Exception { setter.setDouble(target, value, columnIndex, context); } @Override public void set(PreparedStatement target, Double value, Context context) throws Exception { setter.set(target, value, columnIndex, context); } }
336
319
/*! ****************************************************************************** * * \file * * \brief Header file containing RAJA reduction templates for * OpenMP execution. * * These methods should work on any platform that supports OpenMP. * ****************************************************************************** */ //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~// // Copyright (c) 2016-21, Lawrence Livermore National Security, LLC // and RAJA project contributors. See the RAJA/LICENSE file for details. // // SPDX-License-Identifier: (BSD-3-Clause) //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~// #ifndef RAJA_omp_reduce_HPP #define RAJA_omp_reduce_HPP #include "RAJA/config.hpp" #if defined(RAJA_ENABLE_OPENMP) #include <memory> #include <vector> #include <omp.h> #include "RAJA/util/types.hpp" #include "RAJA/pattern/detail/reduce.hpp" #include "RAJA/pattern/reduce.hpp" #include "RAJA/policy/openmp/policy.hpp" namespace RAJA { namespace detail { template <typename T, typename Reduce> class ReduceOMP : public reduce::detail::BaseCombinable<T, Reduce, ReduceOMP<T, Reduce>> { using Base = reduce::detail::BaseCombinable<T, Reduce, ReduceOMP>; public: using Base::Base; //! prohibit compiler-generated default ctor ReduceOMP() = delete; ~ReduceOMP() { if (Base::parent) { #pragma omp critical(ompReduceCritical) Reduce()(Base::parent->local(), Base::my_data); Base::my_data = Base::identity; } } }; } // namespace detail RAJA_DECLARE_ALL_REDUCERS(omp_reduce, detail::ReduceOMP) /////////////////////////////////////////////////////////////////////////////// // // Old ordered reductions are included below. // /////////////////////////////////////////////////////////////////////////////// namespace detail { template <typename T, typename Reduce> class ReduceOMPOrdered : public reduce::detail:: BaseCombinable<T, Reduce, ReduceOMPOrdered<T, Reduce>> { using Base = reduce::detail::BaseCombinable<T, Reduce, ReduceOMPOrdered>; std::shared_ptr<std::vector<T>> data; public: ReduceOMPOrdered() { reset(T(), T()); } //! constructor requires a default value for the reducer explicit ReduceOMPOrdered(T init_val, T identity_) { reset(init_val, identity_); } void reset(T init_val, T identity_) { Base::reset(init_val, identity_); data = std::shared_ptr<std::vector<T>>( std::make_shared<std::vector<T>>(omp_get_max_threads(), identity_)); } ~ReduceOMPOrdered() { Reduce{}((*data)[omp_get_thread_num()], Base::my_data); Base::my_data = Base::identity; } T get_combined() const { if (Base::my_data != Base::identity) { Reduce{}((*data)[omp_get_thread_num()], Base::my_data); Base::my_data = Base::identity; } T res = Base::identity; for (size_t i = 0; i < data->size(); ++i) { Reduce{}(res, (*data)[i]); } return res; } }; } // namespace detail RAJA_DECLARE_ALL_REDUCERS(omp_reduce_ordered, detail::ReduceOMPOrdered) } // namespace RAJA #endif // closing endif for RAJA_ENABLE_OPENMP guard #endif // closing endif for header file include guard
1,112
1,921
from heapq import heappush as hp def get_sort_range(arr): if arr == sorted(arr): return () options = list() for sort_start in range(len(arr) - 1): for sort_end in range(1, len(arr) + 1): a1 = arr[:sort_start] a2 = arr[sort_start:sort_end] a3 = arr[sort_end:] new_arr = a1 + sorted(a2) + a3 if new_arr == sorted(new_arr): # options.append((sort_start, sort_end - 1)) hp(options, (sort_end - sort_start, (sort_start, sort_end - 1))) return options[0][1] # Test assert get_sort_range([3, 5, 6, 7, 9]) == () assert get_sort_range([3, 7, 5, 6, 9]) == (1, 3) assert get_sort_range([5, 4, 3, 2, 1]) == (0, 4)
364
746
<gh_stars>100-1000 package org.protege.editor.core.update; /** * <NAME> * Stanford Center for Biomedical Informatics Research * 18/11/15 */ public class PluginDocumentParseException extends Exception { public PluginDocumentParseException(String message) { super(message); } public PluginDocumentParseException(String message, Throwable cause) { super(message, cause); } }
134
887
<reponame>trajchen/javers<filename>javers-spring/src/main/java/org/javers/spring/RegisterJsonTypeAdaptersPlugin.java package org.javers.spring; import org.javers.core.JaversBuilder; import org.javers.core.JaversBuilderPlugin; import org.javers.core.json.JsonAdvancedTypeAdapter; import org.javers.core.json.JsonTypeAdapter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.Collections; import java.util.List; import java.util.Optional; @Service public class RegisterJsonTypeAdaptersPlugin implements JaversBuilderPlugin { private final List<JsonTypeAdapter<?>> typeAdapters; private final List<JsonAdvancedTypeAdapter<?>> advancedTypeAdapters; public RegisterJsonTypeAdaptersPlugin(Optional<List<JsonTypeAdapter<?>>> typeAdapters, Optional<List<JsonAdvancedTypeAdapter<?>>> advancedTypeAdapters) { this.typeAdapters = typeAdapters.orElse(Collections.emptyList()); this.advancedTypeAdapters = advancedTypeAdapters.orElse(Collections.emptyList()); } @Override public void beforeAssemble(JaversBuilder javersBuilder) { typeAdapters.forEach(javersBuilder::registerValueTypeAdapter); advancedTypeAdapters.forEach(javersBuilder::registerJsonAdvancedTypeAdapter); } }
458
474
<filename>QNShortVideo-With-TuTu-iOS/tusdkfilterprocessormodule/tusdkfilterprocessormodule/TuSDKFramework/TuSDK.framework/Versions/A/Headers/TuSDKGPUTfmMixFilter.h // // TuSDKGPUTfmMixFilter.h // TuSDK // // Created by <NAME> on 2018/11/7. // Copyright © 2018 tusdk.com. All rights reserved. // #import "TuSDKFilterAdapter.h" NS_ASSUME_NONNULL_BEGIN // 漫画特效混合 @interface TuSDKGPUTfmMixFilter : TuSDKThreeInputFilter @property(nonatomic) CGFloat lightUp; @end NS_ASSUME_NONNULL_END
209
6,709
""" tests for telethon.helpers """ from base64 import b64decode import pytest from telethon import helpers def test_strip_text(): assert helpers.strip_text(" text ", []) == "text" # I can't interpret the rest of the code well enough yet class TestSyncifyAsyncContext: class NoopContextManager: def __init__(self, loop): self.count = 0 self.loop = loop async def __aenter__(self): self.count += 1 return self async def __aexit__(self, exc_type, *args): assert exc_type is None self.count -= 1 __enter__ = helpers._sync_enter __exit__ = helpers._sync_exit def test_sync_acontext(self, event_loop): contm = self.NoopContextManager(event_loop) assert contm.count == 0 with contm: assert contm.count == 1 assert contm.count == 0 @pytest.mark.asyncio async def test_async_acontext(self, event_loop): contm = self.NoopContextManager(event_loop) assert contm.count == 0 async with contm: assert contm.count == 1 assert contm.count == 0 def test_generate_key_data_from_nonce(): gkdfn = helpers.generate_key_data_from_nonce key_expect = b64decode(b'<KEY> nonce_expect = b64decode(b'1AgjhU9eDvJRjFik73bjR2zZEATzL/jLu9yodYfWEgA=') assert gkdfn(123456789, 1234567) == (key_expect, nonce_expect)
652
1,926
# todo make sure what they mean by desc undefined? None or empty? Answer: None :) it can never be empty but None is sometimes returned. # I am implementing everything as dicts to speed up property creation # Warning: value, get, set props of dest are PyJs types. Rest is Py! def is_data_descriptor(desc): return desc and ('value' in desc or 'writable' in desc) def is_accessor_descriptor(desc): return desc and ('get' in desc or 'set' in desc) def is_generic_descriptor( desc ): # generic means not the data and not the setter - therefore it must be one that changes only enum and conf return desc and not (is_data_descriptor(desc) or is_accessor_descriptor(desc)) def from_property_descriptor(desc, space): if not desc: return {} obj = space.NewObject() if is_data_descriptor(desc): obj.define_own_property( 'value', { 'value': desc['value'], 'writable': True, 'enumerable': True, 'configurable': True }, False) obj.define_own_property( 'writable', { 'value': desc['writable'], 'writable': True, 'enumerable': True, 'configurable': True }, False) else: obj.define_own_property( 'get', { 'value': desc['get'], 'writable': True, 'enumerable': True, 'configurable': True }, False) obj.define_own_property( 'set', { 'value': desc['set'], 'writable': True, 'enumerable': True, 'configurable': True }, False) obj.define_own_property( 'writable', { 'value': desc['writable'], 'writable': True, 'enumerable': True, 'configurable': True }, False) obj.define_own_property( 'enumerable', { 'value': desc['enumerable'], 'writable': True, 'enumerable': True, 'configurable': True }, False) return obj def to_property_descriptor(obj): if obj._type() != 'Object': raise TypeError() desc = {} for e in ('enumerable', 'configurable', 'writable'): if obj.has_property(e): desc[e] = obj.get(e).to_boolean().value if obj.has_property('value'): desc['value'] = obj.get('value') for e in ('get', 'set'): if obj.has_property(e): cand = obj.get(e) if not (cand.is_callable() or cand.is_undefined()): raise TypeError() if ('get' in desc or 'set' in desc) and ('value' in desc or 'writable' in desc): raise TypeError()
1,408
1,091
<gh_stars>1000+ /* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.simplefabric.api; import org.onosproject.event.AbstractEvent; import java.io.OutputStream; import java.io.PrintStream; /** * Describes an interface event. */ public class SimpleFabricEvent extends AbstractEvent<SimpleFabricEvent.Type, String> { public enum Type { SIMPLE_FABRIC_UPDATED, /* Indicates topology info has been updated. */ SIMPLE_FABRIC_FLUSH, /* Indicates flush triggered. */ SIMPLE_FABRIC_IDLE, /* Indicates idle check. */ SIMPLE_FABRIC_DUMP /* Indicates to dump info on the subject to output stream. */ } private PrintStream printStream; // output stream for SIMPLE_FABRIC_DUMP /** * Creates an interface event with type and subject. * * @param type event type * @param subject subject for dump event or dummy */ public SimpleFabricEvent(Type type, String subject) { super(type, subject); /* subject is dummy */ } /** * Creates an interface event with type, subject and output stream for dump. * * @param type event type * @param subject subject for dump event * @param out output stream to dump out */ public SimpleFabricEvent(Type type, String subject, OutputStream out) { super(type, subject); /* subject is dummy */ printStream = new PrintStream(out, true); } public PrintStream out() { return printStream; } }
680
323
<reponame>jiangjiang66/SpringBoot3<gh_stars>100-1000 package cn.huanzi.qch.springbootsecurity.config; import org.springframework.security.core.Authentication; import org.springframework.security.web.authentication.AuthenticationSuccessHandler; import org.springframework.stereotype.Component; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; /** * 登录成功处理 */ @Component public class LoginSuccessHandlerConfig implements AuthenticationSuccessHandler { @Override public void onAuthenticationSuccess(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Authentication authentication) throws IOException, ServletException { System.out.println("登录成功"); //响应json httpServletResponse.setCharacterEncoding("UTF-8"); httpServletResponse.setContentType("application/json; charset=utf-8"); PrintWriter out = httpServletResponse.getWriter(); out.print("{\"code\":\"300\",\"msg\":\"登录成功\",\"url\":\"/index\"}"); out.flush(); out.close(); } }
414
2,151
// Copyright 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/layers/layer_impl.h" #include "cc/layers/painted_scrollbar_layer_impl.h" #include "cc/layers/solid_color_scrollbar_layer_impl.h" #include "cc/paint/filter_operation.h" #include "cc/paint/filter_operations.h" #include "cc/test/animation_test_common.h" #include "cc/test/fake_impl_task_runner_provider.h" #include "cc/test/fake_layer_tree_frame_sink.h" #include "cc/test/fake_layer_tree_host_impl.h" #include "cc/test/geometry_test_utils.h" #include "cc/test/test_task_graph_runner.h" #include "cc/trees/layer_tree_impl.h" #include "cc/trees/single_thread_proxy.h" #include "cc/trees/tree_synchronizer.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/skia/include/effects/SkBlurImageFilter.h" namespace cc { namespace { #define EXECUTE_AND_VERIFY_SUBTREE_DID_NOT_CHANGE(code_to_test) \ root->layer_tree_impl()->ResetAllChangeTracking(); \ code_to_test; \ EXPECT_FALSE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(root)); \ EXPECT_FALSE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(child)); \ EXPECT_FALSE(root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting( \ grand_child)); \ EXPECT_FALSE(root->LayerPropertyChanged()); \ EXPECT_FALSE(child->LayerPropertyChanged()); \ EXPECT_FALSE(grand_child->LayerPropertyChanged()); #define EXECUTE_AND_VERIFY_NEEDS_PUSH_PROPERTIES_AND_SUBTREE_DID_NOT_CHANGE( \ code_to_test) \ root->layer_tree_impl()->ResetAllChangeTracking(); \ code_to_test; \ EXPECT_TRUE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(root)); \ EXPECT_FALSE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(child)); \ EXPECT_FALSE(root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting( \ grand_child)); \ EXPECT_FALSE(root->LayerPropertyChanged()); \ EXPECT_FALSE(child->LayerPropertyChanged()); \ EXPECT_FALSE(grand_child->LayerPropertyChanged()); #define EXECUTE_AND_VERIFY_NO_NEED_TO_PUSH_PROPERTIES_AND_SUBTREE_CHANGED( \ code_to_test) \ root->layer_tree_impl()->ResetAllChangeTracking(); \ code_to_test; \ EXPECT_FALSE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(root)); \ EXPECT_FALSE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(child)); \ EXPECT_FALSE(root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting( \ grand_child)); \ EXPECT_TRUE(root->LayerPropertyChanged()); \ EXPECT_TRUE(root->LayerPropertyChangedFromPropertyTrees()); \ EXPECT_FALSE(root->LayerPropertyChangedNotFromPropertyTrees()); \ EXPECT_TRUE(child->LayerPropertyChanged()); \ EXPECT_TRUE(child->LayerPropertyChangedFromPropertyTrees()); \ EXPECT_FALSE(child->LayerPropertyChangedNotFromPropertyTrees()); \ EXPECT_TRUE(grand_child->LayerPropertyChanged()); \ EXPECT_TRUE(grand_child->LayerPropertyChangedFromPropertyTrees()); \ EXPECT_FALSE(grand_child->LayerPropertyChangedNotFromPropertyTrees()); #define EXECUTE_AND_VERIFY_ONLY_LAYER_CHANGED(code_to_test) \ root->layer_tree_impl()->ResetAllChangeTracking(); \ root->layer_tree_impl()->property_trees()->full_tree_damaged = false; \ code_to_test; \ EXPECT_TRUE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(root)); \ EXPECT_FALSE( \ root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting(child)); \ EXPECT_FALSE(root->layer_tree_impl()->LayerNeedsPushPropertiesForTesting( \ grand_child)); \ EXPECT_TRUE(root->LayerPropertyChanged()); \ EXPECT_FALSE(root->LayerPropertyChangedFromPropertyTrees()); \ EXPECT_TRUE(root->LayerPropertyChangedNotFromPropertyTrees()); \ EXPECT_FALSE(child->LayerPropertyChanged()); \ EXPECT_FALSE(grand_child->LayerPropertyChanged()); #define VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(code_to_test) \ root->layer_tree_impl()->ResetAllChangeTracking(); \ host_impl.ForcePrepareToDraw(); \ EXPECT_FALSE(host_impl.active_tree()->needs_update_draw_properties()); \ code_to_test; \ EXPECT_TRUE(host_impl.active_tree()->needs_update_draw_properties()); #define VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(code_to_test) \ root->layer_tree_impl()->ResetAllChangeTracking(); \ host_impl.ForcePrepareToDraw(); \ EXPECT_FALSE(host_impl.active_tree()->needs_update_draw_properties()); \ code_to_test; \ EXPECT_FALSE(host_impl.active_tree()->needs_update_draw_properties()); static gfx::Vector2dF ScrollDelta(LayerImpl* layer_impl) { gfx::ScrollOffset delta = layer_impl->layer_tree_impl() ->property_trees() ->scroll_tree.GetScrollOffsetDeltaForTesting( layer_impl->element_id()); return gfx::Vector2dF(delta.x(), delta.y()); } TEST(LayerImplTest, VerifyPendingLayerChangesAreTrackedProperly) { // // This test checks that LayerPropertyChanged() has the correct behavior. // // The constructor on this will fake that we are on the correct thread. // Create a simple LayerImpl tree: FakeImplTaskRunnerProvider task_runner_provider; TestTaskGraphRunner task_graph_runner; std::unique_ptr<LayerTreeFrameSink> layer_tree_frame_sink = FakeLayerTreeFrameSink::Create3d(); FakeLayerTreeHostImpl host_impl(&task_runner_provider, &task_graph_runner); host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(layer_tree_frame_sink.get())); host_impl.CreatePendingTree(); std::unique_ptr<LayerImpl> root_ptr = LayerImpl::Create(host_impl.pending_tree(), 2); LayerImpl* root = root_ptr.get(); host_impl.pending_tree()->SetRootLayerForTesting(std::move(root_ptr)); root->test_properties()->force_render_surface = true; root->SetMasksToBounds(true); root->layer_tree_impl()->ResetAllChangeTracking(); root->test_properties()->AddChild( LayerImpl::Create(host_impl.pending_tree(), 7)); LayerImpl* child = root->test_properties()->children[0]; child->test_properties()->AddChild( LayerImpl::Create(host_impl.pending_tree(), 8)); LayerImpl* grand_child = child->test_properties()->children[0]; host_impl.pending_tree()->BuildLayerListAndPropertyTreesForTesting(); // Adding children is an internal operation and should not mark layers as // changed. EXPECT_FALSE(root->LayerPropertyChanged()); EXPECT_FALSE(child->LayerPropertyChanged()); EXPECT_FALSE(grand_child->LayerPropertyChanged()); gfx::PointF arbitrary_point_f = gfx::PointF(0.125f, 0.25f); float arbitrary_number = 0.352f; gfx::Size arbitrary_size = gfx::Size(111, 222); gfx::Point arbitrary_point = gfx::Point(333, 444); gfx::Rect arbitrary_rect = gfx::Rect(arbitrary_point, arbitrary_size); SkColor arbitrary_color = SkColorSetRGB(10, 20, 30); gfx::Transform arbitrary_transform; arbitrary_transform.Scale3d(0.1f, 0.2f, 0.3f); FilterOperations arbitrary_filters; arbitrary_filters.Append(FilterOperation::CreateOpacityFilter(0.5f)); // These properties are internal, and should not be considered "change" when // they are used. EXECUTE_AND_VERIFY_NEEDS_PUSH_PROPERTIES_AND_SUBTREE_DID_NOT_CHANGE( root->SetUpdateRect(arbitrary_rect)); EXECUTE_AND_VERIFY_ONLY_LAYER_CHANGED(root->SetBounds(arbitrary_size)); host_impl.pending_tree()->property_trees()->needs_rebuild = true; host_impl.pending_tree()->BuildLayerListAndPropertyTreesForTesting(); // Changing these properties affects the entire subtree of layers. EXECUTE_AND_VERIFY_NO_NEED_TO_PUSH_PROPERTIES_AND_SUBTREE_CHANGED( host_impl.pending_tree()->SetFilterMutated(root->element_id(), arbitrary_filters)); EXECUTE_AND_VERIFY_NO_NEED_TO_PUSH_PROPERTIES_AND_SUBTREE_CHANGED( host_impl.pending_tree()->SetFilterMutated(root->element_id(), FilterOperations())); EXECUTE_AND_VERIFY_NO_NEED_TO_PUSH_PROPERTIES_AND_SUBTREE_CHANGED( host_impl.pending_tree()->SetOpacityMutated(root->element_id(), arbitrary_number)); EXECUTE_AND_VERIFY_NO_NEED_TO_PUSH_PROPERTIES_AND_SUBTREE_CHANGED( host_impl.pending_tree()->SetTransformMutated(root->element_id(), arbitrary_transform)); // Changing these properties only affects the layer itself. EXECUTE_AND_VERIFY_ONLY_LAYER_CHANGED(root->SetDrawsContent(true)); EXECUTE_AND_VERIFY_ONLY_LAYER_CHANGED( root->SetBackgroundColor(arbitrary_color)); // Changing these properties does not cause the layer to be marked as changed // but does cause the layer to need to push properties. EXECUTE_AND_VERIFY_NEEDS_PUSH_PROPERTIES_AND_SUBTREE_DID_NOT_CHANGE( root->SetElementId(ElementId(2))); // After setting all these properties already, setting to the exact same // values again should not cause any change. EXECUTE_AND_VERIFY_SUBTREE_DID_NOT_CHANGE(root->SetMasksToBounds(true)); EXECUTE_AND_VERIFY_SUBTREE_DID_NOT_CHANGE( root->SetPosition(arbitrary_point_f)); EXECUTE_AND_VERIFY_SUBTREE_DID_NOT_CHANGE(root->SetContentsOpaque(true)); EXECUTE_AND_VERIFY_SUBTREE_DID_NOT_CHANGE(root->SetDrawsContent(true)); EXECUTE_AND_VERIFY_SUBTREE_DID_NOT_CHANGE(root->SetBounds(root->bounds())); } TEST(LayerImplTest, VerifyActiveLayerChangesAreTrackedProperly) { FakeImplTaskRunnerProvider task_runner_provider; TestTaskGraphRunner task_graph_runner; std::unique_ptr<LayerTreeFrameSink> layer_tree_frame_sink = FakeLayerTreeFrameSink::Create3d(); FakeLayerTreeHostImpl host_impl(&task_runner_provider, &task_graph_runner); host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(layer_tree_frame_sink.get())); std::unique_ptr<LayerImpl> root_ptr = LayerImpl::Create(host_impl.active_tree(), 2); LayerImpl* root = root_ptr.get(); host_impl.active_tree()->SetRootLayerForTesting(std::move(root_ptr)); root->test_properties()->AddChild( LayerImpl::Create(host_impl.active_tree(), 7)); LayerImpl* child = root->test_properties()->children[0]; root->SetScrollable(gfx::Size(100, 100)); host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); // Make root the outer viewport container layer. This ensures the later call // to |SetViewportBoundsDelta| will be on a viewport layer. LayerTreeImpl::ViewportLayerIds viewport_ids; viewport_ids.outer_viewport_container = root->id(); host_impl.active_tree()->SetViewportLayersFromIds(viewport_ids); root->SetMasksToBounds(true); host_impl.active_tree()->property_trees()->needs_rebuild = true; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); root->layer_tree_impl()->ResetAllChangeTracking(); // SetViewportBoundsDelta changes subtree only when masks_to_bounds is true. root->SetViewportBoundsDelta(gfx::Vector2d(222, 333)); EXPECT_TRUE(root->LayerPropertyChanged()); EXPECT_TRUE(root->LayerPropertyChangedFromPropertyTrees()); EXPECT_FALSE(root->LayerPropertyChangedNotFromPropertyTrees()); EXPECT_TRUE(host_impl.active_tree()->property_trees()->full_tree_damaged); root->SetMasksToBounds(false); host_impl.active_tree()->property_trees()->needs_rebuild = true; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); root->layer_tree_impl()->ResetAllChangeTracking(); // SetViewportBoundsDelta does not change the subtree without masks_to_bounds. root->SetViewportBoundsDelta(gfx::Vector2d(333, 444)); EXPECT_TRUE(root->LayerPropertyChanged()); EXPECT_FALSE(root->LayerPropertyChangedFromPropertyTrees()); EXPECT_TRUE(root->LayerPropertyChangedNotFromPropertyTrees()); EXPECT_FALSE(host_impl.active_tree()->property_trees()->full_tree_damaged); host_impl.active_tree()->property_trees()->needs_rebuild = true; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); root->layer_tree_impl()->ResetAllChangeTracking(); // Ensure some node is affected by the outer viewport bounds delta. This // ensures the later call to |SetViewportBoundsDelta| will require a // transform tree update. TransformTree& transform_tree = host_impl.active_tree()->property_trees()->transform_tree; transform_tree.AddNodeAffectedByOuterViewportBoundsDelta( child->transform_tree_index()); EXPECT_FALSE(transform_tree.needs_update()); root->SetViewportBoundsDelta(gfx::Vector2d(111, 222)); EXPECT_TRUE(transform_tree.needs_update()); host_impl.active_tree()->property_trees()->needs_rebuild = true; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); root->layer_tree_impl()->ResetAllChangeTracking(); // Ensure scrolling changes the transform tree but does not damage all trees. root->ScrollBy(gfx::Vector2d(7, 9)); EXPECT_TRUE(transform_tree.needs_update()); EXPECT_TRUE(root->LayerPropertyChanged()); EXPECT_TRUE(root->LayerPropertyChangedFromPropertyTrees()); EXPECT_FALSE(root->LayerPropertyChangedNotFromPropertyTrees()); EXPECT_FALSE(host_impl.active_tree()->property_trees()->full_tree_damaged); } TEST(LayerImplTest, VerifyNeedsUpdateDrawProperties) { FakeImplTaskRunnerProvider task_runner_provider; TestTaskGraphRunner task_graph_runner; std::unique_ptr<LayerTreeFrameSink> layer_tree_frame_sink = FakeLayerTreeFrameSink::Create3d(); FakeLayerTreeHostImpl host_impl(&task_runner_provider, &task_graph_runner); host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(layer_tree_frame_sink.get())); host_impl.active_tree()->SetRootLayerForTesting( LayerImpl::Create(host_impl.active_tree(), 1)); LayerImpl* root = host_impl.active_tree()->root_layer_for_testing(); std::unique_ptr<LayerImpl> layer_ptr = LayerImpl::Create(host_impl.active_tree(), 2); LayerImpl* layer = layer_ptr.get(); root->test_properties()->AddChild(std::move(layer_ptr)); layer->SetScrollable(gfx::Size(1, 1)); std::unique_ptr<LayerImpl> layer2_ptr = LayerImpl::Create(host_impl.active_tree(), 3); LayerImpl* layer2 = layer2_ptr.get(); root->test_properties()->AddChild(std::move(layer2_ptr)); host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); DCHECK(host_impl.CanDraw()); gfx::PointF arbitrary_point_f = gfx::PointF(0.125f, 0.25f); float arbitrary_number = 0.352f; gfx::Size arbitrary_size = gfx::Size(111, 222); gfx::Vector2d arbitrary_vector2d = gfx::Vector2d(111, 222); gfx::Size large_size = gfx::Size(1000, 1000); SkColor arbitrary_color = SkColorSetRGB(10, 20, 30); gfx::Transform arbitrary_transform; arbitrary_transform.Scale3d(0.1f, 0.2f, 0.3f); FilterOperations arbitrary_filters; arbitrary_filters.Append(FilterOperation::CreateOpacityFilter(0.5f)); // Set layer to draw content so that their draw property by property trees is // verified. VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetDrawsContent(true)); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer2->SetDrawsContent(true)); // Create a render surface, because we must have a render surface if we have // filters. layer->test_properties()->force_render_surface = true; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); // Related filter functions. VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(host_impl.active_tree()->SetFilterMutated( root->element_id(), arbitrary_filters)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES( host_impl.active_tree()->SetFilterMutated(root->element_id(), arbitrary_filters)); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(host_impl.active_tree()->SetFilterMutated( root->element_id(), FilterOperations())); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(host_impl.active_tree()->SetFilterMutated( root->element_id(), arbitrary_filters)); // Related scrolling functions. VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetBounds(large_size)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetBounds(large_size)); host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); host_impl.active_tree()->set_needs_update_draw_properties(); host_impl.active_tree()->UpdateDrawProperties(); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->ScrollBy(arbitrary_vector2d)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->ScrollBy(gfx::Vector2d())); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES( layer->layer_tree_impl()->DidUpdateScrollOffset(layer->element_id())); layer->layer_tree_impl() ->property_trees() ->scroll_tree.SetScrollOffsetDeltaForTesting(layer->element_id(), gfx::Vector2dF()); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetCurrentScrollOffset( gfx::ScrollOffset(arbitrary_vector2d.x(), arbitrary_vector2d.y()))); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetCurrentScrollOffset( gfx::ScrollOffset(arbitrary_vector2d.x(), arbitrary_vector2d.y()))); // Unrelated functions, always set to new values, always set needs update. host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); host_impl.active_tree()->set_needs_update_draw_properties(); host_impl.active_tree()->UpdateDrawProperties(); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetMasksToBounds(true); layer->NoteLayerPropertyChanged()); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetContentsOpaque(true); layer->NoteLayerPropertyChanged()); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer2->SetPosition(arbitrary_point_f); layer->NoteLayerPropertyChanged()); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES( layer->SetBackgroundColor(arbitrary_color)); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES( host_impl.active_tree()->SetOpacityMutated(layer->element_id(), arbitrary_number)); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES( host_impl.active_tree()->SetTransformMutated(layer->element_id(), arbitrary_transform)); VERIFY_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetBounds(arbitrary_size); layer->NoteLayerPropertyChanged()); // Unrelated functions, set to the same values, no needs update. layer->test_properties()->filters = arbitrary_filters; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES( host_impl.active_tree()->SetFilterMutated(layer->element_id(), arbitrary_filters)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetMasksToBounds(true)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetContentsOpaque(true)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES( layer2->SetPosition(arbitrary_point_f)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetDrawsContent(true)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES( layer->SetBackgroundColor(arbitrary_color)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetBounds(arbitrary_size)); VERIFY_NO_NEEDS_UPDATE_DRAW_PROPERTIES(layer->SetElementId(ElementId(2))); } TEST(LayerImplTest, SafeOpaqueBackgroundColor) { FakeImplTaskRunnerProvider task_runner_provider; TestTaskGraphRunner task_graph_runner; std::unique_ptr<LayerTreeFrameSink> layer_tree_frame_sink = FakeLayerTreeFrameSink::Create3d(); FakeLayerTreeHostImpl host_impl(&task_runner_provider, &task_graph_runner); host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(layer_tree_frame_sink.get())); host_impl.active_tree()->SetRootLayerForTesting( LayerImpl::Create(host_impl.active_tree(), 1)); LayerImpl* layer = host_impl.active_tree()->root_layer_for_testing(); for (int contents_opaque = 0; contents_opaque < 2; ++contents_opaque) { for (int layer_opaque = 0; layer_opaque < 2; ++layer_opaque) { for (int host_opaque = 0; host_opaque < 2; ++host_opaque) { layer->SetContentsOpaque(!!contents_opaque); layer->SetBackgroundColor(layer_opaque ? SK_ColorRED : SK_ColorTRANSPARENT); host_impl.active_tree()->set_background_color( host_opaque ? SK_ColorRED : SK_ColorTRANSPARENT); host_impl.active_tree()->property_trees()->needs_rebuild = true; host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); SkColor safe_color = layer->SafeOpaqueBackgroundColor(); if (contents_opaque) { EXPECT_EQ(SkColorGetA(safe_color), 255u) << "Flags: " << contents_opaque << ", " << layer_opaque << ", " << host_opaque << "\n"; } else { EXPECT_NE(SkColorGetA(safe_color), 255u) << "Flags: " << contents_opaque << ", " << layer_opaque << ", " << host_opaque << "\n"; } } } } } TEST(LayerImplTest, PerspectiveTransformHasReasonableScale) { FakeImplTaskRunnerProvider task_runner_provider; TestTaskGraphRunner task_graph_runner; std::unique_ptr<LayerTreeFrameSink> layer_tree_frame_sink = FakeLayerTreeFrameSink::Create3d(); LayerTreeSettings settings; settings.layer_transforms_should_scale_layer_contents = true; FakeLayerTreeHostImpl host_impl(settings, &task_runner_provider, &task_graph_runner); auto owned_layer = LayerImpl::Create(host_impl.active_tree(), 1); LayerImpl* layer = owned_layer.get(); layer->SetBounds(gfx::Size(10, 10)); layer->set_contributes_to_drawn_render_surface(true); host_impl.active_tree()->SetRootLayerForTesting(std::move(owned_layer)); host_impl.active_tree()->BuildLayerListAndPropertyTreesForTesting(); // Ensure that we are close to the maximum scale for the matrix. { gfx::Transform transform; transform.Scale(10.2f, 15.1f); transform.ApplyPerspectiveDepth(10); layer->draw_properties().screen_space_transform = transform; ASSERT_TRUE(layer->ScreenSpaceTransform().HasPerspective()); EXPECT_FLOAT_EQ(15.f, layer->GetIdealContentsScale()); } // Ensure that we don't fall below the device scale factor. { gfx::Transform transform; transform.Scale(0.1f, 0.2f); transform.ApplyPerspectiveDepth(10); layer->draw_properties().screen_space_transform = transform; ASSERT_TRUE(layer->ScreenSpaceTransform().HasPerspective()); EXPECT_FLOAT_EQ(1.f, layer->GetIdealContentsScale()); } // Ensure that large scales don't end up extremely large. { gfx::Transform transform; transform.Scale(10000.1f, 10000.2f); transform.ApplyPerspectiveDepth(10); layer->draw_properties().screen_space_transform = transform; ASSERT_TRUE(layer->ScreenSpaceTransform().HasPerspective()); EXPECT_FLOAT_EQ(127.f, layer->GetIdealContentsScale()); } // Test case from crbug.com/766021. { gfx::Transform transform(-0.9397f, -0.7019f, 0.2796f, 2383.4521f, // row 1 -0.0038f, 0.0785f, 1.0613f, 1876.4553f, // row 2 -0.0835f, 0.9081f, -0.4105f, -2208.3035f, // row 3 0.0001f, -0.0008f, 0.0003f, 2.8435f); // row 4 layer->draw_properties().screen_space_transform = transform; ASSERT_TRUE(layer->ScreenSpaceTransform().HasPerspective()); EXPECT_FLOAT_EQ(1.f, layer->GetIdealContentsScale()); } } class LayerImplScrollTest : public testing::Test { public: LayerImplScrollTest() : LayerImplScrollTest(LayerTreeSettings()) {} explicit LayerImplScrollTest(const LayerTreeSettings& settings) : host_impl_(settings, &task_runner_provider_, &task_graph_runner_), root_id_(7) { host_impl_.active_tree()->SetRootLayerForTesting( LayerImpl::Create(host_impl_.active_tree(), root_id_)); host_impl_.active_tree() ->root_layer_for_testing() ->test_properties() ->AddChild(LayerImpl::Create(host_impl_.active_tree(), root_id_ + 1)); // Set the max scroll offset by noting that the root layer has bounds (1,1), // thus whatever bounds are set for the layer will be the max scroll // offset plus 1 in each direction. host_impl_.active_tree()->root_layer_for_testing()->SetBounds( gfx::Size(1, 1)); layer()->SetScrollable(gfx::Size(1, 1)); gfx::Vector2d max_scroll_offset(51, 81); layer()->SetBounds(gfx::Size(max_scroll_offset.x(), max_scroll_offset.y())); host_impl_.active_tree()->BuildLayerListAndPropertyTreesForTesting(); } LayerImpl* layer() { return host_impl_.active_tree() ->root_layer_for_testing() ->test_properties() ->children[0]; } ScrollTree* scroll_tree(LayerImpl* layer_impl) { return &layer_impl->layer_tree_impl()->property_trees()->scroll_tree; } LayerTreeHostImpl& host_impl() { return host_impl_; } LayerTreeImpl* tree() { return host_impl_.active_tree(); } private: FakeImplTaskRunnerProvider task_runner_provider_; TestTaskGraphRunner task_graph_runner_; FakeLayerTreeHostImpl host_impl_; int root_id_; }; class CommitToPendingTreeLayerImplScrollTest : public LayerImplScrollTest { public: CommitToPendingTreeLayerImplScrollTest() : LayerImplScrollTest(settings()) {} LayerTreeSettings settings() { LayerTreeSettings tree_settings; tree_settings.commit_to_active_tree = false; return tree_settings; } }; TEST_F(LayerImplScrollTest, ScrollByWithZeroOffset) { // Test that LayerImpl::ScrollBy only affects ScrollDelta and total scroll // offset is bounded by the range [0, max scroll offset]. EXPECT_VECTOR_EQ(gfx::Vector2dF(), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(gfx::Vector2dF(), scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); EXPECT_VECTOR_EQ(gfx::Vector2dF(), ScrollDelta(layer())); layer()->ScrollBy(gfx::Vector2dF(-100, 100)); EXPECT_VECTOR_EQ(gfx::Vector2dF(0, 80), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(ScrollDelta(layer()), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(gfx::Vector2dF(), scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); layer()->ScrollBy(gfx::Vector2dF(100, -100)); EXPECT_VECTOR_EQ(gfx::Vector2dF(50, 0), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(ScrollDelta(layer()), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(gfx::Vector2dF(), scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); } TEST_F(LayerImplScrollTest, ScrollByWithNonZeroOffset) { gfx::ScrollOffset scroll_offset(10, 5); scroll_tree(layer())->UpdateScrollOffsetBaseForTesting(layer()->element_id(), scroll_offset); EXPECT_VECTOR_EQ(scroll_offset, layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(scroll_offset, scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); EXPECT_VECTOR_EQ(gfx::Vector2dF(), ScrollDelta(layer())); layer()->ScrollBy(gfx::Vector2dF(-100, 100)); EXPECT_VECTOR_EQ(gfx::Vector2dF(0, 80), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ( gfx::ScrollOffsetWithDelta(scroll_offset, ScrollDelta(layer())), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(scroll_offset, scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); layer()->ScrollBy(gfx::Vector2dF(100, -100)); EXPECT_VECTOR_EQ(gfx::Vector2dF(50, 0), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ( gfx::ScrollOffsetWithDelta(scroll_offset, ScrollDelta(layer())), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(scroll_offset, scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); } TEST_F(LayerImplScrollTest, ApplySentScrollsNoListener) { gfx::ScrollOffset scroll_offset(10, 5); gfx::Vector2dF scroll_delta(20.5f, 8.5f); gfx::Vector2d sent_scroll_delta(12, -3); scroll_tree(layer())->UpdateScrollOffsetBaseForTesting(layer()->element_id(), scroll_offset); layer()->ScrollBy(sent_scroll_delta); scroll_tree(layer())->CollectScrollDeltasForTesting(); layer()->SetCurrentScrollOffset(scroll_offset + gfx::ScrollOffset(scroll_delta)); EXPECT_VECTOR_EQ(gfx::ScrollOffsetWithDelta(scroll_offset, scroll_delta), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(scroll_delta, ScrollDelta(layer())); EXPECT_VECTOR_EQ(scroll_offset, scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); scroll_tree(layer())->ApplySentScrollDeltasFromAbortedCommit(); EXPECT_VECTOR_EQ(gfx::ScrollOffsetWithDelta(scroll_offset, scroll_delta), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(scroll_delta - sent_scroll_delta, ScrollDelta(layer())); EXPECT_VECTOR_EQ(gfx::ScrollOffsetWithDelta(scroll_offset, sent_scroll_delta), scroll_tree(layer())->GetScrollOffsetBaseForTesting( layer()->element_id())); } TEST_F(LayerImplScrollTest, ScrollUserUnscrollableLayer) { gfx::ScrollOffset scroll_offset(10, 5); gfx::Vector2dF scroll_delta(20.5f, 8.5f); layer()->test_properties()->user_scrollable_vertical = false; layer()->layer_tree_impl()->property_trees()->needs_rebuild = true; layer()->layer_tree_impl()->BuildLayerListAndPropertyTreesForTesting(); scroll_tree(layer())->UpdateScrollOffsetBaseForTesting(layer()->element_id(), scroll_offset); gfx::Vector2dF unscrolled = layer()->ScrollBy(scroll_delta); EXPECT_VECTOR_EQ(gfx::Vector2dF(0, 8.5f), unscrolled); EXPECT_VECTOR_EQ(gfx::Vector2dF(30.5f, 5), layer()->CurrentScrollOffset()); } TEST_F(CommitToPendingTreeLayerImplScrollTest, PushPropertiesToMirrorsCurrentScrollOffset) { gfx::ScrollOffset scroll_offset(10, 5); gfx::Vector2dF scroll_delta(12, 18); host_impl().CreatePendingTree(); scroll_tree(layer())->UpdateScrollOffsetBaseForTesting(layer()->element_id(), scroll_offset); gfx::Vector2dF unscrolled = layer()->ScrollBy(scroll_delta); EXPECT_VECTOR_EQ(gfx::Vector2dF(0, 0), unscrolled); EXPECT_VECTOR_EQ(gfx::Vector2dF(22, 23), layer()->CurrentScrollOffset()); scroll_tree(layer())->CollectScrollDeltasForTesting(); std::unique_ptr<LayerImpl> pending_layer = LayerImpl::Create(host_impl().sync_tree(), layer()->id()); pending_layer->SetElementId( LayerIdToElementIdForTesting(pending_layer->id())); scroll_tree(pending_layer.get()) ->UpdateScrollOffsetBaseForTesting(pending_layer->element_id(), layer()->CurrentScrollOffset()); pending_layer->PushPropertiesTo(layer()); EXPECT_VECTOR_EQ(gfx::Vector2dF(22, 23), layer()->CurrentScrollOffset()); EXPECT_VECTOR_EQ(layer()->CurrentScrollOffset(), pending_layer->CurrentScrollOffset()); } } // namespace } // namespace cc
14,205
11,356
<reponame>cookingcodewithme/turicreate /* Copyright © 2017 Apple Inc. All rights reserved. * * Use of this source code is governed by a BSD-3-clause license that can * be found in the LICENSE.txt file or at * https://opensource.org/licenses/BSD-3-Clause */ #ifndef TC_DISABLE_REMOTEFS #ifndef _WIN32 #include <arpa/inet.h> #else #include <ws2tcpip.h> #endif #include <boost/algorithm/string/classification.hpp> #include <boost/algorithm/string/predicate.hpp> #include <boost/algorithm/string/split.hpp> #include <boost/algorithm/string/trim.hpp> #include <boost/optional/optional_io.hpp> #include <boost/regex.hpp> #include <boost/tokenizer.hpp> #include <chrono> #include <core/logging/assertions.hpp> #include <core/logging/logger.hpp> #include <core/random/random.hpp> #include <core/storage/fileio/fs_utils.hpp> #include <core/storage/fileio/general_fstream.hpp> #include <core/storage/fileio/get_s3_endpoint.hpp> #include <core/storage/fileio/s3_api.hpp> #include <core/system/cppipc/server/cancel_ops.hpp> #include <fstream> #include <future> #include <memory> #include <regex> #include <string> #include <thread> /* aws */ #include <aws/core/Aws.h> #include <aws/core/auth/AWSCredentialsProvider.h> #include <aws/core/http/HttpResponse.h> #include <aws/s3/S3Client.h> #include <aws/s3/model/Delete.h> #include <aws/s3/model/DeleteObjectRequest.h> #include <aws/s3/model/DeleteObjectsRequest.h> #include <aws/s3/model/ListObjectsV2Request.h> #include <aws/s3/model/ListObjectsV2Result.h> using namespace Aws; using namespace Aws::S3; using namespace turi::fileio; namespace turi { namespace { /** * Check the string is a valid s3 bucket name using the following criteria from * http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html: * * 1 Bucket names must be at least 3 and no more than 63 characters long. * 2 Bucket names must be a series of one or more labels. * 3 Adjacent labels are separated by a single period (.). * 4 Bucket names can contain lowercase letters, numbers, and hyphens. * 5 Each label must start and end with a lowercase letter or a number. * 6 Bucket names must not be formatted as an IP address (e.g., 192.168.5.4). * * Amendment 1: * Uppercase letters are in fact fine... And it is in fact case sensitive. * Our test bucket Turi-Datasets breaks a couple of the rules above. * Tweaked to accept capital letters. * * Amendment 2: * underscores are fine too */ bool bucket_name_valid(const std::string& bucket_name) { // rule 1 if (bucket_name.size() < 3 || bucket_name.size() > 63) { return false; } // rule 2, 3 typedef boost::tokenizer<boost::char_separator<char> > tokenizer; boost::char_separator<char> sep("."); tokenizer labels(bucket_name, sep); tokenizer::iterator iter = labels.begin(); if (iter == labels.end()) { return false; } // rule 4, 5 auto label_valid = [](const std::string& label) { if (label.empty()) return false; using namespace std::regex_constants; auto alnum = [=](char x) { return (x <= 'Z' && x >= 'A') || (x <= 'z' && x >= 'a') || (x <= '9' && x >= '0'); }; auto alnum_underscore_or_hypen = [=](char x) { return x == '-' || x == '_' || alnum(x); }; // begin if (!alnum(*label.begin())) return false; // end if (!alnum(*(label.end() - 1))) return false; // everything in between for (size_t i = 1; i < label.size() - 1; ++i) { if (!alnum_underscore_or_hypen(label[i])) return false; } return true; }; while (iter != labels.end()) { if (!label_valid(*iter)) return false; ++iter; } // rule 6, to validate, let's try creating an ip address from the bucket name. struct sockaddr_in sa; int result = inet_pton(AF_INET, bucket_name.c_str(), &(sa.sin_addr)); if (result != 0) return false; return true; } } // anonymous namespace /* * @param: parsed_url output parameter, its state will be modified */ S3Client init_aws_sdk_with_turi_env(s3url& parsed_url) { // s3 client config // DefaultCredentialProviderChain Aws::Client::ClientConfiguration clientConfiguration; // a little bit too long, anyway clientConfiguration.requestTimeoutMs = 5 * 60000; clientConfiguration.connectTimeoutMs = 20000; if (turi::fileio::insecure_ssl_cert_checks()) { clientConfiguration.verifySSL = false; } if (!parsed_url.endpoint.empty()) { clientConfiguration.endpointOverride = parsed_url.endpoint.c_str(); } else { auto env_var = get_endpoint_from_env(); if (env_var) { clientConfiguration.endpointOverride = env_var->c_str(); parsed_url.sdk_endpoint = env_var->c_str(); } } // TODO: add proxy support // clientConfiguration.proxyHost = proxy.c_str(); // set path or file for ssl certs if (!get_alternative_ssl_cert_file().empty()) { auto fstaus = get_file_status(get_alternative_ssl_cert_file()); ASSERT_TRUE(fstaus.second.empty()); if (fstaus.first == file_status::REGULAR_FILE) { clientConfiguration.caFile = get_alternative_ssl_cert_file().c_str(); } else { std::stringstream ss; ss << "Invalid file for alternative SSL certificate. Value of " "TURI_FILEIO_ALTERNATIVE_SSL_CERT_FILE must be a regular file. " << get_alternative_ssl_cert_file() << " is not a regular file."; log_and_throw(ss.str()); } } if (!get_alternative_ssl_cert_dir().empty()) { auto fstaus = get_file_status(get_alternative_ssl_cert_dir()); ASSERT_TRUE(fstaus.second.empty()); if (fstaus.first == file_status::DIRECTORY) { clientConfiguration.caPath = get_alternative_ssl_cert_dir().c_str(); } else { std::stringstream ss; ss << "Invalid file for alternative SSL certificate. Value of " "TURI_FILEIO_ALTERNATIVE_SSL_CERT_DIR must be a valid directory. " << get_alternative_ssl_cert_dir() << " is not a regular directory."; log_and_throw(ss.str()); } } std::string region = fileio::get_region_name_from_endpoint( clientConfiguration.endpointOverride.c_str()); if (!region.empty()) { clientConfiguration.region = region.c_str(); } else { auto env_var = get_auth_region_from_env(); if (env_var) { clientConfiguration.region = env_var->c_str(); parsed_url.sdk_region = env_var->c_str(); } } if (parsed_url.secret_key.empty()) { return S3Client(clientConfiguration, /* default value */ Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, /* use virtual address */ false); } else { // credentials Aws::Auth::AWSCredentials credentials(parsed_url.access_key_id.c_str(), parsed_url.secret_key.c_str()); return S3Client(credentials, clientConfiguration, /* default value */ Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, /* use virtual address */ false); } } const std::vector<std::string> S3Operation::_enum_to_str = { "DeleteObjects", "ListObjects", "HeadObjects"}; /** * This splits a URL of the form * s3://[access_key_id]:[secret_key]:[endpoint/][bucket]/[object_name] * into several pieces. * * Returns true on success, false on failure. */ bool parse_s3url(const std::string& s3_url, s3url& ret, std::string& err_msg) { // must begin with s3:// auto url = s3_url; if (fileio::get_protocol(url) != "s3") { err_msg = url + " doesn't start with 's3://'"; return false; } // strip the s3:// url = url.substr(5); err_msg.clear(); // Extract the access key ID and secret key std::stringstream ss; size_t splitpos = url.find(':'); if (splitpos == std::string::npos) { ss << "Cannot find AWS_ACCESS_KEY_ID in the s3 url." << __FILE__ << " at " << __LINE__; err_msg = ss.str(); logstream(LOG_WARNING) << err_msg << std::endl; return false; } else { ret.access_key_id = url.substr(0, splitpos); url = url.substr(splitpos + 1); } // Extract the secret key splitpos = url.find(':'); if (splitpos == std::string::npos) { ss << "Cannot find SECRET_AWS_ACCESS_KEY in the s3 url." << __LINE__ << " at " << __FILE__; err_msg = ss.str(); logstream(LOG_WARNING) << err_msg << std::endl; return false; } else { ret.secret_key = url.substr(0, splitpos); url = url.substr(splitpos + 1); } boost::trim(url); if (url.empty()) { ss << "missing endpoint or bucket or object key in " << "s3://" << __FILE__ << " at" << __LINE__; err_msg = ss.str(); return false; } // this is a bad design auto original_url = sanitize_url(url); // The rest is parsed using boost::tokenizer typedef boost::tokenizer<boost::char_separator<char> > tokenizer; /* * keep extra token separators * since s3 is not like the UNIX director that redundant '/' will * be removed, e.g., * s3://key/gui///// is not same as s3://key/gui/. * However in linux, * dir//// is same as dir/ or dir * */ boost::char_separator<char> sep("/", 0, boost::keep_empty_tokens); tokenizer tokens(url, sep); tokenizer::iterator iter = tokens.begin(); if (iter == tokens.end()) { ss << "missing endpoint or bucket or object key in " << url << __FILE__ << "at" << __LINE__; err_msg = ss.str(); return false; } // Parse endpoints; since we support private cloud settings // url can be tricky; region (.*)com is not sufficient if (std::regex_match(*iter, std::regex("(.*)\\.(com|net)"))) { std::string endpoint = *iter; std::vector<std::string> subs; boost::algorithm::split(subs, endpoint, [](char c) { return c == '.'; }); bool is_valid = std::all_of( std::begin(subs), std::end(subs), [](const std::string& name) { return boost::regex_match(name, boost::regex("[[:w:]]+")); }); if (!is_valid) { ss << "endpoint name: " << endpoint << " contains invalid chars: " << url << " " << __FILE__ << " at" << __LINE__; err_msg = ss.str(); return false; } ret.endpoint = std::move(endpoint); ++iter; } // Parse bucket name if (iter == tokens.end()) { ss << "missing bucket name in " << '\'' << original_url << '\'' << " in " << __FILE__ << " at " << __LINE__; err_msg = ss.str(); return false; } if (!bucket_name_valid(*iter)) { ss << '\'' << original_url << '\'' << " has invalid bucket name: " << *iter; err_msg = ss.str(); logstream(LOG_WARNING) << err_msg << std::endl; return false; } ret.bucket = *iter; ++iter; // The rest part is the object key if (iter == tokens.end()) { // no object key return true; } ret.object_name = *iter; ++iter; while (iter != tokens.end()) { ret.object_name += "/" + *iter; ++iter; } // std::cout << "Access Key: " << ret.access_key_id << "\n" // << "Secret Key: " << ret.secret_key<< "\n" // << "Bucket: " << ret.bucket<< "\n" // << "Object: " << ret.object_name<< "\n" // << "Endpoint: " << ret.endpoint << "\n"; return true; } // The options we pass to aws cli for s3 commands // "us-east-1" is the us-standard and it works with buckets from all regions // "acl" grants the bucket owner full permission regardless of the uploader's // account static const std::string S3_COMMAND_OPTION = "--region us-east-1 --acl bucket-owner-full-control"; std::string validate_input_file(const std::string& local_file) { // Try to open the input file std::shared_ptr<turi::general_ifstream> fin( new turi::general_ifstream(local_file.c_str(), false)); // gzip_compressed. // We avoid decompressing the file // on transfer. i.e. if the file is // compressed/uncompressed to begin // with, lets keep it that way. // file cannot be opened if (!fin->good()) { return std::string("File ") + local_file + " cannot be opened."; } // get the file size. Return failure on failure. size_t file_size = fin->file_size(); if (file_size == (size_t)(-1)) { return std::string("Size of file ") + local_file + " cannot be obtained."; } return ""; } std::string validate_output_file(const std::string& local_file) { // Try to open the output file std::shared_ptr<turi::general_ofstream> fout( new turi::general_ofstream(local_file.c_str(), false)); // gzip_compressed. // We avoid recompressing the file // on transfer. i.e. if the file is // compressed/uncompressed to begin // with, lets keep it that way. // file cannot be opened if (!fout->good()) { // return a failure immediately. return std::string("File ") + local_file + " cannot be opened."; } return ""; } /** * Adding single quote around the path, and escape all single quotes inside the * path. */ std::string quote_and_escape_path(const std::string& path) { // s3 keys are at most 1024 bytes, // we make the buffer three times bigger // and it should be enough to conver the length of escaped path // s3://bucket_name/key const size_t BUF_SIZE = 1024 * 3; char* buf = new char[BUF_SIZE]; size_t current_pos = 0; buf[current_pos++] = '\"'; // begin quote for (const auto& c : path) { if (c == '\'') { buf[current_pos++] = '\\'; // escape quote if (current_pos >= BUF_SIZE) { delete[] buf; throw("Invalid path: exceed length limit"); } } buf[current_pos++] = c; if (current_pos >= BUF_SIZE) { delete[] buf; throw("Invalid path: exceed length limit"); } } buf[current_pos++] = '\"'; // end quote std::string ret(buf, current_pos); delete[] buf; return ret; } list_objects_response list_objects_impl(const s3url& parsed_url, std::string proxy, std::string endpoint) { // do not modify the parsed_url because string_from_s3url() will // be called on it to retrieve its original url (prefix) in is_directory. // let init_aws_sdk to be aware of the endpoint to override auto temp_url = parsed_url; if (temp_url.endpoint.empty()) { temp_url.endpoint = endpoint; } if (!temp_url.sdk_proxy || temp_url.sdk_proxy->empty()) { temp_url.sdk_proxy = proxy; } S3Client client = init_aws_sdk_with_turi_env(temp_url); list_objects_response ret; Aws::S3::Model::ListObjectsV2Request request; request.WithBucket(parsed_url.bucket.c_str()); request.WithPrefix(parsed_url.object_name.c_str()); request.WithDelimiter("/"); // seperate objects from directories bool moreResults = false; bool success = false; int backoff = 50; // ms int n_retry = 0; do { n_retry = 0; do { auto outcome = client.ListObjectsV2(request); success = outcome.IsSuccess(); if (success) { auto result = outcome.GetResult(); // now iterate through found objects - these are files Aws::Vector<Aws::S3::Model::Object> objects; objects = result.GetContents(); for (auto const& o : objects) { ret.objects.push_back(std::string(o.GetKey().c_str())); std::stringstream stream; stream << o.GetLastModified().Millis(); ret.objects_last_modified.push_back(stream.str()); ret.objects_size.push_back(o.GetSize()); } // now iterate through common prefixes - these are directories Aws::Vector<Aws::S3::Model::CommonPrefix> prefixes; prefixes = result.GetCommonPrefixes(); for (auto const& p : prefixes) { std::string key = std::string(p.GetPrefix().c_str()); // strip the ending "/" on a directory if (boost::ends_with(key, "/")) key = key.substr(0, key.length() - 1); ret.directories.push_back(key); } // more results to retrieve moreResults = result.GetIsTruncated(); if (moreResults) { // add to the request object with continuation token request.WithContinuationToken(result.GetContinuationToken()); } // jump out the retry loop break; } else { auto error = outcome.GetError(); /* * Unlike CoreErrors, S3Error Never retries on S3 errors. * Retry can be based on HTTP code or HTTP body. * * 1. if SDK uses HTTP code, e.g., ShouldRetry return true on 429. * We don't need to retry on our own since SDK already made decision to * retry based on HTTP code. * * 2. if SDK doesn't use HTTP code but HTTP body, we check HTTP on our * own if SDK doesn't think it should retry based on messages in HTTP * body. Check https://guihao-liang.github.io/2020-04-12-aws-s3-retry/ * * */ if (!error.ShouldRetry()) { // SDK didn't retry for us, check retry on our own decisions // especially for non-standard AWS error reply if (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN && error.GetResponseCode() == Aws::Http::HttpResponseCode::TOO_MANY_REQUESTS) { n_retry++; } else { // it's standard AWS error, let's stop retry immediately // and report accordingly to user n_retry = 3; } if (n_retry >= 3) { // amend the error msg on the last retry failure std::stringstream ss; reportS3ErrorDetailed(ss, temp_url, S3Operation::List, outcome) << std::endl; ret.error = ss.str(); logstream(LOG_DEBUG) << "list_objects_impl failed:" << ret.error << std::endl; } else { // continue retry std::this_thread::sleep_for(std::chrono::milliseconds(backoff)); backoff *= 2; } } else { // error.ShouldRetry() == true // AWS SDK already retried 3 times std::stringstream ss; reportS3ErrorDetailed(ss, temp_url, S3Operation::List, outcome) << std::endl; ret.error = ss.str(); logstream(LOG_DEBUG) << "list_objects_impl failed:" << ret.error << std::endl; // no need to retry, just jump out all while loop break; } } } while (n_retry < 3); // finished retry } while (moreResults && success); for (auto& dir : ret.directories) { s3url dirurl = parsed_url; dirurl.object_name = dir; // this is not necessary to override what returned by s3 dir = dirurl.string_from_s3url(); } for (auto& object : ret.objects) { s3url objurl = parsed_url; objurl.object_name = object; object = objurl.string_from_s3url(); } return ret; } /// returns an error string on failure. Empty string on success std::string delete_object_impl(const s3url& parsed_url, std::string proxy, std::string endpoint) { // do not modify the parsed_url because string_from_s3url() will // be called on it to retrieve its original url (prefix) in is_directory. // let init_aws_sdk to be aware of the endpoint to override auto temp_url = parsed_url; if (temp_url.endpoint.empty()) { temp_url.endpoint = endpoint; } if (!temp_url.sdk_proxy || temp_url.sdk_proxy->empty()) { temp_url.sdk_proxy = proxy; } S3Client client = init_aws_sdk_with_turi_env(temp_url); std::string ret; Aws::S3::Model::DeleteObjectRequest request; request.WithBucket(parsed_url.bucket.c_str()); request.WithKey(parsed_url.object_name.c_str()); auto outcome = client.DeleteObject(request); if (!outcome.IsSuccess()) { std::stringstream ss; reportS3ErrorDetailed(ss, temp_url, S3Operation::Delete, outcome) << std::endl; ret = ss.str(); } return ret; } /// returns an error string on failure. Empty string on success std::string delete_prefix_impl(const s3url& parsed_url, std::string proxy, std::string endpoint) { // do not modify the parsed_url because string_from_s3url() will // be called on it to retrieve its original url (prefix) in is_directory. // let init_aws_sdk to be aware of the endpoint to override auto temp_url = parsed_url; if (temp_url.endpoint.empty()) { temp_url.endpoint = endpoint; } if (!temp_url.sdk_proxy || temp_url.sdk_proxy->empty()) { temp_url.sdk_proxy = proxy; } S3Client client = init_aws_sdk_with_turi_env(temp_url); std::string ret; Aws::S3::Model::ListObjectsV2Request request; request.WithBucket(parsed_url.bucket.c_str()); request.WithPrefix(parsed_url.object_name.c_str()); // keep retrieving objects until no more objects match query bool moreResults = false; Aws::S3::Model::Delete deleteObjects; do { auto outcome = client.ListObjectsV2(request); if (outcome.IsSuccess()) { auto result = outcome.GetResult(); // now iterate through found objects and construct DeleteObjects request // with them auto objects = result.GetContents(); for (auto const& o : objects) { Aws::S3::Model::ObjectIdentifier key; deleteObjects.AddObjects(key.WithKey(o.GetKey())); } // more results to retrieve moreResults = result.GetIsTruncated(); if (moreResults) { // add to the request object with continuation token request.WithContinuationToken(result.GetContinuationToken()); } } else { std::stringstream ss; reportS3ErrorDetailed(ss, temp_url, S3Operation::List, outcome) << std::endl; ret = ss.str(); } } while (moreResults); if (deleteObjects.GetObjects().size() > 0) { Aws::S3::Model::DeleteObjectsRequest delRequest; delRequest.WithBucket(parsed_url.bucket.c_str()); delRequest.WithDelete(deleteObjects); auto outcome = client.DeleteObjects(delRequest); if (!outcome.IsSuccess()) { std::stringstream ss; reportS3ErrorDetailed(ss, parsed_url, S3Operation::Delete, outcome) << std::endl; ret = ss.str(); } } return ret; } list_objects_response list_objects(std::string url, std::string proxy) { s3url parsed_url; list_objects_response ret; std::string err_msg; // in order not to reach the rate limit bool success = parse_s3url(url, parsed_url, err_msg); if (!success) { ret.error = err_msg; return ret; } logstream(LOG_DEBUG) << "list_obejcts: " << url << std::endl; size_t current_endpoint = 0; // try all endpoints auto endpoints = get_s3_endpoints(); do { ret = list_objects_impl(parsed_url, proxy, endpoints[current_endpoint]); ++current_endpoint; } while (boost::algorithm::icontains(ret.error, "PermanentRedirect") && current_endpoint < endpoints.size()); return ret; } std::pair<file_status, list_objects_response> is_directory(std::string url, std::string proxy) { s3url parsed_url; list_objects_response ret; std::string err_msg; bool success = parse_s3url(url, parsed_url, err_msg); if (!success) { ret.error = std::move(err_msg); return {file_status::MISSING, ret}; } /* if there are no “/”‘s it is just a top level bucket * list_objects_impl will remove the ending ‘/’ * e.g., dir/ -> dir * in turicreate convention, dir should not have ‘/’, * refer to dir_archive::init_for_read */ // remove credentials url = parsed_url.string_from_s3url(); logstream(LOG_DEBUG) << "compare on url: " << url << std::endl; if (url.length() > 5 && url.back() == '/') url.pop_back(); list_objects_response response = list_objects(url, proxy); // an error occured if (!response.error.empty()) { return {file_status::MISSING, response}; } // its a top level bucket name if (parsed_url.object_name.empty()) { return {file_status::DIRECTORY, response}; } // is a directory for (auto dir : response.directories) { if (dir == url) { return {file_status::DIRECTORY, response}; } } // is an object for (auto& object : response.objects) { if (object == url) { return {file_status::REGULAR_FILE, response}; } } // is not found // s3 would be slient with list-objects if prefix doesn't exist if (response.error.empty()) { std::stringstream ss; ss << sanitize_url(url) << " has no objects or diretoires. Consider create the prefix and try " "again"; response.error = ss.str(); } return {file_status::MISSING, response}; } list_objects_response list_directory(std::string url, std::string proxy) { s3url parsed_url; list_objects_response ret; std::string err_msg; bool success = parse_s3url(url, parsed_url, err_msg); if (!success) { ret.error = err_msg; return ret; } // normalize the URL so it doesn't matter if you put strange "/"s at the end url = parsed_url.string_from_s3url(); auto isdir = is_directory(url, proxy); // if not found. if (isdir.first == file_status::MISSING) return isdir.second; // if its a directory if (isdir.first == file_status::DIRECTORY) { // if there are no "/"'s it is a top level bucket and we don't need // to mess with prefixes to get the contents if (!parsed_url.object_name.empty()) { parsed_url.object_name = parsed_url.object_name + "/"; } size_t current_endpoint = 0; // try all endpoints auto endpoints = get_s3_endpoints(); do { ret = list_objects_impl(parsed_url, proxy, endpoints[current_endpoint]); ++current_endpoint; } while (boost::algorithm::icontains(ret.error, "PermanentRedirect") && current_endpoint < endpoints.size()); } else { ret.objects.push_back(url); } return ret; } std::string delete_object(std::string url, std::string proxy) { s3url parsed_url; std::string err_msg; bool success = parse_s3url(url, parsed_url, err_msg); if (!success) { return err_msg; } // try all endpoints size_t current_endpoint = 0; auto endpoints = get_s3_endpoints(); do { err_msg = delete_object_impl(parsed_url, proxy, endpoints[current_endpoint]); ++current_endpoint; } while (boost::algorithm::icontains(err_msg, "PermanentRedirect") && current_endpoint < endpoints.size()); return err_msg; } std::string delete_prefix(std::string url, std::string proxy) { s3url parsed_url; std::string err_msg; bool success = parse_s3url(url, parsed_url, err_msg); if (!success) { return err_msg; } // try all endpoints size_t current_endpoint = 0; auto endpoints = get_s3_endpoints(); do { err_msg = delete_prefix_impl(parsed_url, proxy, endpoints[current_endpoint]); ++current_endpoint; } while (boost::algorithm::icontains(err_msg, "PermanentRedirect") && current_endpoint < endpoints.size()); return err_msg; } std::string sanitize_s3_url_aggressive(std::string url) { // must begin with s3:// if (fileio::get_protocol(url) != "s3") { return url; } // strip the s3:// url = url.substr(5); // strip the secret key and the access key following the usual rules. size_t splitpos = url.find(':'); if (splitpos != std::string::npos) url = url.substr(splitpos + 1); splitpos = url.find(':'); if (splitpos != std::string::npos) url = url.substr(splitpos + 1); // now, a user error is possible where ":" shows up inside the // secret key / access key thus leaking part of a key in the logs. // so we also perform a more aggressive truncation. // find the first "/" and delete everything up to the last ":" // before the first "/" size_t bucketend = url.find('/'); if (bucketend == std::string::npos) bucketend = url.length(); size_t last_colon = url.find_last_of(':', bucketend); if (last_colon != std::string::npos) url = url.substr(last_colon + 1); return "s3://" + url; } std::string sanitize_s3_url(const std::string& url) { s3url parsed_url; std::string err_msg; auto ret = parse_s3url(url, parsed_url, err_msg); DASSERT_EQ(ret, err_msg.empty()); if (ret) { if (parsed_url.endpoint.empty()) return "s3://" + parsed_url.bucket + "/" + parsed_url.object_name; else return "s3://" + parsed_url.endpoint + "/" + parsed_url.bucket + "/" + parsed_url.object_name; } else { return sanitize_s3_url_aggressive(url); }; } std::string get_s3_file_last_modified(const std::string& url) { list_objects_response response = list_objects(url); if (response.error.empty() && response.objects_last_modified.size() == 1) { return response.objects_last_modified[0]; } else if (!response.error.empty()) { logstream(LOG_WARNING) << "List object error: " << response.error << " " << __FILE__ << " at " << __LINE__ << std::endl; throw(response.error); } return ""; } } // namespace turi #endif
11,764
360
import argparse import json import os from collections import deque, namedtuple from enum import Enum import bddl import printree import pyinstrument import igibson from igibson import object_states from igibson.activity.activity_base import iGBEHAVIORActivityInstance from igibson.activity.bddl_backend import ObjectStateBinaryPredicate, ObjectStateUnaryPredicate from igibson.examples.behavior import behavior_demo_replay from igibson.object_states import ROOM_STATES, factory from igibson.object_states.object_state_base import AbsoluteObjectState, BooleanState, RelativeObjectState from igibson.robots.behavior_robot import BRBody StateRecord = namedtuple("StateRecord", ["state_type", "objects", "value"]) StateEntry = namedtuple("StateEntry", ["frame_count", "state_records"]) Segment = namedtuple("DiffEntry", ["start", "duration", "end", "state_records", "sub_segments"]) class SegmentationObjectSelection(Enum): ALL_OBJECTS = 1 TASK_RELEVANT_OBJECTS = 2 ROBOTS = 3 class SegmentationStateSelection(Enum): ALL_STATES = 1 GOAL_CONDITION_RELEVANT_STATES = 2 class SegmentationStateDirection(Enum): BOTH_DIRECTIONS = 1 FALSE_TO_TRUE = 2 TRUE_TO_FALSE = 3 STATE_DIRECTIONS = { # Note that some of these states already only go False-to-True so they are left as BOTH_DIRECTIONS # so as not to add filtering work. object_states.Burnt: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Cooked: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Dusty: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Frozen: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.InFOVOfRobot: SegmentationStateDirection.FALSE_TO_TRUE, object_states.InHandOfRobot: SegmentationStateDirection.FALSE_TO_TRUE, object_states.InReachOfRobot: SegmentationStateDirection.FALSE_TO_TRUE, object_states.InSameRoomAsRobot: SegmentationStateDirection.FALSE_TO_TRUE, object_states.Inside: SegmentationStateDirection.FALSE_TO_TRUE, object_states.NextTo: SegmentationStateDirection.FALSE_TO_TRUE, # OnFloor: SegmentationStateDirection.FALSE_TO_TRUE, object_states.OnTop: SegmentationStateDirection.FALSE_TO_TRUE, object_states.Open: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Sliced: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Soaked: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Stained: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.ToggledOn: SegmentationStateDirection.BOTH_DIRECTIONS, # Touching: SegmentationStateDirection.BOTH_DIRECTIONS, object_states.Under: SegmentationStateDirection.FALSE_TO_TRUE, } STATE_DIRECTIONS.update({state: SegmentationStateDirection.FALSE_TO_TRUE for state in ROOM_STATES}) ALLOWED_SUB_SEGMENTS_BY_STATE = { object_states.Burnt: {object_states.OnTop, object_states.ToggledOn, object_states.Open, object_states.Inside}, object_states.Cooked: {object_states.OnTop, object_states.ToggledOn, object_states.Open, object_states.Inside}, object_states.Dusty: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, object_states.Frozen: { object_states.InReachOfRobot, object_states.OnTop, object_states.ToggledOn, object_states.Open, object_states.Inside, }, object_states.InFOVOfRobot: {}, object_states.InHandOfRobot: {}, object_states.InReachOfRobot: {}, object_states.InSameRoomAsRobot: {}, object_states.Inside: { object_states.Open, object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot, }, object_states.NextTo: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, # OnFloor: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, object_states.OnTop: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, object_states.Open: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, object_states.Sliced: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, object_states.Soaked: { object_states.ToggledOn, object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot, }, object_states.Stained: { object_states.Soaked, object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot, }, object_states.ToggledOn: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot}, # Touching: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, object_states.Under: {object_states.InSameRoomAsRobot, object_states.InReachOfRobot, object_states.InHandOfRobot}, } def process_states(objects, state_types): predicate_states = set() for obj in objects: for state_type in state_types: if state_type not in obj.states: continue assert issubclass(state_type, BooleanState) state = obj.states[state_type] if isinstance(state, AbsoluteObjectState): # Add only one instance of absolute state try: value = bool(state.get_value()) record = StateRecord(state_type, (obj,), value) predicate_states.add(record) except ValueError: pass elif isinstance(state, RelativeObjectState): # Add one instance per state pair for other in objects: try: value = state.get_value(other) record = StateRecord(state_type, (obj, other), value) predicate_states.add(record) except ValueError: pass else: raise ValueError("Unusable state for segmentation.") return predicate_states def _get_goal_condition_states(igbhvr_act_inst: iGBEHAVIORActivityInstance): state_types = set() q = deque() q.extend(igbhvr_act_inst.goal_conditions) while q: pred = q.popleft() if isinstance(pred, ObjectStateUnaryPredicate) or isinstance(pred, ObjectStateBinaryPredicate): state_types.add(pred.STATE_CLASS) q.extend(pred.children) return state_types class DemoSegmentationProcessor(object): def __init__( self, state_types=None, object_selection=SegmentationObjectSelection.TASK_RELEVANT_OBJECTS, label_by_instance=False, hierarchical=False, diff_initial=False, state_directions=STATE_DIRECTIONS, profiler=None, ): self.state_history = [] self.last_state = None self.state_types_option = state_types self.state_types = None # To be populated in initialize(). self.state_directions = state_directions self.object_selection = object_selection self.label_by_instance = label_by_instance self.hierarchical = hierarchical self.all_state_types = None if diff_initial: self.state_history.append(StateEntry(0, set())) self.last_state = set() self.profiler = profiler def start_callback(self, igbhvr_act_inst, _): self.all_state_types = [ state for state in factory.get_all_states() if ( issubclass(state, BooleanState) and (issubclass(state, AbsoluteObjectState) or issubclass(state, RelativeObjectState)) ) ] if isinstance(self.state_types_option, list) or isinstance(self.state_types_option, set): self.state_types = self.state_types_option elif self.state_types_option == SegmentationStateSelection.ALL_STATES: self.state_types = self.all_state_types elif self.state_types_option == SegmentationStateSelection.GOAL_CONDITION_RELEVANT_STATES: self.state_types = _get_goal_condition_states(igbhvr_act_inst) else: raise ValueError("Unknown segmentation state selection.") def step_callback(self, igbhvr_act_inst, _): if self.profiler: self.profiler.start() if self.object_selection == SegmentationObjectSelection.TASK_RELEVANT_OBJECTS: objects = [obj for obj in igbhvr_act_inst.object_scope.values() if not isinstance(obj, BRBody)] elif self.object_selection == SegmentationObjectSelection.ROBOTS: objects = [obj for obj in igbhvr_act_inst.object_scope.values() if isinstance(obj, BRBody)] elif self.object_selection == SegmentationObjectSelection.ALL_OBJECTS: objects = igbhvr_act_inst.simulator.scene.get_objects() else: raise ValueError("Incorrect SegmentationObjectSelection %r" % self.object_selection) # Get the processed state. state_types_to_use = self.state_types if not self.hierarchical else self.all_state_types processed_state = process_states(objects, state_types_to_use) if self.last_state is None or (processed_state - self.last_state): self.state_history.append(StateEntry(igbhvr_act_inst.simulator.frame_count, processed_state)) self.last_state = processed_state if self.profiler: self.profiler.stop() def obj2str(self, obj): return obj.name if self.label_by_instance else obj.category def _hierarchical_segments(self, state_entries, state_types): if not state_types: return [] segments = [] before_idx = 0 after_idx = 1 # Keep iterating until we reach the end of our state entries. while after_idx < len(state_entries): # Get the state entries at these keys. before = state_entries[before_idx] after = state_entries[after_idx] # Check if there is a valid diff at this range. diffs = self.filter_diffs(after.state_records - before.state_records, state_types) if diffs is not None: # If there is a diff, prepare to do sub-segmentation on the segment. sub_segment_states = set() if self.hierarchical: for state_record in diffs: corresponding_sub_states = ALLOWED_SUB_SEGMENTS_BY_STATE[state_record.state_type] sub_segment_states.update(corresponding_sub_states) sub_segments = self._hierarchical_segments( state_entries[before_idx : after_idx + 1], sub_segment_states ) segments.append( Segment( before.frame_count, after.frame_count - before.frame_count, after.frame_count, diffs, sub_segments, ) ) # Continue segmentation by moving the before_idx to start here. before_idx = after_idx # Increase the range of elements we're looking at by one. after_idx += 1 return segments def get_segments(self): segments = self._hierarchical_segments(self.state_history, self.state_types) return Segment(segments[0].start, segments[-1].end - segments[0].start, segments[-1].end, [], segments) def filter_diffs(self, state_records, state_types): """Filter the segments so that only objects in the given state directions are monitored.""" new_records = set() # Go through the records in the segment. for state_record in state_records: # Check if the state type is on our list if state_record.state_type not in state_types: continue # Check if any object in the record is on our list. mode = self.state_directions[state_record.state_type] accept = True if mode == SegmentationStateDirection.FALSE_TO_TRUE: accept = state_record.value elif mode == SegmentationStateDirection.TRUE_TO_FALSE: accept = not state_record.value # If an object in our list is part of the record, keep the record. if accept: new_records.add(state_record) # If we haven't kept any of this segment's records, drop the segment. if not new_records: return None return new_records def _serialize_segment(self, segment): stringified_entries = [ { "name": state_record.state_type.__name__, "objects": [self.obj2str(obj) for obj in state_record.objects], "value": state_record.value, } for state_record in segment.state_records ] return { "start": segment.start, "end": segment.end, "duration": segment.duration, "state_records": stringified_entries, "sub_segments": [self._serialize_segment(sub_segment) for sub_segment in segment.sub_segments], } def _segment_to_dict_tree(self, segment, output_dict): stringified_entries = [ ( state_record.state_type.__name__, ", ".join(obj.category for obj in state_record.objects), state_record.value, ) for state_record in segment.state_records ] entry_strs = ["%s(%r) = %r" % entry for entry in stringified_entries] key = "%d-%d: %s" % (segment.start, segment.end, ", ".join(entry_strs)) sub_segments = {} for sub in segment.sub_segments: self._segment_to_dict_tree(sub, sub_segments) output_dict[key] = sub_segments def serialize_segments(self): # Make the root call to recursive function. return self._serialize_segment(self.get_segments()) def __str__(self): out = "" out += "---------------------------------------------------\n" out += "Segmentation of %s\n" % self.object_selection.name out += "Considered states: %s\n" % ", ".join(x.__name__ for x in self.state_types) out += "---------------------------------------------------\n" output = {} self._segment_to_dict_tree(self.get_segments(), output) out += printree.ftree(output) + "\n" out += "---------------------------------------------------\n" return out def parse_args(): parser = argparse.ArgumentParser(description="Run segmentation on an ATUS demo.") parser.add_argument( "--log_path", type=str, help="Path (and filename) of log to replay. If empty, test demo will be used." ) parser.add_argument( "--out_dir", type=str, help="Directory to store results in. If empty, test directory will be used." ) parser.add_argument( "--profile", action="store_true", help="Whether to profile the segmentation, outputting a profile HTML in the out path.", ) return parser.parse_args() def get_default_segmentation_processors(profiler=None): # This applies a "flat" segmentation (e.g. not hierarchical) using only the states supported by our magic motion # primitives. flat_states = [ object_states.Open, object_states.OnTop, object_states.Inside, object_states.InHandOfRobot, object_states.InReachOfRobot, ] flat_object_segmentation = DemoSegmentationProcessor( flat_states, SegmentationObjectSelection.TASK_RELEVANT_OBJECTS, label_by_instance=True, profiler=profiler ) # This applies a hierarchical segmentation based on goal condition states. It's WIP and currently unused. goal_segmentation = DemoSegmentationProcessor( SegmentationStateSelection.GOAL_CONDITION_RELEVANT_STATES, SegmentationObjectSelection.TASK_RELEVANT_OBJECTS, hierarchical=True, label_by_instance=True, profiler=profiler, ) # This applies a flat segmentation that allows us to see what room the agent is in during which frames. room_presence_segmentation = DemoSegmentationProcessor( ROOM_STATES, SegmentationObjectSelection.ROBOTS, diff_initial=True, profiler=profiler ) return { # "goal": goal_segmentation, "flat": flat_object_segmentation, "room": room_presence_segmentation, } def main(): bddl.set_backend("iGibson") args = parse_args() # Select the demo to apply segmentation on. demo_file = os.path.join(igibson.ig_dataset_path, "tests", "cleaning_windows_0_Rs_int_2021-05-23_23-11-46.hdf5") if args.log_path: demo_file = args.log_path # Select the output directory. out_dir = os.path.join(igibson.ig_dataset_path, "tests", "segmentation_results") if args.out_dir: out_dir = args.out_dir # Create output directory if needed. if not os.path.exists(out_dir): os.mkdir(out_dir) # Set up the profiler profiler = None if args.profile: profiler = pyinstrument.Profiler() # Create default segmentation processors. segmentation_processors = get_default_segmentation_processors(profiler) # Run the segmentations. behavior_demo_replay.safe_replay_demo( demo_file, start_callbacks=[sp.start_callback for sp in segmentation_processors.values()], step_callbacks=[sp.step_callback for sp in segmentation_processors.values()], ) demo_basename = os.path.splitext(os.path.basename(demo_file))[0] for segmentation_name, segmentation_processor in segmentation_processors.items(): json_file = "%s_%s.json" % (demo_basename, segmentation_name) json_fullpath = os.path.join(out_dir, json_file) with open(json_fullpath, "w") as f: json.dump(segmentation_processor.serialize_segments(), f) # Print the segmentations. combined_output = "" for segmentation_processor in segmentation_processors.values(): combined_output += str(segmentation_processor) + "\n" print(combined_output) # Save profiling information. if args.profile: html = profiler.output_html() html_path = os.path.join(out_dir, "segmentation_profile.html") with open(html_path, "w") as f: f.write(html) if __name__ == "__main__": main()
7,979
721
<reponame>beenfhb/AlphaGo # -*- coding: utf-8 -*- """Tests for sgf.py.""" import unittest from textwrap import dedent from betago import gosgf SAMPLE_SGF = b"""\ (;AP[testsuite:0]CA[utf-8]DT[2009-06-06]FF[4]GM[1]KM[7.5]PB[Black engine] PL[B]PW[White engine]RE[W+R]SZ[9]AB[ai][bh][ee]AW[fc][gc];B[dg];W[ef]C[comment on two lines];B[];W[tt]C[Final comment]) """ SAMPLE_SGF_VAR = b"""\ (;AP[testsuite:0]CA[utf-8]DT[2009-06-06]FF[4]GM[1]KM[7.5]PB[Black engine] PL[B]RE[W+R]SZ[9]AB[ai][bh][ee]AW[fd][gc]VW[] ;B[dg] ;W[ef]C[comment on two lines] ;B[] ;C[Nonfinal comment]VW[aa:bb] (;B[ia];W[ib];B[ic]) (;B[ib];W[ic] (;B[id]) (;B[ie]) )) """ class SgfTestCase(unittest.TestCase): def test_new_sgf_game(self): g1 = gosgf.Sgf_game(9) self.assertEqual(g1.get_size(), 9) root = g1.get_root() self.assertEqual(root.get_raw(b'FF'), b'4') self.assertEqual(root.get_raw(b'GM'), b'1') self.assertEqual(root.get_raw(b'SZ'), b'9') self.assertEqual(root.get_raw_property_map(), { b'FF': [b'4'], b'GM': [b'1'], b'SZ': [b'9'], b'CA': [b'UTF-8'], }); self.assertEqual(list(root), []) self.assertEqual(root.parent, None) self.assertIs(root.owner, g1) def test_sgf_game_from_coarse_game_tree(self): class Namespace(object): pass coarse_game = Namespace() coarse_game.sequence = [{b'SZ' : [b"9"]}, {b'B' : [b"aa"]}] coarse_game.children = [] g1 = gosgf.Sgf_game.from_coarse_game_tree(coarse_game) self.assertEqual(g1.get_size(), 9) root = g1.get_root() self.assertIs(root.get_raw_property_map(), coarse_game.sequence[0]) self.assertEqual(root.parent, None) self.assertIs(root.owner, g1) self.assertEqual(len(root), 1) coarse_game2 = Namespace() coarse_game2.sequence = [{b'SZ' : [b"0"]}, {b'B' : [b"aa"]}] coarse_game2.children = [] self.assertRaisesRegexp(ValueError, "size out of range: 0", gosgf.Sgf_game.from_coarse_game_tree, coarse_game2) def test_sgf_game_from_string(self): g1 = gosgf.Sgf_game.from_string(b"(;)") self.assertEqual(g1.get_size(), 19) self.assertRaisesRegexp(ValueError, "unexpected end of SGF data", gosgf.Sgf_game.from_string, b"(;SZ[9]") g2 = gosgf.Sgf_game.from_string(b"(;SZ[9])") self.assertEqual(g2.get_size(), 9) self.assertRaisesRegexp(ValueError, "bad SZ property", gosgf.Sgf_game.from_string, b"(;SZ[a])") self.assertRaisesRegexp(ValueError, "size out of range", gosgf.Sgf_game.from_string, b"(;SZ[27])") self.assertRaisesRegexp(ValueError, "unknown encoding", gosgf.Sgf_game.from_string, b"(;CA[])") def test_node(self): sgf_game = gosgf.Sgf_game.from_string( r"(;KM[6.5]C[sample\: comment]AB[ai][bh][ee]AE[];B[dg])".encode('ascii')) node0 = sgf_game.get_root() node1 = list(sgf_game.main_sequence_iter())[1] self.assertEqual(node0.get_size(), 19) self.assertEqual(node0.get_encoding(), "ISO-8859-1") self.assertIs(node0.has_property(b'KM'), True) self.assertIs(node0.has_property(b'XX'), False) self.assertIs(node1.has_property(b'KM'), False) self.assertEqual(set(node0.properties()), set([b"KM", b"C", b"AB", b"AE"])) self.assertEqual(set(node1.properties()), set([b"B"])) self.assertEqual(node0.get_raw(b'C'), r"sample\: comment".encode('ascii')) self.assertEqual(node0.get_raw(b'AB'), b"ai") self.assertEqual(node0.get_raw(b'AE'), b"") self.assertRaises(KeyError, node0.get_raw, b'XX') self.assertEqual(node0.get_raw_list(b'KM'), [b'6.5']) self.assertEqual(node0.get_raw_list(b'AB'), [b'ai', b'bh', b'ee']) self.assertEqual(node0.get_raw_list(b'AE'), [b'']) self.assertRaises(KeyError, node0.get_raw_list, b'XX') self.assertRaises(KeyError, node0.get_raw, b'XX') def test_property_combination(self): sgf_game = gosgf.Sgf_game.from_string(b"(;XX[1]YY[2]XX[3]YY[4])") node0 = sgf_game.get_root() self.assertEqual(node0.get_raw_list(b"XX"), [b"1", b"3"]) self.assertEqual(node0.get_raw_list(b"YY"), [b"2", b"4"]) def test_node_get(self): sgf_game = gosgf.Sgf_game.from_string(dedent(r""" (;AP[testsuite:0]CA[utf-8]DT[2009-06-06]FF[4]GM[1]KM[7.5]PB[Black engine] PL[B]PW[White engine][xs]RE[W+R]SZ[9]AB[ai][bh][ee]AW[fd][gc]AE[]BM[2]VW[] EV[Test event] C[123:\) abc] YY[none sense] ;B[dg]KO[]AR[ab:cd][de:fg]FG[515:first move] LB[ac:lbl][bc:lbl2]) """).encode('utf-8')) root = sgf_game.get_root() node1 = list(sgf_game.main_sequence_iter())[1] self.assertRaises(KeyError, root.get, b'XX') self.assertEqual(root.get(b'C'), b"123:)\nabc") # Text self.assertEqual(root.get(b'EV'), b"Test event") # Simpletext self.assertEqual(root.get(b'BM'), 2) # Double self.assertEqual(root.get(b'YY'), b"none\nsense") # unknown (Text) self.assertIs(node1.get(b'KO'), True) # None self.assertEqual(root.get(b'KM'), 7.5) # Real self.assertEqual(root.get(b'GM'), 1) # Number self.assertEqual(root.get(b'PL'), 'b') # Color self.assertEqual(node1.get(b'B'), (2, 3)) # Point self.assertEqual(root.get(b'AB'), set([(0, 0), (1, 1), (4, 4)])) # List of Point self.assertEqual(root.get(b'VW'), set()) # Empty elist self.assertEqual(root.get(b'AP'), (b"testsuite", b"0")) # Application self.assertEqual(node1.get(b'AR'), [((7, 0), (5, 2)), ((4, 3), (2, 5))]) # Arrow self.assertEqual(node1.get(b'FG'), (515, b"first move")) # Figure self.assertEqual(node1.get(b'LB'), [((6, 0), b"lbl"), ((6, 1), b"lbl2")]) # Label # Check we (leniently) treat lists like elists on read self.assertEqual(root.get(b'AE'), set()) self.assertRaisesRegexp(ValueError, "multiple values", root.get, b'PW') def test_text_values(self): def check(s): sgf_game = gosgf.Sgf_game.from_string(s.encode('ascii')) return sgf_game.get_root().get(b"C").decode('ascii') # Round-trip check of Text values through tokeniser, parser, and # text_value(). self.assertEqual(check(r"(;C[abc]KO[])"), r"abc") self.assertEqual(check(r"(;C[a\\bc]KO[])"), r"a\bc") self.assertEqual(check(r"(;C[a\\bc\]KO[])"), r"a\bc]KO[") self.assertEqual(check(r"(;C[abc\\]KO[])"), r"abc" + "\\") self.assertEqual(check(r"(;C[abc\\\]KO[])"), r"abc\]KO[") self.assertEqual(check(r"(;C[abc\\\\]KO[])"), r"abc" + "\\\\") self.assertEqual(check(r"(;C[abc\\\\\]KO[])"), r"abc\\]KO[") self.assertEqual(check(r"(;C[xxx :\) yyy]KO[])"), r"xxx :) yyy") self.assertEqual(check("(;C[ab\\\nc])"), "abc") self.assertEqual(check("(;C[ab\nc])"), "ab\nc") def test_node_string(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF) node = sgf_game.get_root() self.assertMultiLineEqual(str(node), dedent("""\ AB[ai][bh][ee] AP[testsuite:0] AW[fc][gc] CA[utf-8] DT[2009-06-06] FF[4] GM[1] KM[7.5] PB[Black engine] PL[B] PW[White engine] RE[W+R] SZ[9] """)) def test_node_get_move(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF) nodes = list(sgf_game.main_sequence_iter()) self.assertEqual(nodes[0].get_move(), (None, None)) self.assertEqual(nodes[1].get_move(), ('b', (2, 3))) self.assertEqual(nodes[2].get_move(), ('w', (3, 4))) self.assertEqual(nodes[3].get_move(), ('b', None)) self.assertEqual(nodes[4].get_move(), ('w', None)) def test_node_get_setup_stones(self): sgf_game = gosgf.Sgf_game.from_string( r"(;KM[6.5]SZ[9]C[sample\: comment]AB[ai][bh][ee]AE[bb];B[dg])".encode('utf-8')) node0 = sgf_game.get_root() node1 = list(sgf_game.main_sequence_iter())[1] self.assertIs(node0.has_setup_stones(), True) self.assertIs(node1.has_setup_stones(), False) self.assertEqual(node0.get_setup_stones(), (set([(0, 0), (1, 1), (4, 4)]), set(), set([(7, 1)]))) self.assertEqual(node1.get_setup_stones(), (set(), set(), set())) def test_sgf_game(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) nodes = list(sgf_game.main_sequence_iter()) self.assertEqual(sgf_game.get_size(), 9) self.assertEqual(sgf_game.get_komi(), 7.5) self.assertIs(sgf_game.get_handicap(), None) self.assertEqual(sgf_game.get_player_name('b'), "Black engine") self.assertIs(sgf_game.get_player_name('w'), None) self.assertEqual(sgf_game.get_winner(), 'w') self.assertEqual(nodes[2].get(b'C'), b"comment\non two lines") self.assertEqual(nodes[4].get(b'C'), b"Nonfinal comment") g2 = gosgf.Sgf_game.from_string(b"(;)") self.assertEqual(g2.get_size(), 19) self.assertEqual(g2.get_komi(), 0.0) self.assertIs(g2.get_handicap(), None) self.assertIs(g2.get_player_name('b'), None) self.assertIs(g2.get_player_name('w'), None) self.assertEqual(g2.get_winner(), None) def test_tree_view(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) root = sgf_game.get_root() self.assertIsInstance(root, gosgf.Tree_node) self.assertIs(root.parent, None) self.assertIs(root.owner, sgf_game) self.assertEqual(len(root), 1) self.assertEqual(root[0].get_raw(b'B'), b"dg") self.assertTrue(root) self.assertEqual(root.index(root[0]), 0) branchnode = root[0][0][0][0] self.assertIsInstance(branchnode, gosgf.Tree_node) self.assertIs(branchnode.parent, root[0][0][0]) self.assertIs(branchnode.owner, sgf_game) self.assertEqual(len(branchnode), 2) self.assertIs(branchnode[1], branchnode[-1]) self.assertEqual(branchnode[:1], [branchnode[0]]) self.assertEqual([node for node in branchnode], [branchnode[0], branchnode[1]]) with self.assertRaises(IndexError): branchnode[2] self.assertEqual(branchnode[0].get_raw(b'B'), b"ia") self.assertEqual(branchnode[1].get_raw(b'B'), b"ib") self.assertEqual(branchnode.index(branchnode[0]), 0) self.assertEqual(branchnode.index(branchnode[1]), 1) self.assertEqual(len(branchnode[1][0]), 2) leaf = branchnode[1][0][1] self.assertIs(leaf.parent, branchnode[1][0]) self.assertEqual(len(leaf), 0) self.assertFalse(leaf) self.assertIs(sgf_game.get_last_node(), root[0][0][0][0][0][0][0]) # check nothing breaks when first retrieval is by index game2 = gosgf.Sgf_game.from_string(SAMPLE_SGF) root2 = game2.get_root() self.assertEqual(root2[0].get_raw(b'B'), b"dg") def test_serialise(self): # Doesn't cover transcoding sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) serialised = sgf_game.serialise() self.assertEqual(serialised, dedent("""\ (;FF[4]AB[ai][bh][ee]AP[testsuite:0]AW[fd][gc]CA[utf-8]DT[2009-06-06]GM[1] KM[7.5]PB[Black engine]PL[B]RE[W+R]SZ[9]VW[];B[dg];C[comment on two lines]W[ef] ;B[];C[Nonfinal comment]VW[aa:bb](;B[ia];W[ib];B[ic])(;B[ib];W[ic](;B[id])(; B[ie]))) """).encode('utf-8')) sgf_game2 = gosgf.Sgf_game.from_string(serialised) self.assertEqual([str(x) for x in sgf_game.get_main_sequence()], [str(x) for x in sgf_game2.get_main_sequence()]) def test_serialise_wrap(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) serialised = sgf_game.serialise(wrap=None) self.assertEqual(serialised, dedent("""\ (;FF[4]AB[ai][bh][ee]AP[testsuite:0]AW[fd][gc]CA[utf-8]DT[2009-06-06]GM[1]KM[7.5]PB[Black engine]PL[B]RE[W+R]SZ[9]VW[];B[dg];C[comment on two lines]W[ef];B[];C[Nonfinal comment]VW[aa:bb](;B[ia];W[ib];B[ic])(;B[ib];W[ic](;B[id])(;B[ie]))) """).encode('ascii')) sgf_game2 = gosgf.Sgf_game.from_string(serialised) seq1 = [str(x) for x in sgf_game.get_main_sequence()] seq2 = [str(x) for x in sgf_game2.get_main_sequence()] self.assertEqual(seq1, seq2) def test_encoding(self): g1 = gosgf.Sgf_game(19) self.assertEqual(g1.get_charset(), "UTF-8") root = g1.get_root() self.assertEqual(root.get_encoding(), "UTF-8") root.set(b"C", u"£".encode('utf-8')) self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), u"£".encode('utf-8')) self.assertEqual(g1.serialise(), dedent(u"""\ (;FF[4]C[£]CA[UTF-8]GM[1]SZ[19]) """).encode('utf-8')) g2 = gosgf.Sgf_game(19, encoding="iso-8859-1") self.assertEqual(g2.get_charset(), "ISO-8859-1") root = g2.get_root() self.assertEqual(root.get_encoding(), "ISO-8859-1") root.set(b"C", u"£".encode('utf-8')) self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), b"\xa3") self.assertEqual(g2.serialise(), b"(;FF[4]C[\xa3]CA[ISO-8859-1]GM[1]SZ[19])\n") self.assertRaisesRegexp(ValueError, "unknown encoding", gosgf.Sgf_game, 19, "unknownencoding") def test_loaded_sgf_game_encoding(self): g1 = gosgf.Sgf_game.from_string(u""" (;FF[4]C[£]CA[utf-8]GM[1]SZ[19]) """.encode('utf-8')) self.assertEqual(g1.get_charset(), "UTF-8") root = g1.get_root() self.assertEqual(root.get_encoding(), "UTF-8") self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), u"£".encode('utf-8')) self.assertEqual(g1.serialise(), dedent(u"""\ (;FF[4]C[£]CA[utf-8]GM[1]SZ[19]) """).encode('utf-8')) g2 = gosgf.Sgf_game.from_string(b""" (;FF[4]C[\xa3]CA[iso-8859-1]GM[1]SZ[19]) """) self.assertEqual(g2.get_charset(), "ISO-8859-1") root = g2.get_root() self.assertEqual(root.get_encoding(), "ISO-8859-1") self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), b"\xa3") self.assertEqual(g2.serialise(), dedent(u"""\ (;FF[4]C[£]CA[iso-8859-1]GM[1]SZ[19]) """).encode('iso-8859-1')) g3 = gosgf.Sgf_game.from_string(b""" (;FF[4]C[\xa3]GM[1]SZ[19]) """) self.assertEqual(g3.get_charset(), "ISO-8859-1") root = g3.get_root() self.assertEqual(root.get_encoding(), "ISO-8859-1") self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), b"\xa3") self.assertEqual(g3.serialise(), dedent(u"""\ (;FF[4]C[£]GM[1]SZ[19]) """).encode('iso-8859-1')) # This is invalidly encoded. get() notices, but serialise() doesn't care. g4 = gosgf.Sgf_game.from_string(b""" (;FF[4]C[\xa3]CA[utf-8]GM[1]SZ[19]) """) self.assertEqual(g4.get_charset(), "UTF-8") root = g4.get_root() self.assertEqual(root.get_encoding(), "UTF-8") self.assertRaises(UnicodeDecodeError, root.get, b"C") self.assertEqual(root.get_raw(b"C"), b"\xa3") self.assertEqual(g4.serialise(), b"""(;FF[4]C[\xa3]CA[utf-8]GM[1]SZ[19])\n""") self.assertRaisesRegexp( ValueError, "unknown encoding", gosgf.Sgf_game.from_string, b""" (;FF[4]CA[unknownencoding]GM[1]SZ[19]) """) def test_override_encoding(self): g1 = gosgf.Sgf_game.from_string(u""" (;FF[4]C[£]CA[iso-8859-1]GM[1]SZ[19]) """.encode('utf-8'), override_encoding="utf-8") root = g1.get_root() self.assertEqual(root.get_encoding(), "UTF-8") self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), u"£".encode('utf-8')) self.assertEqual(g1.serialise(), dedent(u"""\ (;FF[4]C[£]CA[UTF-8]GM[1]SZ[19]) """).encode('utf-8')) g2 = gosgf.Sgf_game.from_string(b""" (;FF[4]C[\xa3]CA[utf-8]GM[1]SZ[19]) """, override_encoding="iso-8859-1") root = g2.get_root() self.assertEqual(root.get_encoding(), "ISO-8859-1") self.assertEqual(root.get(b"C"), u"£".encode('utf-8')) self.assertEqual(root.get_raw(b"C"), b'\xa3') self.assertEqual(g2.serialise().strip(), b"""(;FF[4]C[\xa3]CA[ISO-8859-1]GM[1]SZ[19])""") def test_serialise_transcoding(self): g1 = gosgf.Sgf_game.from_string(u""" (;FF[4]C[£]CA[utf-8]GM[1]SZ[19]) """.encode('utf-8')) self.assertEqual(g1.serialise(), dedent(u"""\ (;FF[4]C[£]CA[utf-8]GM[1]SZ[19]) """).encode('utf-8')) g1.get_root().set(b"CA", b"latin-1") self.assertEqual(g1.serialise(), dedent(u"""\ (;FF[4]C[£]CA[latin-1]GM[1]SZ[19]) """).encode('latin-1')) g1.get_root().set(b"CA", b"unknown") self.assertRaisesRegexp(ValueError, "unsupported charset", g1.serialise) # improperly-encoded from the start g2 = gosgf.Sgf_game.from_string(u""" (;FF[4]C[£]CA[ascii]GM[1]SZ[19]) """.encode('utf-8')) self.assertEqual(g2.serialise(), dedent(u"""\ (;FF[4]C[£]CA[ascii]GM[1]SZ[19]) """).encode('utf-8')) g2.get_root().set(b"CA", b"utf-8") self.assertRaises(UnicodeDecodeError, g2.serialise) g3 = gosgf.Sgf_game.from_string(u""" (;FF[4]C[Δ]CA[utf-8]GM[1]SZ[19]) """.encode('utf-8')) g3.get_root().unset(b"CA") self.assertRaises(UnicodeEncodeError, g3.serialise) def test_tree_mutation(self): sgf_game = gosgf.Sgf_game(9) root = sgf_game.get_root() n1 = root.new_child() n1.set(b"N", b"n1") n2 = root.new_child() n2.set(b"N", b"n2") n3 = n1.new_child() n3.set(b"N", b"n3") n4 = root.new_child(1) n4.set(b"N", b"n4") self.assertEqual( sgf_game.serialise(), b"(;FF[4]CA[UTF-8]GM[1]SZ[9](;N[n1];N[n3])(;N[n4])(;N[n2]))\n") self.assertEqual( [node.get_raw_property_map() for node in sgf_game.main_sequence_iter()], [node.get_raw_property_map() for node in (root, root[0], n3)]) self.assertIs(sgf_game.get_last_node(), n3) n1.delete() self.assertEqual( sgf_game.serialise(), b"(;FF[4]CA[UTF-8]GM[1]SZ[9](;N[n4])(;N[n2]))\n") self.assertRaises(ValueError, root.delete) def test_tree_mutation_from_coarse_game(self): sgf_game = gosgf.Sgf_game.from_string(b"(;SZ[9](;N[n1];N[n3])(;N[n2]))") root = sgf_game.get_root() n4 = root.new_child() n4.set(b"N", b"n4") n3 = root[0][0] self.assertEqual(n3.get(b"N"), b"n3") n5 = n3.new_child() n5.set(b"N", b"n5") self.assertEqual(sgf_game.serialise(), b"(;SZ[9](;N[n1];N[n3];N[n5])(;N[n2])(;N[n4]))\n") self.assertEqual( [node.get_raw_property_map() for node in sgf_game.main_sequence_iter()], [node.get_raw_property_map() for node in (root, root[0], n3, n5)]) self.assertIs(sgf_game.get_last_node(), n5) n3.delete() self.assertEqual(sgf_game.serialise(), b"(;SZ[9](;N[n1])(;N[n2])(;N[n4]))\n") self.assertRaises(ValueError, root.delete) def test_tree_new_child_with_unexpanded_root_and_index(self): sgf_game = gosgf.Sgf_game.from_string(b"(;SZ[9](;N[n1];N[n3])(;N[n2]))") root = sgf_game.get_root() n4 = root.new_child(2) n4.set(b"N", b"n4") self.assertEqual(sgf_game.serialise(), b"(;SZ[9](;N[n1];N[n3])(;N[n2])(;N[n4]))\n") def test_reparent(self): g1 = gosgf.Sgf_game.from_string(b"(;SZ[9](;N[n1];N[n3])(;N[n2]))") root = g1.get_root() # Test with unexpanded root self.assertRaisesRegexp(ValueError, "would create a loop", root.reparent, root) n1 = root[0] n2 = root[1] n3 = root[0][0] self.assertEqual(n1.get(b"N"), b"n1") self.assertEqual(n2.get(b"N"), b"n2") self.assertEqual(n3.get(b"N"), b"n3") n3.reparent(n2) self.assertEqual(g1.serialise(), b"(;SZ[9](;N[n1])(;N[n2];N[n3]))\n") n3.reparent(n2) self.assertEqual(g1.serialise(), b"(;SZ[9](;N[n1])(;N[n2];N[n3]))\n") self.assertRaisesRegexp(ValueError, "would create a loop", root.reparent, n3) self.assertRaisesRegexp(ValueError, "would create a loop", n3.reparent, n3) g2 = gosgf.Sgf_game(9) self.assertRaisesRegexp( ValueError, "new parent doesn't belong to the same game", n3.reparent, g2.get_root()) def test_reparent_index(self): g1 = gosgf.Sgf_game.from_string(b"(;SZ[9](;N[n1];N[n3])(;N[n2]))") root = g1.get_root() n1 = root[0] n2 = root[1] n3 = root[0][0] self.assertEqual(n1.get(b"N"), b"n1") self.assertEqual(n2.get(b"N"), b"n2") self.assertEqual(n3.get(b"N"), b"n3") n3.reparent(root, index=1) self.assertEqual(g1.serialise(), b"(;SZ[9](;N[n1])(;N[n3])(;N[n2]))\n") n3.reparent(root, index=1) self.assertEqual(g1.serialise(), b"(;SZ[9](;N[n1])(;N[n3])(;N[n2]))\n") n3.reparent(root, index=2) self.assertEqual(g1.serialise(), b"(;SZ[9](;N[n1])(;N[n2])(;N[n3]))\n") def test_extend_main_sequence(self): g1 = gosgf.Sgf_game(9) for i in range(6): g1.extend_main_sequence().set(b"N", ("e%d" % i).encode('ascii')) self.assertEqual( g1.serialise(), b"(;FF[4]CA[UTF-8]GM[1]SZ[9];N[e0];N[e1];N[e2];N[e3];N[e4];N[e5])\n") g2 = gosgf.Sgf_game.from_string(b"(;SZ[9](;N[n1];N[n3])(;N[n2]))") for i in range(6): g2.extend_main_sequence().set(b"N", ("e%d" % i).encode('ascii')) self.assertEqual( g2.serialise(), b"(;SZ[9](;N[n1];N[n3];N[e0];N[e1];N[e2];N[e3];N[e4];N[e5])(;N[n2]))\n") def test_get_sequence_above(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) root = sgf_game.get_root() branchnode = root[0][0][0][0] leaf = branchnode[1][0][1] self.assertEqual(sgf_game.get_sequence_above(root), []) self.assertEqual(sgf_game.get_sequence_above(branchnode), [root, root[0], root[0][0], root[0][0][0]]) self.assertEqual(sgf_game.get_sequence_above(leaf), [root, root[0], root[0][0], root[0][0][0], branchnode, branchnode[1], branchnode[1][0]]) sgf_game2 = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) self.assertRaisesRegexp(ValueError, "node doesn't belong to this game", sgf_game2.get_sequence_above, leaf) def test_get_main_sequence_below(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) root = sgf_game.get_root() branchnode = root[0][0][0][0] leaf = branchnode[1][0][1] self.assertEqual(sgf_game.get_main_sequence_below(leaf), []) self.assertEqual(sgf_game.get_main_sequence_below(branchnode), [branchnode[0], branchnode[0][0], branchnode[0][0][0]]) self.assertEqual(sgf_game.get_main_sequence_below(root), [root[0], root[0][0], root[0][0][0], branchnode, branchnode[0], branchnode[0][0], branchnode[0][0][0]]) sgf_game2 = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) self.assertRaisesRegexp(ValueError, "node doesn't belong to this game", sgf_game2.get_main_sequence_below, branchnode) def test_main_sequence(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) root = sgf_game.get_root() nodes = list(sgf_game.main_sequence_iter()) self.assertEqual(len(nodes), 8) self.assertIs(root.get_raw_property_map(), nodes[0].get_raw_property_map()) # Check that main_sequence_iter() optimisation has been used. # (Have to call this before making the tree expand.) with self.assertRaises(AttributeError): nodes[1].parent tree_nodes = sgf_game.get_main_sequence() self.assertEqual(len(tree_nodes), 8) self.assertIs(root.get_raw_property_map(), tree_nodes[0].get_raw_property_map()) self.assertIs(tree_nodes[0], root) self.assertIs(tree_nodes[2].parent, tree_nodes[1]) self.assertIs(sgf_game.get_last_node(), tree_nodes[-1]) tree_node = root for node in nodes: self.assertIs(tree_node.get_raw_property_map(), node.get_raw_property_map()) if tree_node: tree_node = tree_node[0] def test_find(self): sgf_game = gosgf.Sgf_game.from_string(SAMPLE_SGF_VAR) root = sgf_game.get_root() branchnode = root[0][0][0][0] leaf = branchnode[1][0][1] self.assertEqual(root.get(b"VW"), set()) self.assertIs(root.find(b"VW"), root) self.assertRaises(KeyError, root[0].get, b"VW") self.assertEqual(root[0].find_property(b"VW"), set()) self.assertIs(root[0].find(b"VW"), root) self.assertEqual(branchnode.get(b"VW"), set([(7, 0), (7, 1), (8, 0), (8, 1)])) self.assertIs(branchnode.find(b"VW"), branchnode) self.assertEqual(branchnode.find_property(b"VW"), set([(7, 0), (7, 1), (8, 0), (8, 1)])) self.assertRaises(KeyError, leaf.get, b"VW") self.assertIs(leaf.find(b"VW"), branchnode) self.assertEqual(leaf.find_property(b"VW"), set([(7, 0), (7, 1), (8, 0), (8, 1)])) self.assertIs(leaf.find(b"XX"), None) self.assertRaises(KeyError, leaf.find_property, b"XX") def test_node_set_raw(self): sgf_game = gosgf.Sgf_game.from_string(dedent(r""" (;AP[testsuite:0]CA[utf-8]DT[2009-06-06]FF[4]GM[1]KM[7.5] PB[Black engine]PW[White engine]RE[W+R]SZ[9] AB[ai][bh][ee]AW[fd][gc]BM[2]VW[] PL[B] C[123abc] ;B[dg]C[first move]) """).encode('utf-8')) root = sgf_game.get_root() self.assertEqual(root.get_raw(b'RE'), b"W+R") root.set_raw(b'RE', b"W+2.5") self.assertEqual(root.get_raw(b'RE'), b"W+2.5") self.assertRaises(KeyError, root.get_raw, b'XX') root.set_raw(b'XX', b"xyz") self.assertEqual(root.get_raw(b'XX'), b"xyz") root.set_raw_list(b'XX', (b"abc", b"def")) self.assertEqual(root.get_raw(b'XX'), b"abc") self.assertEqual(root.get_raw_list(b'XX'), [b"abc", b"def"]) self.assertRaisesRegexp(ValueError, "empty property list", root.set_raw_list, b'B', []) values = [b"123", b"456"] root.set_raw_list(b'YY', values) self.assertEqual(root.get_raw_list(b'YY'), [b"123", b"456"]) values.append(b"789") self.assertEqual(root.get_raw_list(b'YY'), [b"123", b"456"]) self.assertRaisesRegexp(ValueError, "ill-formed property identifier", root.set_raw, b'Black', b"aa") self.assertRaisesRegexp(ValueError, "ill-formed property identifier", root.set_raw_list, b'Black', [b"aa"]) root.set_raw(b'C', b"foo\\]bar") self.assertEqual(root.get_raw(b'C'), b"foo\\]bar") root.set_raw(b'C', b"abc\\\\") self.assertEqual(root.get_raw(b'C'), b"abc\\\\") self.assertRaisesRegexp(ValueError, "ill-formed raw property value", root.set_raw, b'C', b"foo]bar") self.assertRaisesRegexp(ValueError, "ill-formed raw property value", root.set_raw, b'C', b"abc\\") self.assertRaisesRegexp(ValueError, "ill-formed raw property value", root.set_raw_list, b'C', [b"abc", b"de]f"]) root.set_raw(b'C', b"foo\\]bar\\\nbaz") self.assertEqual(root.get(b'C'), b"foo]barbaz") def test_node_aliasing(self): # Check that node objects retrieved by different means use the same # property map. sgf_game = gosgf.Sgf_game.from_string(dedent(r""" (;C[root];C[node 1]) """).encode('utf-8')) root = sgf_game.get_root() plain_node = list(sgf_game.main_sequence_iter())[1] tree_node = root[0] # Check the main_sequence_iter() optimisation was used, otherwise this test # isn't checking what it's supposed to. self.assertIsNot(tree_node, plain_node) self.assertIs(tree_node.__class__, gosgf.Tree_node) self.assertIs(plain_node.__class__, gosgf.Node) self.assertEqual(tree_node.get_raw(b'C'), b"node 1") tree_node.set_raw(b'C', r"test\value".encode('ascii')) self.assertEqual(tree_node.get_raw(b'C'), r"test\value".encode('ascii')) self.assertEqual(plain_node.get_raw(b'C'), r"test\value".encode('ascii')) plain_node.set_raw_list(b'XX', [b"1", b"2", b"3"]) self.assertEqual(tree_node.get_raw_list(b'XX'), [b"1", b"2", b"3"]) def test_node_set(self): sgf_game = gosgf.Sgf_game.from_string(b"(;FF[4]GM[1]SZ[9])") root = sgf_game.get_root() root.set(b"KO", True) root.set(b"KM", 0.5) root.set(b'DD', [(3, 4), (5, 6)]) root.set(b'AB', set([(0, 0), (1, 1), (4, 4)])) root.set(b'TW', set()) root.set(b'XX', b"nonsense [none]sense more n\\onsens\\e") self.assertEqual(sgf_game.serialise(), dedent("""\ (;FF[4]AB[ai][bh][ee]DD[ef][gd]GM[1]KM[0.5]KO[]SZ[9]TW[] XX[nonsense [none\\]sense more n\\\\onsens\\\\e]) """).encode('utf-8')) def test_node_unset(self): sgf_game = gosgf.Sgf_game.from_string(b"(;FF[4]GM[1]SZ[9]HA[3])") root = sgf_game.get_root() self.assertEqual(root.get(b'HA'), 3) root.unset(b'HA') self.assertRaises(KeyError, root.unset, b'PL') self.assertEqual(sgf_game.serialise(), b"(;FF[4]GM[1]SZ[9])\n") def test_set_and_unset_size(self): g1 = gosgf.Sgf_game.from_string(b"(;FF[4]GM[1]SZ[9]HA[3])") root1 = g1.get_root() self.assertRaisesRegexp(ValueError, "changing size is not permitted", root1.set, b"SZ", 19) root1.set(b"SZ", 9) self.assertRaisesRegexp(ValueError, "changing size is not permitted", root1.unset, b"SZ") g2 = gosgf.Sgf_game.from_string(b"(;FF[4]GM[1]SZ[19]HA[3])") root2 = g2.get_root() root2.unset(b"SZ") root2.set(b"SZ", 19) def test_set_and_unset_charset(self): g1 = gosgf.Sgf_game.from_string(b"(;FF[4]CA[utf-8]GM[1]SZ[9]HA[3])") self.assertEqual(g1.get_charset(), "UTF-8") root1 = g1.get_root() root1.unset(b"CA") self.assertEqual(g1.get_charset(), "ISO-8859-1") root1.set(b"CA", b"iso-8859-1") self.assertEqual(g1.get_charset(), "ISO-8859-1") root1.set(b"CA", b"ascii") self.assertEqual(g1.get_charset(), "ASCII") root1.set(b"CA", b"unknownencoding") self.assertRaisesRegexp(ValueError, "no codec available for CA", g1.get_charset) def test_node_set_move(self): sgf_game = gosgf.Sgf_game.from_string(b"(;FF[4]GM[1]SZ[9];B[aa];B[bb])") root, n1, n2 = sgf_game.get_main_sequence() self.assertEqual(root.get_move(), (None, None)) root.set_move('b', (1, 1)) n1.set_move('w', (1, 2)) n2.set_move('b', None) self.assertEqual(root.get(b'B'), (1, 1)) self.assertRaises(KeyError, root.get, b'W') self.assertEqual(n1.get(b'W'), (1, 2)) self.assertRaises(KeyError, n1.get, b'B') self.assertEqual(n2.get(b'B'), None) self.assertRaises(KeyError, n2.get, b'W') def test_node_setup_stones(self): sgf_game = gosgf.Sgf_game.from_string(b"(;FF[4]GM[1]SZ[9]AW[aa:bb])") root = sgf_game.get_root() root.set_setup_stones( [(1, 2), (3, 4)], set(), [(1, 3), (4, 5)], ) self.assertEqual(root.get(b'AB'), set([(1, 2), (3, 4)])) self.assertRaises(KeyError, root.get, b'AW') self.assertEqual(root.get(b'AE'), set([(1, 3), (4, 5)])) def test_add_comment_text(self): sgf_game = gosgf.Sgf_game(9) root = sgf_game.get_root() root.add_comment_text(b"hello\nworld") self.assertEqual(root.get(b'C'), b"hello\nworld") root.add_comment_text(b"hello\naga]in") self.assertEqual(root.get(b'C'), b"hello\nworld\n\nhello\naga]in")
19,077
310
{ "name": "Bullet", "description": "A space pen.", "url": "https://www.spacepen.com/bullet.aspx" }
44
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.qa.form.refactoring; import org.netbeans.qa.form.*; import org.netbeans.jellytools.nodes.Node; import org.netbeans.jemmy.operators.JButtonOperator; import org.netbeans.jemmy.operators.JComboBoxOperator; import org.netbeans.jemmy.operators.JDialogOperator; import java.io.File; import junit.framework.Test; import org.netbeans.jellytools.NbDialogOperator; import org.netbeans.jellytools.ProjectsTabOperator; import org.netbeans.jellytools.actions.OpenAction; import org.netbeans.jellytools.modules.form.FormDesignerOperator; import org.netbeans.jellytools.nodes.ProjectRootNode; import org.netbeans.jemmy.TimeoutExpiredException; import org.netbeans.jemmy.operators.JTextFieldOperator; import org.netbeans.jemmy.operators.Operator; import org.netbeans.junit.NbModuleSuite; /** * Tests form refactoring, 3rd scenarion : Move form class into dif package * * @author <NAME> * * <b><NAME></b> * 26 APRIL 2011 WORKS */ public class MoveFormClassTest extends ExtJellyTestCase { private String CLASS_NAME = "FrameWithBundleToMove"; // NOI18N // private String CLASS_NAME = "ClassToMove"; // NOI18N private String NEW_PACKAGE_NAME = "subdata"; private String PACKAGE_NAME = "." + NEW_PACKAGE_NAME; // NOI18N /** * Constructor required by JUnit * @param testName */ public MoveFormClassTest(String testName) { super(testName); } public static Test suite() { return NbModuleSuite.create(NbModuleSuite.createConfiguration(MoveFormClassTest.class).addTest( "testCreatePackage", "testRefactoring", "testChangesInJavaFile", "testChangesInPropertiesFile" ).clusters(".*").enableModules(".*").gui(true)); } /** Creates subdata package */ public void testCreatePackage() { ProjectsTabOperator pto = new ProjectsTabOperator(); ProjectRootNode prn = pto.getProjectRootNode(getTestProjectName()); prn.select(); Node formnode = new Node(prn, "Source Packages"); // NOI18N formnode.setComparator(new Operator.DefaultStringComparator(true, false)); formnode.select(); runNoBlockPopupOverNode("New|Java Package...", formnode); // NOI18N NbDialogOperator dialog = new NbDialogOperator("New Java Package"); new JTextFieldOperator(dialog,0).typeText( getTestPackageName() + PACKAGE_NAME); new JButtonOperator(dialog, "Finish").push(); } /** Runs refactoring */ public void testRefactoring() throws Exception { Node node = openFile(CLASS_NAME); /*Task manager takes a long time for scanning and due to this case, file is not opened in time. Implemented workaround - sleep for a while */ waitNoEvent(1000); runNoBlockPopupOverNode("Refactor|Move...", node); // NOI18N waitNoEvent(3000); JDialogOperator dialog = new JDialogOperator("Move"); // NOI18N JComboBoxOperator combo = new JComboBoxOperator(dialog, 2); combo.selectItem( getTestPackageName() + PACKAGE_NAME); new JButtonOperator(dialog,"Refactor").clickMouse(); // this refactoring case takes sometimes a very long time // that's way there is following code with for loop boolean isClosed = false; TimeoutExpiredException lastExc = null; for (int i=0; i < 3; i++) { try { dialog.waitClosed(); isClosed = true; } catch (TimeoutExpiredException e) { lastExc = e; } catch (Exception e) { throw e; } } if (!isClosed) { throw (lastExc != null) ? lastExc : new Exception("Something strange while waiting using waitClosed() method"); } } /** Tests content of java file */ public void testChangesInJavaFile() { ProjectsTabOperator pto = new ProjectsTabOperator(); ProjectRootNode prn = pto.getProjectRootNode(getTestProjectName()); prn.select(); String path = "Source Packages|" + getTestPackageName() + PACKAGE_NAME + "|" + CLASS_NAME + ".java"; // NOI18N //p(path); Node formnode = new Node(prn, path ); // NOI18N formnode.setComparator(new Operator.DefaultStringComparator(true, false)); // formnode.select(); OpenAction openAction = new OpenAction(); openAction.perform(formnode); FormDesignerOperator designer = new FormDesignerOperator(CLASS_NAME); // new class package findInCode("package data.subdata;", designer); } /** Test changes in property bundle file */ public void testChangesInPropertiesFile() { String sourceFilePath = getFilePathFromDataPackage(NEW_PACKAGE_NAME + File.separator +"Bundle.properties"); String key = "FrameWithBundleToMove.lanButton.text"; assertTrue("Key \"" + key + "\" not found in Bundle.properties file.", findInFile( key, sourceFilePath)); // NOI18N } }
2,292
1,144
// SPDX-License-Identifier: GPL-2.0+ /* * Keystone2: DDR3 SPD configuration * * (C) Copyright 2015-2016 Texas Instruments Incorporated, <www.ti.com> */ #include <common.h> #include <i2c.h> #include <ddr_spd.h> #include <asm/arch/ddr3.h> #include <asm/arch/hardware.h> #define DUMP_DDR_CONFIG 0 /* set to 1 to debug */ #define debug_ddr_cfg(fmt, args...) \ debug_cond(DUMP_DDR_CONFIG, fmt, ##args) static void dump_phy_config(struct ddr3_phy_config *ptr) { debug_ddr_cfg("\npllcr 0x%08X\n", ptr->pllcr); debug_ddr_cfg("pgcr1_mask 0x%08X\n", ptr->pgcr1_mask); debug_ddr_cfg("pgcr1_val 0x%08X\n", ptr->pgcr1_val); debug_ddr_cfg("ptr0 0x%08X\n", ptr->ptr0); debug_ddr_cfg("ptr1 0x%08X\n", ptr->ptr1); debug_ddr_cfg("ptr2 0x%08X\n", ptr->ptr2); debug_ddr_cfg("ptr3 0x%08X\n", ptr->ptr3); debug_ddr_cfg("ptr4 0x%08X\n", ptr->ptr4); debug_ddr_cfg("dcr_mask 0x%08X\n", ptr->dcr_mask); debug_ddr_cfg("dcr_val 0x%08X\n", ptr->dcr_val); debug_ddr_cfg("dtpr0 0x%08X\n", ptr->dtpr0); debug_ddr_cfg("dtpr1 0x%08X\n", ptr->dtpr1); debug_ddr_cfg("dtpr2 0x%08X\n", ptr->dtpr2); debug_ddr_cfg("mr0 0x%08X\n", ptr->mr0); debug_ddr_cfg("mr1 0x%08X\n", ptr->mr1); debug_ddr_cfg("mr2 0x%08X\n", ptr->mr2); debug_ddr_cfg("dtcr 0x%08X\n", ptr->dtcr); debug_ddr_cfg("pgcr2 0x%08X\n", ptr->pgcr2); debug_ddr_cfg("zq0cr1 0x%08X\n", ptr->zq0cr1); debug_ddr_cfg("zq1cr1 0x%08X\n", ptr->zq1cr1); debug_ddr_cfg("zq2cr1 0x%08X\n", ptr->zq2cr1); debug_ddr_cfg("pir_v1 0x%08X\n", ptr->pir_v1); debug_ddr_cfg("pir_v2 0x%08X\n\n", ptr->pir_v2); }; static void dump_emif_config(struct ddr3_emif_config *ptr) { debug_ddr_cfg("\nsdcfg 0x%08X\n", ptr->sdcfg); debug_ddr_cfg("sdtim1 0x%08X\n", ptr->sdtim1); debug_ddr_cfg("sdtim2 0x%08X\n", ptr->sdtim2); debug_ddr_cfg("sdtim3 0x%08X\n", ptr->sdtim3); debug_ddr_cfg("sdtim4 0x%08X\n", ptr->sdtim4); debug_ddr_cfg("zqcfg 0x%08X\n", ptr->zqcfg); debug_ddr_cfg("sdrfc 0x%08X\n\n", ptr->sdrfc); }; #define TEMP NORMAL_TEMP #define VBUS_CLKPERIOD 1.875 /* Corresponds to vbus=533MHz, */ #define PLLGS_VAL (4000.0 / VBUS_CLKPERIOD) /* 4 us */ #define PLLPD_VAL (1000.0 / VBUS_CLKPERIOD) /* 1 us */ #define PLLLOCK_VAL (100000.0 / VBUS_CLKPERIOD) /* 100 us */ #define PLLRST_VAL (9000.0 / VBUS_CLKPERIOD) /* 9 us */ #define PHYRST_VAL 0x10 #define DDR_TERM RZQ_4_TERM #define SDRAM_DRIVE RZQ_7_IMP #define DYN_ODT ODT_DISABLE enum srt { NORMAL_TEMP, EXTENDED_TEMP }; enum out_impedance { RZQ_6_IMP = 0, RZQ_7_IMP }; enum die_term { ODT_DISABLE = 0, RZQ_4_TERM, RZQ_2_TERM, RZQ_6_TERM, RZQ_12_TERM, RZQ_8_TERM }; struct ddr3_sodimm { u32 t_ck; u32 freqsel; u32 t_xp; u32 t_cke; u32 t_pllpd; u32 t_pllgs; u32 t_phyrst; u32 t_plllock; u32 t_pllrst; u32 t_rfc; u32 t_xs; u32 t_dinit0; u32 t_dinit1; u32 t_dinit2; u32 t_dinit3; u32 t_rtp; u32 t_wtr; u32 t_rp; u32 t_rcd; u32 t_ras; u32 t_rrd; u32 t_rc; u32 t_faw; u32 t_mrd; u32 t_mod; u32 t_wlo; u32 t_wlmrd; u32 t_xsdll; u32 t_xpdll; u32 t_ckesr; u32 t_dllk; u32 t_wr; u32 t_wr_bin; u32 cas; u32 cwl; u32 asr; u32 pasr; u32 t_refprd; u8 sdram_type; u8 ibank; u8 pagesize; u8 t_rrd2; u8 t_ras_max; u8 t_zqcs; u32 refresh_rate; u8 t_csta; u8 rank; u8 mirrored; u8 buswidth; }; static u8 cas_latancy(u16 temp) { int loop; u8 cas_bin = 0; for (loop = 0; loop < 32; loop += 2, temp >>= 1) { if (temp & 0x0001) cas_bin = (loop > 15) ? loop - 15 : loop; } return cas_bin; } static int ddr3_get_size_in_mb(ddr3_spd_eeprom_t *buf) { return (((buf->organization & 0x38) >> 3) + 1) * (256 << (buf->density_banks & 0xf)); } static int ddrtimingcalculation(ddr3_spd_eeprom_t *buf, struct ddr3_sodimm *spd, struct ddr3_spd_cb *spd_cb) { u32 mtb, clk_freq; if ((buf->mem_type != 0x0b) || ((buf->density_banks & 0x70) != 0x00)) return 1; spd->sdram_type = 0x03; spd->ibank = 0x03; mtb = buf->mtb_dividend * 1000 / buf->mtb_divisor; spd->t_ck = buf->tck_min * mtb; spd_cb->ddrspdclock = 2000000 / spd->t_ck; clk_freq = spd_cb->ddrspdclock / 2; spd->rank = ((buf->organization & 0x38) >> 3) + 1; if (spd->rank > 2) return 1; spd->pagesize = (buf->addressing & 0x07) + 1; if (spd->pagesize > 3) return 1; spd->buswidth = 8 << (buf->bus_width & 0x7); if ((spd->buswidth < 16) || (spd->buswidth > 64)) return 1; spd->mirrored = buf->mod_section.unbuffered.addr_mapping & 1; printf("DDR3A Speed will be configured for %d Operation.\n", spd_cb->ddrspdclock); if (spd_cb->ddrspdclock == 1333) { spd->t_xp = ((3 * spd->t_ck) > 6000) ? 3 : ((5999 / spd->t_ck) + 1); spd->t_cke = ((3 * spd->t_ck) > 5625) ? 3 : ((5624 / spd->t_ck) + 1); } else if (spd_cb->ddrspdclock == 1600) { spd->t_xp = ((3 * spd->t_ck) > 6000) ? 3 : ((5999 / spd->t_ck) + 1); spd->t_cke = ((3 * spd->t_ck) > 5000) ? 3 : ((4999 / spd->t_ck) + 1); } else { printf("Unsupported DDR3 speed %d\n", spd_cb->ddrspdclock); return 1; } spd->t_xpdll = (spd->t_ck > 2400) ? 10 : 24000 / spd->t_ck; spd->t_ckesr = spd->t_cke + 1; /* SPD Calculated Values */ spd->cas = cas_latancy((buf->caslat_msb << 8) | buf->caslat_lsb); spd->t_wr = (buf->twr_min * mtb) / spd->t_ck; spd->t_wr_bin = (spd->t_wr / 2) & 0x07; spd->t_rcd = ((buf->trcd_min * mtb) - 1) / spd->t_ck + 1; spd->t_rrd = ((buf->trrd_min * mtb) - 1) / spd->t_ck + 1; spd->t_rp = (((buf->trp_min * mtb) - 1) / spd->t_ck) + 1; spd->t_ras = (((buf->tras_trc_ext & 0x0f) << 8 | buf->tras_min_lsb) * mtb) / spd->t_ck; spd->t_rc = (((((buf->tras_trc_ext & 0xf0) << 4) | buf->trc_min_lsb) * mtb) - 1) / spd->t_ck + 1; spd->t_rfc = (buf->trfc_min_lsb | (buf->trfc_min_msb << 8)) * mtb / 1000; spd->t_wtr = (buf->twtr_min * mtb) / spd->t_ck; spd->t_rtp = (buf->trtp_min * mtb) / spd->t_ck; spd->t_xs = (((spd->t_rfc + 10) * 1000) / spd->t_ck); spd->t_rfc = ((spd->t_rfc * 1000) - 1) / spd->t_ck + 1; spd->t_faw = (((buf->tfaw_msb << 8) | buf->tfaw_min) * mtb) / spd->t_ck; spd->t_rrd2 = ((((buf->tfaw_msb << 8) | buf->tfaw_min) * mtb) / (4 * spd->t_ck)) - 1; /* Hard-coded values */ spd->t_mrd = 0x00; spd->t_mod = 0x00; spd->t_wlo = 0x0C; spd->t_wlmrd = 0x28; spd->t_xsdll = 0x200; spd->t_ras_max = 0x0F; spd->t_csta = 0x05; spd->t_dllk = 0x200; /* CAS Write Latency */ if (spd->t_ck >= 2500) spd->cwl = 0; else if (spd->t_ck >= 1875) spd->cwl = 1; else if (spd->t_ck >= 1500) spd->cwl = 2; else if (spd->t_ck >= 1250) spd->cwl = 3; else if (spd->t_ck >= 1071) spd->cwl = 4; else spd->cwl = 5; /* SD:RAM Thermal and Refresh Options */ spd->asr = (buf->therm_ref_opt & 0x04) >> 2; spd->pasr = (buf->therm_ref_opt & 0x80) >> 7; spd->t_zqcs = 64; spd->t_refprd = (TEMP == NORMAL_TEMP) ? 7812500 : 3906250; spd->t_refprd = spd->t_refprd / spd->t_ck; spd->refresh_rate = spd->t_refprd; spd->t_refprd = spd->t_refprd * 5; /* Set MISC PHY space registers fields */ if ((clk_freq / 2) >= 166 && (clk_freq / 2 < 275)) spd->freqsel = 0x03; else if ((clk_freq / 2) > 225 && (clk_freq / 2 < 385)) spd->freqsel = 0x01; else if ((clk_freq / 2) > 335 && (clk_freq / 2 < 534)) spd->freqsel = 0x00; spd->t_dinit0 = 500000000 / spd->t_ck; /* CKE low time 500 us */ spd->t_dinit1 = spd->t_xs; spd->t_dinit2 = 200000000 / spd->t_ck; /* Reset low time 200 us */ /* Time from ZQ initialization command to first command (1 us) */ spd->t_dinit3 = 1000000 / spd->t_ck; spd->t_pllgs = PLLGS_VAL + 1; spd->t_pllpd = PLLPD_VAL + 1; spd->t_plllock = PLLLOCK_VAL + 1; spd->t_pllrst = PLLRST_VAL; spd->t_phyrst = PHYRST_VAL; spd_cb->ddr_size_gbyte = ddr3_get_size_in_mb(buf) / 1024; return 0; } static void init_ddr3param(struct ddr3_spd_cb *spd_cb, struct ddr3_sodimm *spd) { spd_cb->phy_cfg.pllcr = (spd->freqsel & 3) << 18 | 0xE << 13; spd_cb->phy_cfg.pgcr1_mask = (IODDRM_MASK | ZCKSEL_MASK); spd_cb->phy_cfg.pgcr1_val = ((1 << 2) | (1 << 7) | (1 << 23)); spd_cb->phy_cfg.ptr0 = ((spd->t_pllpd & 0x7ff) << 21) | ((spd->t_pllgs & 0x7fff) << 6) | (spd->t_phyrst & 0x3f); spd_cb->phy_cfg.ptr1 = ((spd->t_plllock & 0xffff) << 16) | (spd->t_pllrst & 0x1fff); spd_cb->phy_cfg.ptr2 = 0; spd_cb->phy_cfg.ptr3 = ((spd->t_dinit1 & 0x1ff) << 20) | (spd->t_dinit0 & 0xfffff); spd_cb->phy_cfg.ptr4 = ((spd->t_dinit3 & 0x3ff) << 18) | (spd->t_dinit2 & 0x3ffff); spd_cb->phy_cfg.dcr_mask = PDQ_MASK | MPRDQ_MASK | BYTEMASK_MASK; spd_cb->phy_cfg.dcr_val = 1 << 10; if (spd->mirrored) { spd_cb->phy_cfg.dcr_mask |= NOSRA_MASK | UDIMM_MASK; spd_cb->phy_cfg.dcr_val |= (1 << 27) | (1 << 29); } spd_cb->phy_cfg.dtpr0 = (spd->t_rc & 0x3f) << 26 | (spd->t_rrd & 0xf) << 22 | (spd->t_ras & 0x3f) << 16 | (spd->t_rcd & 0xf) << 12 | (spd->t_rp & 0xf) << 8 | (spd->t_wtr & 0xf) << 4 | (spd->t_rtp & 0xf); spd_cb->phy_cfg.dtpr1 = (spd->t_wlo & 0xf) << 26 | (spd->t_wlmrd & 0x3f) << 20 | (spd->t_rfc & 0x1ff) << 11 | (spd->t_faw & 0x3f) << 5 | (spd->t_mod & 0x7) << 2 | (spd->t_mrd & 0x3); spd_cb->phy_cfg.dtpr2 = 0 << 31 | 1 << 30 | 0 << 29 | (spd->t_dllk & 0x3ff) << 19 | (spd->t_ckesr & 0xf) << 15; spd_cb->phy_cfg.dtpr2 |= (((spd->t_xp > spd->t_xpdll) ? spd->t_xp : spd->t_xpdll) & 0x1f) << 10; spd_cb->phy_cfg.dtpr2 |= (((spd->t_xs > spd->t_xsdll) ? spd->t_xs : spd->t_xsdll) & 0x3ff); spd_cb->phy_cfg.mr0 = 1 << 12 | (spd->t_wr_bin & 0x7) << 9 | 0 << 8 | 0 << 7 | ((spd->cas & 0x0E) >> 1) << 4 | 0 << 3 | (spd->cas & 0x01) << 2; spd_cb->phy_cfg.mr1 = 0 << 12 | 0 << 11 | 0 << 7 | 0 << 3 | ((DDR_TERM >> 2) & 1) << 9 | ((DDR_TERM >> 1) & 1) << 6 | (DDR_TERM & 0x1) << 2 | ((SDRAM_DRIVE >> 1) & 1) << 5 | (SDRAM_DRIVE & 1) << 1 | 0 << 0; spd_cb->phy_cfg.mr2 = DYN_ODT << 9 | TEMP << 7 | (spd->asr & 1) << 6 | (spd->cwl & 7) << 3 | (spd->pasr & 7); spd_cb->phy_cfg.dtcr = (spd->rank == 2) ? 0x730035C7 : 0x710035C7; spd_cb->phy_cfg.pgcr2 = (0xF << 20) | ((int)spd->t_refprd & 0x3ffff); spd_cb->phy_cfg.zq0cr1 = 0x0000005D; spd_cb->phy_cfg.zq1cr1 = 0x0000005B; spd_cb->phy_cfg.zq2cr1 = 0x0000005B; spd_cb->phy_cfg.pir_v1 = 0x00000033; spd_cb->phy_cfg.pir_v2 = 0x0000FF81; /* EMIF Registers */ spd_cb->emif_cfg.sdcfg = spd->sdram_type << 29 | (DDR_TERM & 7) << 25 | (DYN_ODT & 3) << 22 | (spd->cwl & 0x7) << 14 | (spd->cas & 0xf) << 8 | (spd->ibank & 3) << 5 | (spd->buswidth & 3) << 12 | (spd->pagesize & 3); if (spd->rank == 2) spd_cb->emif_cfg.sdcfg |= 1 << 3; spd_cb->emif_cfg.sdtim1 = ((spd->t_wr - 1) & 0x1f) << 25 | ((spd->t_ras - 1) & 0x7f) << 18 | ((spd->t_rc - 1) & 0xff) << 10 | (spd->t_rrd2 & 0x3f) << 4 | ((spd->t_wtr - 1) & 0xf); spd_cb->emif_cfg.sdtim2 = 0x07 << 10 | ((spd->t_rp - 1) & 0x1f) << 5 | ((spd->t_rcd - 1) & 0x1f); spd_cb->emif_cfg.sdtim3 = ((spd->t_xp - 2) & 0xf) << 28 | ((spd->t_xs - 1) & 0x3ff) << 18 | ((spd->t_xsdll - 1) & 0x3ff) << 8 | ((spd->t_rtp - 1) & 0xf) << 4 | ((spd->t_cke) & 0xf); spd_cb->emif_cfg.sdtim4 = (spd->t_csta & 0xf) << 28 | ((spd->t_ckesr - 1) & 0xf) << 24 | ((spd->t_zqcs - 1) & 0xff) << 16 | ((spd->t_rfc - 1) & 0x3ff) << 4 | (spd->t_ras_max & 0xf); spd_cb->emif_cfg.sdrfc = (spd->refresh_rate - 1) & 0xffff; /* TODO zqcfg value fixed ,May be required correction for K2E evm. */ spd_cb->emif_cfg.zqcfg = (spd->rank == 2) ? 0xF0073200 : 0x70073200; } static int ddr3_read_spd(ddr3_spd_eeprom_t *spd_params) { int ret; int old_bus; i2c_init(CONFIG_SYS_DAVINCI_I2C_SPEED, CONFIG_SYS_DAVINCI_I2C_SLAVE); old_bus = i2c_get_bus_num(); i2c_set_bus_num(1); ret = i2c_read(0x53, 0, 1, (unsigned char *)spd_params, 256); i2c_set_bus_num(old_bus); if (ret) { printf("Cannot read DIMM params\n"); return 1; } if (ddr3_spd_check(spd_params)) return 1; return 0; } int ddr3_get_size(void) { ddr3_spd_eeprom_t spd_params; if (ddr3_read_spd(&spd_params)) return 0; return ddr3_get_size_in_mb(&spd_params) / 1024; } int ddr3_get_dimm_params_from_spd(struct ddr3_spd_cb *spd_cb) { struct ddr3_sodimm spd; ddr3_spd_eeprom_t spd_params; memset(&spd, 0, sizeof(spd)); if (ddr3_read_spd(&spd_params)) return 1; if (ddrtimingcalculation(&spd_params, &spd, spd_cb)) { printf("Timing caclulation error\n"); return 1; } strncpy(spd_cb->dimm_name, (char *)spd_params.mpart, 18); spd_cb->dimm_name[18] = '\0'; init_ddr3param(spd_cb, &spd); dump_emif_config(&spd_cb->emif_cfg); dump_phy_config(&spd_cb->phy_cfg); return 0; }
6,842
372
<reponame>kbore/pbis-open<gh_stars>100-1000 /* Editor Settings: expandtabs and use 4 spaces for indentation * ex: set softtabstop=4 tabstop=8 expandtab shiftwidth=4: * * -*- mode: c, c-basic-offset: 4 -*- */ /* * Copyright © BeyondTrust Software 2004 - 2019 * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * BEYONDTRUST MAKES THIS SOFTWARE AVAILABLE UNDER OTHER LICENSING TERMS AS * WELL. IF YOU HAVE ENTERED INTO A SEPARATE LICENSE AGREEMENT WITH * BEYONDTRUST, THEN YOU MAY ELECT TO USE THE SOFTWARE UNDER THE TERMS OF THAT * SOFTWARE LICENSE AGREEMENT INSTEAD OF THE TERMS OF THE APACHE LICENSE, * NOTWITHSTANDING THE ABOVE NOTICE. IF YOU HAVE QUESTIONS, OR WISH TO REQUEST * A COPY OF THE ALTERNATE LICENSING TERMS OFFERED BY BEYONDTRUST, PLEASE CONTACT * BEYONDTRUST AT beyondtrust.com/contact */ #include "includes.h" static void* LwIoFuseEntrypointInit( struct fuse_conn_info* conn ) { NTSTATUS status = STATUS_SUCCESS; PIO_FUSE_CONTEXT pFuseContext = LwIoFuseGetContext(); status = LwIoFuseInit( pFuseContext, conn ); BAIL_ON_NT_STATUS(status); return pFuseContext; error: abort(); } static int LwIoFuseEntrypointGetattr( const char *path, struct stat *statbuf ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseGetattr( path, statbuf); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointStatfs( const char *path, struct statvfs *statbuf ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseStatfs( path, statbuf); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointReaddir( const char *path, void *buf, fuse_fill_dir_t filler, off_t offset, struct fuse_file_info *fi ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseReaddir( path, buf, filler, offset, fi); BAIL_ON_NT_STATUS(status); error: switch (status) { case STATUS_BUFFER_TOO_SMALL: /* Special status code to indicate to FUSE that the provided buffer was too small */ return 1; default: return LwIoFuseMapNtStatus(status); } } static int LwIoFuseEntrypointOpen( const char* path, struct fuse_file_info* fi ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseOpen( path, fi); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointRelease( const char* path, struct fuse_file_info* fi ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseRelease( path, fi); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointRead( const char* path, char* buf, size_t len, off_t off, struct fuse_file_info* fi ) { NTSTATUS status = STATUS_SUCCESS; int bytesRead = 0; status = LwIoFuseRead( path, buf, len, off, fi, &bytesRead ); BAIL_ON_NT_STATUS(status); error: if (status) { return LwIoFuseMapNtStatus(status); } else { return bytesRead; } } static int LwIoFuseEntrypointWrite( const char* path, const char* buf, size_t len, off_t off, struct fuse_file_info* fi ) { NTSTATUS status = STATUS_SUCCESS; int bytesWritten = 0; status = LwIoFuseWrite( path, buf, len, off, fi, &bytesWritten ); BAIL_ON_NT_STATUS(status); error: if (status) { return LwIoFuseMapNtStatus(status); } else { return bytesWritten; } } static int LwIoFuseEntrypointTruncate( const char* path, off_t size ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseTruncate( path, size); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointMknod( const char* path, mode_t mode, dev_t dev ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseMknod( path, mode, dev); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointCreate( const char* path, mode_t mode, struct fuse_file_info* fi ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseCreate( path, mode, fi); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointUnlink( const char* path ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseUnlink(path); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointMkdir( const char* path, mode_t mode ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseMkdir( path, mode ); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointRmdir( const char* path ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseRmdir(path); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointRename( const char* oldpath, const char* newpath ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseRename(oldpath, newpath); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointChmod( const char* path, mode_t mode ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseChmod(path, mode); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointChown( const char* path, uid_t uid, gid_t gid ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseChown(path, uid, gid); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static int LwIoFuseEntrypointUtimens( const char* path, const struct timespec tv[2] ) { NTSTATUS status = STATUS_SUCCESS; status = LwIoFuseUtimens(path, tv); BAIL_ON_NT_STATUS(status); error: return LwIoFuseMapNtStatus(status); } static struct fuse_operations gLwIoFuseOperations = { .init = LwIoFuseEntrypointInit, .getattr = LwIoFuseEntrypointGetattr, .statfs = LwIoFuseEntrypointStatfs, .readdir = LwIoFuseEntrypointReaddir, .open = LwIoFuseEntrypointOpen, .release = LwIoFuseEntrypointRelease, .read = LwIoFuseEntrypointRead, .write = LwIoFuseEntrypointWrite, .truncate = LwIoFuseEntrypointTruncate, .mknod = LwIoFuseEntrypointMknod, .create = LwIoFuseEntrypointCreate, .unlink = LwIoFuseEntrypointUnlink, .mkdir = LwIoFuseEntrypointMkdir, .rmdir = LwIoFuseEntrypointRmdir, .rename = LwIoFuseEntrypointRename, .chmod = LwIoFuseEntrypointChmod, .chown = LwIoFuseEntrypointChown, .utimens = LwIoFuseEntrypointUtimens }; struct fuse_operations* LwIoFuseGetOperationsTable( void ) { return &gLwIoFuseOperations; }
3,647
628
import numpy as np import pytest import torch from doctr.io import decode_img_as_tensor, read_img_as_tensor, tensor_from_numpy def test_read_img_as_tensor(mock_image_path): img = read_img_as_tensor(mock_image_path) assert isinstance(img, torch.Tensor) assert img.dtype == torch.float32 assert img.shape == (3, 900, 1200) img = read_img_as_tensor(mock_image_path, dtype=torch.float16) assert img.dtype == torch.float16 img = read_img_as_tensor(mock_image_path, dtype=torch.uint8) assert img.dtype == torch.uint8 def test_decode_img_as_tensor(mock_image_stream): img = decode_img_as_tensor(mock_image_stream) assert isinstance(img, torch.Tensor) assert img.dtype == torch.float32 assert img.shape == (3, 900, 1200) img = decode_img_as_tensor(mock_image_stream, dtype=torch.float16) assert img.dtype == torch.float16 img = decode_img_as_tensor(mock_image_stream, dtype=torch.uint8) assert img.dtype == torch.uint8 def test_tensor_from_numpy(mock_image_stream): with pytest.raises(ValueError): tensor_from_numpy(np.zeros((256, 256, 3)), torch.int64) out = tensor_from_numpy(np.zeros((256, 256, 3), dtype=np.uint8)) assert isinstance(out, torch.Tensor) assert out.dtype == torch.float32 assert out.shape == (3, 256, 256) out = tensor_from_numpy(np.zeros((256, 256, 3), dtype=np.uint8), dtype=torch.float16) assert out.dtype == torch.float16 out = tensor_from_numpy(np.zeros((256, 256, 3), dtype=np.uint8), dtype=torch.uint8) assert out.dtype == torch.uint8
658
453
"""ng_template_bundle.""" load("@io_bazel_rules_closure//closure:defs.bzl", "closure_js_library") def ng_template_bundle( name, srcs, module_name = "templates", strip_prefix = None, prepend_prefix = None, goog_provide = None): """Generates a js_library that inlines a set of Angular templates.""" native.genrule( name = name + "_genrule", srcs = srcs, outs = [ name + "_bundle.js", ], cmd = "$(location //common:ng_template) $(location @npm//ng-html2js/bin:ng-html2js) %s %s %s %s $(SRCS) > $@" % (strip_prefix, prepend_prefix, module_name, goog_provide), tools = [ "@npm//ng-html2js/bin:ng-html2js", "//common:ng_template", ], ) closure_js_library( name = name, srcs = [name + "_genrule"], # Include the .ng files as data dependencies so binaries can serve them. data = srcs, suppress = [ "JSC_VAR_MULTIPLY_DECLARED_ERROR", ], )
516
5,169
<reponame>ftapp/cocoapods<filename>Specs/MTBackgroundView/1.0/MTBackgroundView.podspec.json { "name": "MTBackgroundView", "version": "1.0", "summary": "A NSView subclass wich fills itself with the color set to backgroundColor.", "description": "NSView does not have a backgroundColor property like UIView does, so this view will draw a background color if set.", "homepage": "https://github.com/mathieutozer/MTBackgroundView", "license": "MIT", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/mathieutozer/MTBackgroundView.git", "tag": "1.0" }, "platforms": { "osx": "10.11" }, "requires_arc": true, "source_files": "Pod/Classes/**/*" }
259
3,100
/** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2020 <NAME>. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #ifndef XENIA_BASE_FUZZY_H_ #define XENIA_BASE_FUZZY_H_ #include <string> #include <vector> namespace xe { // Tests a match against a case-insensitive fuzzy filter. // Returns the score of the match or 0 if none. int fuzzy_match(const std::string_view pattern, const char* value); // Applies a case-insensitive fuzzy filter to the given entries and ranks // results. // Entries is a list of pointers to opaque structs, each of which contains a // char* string at the given offset. // Returns an unsorted list of {original index, score}. std::vector<std::pair<size_t, int>> fuzzy_filter(const std::string_view pattern, const void* const* entries, size_t entry_count, size_t string_offset); template <typename T> std::vector<std::pair<size_t, int>> fuzzy_filter(const std::string_view pattern, const std::vector<T>& entries, size_t string_offset) { return fuzzy_filter(pattern, reinterpret_cast<void* const*>(entries.data()), entries.size(), string_offset); } } // namespace xe #endif // XENIA_BASE_FUZZY_H_
720
343
<gh_stars>100-1000 /* * zsummerX License * ----------- * * zsummerX is licensed under the terms of the MIT license reproduced below. * This means that zsummerX is free software and can be used for both academic * and commercial purposes at absolutely no cost. * * * =============================================================================== * * Copyright (C) 2010-2016 YaweiZhang <<EMAIL>>. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * * =============================================================================== * * (end of COPYRIGHT) */ #include <zsummerX/frame/session.h> #include <zsummerX/frame/manager.h> #include <algorithm> using namespace zsummer::proto4z; using namespace zsummer::network; using std::min; using std::max; TcpSession::TcpSession() { SessionManager::getRef()._statInfo[STAT_SESSION_CREATED]++; _recving = (SessionBlock*)malloc(sizeof(SessionBlock)+SESSION_BLOCK_SIZE); _recving->len = 0; _recving->bound = SESSION_BLOCK_SIZE; _sending = (SessionBlock*)malloc(sizeof(SessionBlock)+SESSION_BLOCK_SIZE); _sending->len = 0; _sending->bound = SESSION_BLOCK_SIZE; _param.reserve(100); } TcpSession::~TcpSession() { SessionManager::getRef()._statInfo[STAT_SESSION_DESTROYED]++; while (!_sendque.empty()) { _options._freeBlock(_sendque.front()); _sendque.pop_front(); } if (_sockptr) { _sockptr->doClose(); _sockptr.reset(); } free (_recving); _recving = nullptr; free (_sending); _sending = nullptr; } void TcpSession::connect() { if (_status == 0) { _pulseTimerID = SessionManager::getRef().createTimer(_options._connectPulseInterval, std::bind(&TcpSession::onPulse, shared_from_this())); _status = 1; reconnect(); } else { LCE("can't connect on a old session. please use addConnect try again."); } } void TcpSession::reconnect() { if (_sockptr) { _sockptr->doClose(); _sockptr.reset(); } _sockptr = std::make_shared<TcpSocket>(); if (!_sockptr->initialize(_eventLoop)) { LCE("connect init error"); return; } _recving->len = 0; _sending->len = 0; _sendingLen = 0; _rc4StateRead.makeSBox(_options._rc4TcpEncryption); _rc4StateWrite.makeSBox(_options._rc4TcpEncryption); _bFirstRecvData = true; while (_options._reconnectClean && !_sendque.empty()) { _options._freeBlock(_sendque.front()); _sendque.pop_front(); } if (!_sockptr->doConnect(_remoteIP, _remotePort, std::bind(&TcpSession::onConnected, shared_from_this(), std::placeholders::_1))) { LCE("connect error"); return; } } bool TcpSession::attatch(const TcpSocketPtr &sockptr, AccepterID aID, SessionID sID) { _sockptr = sockptr; _acceptID = aID; _sessionID = sID; _sockptr->getPeerInfo(_remoteIP, _remotePort); if (_options._setNoDelay) { sockptr->setNoDelay(); } #ifndef WIN32 sockptr->setFloodSendOptimize(_options._floodSendOptimize); #endif _status = 2; _pulseTimerID = SessionManager::getRef().createTimer(_options._sessionPulseInterval, std::bind(&TcpSession::onPulse, shared_from_this())); SessionManager::getRef()._statInfo[STAT_SESSION_LINKED]++; if (_options._onSessionLinked) { try { _options._onSessionLinked(shared_from_this()); } catch (const std::exception & e) { LOGE("TcpSession::attatch _onSessionLinked error. e=" << e.what()); } catch (...) { LCW("TcpSession::attatch _onSessionLinked catch one unknown exception."); } } if (!doRecv()) { close(); return false; } return true; } void TcpSession::onConnected(zsummer::network::NetErrorCode ec) { if (ec) { LCW("onConnected error. ec=" << ec << ", cID=" << _sessionID); return; } LCI("onConnected success. sessionID=" << _sessionID); if (!doRecv()) { return; } _status = 2; _reconnects = 0; if (_options._setNoDelay) { _sockptr->setNoDelay(); } #ifndef WIN32 _sockptr->setFloodSendOptimize(_options._floodSendOptimize); #endif if (_options._onSessionLinked) { try { _options._onSessionLinked(shared_from_this()); } catch (const std::exception & e) { LOGE("TcpSession::onConnected error. e=" << e.what()); } catch (...) { LCW("TcpSession::onConnected catch one unknown exception."); } } SessionManager::getRef()._statInfo[STAT_SESSION_LINKED]++; if (!_sendque.empty()) { send(nullptr, 0); } } bool TcpSession::doRecv() { LCT("TcpSession::doRecv sessionID=" << getSessionID() ); if (!_sockptr) { return false; } #ifndef WIN32 return _sockptr->doRecv(_recving->begin + _recving->len, _recving->bound - _recving->len, std::bind(&TcpSession::onRecv, shared_from_this(), std::placeholders::_1, std::placeholders::_2), true); #else return _sockptr->doRecv(_recving->begin + _recving->len, _recving->bound - _recving->len, std::bind(&TcpSession::onRecv, shared_from_this(), std::placeholders::_1, std::placeholders::_2)); #endif } void TcpSession::close() { if (_status != 3 && _status != 0) { if (_sockptr) { _sockptr->doClose(); _sockptr.reset(); } LCD("TcpSession to close socket. sID= " << _sessionID); if (_status == 2) { SessionManager::getRef()._statInfo[STAT_SESSION_CLOSED]++; if (_options._onSessionClosed) { SessionManager::getRef().post(std::bind(_options._onSessionClosed, shared_from_this())); } } if (isConnectID(_sessionID) && _reconnects < _options._reconnects) { _status = 1; LCD("TcpSession already closed. try reconnect ... sID= " << _sessionID); } else { _status = 3; SessionManager::getRef().post(std::bind(&SessionManager::removeSession, SessionManager::getPtr(), shared_from_this())); LCI("TcpSession remove self from manager. sID= " << _sessionID); } return; } LCW("TcpSession::close closing. sID=" << _sessionID); } unsigned int TcpSession::onRecv(zsummer::network::NetErrorCode ec, int received) { LCT("TcpSession::onRecv sessionID=" << getSessionID() << ", received=" << received); if (ec) { _lastRecvError = ec; if (_lastRecvError == NEC_REMOTE_CLOSED) { LCI("socket closed. remote close. sID=" << _sessionID); } else { LCI("socket closed. socket error(or win rst). sID=" << _sessionID); } close(); return 0 ; } _recving->len += received; SessionManager::getRef()._statInfo[STAT_RECV_COUNT]++; SessionManager::getRef()._statInfo[STAT_RECV_BYTES] += received; // skip encrypt the flash policy data if that open flash policy. // skip encrypt when the rc4 encrypt sbox is empty. { do { //process something when recv first data. // flash policy process const char * flashPolicyRequestString = "<policy-file-request/>"; //string length is 23 byte, contain null-terminator character. unsigned int flashPolicyRequestSize = 23; if (_bFirstRecvData && _options._openFlashPolicy && _acceptID != InvalidAccepterID && _recving->len == flashPolicyRequestSize && memcmp(flashPolicyRequestString, _recving->begin, flashPolicyRequestSize) == 0) { _recving->len = 0; const char * flashPolicyResponseString = R"---(<cross-domain-policy><allow-access-from domain="*" to-ports="*"/></cross-domain-policy>)---"; unsigned int flashPolicyResponseSize = (unsigned int)strlen(flashPolicyResponseString) + 1; send(flashPolicyResponseString, flashPolicyResponseSize); //do other something. //do other something end. _bFirstRecvData = false; } else if (_bFirstRecvData) { //do other something. //do other something end. _bFirstRecvData = false; } if (_options._rc4TcpEncryption.empty() || _recving->len == 0) { break; } unsigned int needEncry = received; if (_recving->len < (unsigned int)received) { needEncry = _recving->len; } _rc4StateRead.encryption((unsigned char*)_recving->begin + _recving->len - needEncry, needEncry); } while (0); } //分包 unsigned int usedIndex = 0; do { if (_options._protoType == PT_TCP) { OnBlockCheckResult ret; try { ret = _options._onBlockCheck(_recving->begin + usedIndex, _recving->len - usedIndex, _recving->bound - usedIndex, _recving->bound); } catch (const std::exception & e) { LCW("MessageEntry _onBlockCheck catch one exception: " << e.what() << ", offset = " << usedIndex << ", len = " << _recving->len << ", bound = " << _recving->bound << ", bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin+usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); close(); return 0; } catch (...) { LCW("MessageEntry _onBlockCheck catch one unknown exception. offset=" << usedIndex << ", len=" << _recving->len << ", bound=" << _recving->bound << "bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); close(); return 0; } if (ret.first == BCT_CORRUPTION) { LCW("killed socket: _onBlockCheck error. offset=" << usedIndex << ", len=" << _recving->len << ", bound=" << _recving->bound << "bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); close(); return 0; } if (ret.first == BCT_SHORTAGE) { break; } try { SessionManager::getRef()._statInfo[STAT_RECV_PACKS]++; LCT("TcpSession::onRecv _onBlockDispatch(sessionID=" << getSessionID() << ", offset=" << usedIndex <<", len=" << ret.second); _options._onBlockDispatch(shared_from_this(), _recving->begin + usedIndex, ret.second); } catch (const std::exception & e) { LCW("MessageEntry _onBlockDispatch catch one exception: " << e.what() << ", bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(ret.second, (unsigned int)500))); } catch (...) { LCW("MessageEntry _onBlockDispatch catch one unknown exception, bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(ret.second, (unsigned int)500))); } usedIndex += ret.second; } else { std::string body; bool isFirstRead = _httpHeader.empty(); OnBlockCheckResult ret; try { ret = _options._onHTTPBlockCheck(_recving->begin + usedIndex, _recving->len - usedIndex, _recving->bound - usedIndex, _httpIsChunked, _httpMethod, _httpMethodLine, _httpHeader, body); } catch (const std::exception & e) { LCW("MessageEntry _onHTTPBlockCheck catch one exception: " << e.what() << ", bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); close(); return 0; } catch (...) { LCW("MessageEntry _onHTTPBlockCheck catch one unknown exception, bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); close(); return 0; } if (ret.first == BCT_CORRUPTION) { LCE("killed http socket: _onHTTPBlockCheck error sID=" << _sessionID << ", bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); close(); return 0; } if (ret.first == BCT_SHORTAGE) { if (isFirstRead) { _httpMethod.clear(); _httpMethodLine.clear(); _httpHeader.clear(); _httpIsChunked = false; } break; } SessionManager::getRef()._statInfo[STAT_RECV_PACKS]++; try { _options._onHTTPBlockDispatch(shared_from_this(), _httpMethod, _httpMethodLine, _httpHeader, body); } catch (const std::exception & e) { LCW("MessageEntry _onHTTPBlockDispatch catch one exception: " << e.what() << ", bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); } catch (...) { LCW("MessageEntry _onHTTPBlockDispatch catch one unknown exception, bindata(max 500byte) :" << zsummer::log4z::Log4zBinary(_recving->begin + usedIndex, min(_recving->len - usedIndex, (unsigned int)500))); } usedIndex += ret.second; } } while (true); if (usedIndex > 0) { _recving->len= _recving->len - usedIndex; if (_recving->len > 0) { memmove(_recving->begin, _recving->begin + usedIndex, _recving->len); } } # ifndef WIN32 return _recving->len; #else if (!doRecv()) { close(); } return 0; #endif } void TcpSession::send(const char *buf, unsigned int len) { LCT("TcpSession::send sessionID=" << getSessionID() << ", len=" << len); if (len > _sending->bound) { LCE("send error. too large block than sending block bound. len=" << len); return; } if (len == 0) { if (_status == 2 && _sending->len == 0 && !_sendque.empty()) { SessionBlock *sb = _sendque.front(); _sendque.pop_front(); memcpy(_sending->begin, sb->begin, sb->len); _sending->len = sb->len; _options._freeBlock(sb); if (!_options._rc4TcpEncryption.empty()) { _rc4StateWrite.encryption((unsigned char*)_sending->begin, _sending->len); } bool sendRet = _sockptr->doSend(_sending->begin, _sending->len, std::bind(&TcpSession::onSend, shared_from_this(), std::placeholders::_1, std::placeholders::_2)); if (!sendRet) { LCE("send error from first connect to send dirty block"); } } return; } //push to send queue if (!_sendque.empty() || _status != 2 || _sending->len != 0) { if (_sendque.size() >= _options._maxSendListCount) { close(); return; } SessionBlock * sb = _options._createBlock(); if (sb->bound < len) { _options._freeBlock(sb); LCE("send error. too large block than session block. len=" << len); return; } memcpy(sb->begin, buf, len); sb->len = len; _sendque.push_back(sb); SessionManager::getRef()._statInfo[STAT_SEND_QUES]++; } //send direct else { memcpy(_sending->begin, buf, len); _sending->len = len; if (!_options._rc4TcpEncryption.empty()) { _rc4StateWrite.encryption((unsigned char*)_sending->begin, _sending->len); } SessionManager::getRef()._statInfo[STAT_SEND_COUNT]++; SessionManager::getRef()._statInfo[STAT_SEND_PACKS]++; bool sendRet = _sockptr->doSend(_sending->begin, _sending->len, std::bind(&TcpSession::onSend, shared_from_this(), std::placeholders::_1, std::placeholders::_2)); if (!sendRet) { LCW("send error "); } } } void TcpSession::onSend(zsummer::network::NetErrorCode ec, int sent) { LCT("TcpSession::onSend session id=" << getSessionID() << ", sent=" << sent); if (ec) { LCD("remote socket closed"); return ; } _sendingLen += sent; SessionManager::getRef()._statInfo[STAT_SEND_BYTES] += sent; if (_sendingLen < _sending->len) { SessionManager::getRef()._statInfo[STAT_SEND_COUNT]++; bool sendRet = _sockptr->doSend(_sending->begin + _sendingLen, _sending->len - _sendingLen, std::bind(&TcpSession::onSend, shared_from_this(), std::placeholders::_1, std::placeholders::_2)); if (!sendRet) { LCW("send error from onSend"); return; } return; } if (_sendingLen == _sending->len) { _sendingLen = 0; _sending->len = 0; if (!_sendque.empty()) { do { SessionBlock *sb = _sendque.front(); _sendque.pop_front(); SessionManager::getRef()._statInfo[STAT_SEND_QUES]--; memcpy(_sending->begin + _sending->len, sb->begin, sb->len); _sending->len += sb->len; _options._freeBlock(sb); SessionManager::getRef()._statInfo[STAT_SEND_PACKS]++; if (_sendque.empty()) { break; } if (_sending->bound - _sending->len < _sendque.front()->len) { break; } if (!_options._joinSmallBlock) { break; } } while (true); if (!_options._rc4TcpEncryption.empty()) { _rc4StateWrite.encryption((unsigned char *)_sending->begin, _sending->len); } SessionManager::getRef()._statInfo[STAT_SEND_COUNT]++; bool sendRet = _sockptr->doSend(_sending->begin, _sending->len, std::bind(&TcpSession::onSend, shared_from_this(), std::placeholders::_1, std::placeholders::_2)); if (!sendRet) { LCW("send error from next queue block."); return; } return; } } } void TcpSession::onPulse() { if (_status == 3) { return; } if (_status == 1) { if (_reconnects >= _options._reconnects) { if (_options._onReconnectEnd) { try { _options._onReconnectEnd(shared_from_this()); } catch (const std::exception & e) { LCE("_options._onReconnectEnd catch excetion=" << e.what()); } catch (...) { LCE("_options._onReconnectEnd catch excetion"); } } close(); } else { reconnect(); _reconnects++; _pulseTimerID = SessionManager::getRef().createTimer(_options._connectPulseInterval, std::bind(&TcpSession::onPulse, shared_from_this())); } } else if (_status == 2) { if (_options._onSessionPulse) { try { _options._onSessionPulse(shared_from_this()); } catch (const std::exception & e) { LCW("TcpSession::onPulse catch one exception: " << e.what()); } catch (...) { LCW("TcpSession::onPulse catch one unknown exception: "); } } _pulseTimerID = SessionManager::getRef().createTimer(isConnectID(_sessionID) ? _options._connectPulseInterval : _options._sessionPulseInterval, std::bind(&TcpSession::onPulse, shared_from_this())); } } TupleParam & TcpSession::autoTupleParamImpl(size_t index) { if (index > 100) { LOGW("user param is too many. trace=" << zsummer::proto4z::proto4z_traceback()); } if (_param.size() <= index) { _param.insert(_param.end(), index - _param.size() + 1, std::make_tuple(false, 0.0, 0, "")); } return _param[index]; } const TupleParam & TcpSession::peekTupleParamImpl(size_t index) const { const static TupleParam _invalid = std::make_tuple( false, 0.0, 0, "" ); if (index > 100) { LOGW("user param is too many. trace=" << zsummer::proto4z::proto4z_traceback()); } if (_param.size() <= index ) { LOGW("get user param error. not inited. trace=" << zsummer::proto4z::proto4z_traceback()); return _invalid; } return _param.at(index); }
11,200
1,916
// Copyright (C) 2005, 2006 International Business Machines and others. // All Rights Reserved. // This code is published under the Common Public License. // // $Id: IpIteratesVector.cpp 759 2006-07-07 03:07:08Z andreasw $ // // Authors: <NAME>, <NAME> IBM 2005-06-06 #include "IpIteratesVector.hpp" namespace SimTKIpopt { IteratesVector::IteratesVector(const IteratesVectorSpace* owner_space, bool create_new) : CompoundVector(owner_space, create_new), owner_space_(owner_space) { DBG_ASSERT(owner_space_); } IteratesVector::~IteratesVector() {} SmartPtr<IteratesVector> IteratesVector::MakeNewIteratesVector(bool create_new) const { return owner_space_->MakeNewIteratesVector(create_new); } SmartPtr<IteratesVector> IteratesVector::MakeNewContainer() const { SmartPtr<IteratesVector> ret = MakeNewIteratesVector(false); if (IsValid(x())) { ret->Set_x(*x()); } if (IsValid(s())) { ret->Set_s(*s()); } if (IsValid(y_c())) { ret->Set_y_c(*y_c()); } if (IsValid(y_d())) { ret->Set_y_d(*y_d()); } if (IsValid(z_L())) { ret->Set_z_L(*z_L()); } if (IsValid(z_U())) { ret->Set_z_U(*z_U()); } if (IsValid(v_L())) { ret->Set_v_L(*v_L()); } if (IsValid(v_U())) { ret->Set_v_U(*v_U()); } return ret; // We may need a non const version // if (IsCompConst(0)) { // ret->Set_x(*x()); // } // else { // ret->Set_x_NonConst(*x_NonConst()); // } // if (IsCompConst(1)) { // ret->Set_s(*s()); // } // else { // ret->Set_s_NonConst(*s_NonConst()); // } // if (IsCompConst(2)) { // ret->Set_y_c(*y_c()); // } // else { // ret->Set_y_c_NonConst(*y_c_NonConst()); // } // if (IsCompConst(3)) { // ret->Set_y_d(*y_d()); // } // else { // ret->Set_y_d_NonConst(*y_d_NonConst()); // } // if (IsCompConst(4)) { // ret->Set_z_L(*z_L()); // } // else { // ret->Set_z_L_NonConst(*z_L_NonConst()); // } // if (IsCompConst(5)) { // ret->Set_z_U(*z_U()); // } // else { // ret->Set_z_U_NonConst(*z_U_NonConst()); // } // if (IsCompConst(6)) { // ret->Set_v_L(*v_L()); // } // else { // ret->Set_v_L_NonConst(*v_L_NonConst()); // } // if (IsCompConst(7)) { // ret->Set_v_U(*v_U()); // } // else { // ret->Set_v_U_NonConst(*v_U_NonConst()); // } // return ret; } IteratesVectorSpace::IteratesVectorSpace(const VectorSpace& x_space, const VectorSpace& s_space, const VectorSpace& y_c_space, const VectorSpace& y_d_space, const VectorSpace& z_L_space, const VectorSpace& z_U_space, const VectorSpace& v_L_space, const VectorSpace& v_U_space ) : CompoundVectorSpace(8, x_space.Dim() + s_space.Dim() + y_c_space.Dim() + y_d_space.Dim() + z_L_space.Dim() + z_U_space.Dim() + v_L_space.Dim() + v_U_space.Dim() ) { x_space_ = &x_space; s_space_ = &s_space; y_c_space_ = &y_c_space; y_d_space_ = &y_d_space; z_L_space_ = &z_L_space; z_U_space_ = &z_U_space; v_L_space_ = &v_L_space; v_U_space_ = &v_U_space; this->CompoundVectorSpace::SetCompSpace(0, *x_space_); this->CompoundVectorSpace::SetCompSpace(1, *s_space_); this->CompoundVectorSpace::SetCompSpace(2, *y_c_space_); this->CompoundVectorSpace::SetCompSpace(3, *y_d_space_); this->CompoundVectorSpace::SetCompSpace(4, *z_L_space_); this->CompoundVectorSpace::SetCompSpace(5, *z_U_space_); this->CompoundVectorSpace::SetCompSpace(6, *v_L_space_); this->CompoundVectorSpace::SetCompSpace(7, *v_U_space_); } IteratesVectorSpace::~IteratesVectorSpace() {} } // namespae Ipopt
2,128
1,198
<reponame>dherbst/lullaby<gh_stars>1000+ /* Copyright 2017-2019 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #ifndef LULLABY_UTIL_FILENAME_H_ #define LULLABY_UTIL_FILENAME_H_ #include <string> #include "lullaby/util/string_view.h" namespace lull { // Checks if the |filename| has the filetype |suffix|, which should include the // '.' (e.g. ".wav", not "wav"). bool EndsWith(string_view filename, string_view suffix); // Gets the filename and extension from a file path. In other words, strips // the directory from the file path. For example: // "lullaby/foo/bar.png" returns "bar.png". // "lullaby/foo/bar" returns "bar". // "lullaby/foo/" returns "" std::string GetBasenameFromFilename(string_view filename); // Gets the extension (including the dot) from a file path. For example: // "lullaby/foo/bar.png" returns ".png". // "lullaby/foo/" returns "" std::string GetExtensionFromFilename(string_view filename); // Removes the extension a file path. For example: // "lullaby/foo/bar.png" returns "lullaby/foo/bar". // "lullaby/foo/" returns "lullaby/foo/" std::string RemoveExtensionFromFilename(string_view filename); // Removes both the directory and the extension a file path. For example: // "lullaby/foo/bar.png" returns "bar". // "lullaby/foo/" returns "" std::string RemoveDirectoryAndExtensionFromFilename(string_view filename); // Returns the entire file path up to the last directory (without the trailing // directory separator). For example: // "lullaby/foo/bar.png" returns "lullaby/foo". // "lullaby/foo/" returns "lullaby/foo" std::string GetDirectoryFromFilename(string_view filename); // Joins a Directory and Basename into a Filename. For example: // ("lullaby/foo", "bar.png") returns "lullaby/foo/bar.png". std::string JoinPath(string_view directory, string_view basename); // Correct for platform differences in expressing a path (e.g. a file exported // on windows may try to locate textures\\file.png on linux/mac). std::string LocalizePath(string_view path); // Canonicalize the path. Basically convert directory\file to // directory/file. std::string CanonicalizePath(string_view path); } // namespace lull #endif // LULLABY_UTIL_FILENAME_H_
853
4,332
<filename>vowpalwabbit/core/include/vw/core/parse_regressor.h // Copyright (c) by respective owners including Yahoo!, Microsoft, and // individual contributors. All rights reserved. Released under a BSD (revised) // license as described in the file LICENSE. #pragma once #include "vw/config/options.h" #include "vw/core/vw_fwd.h" #include <string> void read_regressor_file(VW::workspace& all, const std::vector<std::string>& files, io_buf& io_temp); void finalize_regressor(VW::workspace& all, const std::string& reg_name); void initialize_regressor(VW::workspace& all); void save_predictor(VW::workspace& all, const std::string& reg_name, size_t current_pass); void save_load_header(VW::workspace& all, io_buf& model_file, bool read, bool text, std::string& file_options, VW::config::options_i& options); void parse_mask_regressor_args( VW::workspace& all, const std::string& feature_mask, std::vector<std::string> initial_regressors);
317
1,327
<reponame>intel/mkl-dnn<gh_stars>1000+ /******************************************************************************* * Copyright 2019-2022 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ #ifndef MATMUL_HPP #define MATMUL_HPP #include <algorithm> #include <bitset> #include <iostream> #include <map> #include <numeric> #include "oneapi/dnnl/dnnl.h" #include "common.hpp" #include "dnnl_common.hpp" #include "utils/perf_report.hpp" #include "utils/settings.hpp" namespace matmul { typedef std::bitset<DNNL_MAX_NDIMS> dims_mask_t; const int64_t LD_GOOD = INT64_MAX; const int64_t LD_NONE = INT64_MAX - 1; struct settings_t : public base_settings_t { settings_t() = default; // ctor to save certain fields from resetting settings_t(const char *perf_template) : settings_t() { this->perf_template = perf_template; } prb_vdims_t prb_vdims; std::vector<std::string> cfg {std::string()}; std::vector<std::vector<dnnl_data_type_t>> dt {{dnnl_f32}}; std::vector<std::string> stag {tag::any}, wtag {tag::any}, dtag {tag::any}; std::vector<vdims_t> strides {vdims_t(STRIDES_SIZE)}; std::vector<dnnl_data_type_t> bia_dt {dnnl_data_type_undef}; std::vector<int> bia_mask {2}; std::vector<std::vector<dims_mask_t>> rt_dims_masks {{}}; const char *perf_template_csv() const { static const std::string args = "%cfg%,%stag%,%wtag%,%dtag%"; return perf_template_csv_base(args); } void reset() { *this = settings_t(perf_template); } }; struct prb_t : public prb_vdims_t { prb_t(const prb_vdims_t &prb_vdims, const std::vector<dnnl_data_type_t> &dt, const std::string &stag, const std::string &wtag, const std::string &dtag, const vdims_t &strides, dnnl_data_type_t bia_dt, int bia_mask, const std::vector<dims_mask_t> &rt_dims_masks, const attr_t &attr) : prb_vdims_t(prb_vdims) , dt(dt) , stag(stag) , wtag(wtag) , dtag(dtag) , strides(strides) , bia_dt(bia_dt) , bia_mask(bia_mask) , rt_dims_masks(rt_dims_masks) , attr(attr) , scales(NULL) { // Broadcast data types if needed if (dt.size() == 1) { const auto val = dt[0]; // Need a copy here. this->dt.assign(3, val); } this->rt_dims_masks.resize(2); const auto &srcdims = src_dims(); const auto &weidims = weights_dims(); m = srcdims[ndims - 2]; k = srcdims.back(); n = weidims.back(); dst_dims[ndims - 2] = m; dst_dims[ndims - 1] = n; init_dst_rt_dims_mask(); const auto nelems = std::accumulate(dst_dims.begin(), dst_dims.end(), (dnnl_dim_t)1, std::multiplies<dnnl_dim_t>()); ops = 2. * nelems * k; generate_oscales(); src_zp = generate_zero_points(DNNL_ARG_SRC, attr.zero_points, k); dst_zp = generate_zero_points(DNNL_ARG_DST, attr.zero_points, n); } ~prb_t() { if (scales) zfree(scales); if (src_zp) zfree(src_zp); if (dst_zp) zfree(dst_zp); } int m, n, k; dir_t dir = FLAG_FWD; // Lack of prop_kind, always considered as forward. std::vector<dnnl_data_type_t> dt; std::string stag, wtag, dtag; vdims_t strides; dnnl_data_type_t bia_dt; int bia_mask; std::vector<dims_mask_t> rt_dims_masks; attr_t attr; double ops; float *scales; int32_t *src_zp, *dst_zp; const dims_t &src_dims() const { return vdims[0]; } const dims_t &weights_dims() const { return vdims[1]; } // const dims_t &prb_vdims_t::dst_dims() const; const dims_mask_t &src_runtime_dim_mask() const { return rt_dims_masks[0]; } const dims_mask_t &weights_runtime_dim_mask() const { return rt_dims_masks[1]; } const dims_mask_t &dst_runtime_dim_mask() const { return rt_dims_masks[2]; } int src_broadcast_mask() const { return prb_vdims_t::get_broadcast_mask(0); } int weights_broadcast_mask() const { return prb_vdims_t::get_broadcast_mask(1); } int bias_broadcast_mask() const { return bia_mask; } dnnl_data_type_t src_dt() const { return dt[0]; } dnnl_data_type_t wei_dt() const { return dt[1]; } dnnl_data_type_t dst_dt() const { return dt[2]; } dnnl_data_type_t get_dt(data_kind_t data_kind) const; void generate_oscales(); int32_t *generate_zero_points( int arg, const attr_t::zero_points_t &zero_points, int N); BENCHDNN_DISALLOW_COPY_AND_ASSIGN(prb_t); private: void init_dst_rt_dims_mask() { if (rt_dims_masks.size() > 2) return; const auto &src_rt_dim_mask = src_runtime_dim_mask(); const auto &wei_rt_dim_mask = weights_runtime_dim_mask(); dims_mask_t dst_rt_dim_mask; for (int i = 0; i < ndims - 2; ++i) { dst_rt_dim_mask[i] = src_rt_dim_mask[i] || wei_rt_dim_mask[i]; } // m, n mask dst_rt_dim_mask[ndims - 2] = src_rt_dim_mask[ndims - 2]; dst_rt_dim_mask[ndims - 1] = wei_rt_dim_mask[ndims - 1]; rt_dims_masks.push_back(dst_rt_dim_mask); } }; std::ostream &operator<<(std::ostream &s, const prb_t &prb); /* some extra control parameters which shouldn't be placed in prb_t */ std::string str2cfg(const char *str); struct perf_report_t : public base_perf_report_t { perf_report_t(const prb_t *prb, const char *perf_template) : base_perf_report_t(perf_template) , p_(prb) , stag_({normalize_tag(p_->stag, p_->ndims)}) , wtag_(normalize_tag(p_->wtag, p_->ndims)) , dtag_(normalize_tag(p_->dtag, p_->ndims)) {} void dump_desc(std::ostream &s) const override { s << static_cast<const prb_vdims_t &>(*p_); } void dump_desc_csv(std::ostream &s) const override { dump_desc(s); } double ops() const override { return p_->ops; } const std::vector<dnnl_data_type_t> *sdt() const override { return &p_->dt; } const attr_t *attr() const override { return &p_->attr; } const std::string *name() const override { return &p_->name; } const std::vector<std::string> *stag() const override { return &stag_; } const std::string *wtag() const override { return &wtag_; } const std::string *dtag() const override { return &dtag_; } private: const prb_t *p_; std::vector<std::string> stag_; std::string wtag_, dtag_; }; inline int64_t src_off_f(const prb_t *prb, int64_t mb, int64_t m, int64_t k) { return (mb * prb->m + m) * prb->k + k; } inline int64_t wei_off_f(const prb_t *prb, int64_t mb, int64_t k, int64_t n) { return (mb * prb->k + k) * prb->n + n; } inline int64_t dst_off_f(const prb_t *prb, int64_t mb, int64_t m, int64_t n) { return (mb * prb->m + m) * prb->n + n; } struct cfg_entry_t { // `cfg_key_t` participates solely in finding a proper entry in the map. struct cfg_key_t { cfg_key_t(data_kind_t dk, dnnl_data_type_t dt) : data_kind_(dk), data_type_(dt) {} bool operator<(const cfg_key_t &rhs) const { return value() < rhs.value(); } private: data_kind_t data_kind_; dnnl_data_type_t data_type_; enum { MAX_DT_NUM = 10 }; size_t value() const { return (size_t)data_kind_ * MAX_DT_NUM + (size_t)data_type_; } }; // Entry of the map. Supplies min and max ranges for filling for a given dt. struct cfg_range_t { int range_min; int range_max; }; using cfg_map_t = std::map<cfg_key_t, cfg_range_t>; cfg_entry_t() = default; cfg_entry_t(data_kind_t dk, dnnl_data_type_t orig_dt, dnnl_data_type_t dt) : data_kind_(dk), orig_data_type_(orig_dt), data_type_(dt) {} int get_range_min() const { return get_cfg_range().range_min; } int get_range_max() const { return get_cfg_range().range_max; } int get_range_abs_max() const { return std::max(abs(get_range_min()), abs(get_range_max())); } dnnl_data_type_t get_orig_dt() const { return orig_data_type_; } dnnl_data_type_t get_dt() const { return data_type_; } data_kind_t get_dk() const { return data_kind_; } private: data_kind_t data_kind_; dnnl_data_type_t orig_data_type_; dnnl_data_type_t data_type_; const cfg_map_t &get_cfg_map() const; const cfg_range_t &get_cfg_range() const; }; struct cfg_t { cfg_t(const prb_t *prb, std::vector<data_kind_t> kinds) { for (const auto kind : kinds) { auto orig_data_type_ = prb->get_dt(kind); auto data_type_ = deduce_cfg_data_type(orig_data_type_, prb->attr, kind); cfg_entry.push_back(cfg_entry_t(kind, orig_data_type_, data_type_)); } } int get_range_min(data_kind_t dk) const { return cfg_entry[dk].get_range_min(); } int get_range_max(data_kind_t dk) const { return cfg_entry[dk].get_range_max(); } dnnl_data_type_t get_orig_dt(data_kind_t dk) const { return cfg_entry[dk].get_orig_dt(); } dnnl_data_type_t get_dt(data_kind_t dk) const { return cfg_entry[dk].get_dt(); } float get_density(data_kind_t dk, int64_t n_acc) const; private: std::vector<cfg_entry_t> cfg_entry; const cfg_entry_t &operator[](data_kind_t kind) { for (const auto &e : cfg_entry) { if (e.get_dk() == kind) return e; } assert(!"unexpected"); static cfg_entry_t dummy; return dummy; } }; void handle_legacy_cfg( std::vector<dnnl_data_type_t> &dt, const std::string &cfg); void skip_unimplemented_prb(const prb_t *prb, res_t *res); void skip_invalid_prb(const prb_t *prb, res_t *res); void compute_ref(const prb_t *prb, const args_t &args, dnnl_primitive_t prim_ref = nullptr); int doit(const prb_t *prb, res_t *res); int bench(int argc, char **argv); } // namespace matmul #endif
4,952
582
package com.easy.templateThymeleaf.controller; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.MessageSource; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import java.util.Locale; @Controller public class IndexController { @Autowired private MessageSource messageSource; @RequestMapping(value = {"/index", "/"}, method = RequestMethod.GET) public String index(Model model, Locale locale) { model.addAttribute("title", messageSource.getMessage("text.title", null, locale)); return "index"; } }
230
1,738
<gh_stars>1000+ /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #pragma once #include <AzCore/EBus/EBus.h> #include <AzFramework/Entity/EntityContext.h> namespace AzFramework { // Forward declarations class Scene; //! Interface used to create, get, or destroy scenes. class SceneSystemRequests : public AZ::EBusTraits { public: virtual ~SceneSystemRequests() = default; //! Single handler policy since there should only be one instance of this system component. static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Single; //! Creates a scene with a given name. //! - If there is already a scene with the provided name this will return AZ::Failure(). //! - If isDefault is set to true and there is already a default scene, the default scene will be switched to this one. virtual AZ::Outcome<Scene*, AZStd::string> CreateScene(AZStd::string_view name) = 0; //! Gets a scene with a given name //! - If a scene does not exist with the given name, nullptr is returned. virtual Scene* GetScene(AZStd::string_view name) = 0; //! Gets all the scenes that currently exist. virtual AZStd::vector<Scene*> GetAllScenes() = 0; //! Remove a scene with a given name and return if the operation was successful. //! - If the removed scene is the default scene, there will no longer be a default scene. virtual bool RemoveScene(AZStd::string_view name) = 0; //! Add a mapping from the provided EntityContextId to a Scene //! - If a scene is already associated with this EntityContextId, nothing is changed and false is returned. virtual bool SetSceneForEntityContextId(EntityContextId entityContextId, Scene* scene) = 0; //! Remove a mapping from the provided EntityContextId to a Scene //! - If no scene is found from the provided EntityContextId, false is returned. virtual bool RemoveSceneForEntityContextId(EntityContextId entityContextId, Scene* scene) = 0; //! Get the scene associated with an EntityContextId //! - If no scene is found for the provided EntityContextId, nullptr is returned. virtual Scene* GetSceneFromEntityContextId(EntityContextId entityContextId) = 0; }; using SceneSystemRequestBus = AZ::EBus<SceneSystemRequests>; //! Interface used for notifications from the scene system class SceneSystemNotifications : public AZ::EBusTraits { public: virtual ~SceneSystemNotifications() = default; //! There can be multiple listeners to changes in the scene system. static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Multiple; //! Called when a scene has been created. virtual void SceneCreated(Scene& /*scene*/) {}; //! Called just before a scene is removed. virtual void SceneAboutToBeRemoved(Scene& /*scene*/) {}; }; using SceneSystemNotificationBus = AZ::EBus<SceneSystemNotifications>; //! Interface used for notifications about individual scenes class SceneNotifications : public AZ::EBusTraits { public: virtual ~SceneNotifications() = default; //! There can be multiple listeners to changes in the scene system. static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Multiple; //! Bus is listened to using the pointer of the scene static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::ById; //! Specifies that events are addressed by the pointer to the scene using BusIdType = Scene*; //! Called just before a scene is removed. virtual void SceneAboutToBeRemoved() {}; //! Called when an entity context is mapped to this scene. virtual void EntityContextMapped(EntityContextId /*entityContextId*/) {}; //! Called when an entity context is unmapped from this scene. virtual void EntityContextUnmapped(EntityContextId /*entityContextId*/) {}; }; using SceneNotificationBus = AZ::EBus<SceneNotifications>; } // AzFramework
1,502
14,668
<gh_stars>1000+ /* * iccprofile.h * * Copyright (C) 1991-1998, <NAME>. * This file is part of the Independent JPEG Group's software. * For conditions of distribution and use, see the accompanying README file. * * This file provides code to read and write International Color Consortium * (ICC) device profiles embedded in JFIF JPEG image files. The ICC has * defined a standard format for including such data in JPEG "APP2" markers. * The code given here does not know anything about the internal structure * of the ICC profile data; it just knows how to put the profile data into * a JPEG file being written, or get it back out when reading. * * This code depends on new features added to the IJG JPEG library as of * IJG release 6b; it will not compile or work with older IJG versions. * * NOTE: this code would need surgery to work on 16-bit-int machines * with ICC profiles exceeding 64K bytes in size. See iccprofile.c * for details. */ #include <stdio.h> /* needed to define "FILE", "NULL" */ #if defined(USE_SYSTEM_LIBJPEG) #include <jpeglib.h> #else #include "jpeglib.h" #endif /* * This routine writes the given ICC profile data into a JPEG file. * It *must* be called AFTER calling jpeg_start_compress() and BEFORE * the first call to jpeg_write_scanlines(). * (This ordering ensures that the APP2 marker(s) will appear after the * SOI and JFIF or Adobe markers, but before all else.) */ extern void write_icc_profile JPP((j_compress_ptr cinfo, const JOCTET *icc_data_ptr, unsigned int icc_data_len)); /* * Reading a JPEG file that may contain an ICC profile requires two steps: * * 1. After jpeg_create_decompress() but before jpeg_read_header(), * call setup_read_icc_profile(). This routine tells the IJG library * to save in memory any APP2 markers it may find in the file. * * 2. After jpeg_read_header(), call read_icc_profile() to find out * whether there was a profile and obtain it if so. */ /* * Prepare for reading an ICC profile */ extern void setup_read_icc_profile JPP((j_decompress_ptr cinfo)); /* * See if there was an ICC profile in the JPEG file being read; * if so, reassemble and return the profile data. * * TRUE is returned if an ICC profile was found, FALSE if not. * If TRUE is returned, *icc_data_ptr is set to point to the * returned data, and *icc_data_len is set to its length. * * IMPORTANT: the data at **icc_data_ptr has been allocated with malloc() * and must be freed by the caller with free() when the caller no longer * needs it. (Alternatively, we could write this routine to use the * IJG library's memory allocator, so that the data would be freed implicitly * at jpeg_finish_decompress() time. But it seems likely that many apps * will prefer to have the data stick around after decompression finishes.) */ extern boolean read_icc_profile JPP((j_decompress_ptr cinfo, JOCTET **icc_data_ptr, unsigned int *icc_data_len));
895
348
{"nom":"Mouflières","circ":"3ème circonscription","dpt":"Somme","inscrits":68,"abs":22,"votants":46,"blancs":6,"nuls":3,"exp":37,"res":[{"nuance":"LR","nom":"<NAME>","voix":23},{"nuance":"REM","nom":"<NAME>","voix":14}]}
91
1,593
<gh_stars>1000+ /* * This file is part of Nokia HEIF library * * Copyright (c) 2015-2021 Nokia Corporation and/or its subsidiary(-ies). All rights reserved. * * Contact: <EMAIL> * * This software, including documentation, is protected by copyright controlled by Nokia Corporation and/ or its subsidiaries. All rights are reserved. * Copying, including reproducing, storing, adapting or translating, any or all of this material requires the prior written consent of Nokia. * * */ package com.nokia.heif; /** * Base class for ItemProperties */ public abstract class ItemProperty extends Base { /** * Constructor, abstract class so not called directly * @param heif The parent HEIF instance for the new object * @exception Exception Thrown if the parent HEIF instance is invalid */ protected ItemProperty(HEIF heif) throws Exception { super(heif); } /** * Protected constructor, abstract class so not called directly * @param heif The parent HEIF instance for the new object * @param nativeHandle A handle to the corresponding C++ object */ protected ItemProperty(HEIF heif, long nativeHandle) { super(heif, nativeHandle); } @Override protected void destroyNative() { destroyContextNative(); } private native void destroyContextNative(); }
429
396
<gh_stars>100-1000 {"matchings":[{"confidence":1.470422552607431e-7,"geometry":"gatfEfidjUm@LaA@@z@FX","legs":[{"summary":"","weight":72,"duration":41.1,"steps":[],"distance":103.8}],"weight_name":"routability","weight":72,"duration":41.1,"distance":103.8}],"tracepoints":[{"alternatives_count":0,"waypoint_index":0,"matchings_index":0,"name":"North Harbor Drive","location":[-117.172836,32.712041]},{"alternatives_count":1,"waypoint_index":1,"matchings_index":0,"name":"West G Street","location":[-117.173345,32.712546]}],"code":"Ok"}
191
782
<gh_stars>100-1000 /* * Copyright (c) 2011-2017, <NAME>. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.gui; import javax.swing.*; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; /** * Toggles a paused variable on each mouse click * * @author <NAME> */ public class MousePauseHelper implements MouseListener { boolean paused = false; public MousePauseHelper(JPanel panel ) { panel.addMouseListener(this); panel.requestFocus(); } @Override public void mouseClicked(MouseEvent e) { paused = !paused; } @Override public void mousePressed(MouseEvent e) { } @Override public void mouseReleased(MouseEvent e) { } @Override public void mouseEntered(MouseEvent e) { } @Override public void mouseExited(MouseEvent e) { } public void setPaused(boolean paused) { this.paused = paused; } public boolean isPaused() { return paused; } }
467
1,085
{ "tool_id": "upload1", "tool_version": "1.1.7", "exit_code": 0, "tool_stdout": "stdout", "tool_stderr": "stderr", "command_line": "commandline", "job_messages": ["test-msg1", "test-msg2", "test-msg3", "test-msg4"], "id": "test_id", "copied_from_job_id": "test_copied_from_job_id" }
159
1,886
<reponame>diogommartins/cinder fun(var, 10, a=a, kw=2) # EXPECTED: [ ..., LOAD_CONST(('a', 'kw')), CALL_FUNCTION_KW(4), ..., ]
84
5,169
<gh_stars>1000+ { "name": "BeeGDTActionSDK", "version": "2.0.1.3", "summary": "广点通行为数据上报iOS SDK", "description": "具体更新内容查看广点通官方说明文档", "homepage": "https://github.com/5hito/Bee_GDTActionSDK", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "5hito": "<EMAIL>" }, "platforms": { "ios": "8.0" }, "source": { "git": "https://github.com/5hito/Bee_GDTActionSDK.git", "tag": "2.0.1.3" }, "requires_arc": true, "source_files": "lib/Header/*.h", "ios": { "vendored_libraries": "lib/sdk/libGDTActionSDK.a" }, "xcconfig": { "OTHER_LINK_FLAG": "$(inherited) -ObjC" } }
359
5,169
{ "name": "ZYPermission", "version": "0.0.1", "summary": "ZYPermission is a tool", "homepage": "https://github.com/huzhongyin/ZYPermission", "license": "MIT", "authors": { "huzhongyin": "<EMAIL>" }, "source": { "git": "https://github.com/huzhongyin/ZYPermission.git", "tag": "0.0.1" }, "source_files": "PPPrivacyPermission/*.{h,m}", "frameworks": [ "UIKit", "Foundation", "Photos", "AVFoundation", "EventKit", "Contacts", "Speech", "HealthKit", "MediaPlayer", "UserNotifications", "CoreBluetooth", "CoreLocation" ], "requires_arc": true, "platforms": { "ios": "9.0" } }
309
37,508
#ifndef __UAPI_CAM_LRME_H__ #define __UAPI_CAM_LRME_H__ #include "cam_defs.h" /* LRME Resource Types */ enum CAM_LRME_IO_TYPE { CAM_LRME_IO_TYPE_TAR, CAM_LRME_IO_TYPE_REF, CAM_LRME_IO_TYPE_RES, CAM_LRME_IO_TYPE_DS2, }; #define CAM_LRME_INPUT_PORT_TYPE_TAR (1 << 0) #define CAM_LRME_INPUT_PORT_TYPE_REF (1 << 1) #define CAM_LRME_OUTPUT_PORT_TYPE_DS2 (1 << 0) #define CAM_LRME_OUTPUT_PORT_TYPE_RES (1 << 1) #define CAM_LRME_DEV_MAX 1 struct cam_lrme_hw_version { uint32_t gen; uint32_t rev; uint32_t step; }; struct cam_lrme_dev_cap { struct cam_lrme_hw_version clc_hw_version; struct cam_lrme_hw_version bus_rd_hw_version; struct cam_lrme_hw_version bus_wr_hw_version; struct cam_lrme_hw_version top_hw_version; struct cam_lrme_hw_version top_titan_version; }; /** * struct cam_lrme_query_cap_cmd - LRME query device capability payload * * @dev_iommu_handle: LRME iommu handles for secure/non secure * modes * @cdm_iommu_handle: Iommu handles for secure/non secure modes * @num_devices: number of hardware devices * @dev_caps: Returned device capability array */ struct cam_lrme_query_cap_cmd { struct cam_iommu_handle device_iommu; struct cam_iommu_handle cdm_iommu; uint32_t num_devices; struct cam_lrme_dev_cap dev_caps[CAM_LRME_DEV_MAX]; }; struct cam_lrme_soc_info { uint64_t clock_rate; uint64_t bandwidth; uint64_t reserved[4]; }; struct cam_lrme_acquire_args { struct cam_lrme_soc_info lrme_soc_info; }; #endif /* __UAPI_CAM_LRME_H__ */
649
3,710
#include "iwa_corridorgradientfx.h" #include "trop.h" #include "tparamuiconcept.h" #include "tspectrumparam.h" #include "gradients.h" #include <QPolygonF> #include <array> #include <algorithm> //------------------------------------------------------------ Iwa_CorridorGradientFx::Iwa_CorridorGradientFx() : m_shape(new TIntEnumParam(0, "Quadrangle")) , m_innerColor(TPixel32::White) , m_outerColor(TPixel32::Black) , m_curveType(new TIntEnumParam()) { for (int inout = 0; inout < 2; inout++) { double size = (inout == 0) ? 50. : 400.; std::string inout_str = (inout == 0) ? "_in" : "_out"; for (int c = 0; c < 4; c++) { Qt::Corner corner = (Qt::Corner)c; TPointD basePos(1, 1); if (corner == Qt::TopLeftCorner || corner == Qt::BottomLeftCorner) basePos.x *= -1; if (corner == Qt::BottomLeftCorner || corner == Qt::BottomRightCorner) basePos.y *= -1; m_points[inout][corner] = basePos * size; m_points[inout][corner]->getX()->setMeasureName("fxLength"); m_points[inout][corner]->getY()->setMeasureName("fxLength"); std::string TB_str = (corner == Qt::TopLeftCorner || corner == Qt::TopRightCorner) ? "top" : "bottom"; std::string LR_str = (corner == Qt::TopLeftCorner || corner == Qt::BottomLeftCorner) ? "_left" : "_right"; bindParam(this, TB_str + LR_str + inout_str, m_points[inout][corner]); } } m_shape->addItem(1, "Circle"); bindParam(this, "shape", m_shape); m_curveType->addItem(EaseInOut, "Ease In-Out"); m_curveType->addItem(Linear, "Linear"); m_curveType->addItem(EaseIn, "Ease In"); m_curveType->addItem(EaseOut, "Ease Out"); m_curveType->setDefaultValue(Linear); m_curveType->setValue(Linear); bindParam(this, "curveType", m_curveType); bindParam(this, "inner_color", m_innerColor); bindParam(this, "outer_color", m_outerColor); } //------------------------------------------------------------ bool Iwa_CorridorGradientFx::doGetBBox(double frame, TRectD &bBox, const TRenderSettings &ri) { bBox = TConsts::infiniteRectD; return true; } //------------------------------------------------------------ namespace { QPointF toQPointF(const TPointD &p) { return QPointF(p.x, p.y); } double WedgeProduct(const TPointD v, const TPointD w) { return v.x * w.y - v.y * w.x; } struct BilinearParam { TPointD p0, b1, b2, b3; }; //------------------------------------------------------------ double getFactor(const TPointD &p, const BilinearParam &param, const GradientCurveType type) { double t; TPointD q = p - param.p0; // Set up quadratic formula float A = WedgeProduct(param.b2, param.b3); float B = WedgeProduct(param.b3, q) - WedgeProduct(param.b1, param.b2); float C = WedgeProduct(param.b1, q); // Solve for v if (std::abs(A) < 0.001) { // Linear form t = -C / B; } else { // Quadratic form float discrim = B * B - 4 * A * C; t = 0.5 * (-B - std::sqrt(discrim)) / A; } double factor; switch (type) { case Linear: factor = t; break; case EaseIn: factor = t * t; break; case EaseOut: factor = 1.0 - (1.0 - t) * (1.0 - t); break; case EaseInOut: default: factor = (-2 * t + 3) * (t * t); break; } return factor; } //------------------------------------------------------------ template <typename RASTER, typename PIXEL> void doQuadrangleT(RASTER ras, TDimensionI dim, TPointD pos[2][4], const TSpectrumT<PIXEL> &spectrum, GradientCurveType type) { auto buildPolygon = [&](QPolygonF &pol, Qt::Corner c1, Qt::Corner c2) { pol << toQPointF(pos[0][(int)c1]) << toQPointF(pos[1][(int)c1]) << toQPointF(pos[1][(int)c2]) << toQPointF(pos[0][(int)c2]); }; auto buildBilinearParam = [&](BilinearParam &bp, Qt::Corner c1, Qt::Corner c2) { bp.p0 = pos[0][(int)c1]; bp.b1 = pos[0][(int)c2] - pos[0][(int)c1]; bp.b2 = pos[1][(int)c1] - pos[0][(int)c1]; bp.b3 = pos[0][(int)c1] - pos[0][(int)c2] - pos[1][(int)c1] + pos[1][(int)c2]; }; std::array<QPolygonF, 4> polygons; std::array<BilinearParam, 4> params; // Top buildPolygon(polygons[0], Qt::TopLeftCorner, Qt::TopRightCorner); buildBilinearParam(params[0], Qt::TopLeftCorner, Qt::TopRightCorner); // Left buildPolygon(polygons[1], Qt::BottomLeftCorner, Qt::TopLeftCorner); buildBilinearParam(params[1], Qt::BottomLeftCorner, Qt::TopLeftCorner); // Bottom buildPolygon(polygons[2], Qt::BottomRightCorner, Qt::BottomLeftCorner); buildBilinearParam(params[2], Qt::BottomRightCorner, Qt::BottomLeftCorner); // Right buildPolygon(polygons[3], Qt::TopRightCorner, Qt::BottomRightCorner); buildBilinearParam(params[3], Qt::TopRightCorner, Qt::BottomRightCorner); QPolygonF innerPolygon; innerPolygon << toQPointF(pos[0][Qt::TopLeftCorner]) << toQPointF(pos[0][Qt::TopRightCorner]) << toQPointF(pos[0][Qt::BottomRightCorner]) << toQPointF(pos[0][Qt::BottomLeftCorner]); ras->lock(); for (int j = 0; j < ras->getLy(); j++) { PIXEL *pix = ras->pixels(j); PIXEL *endPix = pix + ras->getLx(); int i = 0; while (pix < endPix) { QPointF p(i, j); double factor; bool found = false; for (int edge = 0; edge < 4; edge++) { if (polygons[edge].containsPoint(p, Qt::WindingFill)) { factor = getFactor(TPointD(i, j), params.at(edge), type); found = true; break; } } if (!found) { if (innerPolygon.containsPoint(p, Qt::WindingFill)) factor = 0.0; else factor = 1.0; } *pix++ = spectrum.getPremultipliedValue(factor); i++; } } ras->unlock(); } //------------------------------------------------------------ template <typename RASTER, typename PIXEL> void doCircleT(RASTER ras, TDimensionI dim, TPointD pos[2][4], const TSpectrumT<PIXEL> &spectrum, GradientCurveType type) { auto lerp = [](TPointD p1, TPointD p2, double f) { return p1 * (1 - f) + p2 * f; }; auto bilinearPos = [&](TPointD uv, int inout) { return lerp(lerp(pos[inout][Qt::BottomLeftCorner], pos[inout][Qt::BottomRightCorner], uv.x), lerp(pos[inout][Qt::TopLeftCorner], pos[inout][Qt::TopRightCorner], uv.x), uv.y); }; const int DIVNUM = 36; std::array<TPointD, DIVNUM> innerPos; std::array<TPointD, DIVNUM> outerPos; double tmpRadius = std::sqrt(2.0) / 2.0; for (int div = 0; div < DIVNUM; div++) { double angle = 2.0 * M_PI * (double)div / (double)DIVNUM; // circle position in uv coordinate TPointD uv(tmpRadius * std::cos(angle) + 0.5, tmpRadius * std::sin(angle) + 0.5); // compute inner and outer circle positions by bilinear interpolation // using uv coordinate values. innerPos[div] = bilinearPos(uv, 0); outerPos[div] = bilinearPos(uv, 1); } // - - - - - - - - auto buildPolygon = [&](QPolygonF &pol, int id1, int id2) { pol << toQPointF(innerPos[id2]) << toQPointF(outerPos[id2]) << toQPointF(outerPos[id1]) << toQPointF(innerPos[id1]); }; auto buildBilinearParam = [&](BilinearParam &bp, int id1, int id2) { bp.p0 = innerPos[id2]; bp.b1 = innerPos[id1] - innerPos[id2]; bp.b2 = outerPos[id2] - innerPos[id2]; bp.b3 = innerPos[id2] - innerPos[id1] - outerPos[id2] + outerPos[id1]; }; std::array<QPolygonF, DIVNUM> polygons; std::array<BilinearParam, DIVNUM> params; QPolygonF innerPolygon; for (int div = 0; div < DIVNUM; div++) { int next_div = (div == DIVNUM - 1) ? 0 : div + 1; // create polygon and bilinear parameters for each piece surrounding the // circle buildPolygon(polygons[div], div, next_div); buildBilinearParam(params[div], div, next_div); // create inner circle polygon innerPolygon << toQPointF(innerPos[div]); } // - - - ok, ready to render ras->lock(); for (int j = 0; j < ras->getLy(); j++) { PIXEL *pix = ras->pixels(j); PIXEL *endPix = pix + ras->getLx(); int i = 0; while (pix < endPix) { QPointF p(i, j); double factor; bool found = false; for (int div = 0; div < DIVNUM; div++) { // check if the point is inside of the surrounding pieces if (polygons[div].containsPoint(p, Qt::WindingFill)) { // compute factor by invert bilinear interpolation factor = getFactor(TPointD(i, j), params.at(div), type); found = true; break; } } if (!found) { if (innerPolygon.containsPoint(p, Qt::WindingFill)) factor = 0.0; else factor = 1.0; } *pix++ = spectrum.getPremultipliedValue(factor); i++; } } ras->unlock(); } }; // namespace //------------------------------------------------------------ void Iwa_CorridorGradientFx::doCompute(TTile &tile, double frame, const TRenderSettings &ri) { if (!((TRaster32P)tile.getRaster()) && !((TRaster64P)tile.getRaster())) { throw TRopException("unsupported input pixel type"); } // convert shape position to render region coordinate TPointD pos[2][4]; TAffine aff = ri.m_affine; TDimensionI dimOut(tile.getRaster()->getLx(), tile.getRaster()->getLy()); TPointD dimOffset((float)dimOut.lx / 2.0f, (float)dimOut.ly / 2.0f); for (int inout = 0; inout < 2; inout++) { for (int c = 0; c < 4; c++) { TPointD _point = m_points[inout][c]->getValue(frame); pos[inout][c] = aff * _point - (tile.m_pos + tile.getRaster()->getCenterD()) + dimOffset; } } std::vector<TSpectrum::ColorKey> colors = { TSpectrum::ColorKey(0, m_innerColor->getValue(frame)), TSpectrum::ColorKey(1, m_outerColor->getValue(frame))}; TSpectrumParamP m_colors = TSpectrumParamP(colors); tile.getRaster()->clear(); TRaster32P outRas32 = (TRaster32P)tile.getRaster(); TRaster64P outRas64 = (TRaster64P)tile.getRaster(); if (m_shape->getValue() == 0) { // Quadrangle if (outRas32) doQuadrangleT<TRaster32P, TPixel32>( outRas32, dimOut, pos, m_colors->getValue(frame), (GradientCurveType)m_curveType->getValue()); else if (outRas64) doQuadrangleT<TRaster64P, TPixel64>( outRas64, dimOut, pos, m_colors->getValue64(frame), (GradientCurveType)m_curveType->getValue()); } else { // m_shape == 1 : Circle if (outRas32) doCircleT<TRaster32P, TPixel32>( outRas32, dimOut, pos, m_colors->getValue(frame), (GradientCurveType)m_curveType->getValue()); else if (outRas64) doCircleT<TRaster64P, TPixel64>( outRas64, dimOut, pos, m_colors->getValue64(frame), (GradientCurveType)m_curveType->getValue()); } } //------------------------------------------------------------ void Iwa_CorridorGradientFx::getParamUIs(TParamUIConcept *&concepts, int &length) { concepts = new TParamUIConcept[length = 6]; int vectorUiIdOffset = 2; std::array<Qt::Corner, 4> loopIds{Qt::TopLeftCorner, Qt::TopRightCorner, Qt::BottomRightCorner, Qt::BottomLeftCorner}; for (int inout = 0; inout < 2; inout++) { concepts[inout].m_type = TParamUIConcept::QUAD; for (int c = 0; c < 4; c++) { Qt::Corner corner = loopIds[c]; // quad ui concepts[inout].m_params.push_back(m_points[inout][(int)corner]); concepts[inout].m_label = (inout == 0) ? " In" : " Out"; // vector ui if (inout == 0) concepts[vectorUiIdOffset + (int)corner].m_type = TParamUIConcept::VECTOR; concepts[vectorUiIdOffset + (int)corner].m_params.push_back( m_points[inout][(int)corner]); } } } //------------------------------------------------------------ FX_PLUGIN_IDENTIFIER(Iwa_CorridorGradientFx, "iwa_CorridorGradientFx");
5,551
369
from datetime import date, timedelta, datetime, time from django.contrib.contenttypes.models import ContentType from django.conf import settings from django.db import models from django.db import connections from django.utils import timezone from .models import Period, StatisticByDate, StatisticByDateAndObject class ObjectsByDateTracker(object): date_field = 'date' aggr_op = None metric = None period = None statistic_model = StatisticByDate def __init__(self, **kwargs): for prop, val in kwargs.items(): setattr(self, prop, val) def get_most_recent_kwargs(self): most_recent_kwargs = { 'metric': self.metric, 'period': self.period} return most_recent_kwargs def get_start_date(self, qs): most_recent_kwargs = self.get_most_recent_kwargs() last_stat = self.statistic_model.objects.most_recent( **most_recent_kwargs) if last_stat: start_date = last_stat.date else: first_instance = qs.order_by(self.date_field).first() if first_instance is None: # No data return start_date = getattr(first_instance, self.date_field) if start_date and isinstance(start_date, datetime): if timezone.is_aware(start_date): start_date = timezone.make_naive(start_date).date() else: start_date = start_date.date() return start_date def track_lifetime_upto(self, qs, upto_date): filter_kwargs = { self.date_field + '__date__lte': upto_date } n = qs.filter(**filter_kwargs).count() self.statistic_model.objects.record( metric=self.metric, value=n, period=self.period, date=upto_date) def get_track_values(self): return [] def get_record_kwargs(self, val): return {} def track(self, qs): to_date = date.today() start_date = self.get_start_date(qs) if not start_date: return if self.period == Period.LIFETIME: # Intentionally recompute last stat, as we may have computed # that the last time when the day was not over yet. upto_date = start_date while upto_date <= to_date: self.track_lifetime_upto(qs, upto_date) upto_date += timedelta(days=1) elif self.period == Period.DAY: values_fields = ['ts_date'] + self.get_track_values() connection = connections[qs.db] tzname = ( timezone.get_current_timezone_name() if settings.USE_TZ else None) is_datetime = isinstance(qs.model._meta.get_field( self.date_field), models.DateTimeField) if is_datetime: date_sql = connection.ops.datetime_cast_date_sql( self.date_field, tzname) # before django 2.0 it returns a tuple if isinstance(date_sql, tuple): vals = qs.extra( select={"ts_date": date_sql[0]}, select_params=date_sql[1]) else: vals = qs.extra(select={"ts_date": date_sql}) start_dt = datetime.combine( start_date, time()) - timedelta(days=1) if tzname: start_dt = timezone.make_aware( start_dt, timezone.get_current_timezone()) else: vals = qs.extra(select={"ts_date": self.date_field}) start_dt = start_date vals = vals.filter( **{self.date_field + '__gte': start_dt}).values( *values_fields).order_by().annotate(ts_n=self.aggr_op) # TODO: Bulk create for val in vals: self.statistic_model.objects.record( metric=self.metric, value=val['ts_n'], date=val['ts_date'], period=self.period, **self.get_record_kwargs(val)) else: raise NotImplementedError class ObjectsByDateAndObjectTracker(ObjectsByDateTracker): object = None object_model = None object_field = None statistic_model = StatisticByDateAndObject def __init__(self, **kwargs): super(ObjectsByDateAndObjectTracker, self).__init__(**kwargs) assert self.object is None or self.object_field is None assert self.object or self.object_field def get_most_recent_kwargs(self): kwargs = super( ObjectsByDateAndObjectTracker, self).get_most_recent_kwargs() if self.object_model: kwargs['object_type'] = ContentType.objects.get_for_model( self.object_model) else: kwargs['object'] = self.object return kwargs def track_lifetime_upto(self, qs, upto_date): filter_kwargs = { self.date_field + '__date__lte': upto_date } if self.object_model: vals = qs.filter(**filter_kwargs).values( self.object_field).annotate(ts_n=self.aggr_op) for val in vals: object = self.object_model( pk=val[self.object_field]) # TODO: Bulk create StatisticByDateAndObject.objects.record( metric=self.metric, value=val['ts_n'], date=upto_date, object=object, period=self.period) else: n = qs.filter(**filter_kwargs).count() StatisticByDateAndObject.objects.record( metric=self.metric, value=n, object=self.object, period=self.period, date=upto_date) def get_track_values(self): ret = super(ObjectsByDateAndObjectTracker, self).get_track_values() if self.object_model: ret.append(self.object_field) return ret def get_record_kwargs(self, val): if self.object_model: object = self.object_model(pk=val[self.object_field]) else: object = self.object return {'object': object} class CountObjectsByDateTracker(ObjectsByDateTracker): aggr_op = models.Count('pk', distinct=True) class CountObjectsByDateAndObjectTracker(ObjectsByDateAndObjectTracker): aggr_op = models.Count('pk', distinct=True)
3,392
1,444
package mage.cards.s; import java.util.UUID; import mage.MageInt; import mage.abilities.common.SimpleActivatedAbility; import mage.abilities.costs.common.TapSourceCost; import mage.abilities.effects.common.RevealTopLandToBattlefieldElseHandEffect; import mage.abilities.keyword.VigilanceAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.constants.Zone; /** * @author North */ public final class SkywardEyeProphets extends CardImpl { public SkywardEyeProphets(UUID ownerId, CardSetInfo setInfo) { super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{3}{G}{W}{U}"); this.subtype.add(SubType.HUMAN); this.subtype.add(SubType.WIZARD); this.power = new MageInt(3); this.toughness = new MageInt(3); // Vigilance this.addAbility(VigilanceAbility.getInstance()); // {tap}: Reveal the top card of your library. If it's a land card, put it onto the battlefield. Otherwise, put it into your hand. this.addAbility(new SimpleActivatedAbility(Zone.BATTLEFIELD, new RevealTopLandToBattlefieldElseHandEffect(), new TapSourceCost())); } private SkywardEyeProphets(final SkywardEyeProphets card) { super(card); } @Override public SkywardEyeProphets copy() { return new SkywardEyeProphets(this); } }
513
357
#pragma once #include <string> #include <vector> #include "Vulkan.h" namespace Vulkan { /* * Class for loading and creating shaders. * Each shader has to be compiled to binary code before usage. */ class Shader final { public: NON_COPIABLE(Shader) Shader(const class Device& device, const std::string& filename); ~Shader(); [[nodiscard]] VkShaderModule Get() const { return shader; } VkPipelineShaderStageCreateInfo CreateShaderStage(VkShaderStageFlagBits stage) const; private: static std::vector<char> Load(const std::string& filename); void CreateShaderModule(const std::vector<char>& code); const class Device& device; VkShaderModule shader{}; }; }
250
393
/******************************************************************************* * Copyright 2011 <NAME> * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package pl.otros.logview.parser.log4j; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.commons.lang.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.spi.LocationInfo; import org.apache.log4j.spi.LoggingEvent; import org.apache.log4j.spi.ThrowableInformation; import pl.otros.logview.api.InitializationException; import pl.otros.logview.api.TableColumns; import pl.otros.logview.api.model.LogData; import pl.otros.logview.api.parser.MultiLineLogParser; import pl.otros.logview.api.parser.ParserDescription; import pl.otros.logview.api.parser.ParsingContext; import pl.otros.logview.parser.CustomLevelsParser; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import java.util.regex.MatchResult; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; /** * LogFilePatternReceiver can parse and tail log files, converting entries into LoggingEvents. If the file doesn't exist when the receiver is initialized, the * receiver will look for the file once every 10 seconds. * <p> * This receiver relies on java.util.regex features to perform the parsing of text in the log file, however the only regular expression field explicitly * supported is a glob-style wildcard used to ignore fields in the log file if needed. All other fields are parsed by using the supplied keywords. * <p> * <b>Features:</b><br> * - specify the URL of the log file to be processed<br> * - specify the timestamp format in the file (if one exists, using patterns from {@link java.text.SimpleDateFormat})<br> * - specify the pattern (logFormat) used in the log file using keywords, a wildcard character (*) and fixed text<br> * - 'tail' the file (allows the contents of the file to be continually read and new events processed)<br> * - specify custom charset (default UTF-8) - supports the parsing of multi-line messages and exceptions - 'hostname' property set to URL host (or 'file' if not * available) - 'application' property set to URL path (or value of fileURL if not available) * <p> * <b>Keywords:</b><br> * TIMESTAMP<br> * LOGGER<br> * LEVEL<br> * THREAD<br> * CLASS<br> * FILE<br> * LINE<br> * METHOD<br> * RELATIVETIME<br> * MESSAGE<br> * NDC<br> * PROP(key)<br> * <p> * Use a * to ignore portions of the log format that should be ignored * <p> * Example:<br> * If your file's patternlayout is this:<br> * <b>%d %-5p [%t] %C{2} (%F:%L) - %m%n</b> * <p> * specify this as the log format:<br> * <b>TIMESTAMP LEVEL [THREAD] CLASS (FILE:LINE) - MESSAGE</b> * <p> * To define a PROPERTY field, use PROP(key) * <p> * Example:<br> * If you used the RELATIVETIME pattern layout character in the file, you can use PROP(RELATIVETIME) in the logFormat definition to assign the RELATIVETIME * field as a property on the event. * <p> * If your file's patternlayout is this:<br> * <b>%r [%t] %-5p %c %x - %m%n</b> * <p> * specify this as the log format:<br> * <b>PROP(RELATIVETIME) [THREAD] LEVEL LOGGER * - MESSAGE</b> * <p> * Note the * - it can be used to ignore a single word or sequence of words in the log file (in order for the wildcard to ignore a sequence of words, the text * being ignored must be followed by some delimiter, like '-' or '[') - ndc is being ignored in the following example. * <p> * Assign a filterExpression in order to only process events which match a filter. If a filterExpression is not assigned, all events are processed. * <p> * <b>Limitations:</b><br> * - no support for the single-line version of throwable supported by patternlayout<br> * (this version of throwable will be included as the last line of the message)<br> * - the relativetime patternLayout character must be set as a property: PROP(RELATIVETIME)<br> * - messages should appear as the last field of the logFormat because the variability in message content<br> * - exceptions are converted if the exception stack trace (other than the first line of the exception)<br> * is stored in the log file with a tab followed by the word 'at' as the first characters in the line<br> * - tailing may fail if the file rolls over. * <p> * <b>Example receiver configuration settings</b> (add these as params, specifying a LogFilePatternReceiver 'plugin'):<br> * param: "timestampFormat" value="yyyy-MM-d HH:mm:ss,SSS"<br> * param: "logFormat" value="PROP(RELATIVETIME) [THREAD] LEVEL LOGGER * - MESSAGE"<br> * param: "fileURL" value="file:///c:/events.log"<br> * param: "tailing" value="true" * <p> * This configuration will be able to process these sample events:<br> * 710 [ Thread-0] DEBUG first.logger first - <test> <test2>something here</test2> <test3 blah=something/> <test4> <test5>something else</test5> </test4></test> * <br> * 880 [ Thread-2] DEBUG first.logger third - <test> <test2>something here</test2> <test3 blah=something/> <test4> <test5>something else</test5> </test4></test> * <br> * 880 [ Thread-0] INFO first.logger first - infomsg-0<br> * java.lang.Exception: someexception-first<br> * at Generator2.run(Generator2.java:102)<br> * * @author Code highly based on * http://svn.apache.org/repos/asf/logging/log4j/companions/receivers/trunk/src/main/java/org/apache/log4j/varia/LogFilePatternReceiver.java */ public class Log4jPatternMultilineLogParser implements MultiLineLogParser { private static final java.util.logging.Logger LOG = java.util.logging.Logger.getLogger(Log4jPatternMultilineLogParser.class.getName()); public static final String PROPERTY_NAME = "name"; public static final String PROPERTY_PATTERN = "pattern"; public static final String PROPERTY_REPATTERN = "rePattern"; public static final String PROPERTY_DATE_FORMAT = "dateFormat"; public static final String PROPERTY_CUSTOM_LEVELS = "customLevels"; public static final String PROPERTY_DESCRIPTION = "description"; public static final String PROPERTY_TYPE = "type"; public static final String PROPERTY_CHARSET = "charset"; public static final String PROPERTY_TRIM_UNMATCHED_LINES = "trimUnmatchedLines"; private final List<String> keywords = new ArrayList<>(); // private SimpleDateFormat dateFormat; // private Rule expressionRule; private final String[] emptyException = {""}; private boolean appendNonMatches; private final List<String> matchingKeywords = new ArrayList<>(); private static final String PROP_START = "PROP("; private static final String PROP_END = ")"; protected static final String PROPERTY_LOG_EVENT_PROPERTIES = "Log4jPatternMultilineLogParser.logEventProperties"; protected static final String LOGGER = "LOGGER"; protected static final String MESSAGE = "MESSAGE"; protected static final String TIMESTAMP = "TIMESTAMP"; protected static final String NDC = "NDC"; protected static final String LEVEL = "LEVEL"; protected static final String THREAD = "THREAD"; protected static final String CLASS = "CLASS"; protected static final String FILE = "FILE"; protected static final String LINE = "LINE"; protected static final String METHOD = "METHOD"; // all lines other than first line of exception begin with tab followed by // 'at' followed by text private static final String EXCEPTION_PATTERN = "^\\s+at.*"; private static final String REGEXP_DEFAULT_WILDCARD = ".*?"; private static final String REGEXP_GREEDY_WILDCARD = ".*"; private static final String PATTERN_WILDCARD = "*"; private static final String IN_SPACE_GROUP = "(\\s*?\\S*\\s*?)"; private static final String DEFAULT_GROUP = "(" + REGEXP_DEFAULT_WILDCARD + ")"; private static final String GREEDY_GROUP = "(" + REGEXP_GREEDY_WILDCARD + ")"; private static final String MULTIPLE_SPACES_REGEXP = "[ ]+"; private final String newLine = System.getProperty("line.separator"); private static final String VALID_DATEFORMAT_CHARS = "GyMwWDdFEaHkKhmsSzZ"; private static final String VALID_DATEFORMAT_CHAR_PATTERN = "[" + VALID_DATEFORMAT_CHARS + "]"; private String timestampFormat = "yyyy-MM-d HH:mm:ss,SSS"; private String logFormat; private String customLevelDefinitions; private CustomLevelsParser customLevelsParser; // private String filterExpression; private String regexp; private Pattern regexpPattern; private Pattern exceptionPattern; private String timestampPatternText; private Boolean trimUnmatchedLines; private final ParserDescription parserDescription; private Properties properties; public Log4jPatternMultilineLogParser() { keywords.add(TIMESTAMP); keywords.add(LOGGER); keywords.add(LEVEL); keywords.add(THREAD); keywords.add(CLASS); keywords.add(FILE); keywords.add(LINE); keywords.add(METHOD); keywords.add(MESSAGE); keywords.add(NDC); try { exceptionPattern = Pattern.compile(EXCEPTION_PATTERN); } catch (PatternSyntaxException pse) { // shouldn't happen } parserDescription = new ParserDescription(); parserDescription.setDescription("desc"); parserDescription.setDisplayName("displayName"); } /** * Walk the additionalLines list, looking for the EXCEPTION_PATTERN. * <p> * Return the index of the first matched line (the match may be the 1st line of an exception) * <p> * Assumptions: <br> * - the additionalLines list may contain both message and exception lines<br> * - message lines are added to the additionalLines list and then exception lines (all message lines occur in the list prior to all exception lines) * * @return -1 if no exception line exists, line number otherwise */ private int getExceptionLine(ParsingContext ctx) { String[] additionalLines = ctx.getUnmatchedLog().toString().split("\n"); for (int i = 0; i < additionalLines.length; i++) { Matcher exceptionMatcher = exceptionPattern.matcher(additionalLines[i]); if (exceptionMatcher.matches()) { return i; } } return -1; } /** * Combine all message lines occuring in the additionalLines list, adding a newline character between each line * <p> * the event will already have a message - combine this message with the message lines in the additionalLines list (all entries prior to the exceptionLine * index) * * @param firstMessageLine * primary message line * @param exceptionLine * index of first exception line * @return message */ private String buildMessage(String firstMessageLine, int exceptionLine, ParsingContext ctx) { if (ctx.getUnmatchedLog().length() == 0) { return firstMessageLine; } StringBuffer message = new StringBuffer(); if (firstMessageLine != null) { message.append(firstMessageLine); } String[] additionalLines = ctx.getUnmatchedLog().toString().split("\n"); int linesToProcess = (exceptionLine == -1 ? additionalLines.length : exceptionLine); for (int i = 0; i < linesToProcess; i++) { message.append(newLine); message.append(additionalLines[i]); } return message.toString(); } /** * Combine all exception lines occuring in the additionalLines list into a String array * <p> * (all entries equal to or greater than the exceptionLine index) * * @param exceptionLine * index of first exception line * @return exception */ private String[] buildException(int exceptionLine, ParsingContext ctx) { if (exceptionLine == -1) { return emptyException; } String[] additionalLines = ctx.getUnmatchedLog().toString().split("\n"); String[] exception = new String[additionalLines.length - exceptionLine]; System.arraycopy(additionalLines, exceptionLine, exception, 0, exception.length); return exception; } /** * Construct a logging event from currentMap and additionalLines (additionalLines contains multiple message lines and any exception lines) * <p> * CurrentMap and additionalLines are cleared in the process * * @return event */ private LoggingEvent buildEvent(ParsingContext ctx) { HashMap<String, Object> logEventParsingProperitesMap = (HashMap<String, Object>) ctx.getCustomConextProperties().get(PROPERTY_LOG_EVENT_PROPERTIES); if (logEventParsingProperitesMap.size() == 0) { String[] additionalLines = ctx.getUnmatchedLog().toString().split("\n"); for (String line : additionalLines) { LOG.finest(String.format("found non-matching (file %s) line: \"%s\"", ctx.getLogSource(), line)); } ctx.getUnmatchedLog().setLength(0); return null; } // the current map contains fields - build an event int exceptionLine = getExceptionLine(ctx); String[] exception = buildException(exceptionLine, ctx); String[] additionalLines = ctx.getUnmatchedLog().toString().split("\n"); // messages are listed before exceptions in additional lines if (additionalLines.length > 0 && exception.length > 0) { logEventParsingProperitesMap.put(MESSAGE, buildMessage((String) logEventParsingProperitesMap.get(MESSAGE), exceptionLine, ctx)); } DateFormat dateFormat = ctx.getDateFormat(); LoggingEvent event = convertToEvent(logEventParsingProperitesMap, exception, dateFormat); logEventParsingProperitesMap.clear(); ctx.getUnmatchedLog().setLength(0); return event; } protected void createPattern() { regexpPattern = Pattern.compile(regexp); } /** * Convert the match into a map. * <p> * Relies on the fact that the matchingKeywords list is in the same order as the groups in the regular expression * * @param result * @return map */ private Map processEvent(MatchResult result) { Map map = new HashMap(); // group zero is the entire match - process all other groups for (int i = 1; i < result.groupCount() + 1; i++) { Object key = matchingKeywords.get(i - 1); Object value = result.group(i); map.put(key, value); } return map; } /** * Helper method that will convert timestamp format to a pattern * * * @return string */ private String convertTimestamp() { // some locales (for example, French) generate timestamp text with // characters not included in \w - // now using \S (all non-whitespace characters) instead of /w String result = timestampFormat.replaceAll(VALID_DATEFORMAT_CHAR_PATTERN + "+", "\\\\S+"); // make sure dots in timestamp are escaped result = result.replaceAll(Pattern.quote("."), "\\\\."); return result; } /** * Build the regular expression needed to parse log entries * */ protected void initializePatterns() { // if custom level definitions exist, parse them customLevelsParser = new CustomLevelsParser(customLevelDefinitions); List<String> buildingKeywords = new ArrayList<>(); String newPattern = logFormat; int index = 0; String current = newPattern; // build a list of property names and temporarily replace the property // with an empty string, // we'll rebuild the pattern later // ? The propertyNames list is never used. List<String> propertyNames = new ArrayList<>(); while (index > -1) { if (current.contains(PROP_START) && current.indexOf(PROP_END) > -1) { index = current.indexOf(PROP_START); String longPropertyName = current.substring(current.indexOf(PROP_START), current.indexOf(PROP_END) + 1); String shortProp = getShortPropertyName(longPropertyName); buildingKeywords.add(shortProp); propertyNames.add(longPropertyName); current = current.substring(longPropertyName.length() + 1 + index); newPattern = singleReplace(newPattern, longPropertyName, Integer.toString(buildingKeywords.size() - 1)); } else { // no properties index = -1; } } /* * we're using a treemap, so the index will be used as the key to ensure keywords are ordered correctly * * examine pattern, adding keywords to an index-based map patterns can contain only one of these per entry...properties are the only 'keyword' that can * occur multiple times in an entry */ Iterator iter = keywords.iterator(); while (iter.hasNext()) { String keyword = (String) iter.next(); int index2 = newPattern.indexOf(keyword); if (index2 > -1) { buildingKeywords.add(keyword); newPattern = singleReplace(newPattern, keyword, Integer.toString(buildingKeywords.size() - 1)); } } String buildingInt = ""; for (int i = 0; i < newPattern.length(); i++) { String thisValue = String.valueOf(newPattern.substring(i, i + 1)); if (isInteger(thisValue)) { buildingInt = buildingInt + thisValue; } else { if (isInteger(buildingInt)) { matchingKeywords.add(buildingKeywords.get(Integer.parseInt(buildingInt))); } // reset buildingInt = ""; } } // if the very last value is an int, make sure to add it if (isInteger(buildingInt)) { matchingKeywords.add(buildingKeywords.get(Integer.parseInt(buildingInt))); } newPattern = replaceMetaChars(newPattern); // compress one or more spaces in the pattern into the [ ]+ regexp // (supports padding of level in log files) newPattern = newPattern.replaceAll(MULTIPLE_SPACES_REGEXP, MULTIPLE_SPACES_REGEXP); newPattern = newPattern.replaceAll(Pattern.quote(PATTERN_WILDCARD), REGEXP_DEFAULT_WILDCARD); // use buildingKeywords here to ensure correct order for (int i = 0; i < buildingKeywords.size(); i++) { String keyword = buildingKeywords.get(i); // make the final keyword greedy (we're assuming it's the message) if (i == (buildingKeywords.size() - 1)) { newPattern = singleReplace(newPattern, String.valueOf(i), GREEDY_GROUP); } else if (TIMESTAMP.equals(keyword)) { newPattern = singleReplace(newPattern, String.valueOf(i), "(" + timestampPatternText.replaceAll("'", "") + ")"); } else if (LOGGER.equals(keyword) || LEVEL.equals(keyword)) { newPattern = singleReplace(newPattern, String.valueOf(i), IN_SPACE_GROUP); } else { newPattern = singleReplace(newPattern, String.valueOf(i), DEFAULT_GROUP); } } regexp = newPattern; LOG.fine("regexp is " + regexp); } private boolean isInteger(String value) { try { Integer.parseInt(value); return true; } catch (NumberFormatException nfe) { return false; } } private String singleReplace(String inputString, String oldString, String newString) { String result = inputString; int propLength = oldString.length(); int startPos = result.indexOf(oldString); if (startPos == -1) { LOG.info("string: " + oldString + " not found in input: " + result + " - returning input"); return result; } if (startPos == 0) { result = result.substring(propLength); result = newString + result; } else { result = result.substring(0, startPos) + newString + result.substring(startPos + propLength); } return result; } private String getShortPropertyName(String longPropertyName) { String currentProp = longPropertyName.substring(longPropertyName.indexOf(PROP_START)); String prop = currentProp.substring(0, currentProp.indexOf(PROP_END) + 1); String shortProp = prop.substring(PROP_START.length(), prop.length() - 1); return shortProp; } /** * Some perl5 characters may occur in the log file format. Escape these characters to prevent parsing errors. * * @param input * @return string */ private String replaceMetaChars(String input) { // escape backslash first since that character is used to escape the // remaining meta chars String result = input.replaceAll("\\\\", "\\\\\\"); // don't escape star - it's used as the wildcard result = result.replaceAll(Pattern.quote("]"), "\\\\]"); result = result.replaceAll(Pattern.quote("["), "\\\\["); result = result.replaceAll(Pattern.quote("^"), "\\\\^"); result = result.replaceAll(Pattern.quote("$"), "\\\\$"); result = result.replaceAll(Pattern.quote("."), "\\\\."); result = result.replaceAll(Pattern.quote("|"), "\\\\|"); result = result.replaceAll(Pattern.quote("?"), "\\\\?"); result = result.replaceAll(Pattern.quote("+"), "\\\\+"); result = result.replaceAll(Pattern.quote("("), "\\\\("); result = result.replaceAll(Pattern.quote(")"), "\\\\)"); result = result.replaceAll(Pattern.quote("-"), "\\\\-"); result = result.replaceAll(Pattern.quote("{"), "\\\\{"); result = result.replaceAll(Pattern.quote("}"), "\\\\}"); result = result.replaceAll(Pattern.quote("#"), "\\\\#"); return result; } /** * Convert a keyword-to-values map to a LoggingEvent * * @param fieldMap * @param exception * * @return logging event */ private LoggingEvent convertToEvent(Map fieldMap, String[] exception, DateFormat dateFormat) { if (fieldMap == null) { return null; } // a logger must exist at a minimum for the event to be processed if (!fieldMap.containsKey(LOGGER)) { fieldMap.put(LOGGER, "Unknown"); } if (exception == null) { exception = emptyException; } Logger logger = null; long timeStamp = 0L; String level = null; String threadName = null; Object message = null; String ndc = null; String className = null; String methodName = null; String eventFileName = null; String lineNumber = null; Hashtable properties = new Hashtable(); logger = Logger.getLogger(StringUtils.trim((String) fieldMap.remove(LOGGER))); if ((dateFormat != null) && fieldMap.containsKey(TIMESTAMP)) { String dateString = (String) fieldMap.remove(TIMESTAMP); try { timeStamp = dateFormat.parse(dateString).getTime(); } catch (Exception e) { LOG.log(java.util.logging.Level.WARNING, "Error parsing date with format \"" + dateFormat + "\" with String \"" + dateString + "\"", e); } } // use current time if timestamp not parseable if (timeStamp == 0L) { timeStamp = System.currentTimeMillis(); } message = fieldMap.remove(MESSAGE); if (message == null) { message = ""; } level = StringUtils.trim((String) fieldMap.remove(LEVEL)); Level levelImpl; if (level == null) { levelImpl = Level.DEBUG; } else { // first try to resolve against custom level definition map, then // fall back to regular levels level = level.trim(); final Optional<Level> levelOptional = customLevelsParser.parse(level); if (levelOptional.isPresent()) { levelImpl = levelOptional.get(); } else { levelImpl = Level.toLevel(level.trim()); if (!level.equals(levelImpl.toString())) { // check custom level map levelImpl = Level.DEBUG; LOG.fine("found unexpected level: " + level + ", logger: " + logger.getName() + ", msg: " + message); // make sure the text that couldn't match a level is // added to the message message = level + " " + message; } } } threadName = StringUtils.trim((String) fieldMap.remove(THREAD)); if (threadName == null) { threadName = ""; } ndc = (String) fieldMap.remove(NDC); className = StringUtils.trim((String) fieldMap.remove(CLASS)); methodName = (String) fieldMap.remove(METHOD); eventFileName = (String) fieldMap.remove(FILE); lineNumber = (String) fieldMap.remove(LINE); // properties.put(Constants.HOSTNAME_KEY, host); // properties.put(Constants.APPLICATION_KEY, path); // properties.put(Constants.RECEIVER_NAME_KEY, getName()); // all remaining entries in fieldmap are properties properties.putAll(fieldMap); LocationInfo info = null; if ((eventFileName != null) || (className != null) || (methodName != null) || (lineNumber != null)) { info = new LocationInfo(eventFileName, className, methodName, lineNumber); } else { info = LocationInfo.NA_LOCATION_INFO; } // LoggingEvent event = new LoggingEvent(null, // logger, timeStamp, levelImpl, message, // threadName, // new ThrowableInformation(exception), // ndc, // info, // properties); // LoggingEvent event = new LoggingEvent(); LoggingEvent event = new LoggingEvent(null, logger, timeStamp, levelImpl, message, threadName, new ThrowableInformation(exception), ndc, info, properties); // event.setLogger(logger); // event.setTimeStamp(timeStamp); // event.setLevel(levelImpl); // event.setMessage(message); // event.setThreadName(threadName); // event.setThrowableInformation(new ThrowableInformation(exception)); // event.setNDC(ndc); // event.setLocationInformation(info); // event.setProperties(properties); return event; } @Override public LogData parse(String line, ParsingContext parsingContext) throws ParseException { LogData logData = null; if (line.trim().equals("")) { parsingContext.getUnmatchedLog().append('\n'); parsingContext.getUnmatchedLog().append(line); return null; } Matcher eventMatcher = regexpPattern.matcher(line); Matcher exceptionMatcher = exceptionPattern.matcher(line); HashMap<String, Object> logEventParsingProperties = (HashMap<String, Object>) parsingContext.getCustomConextProperties().get(PROPERTY_LOG_EVENT_PROPERTIES); if (eventMatcher.matches()) { // build an event from the previous match (held in current map) LoggingEvent event = buildEvent(parsingContext); if (event != null) { // doPost(event); logData = Log4jUtil.translateLog4j(event); } // Allow for optional capture fields. // This is used by rePattern now, but traditional patterns could be // enhanced to support optional fields too. // We never write null key ((Set<Map.Entry<String, Object>>) processEvent(eventMatcher.toMatchResult()).entrySet()).stream().filter(entry -> entry.getValue() != null).forEach(entry -> logEventParsingProperties.put(entry.getKey(), entry.getValue())); } else if (exceptionMatcher.matches()) { // an exception line if (parsingContext.getUnmatchedLog().length() > 0) parsingContext.getUnmatchedLog().append('\n'); parsingContext.getUnmatchedLog().append(line); } else { // neither...either post an event with the line or append as // additional lines // if this was a logging event with multiple lines, each line // will show up as its own event instead of being // appended as multiple lines on the same event.. // choice is to have each non-matching line show up as its own // line, or append them all to a previous event if (appendNonMatches) { // hold on to the previous time, so we can do our best to // preserve time-based ordering if the event is a non-match String lastTime = (String) logEventParsingProperties.get(TIMESTAMP); // build an event from the previous match (held in current // map) if (logEventParsingProperties.size() > 0) { LoggingEvent event = buildEvent(parsingContext); if (event != null) { logData = Log4jUtil.translateLog4j(event); } } if (lastTime != null) { logEventParsingProperties.put(TIMESTAMP, lastTime); } logEventParsingProperties.put(MESSAGE, line); } else { if (parsingContext.getUnmatchedLog().length() > 0) { parsingContext.getUnmatchedLog().append('\n'); } if (trimUnmatchedLines) parsingContext.getUnmatchedLog().append(line.trim()); else parsingContext.getUnmatchedLog().append(line); } } return logData; } @Override public ParserDescription getParserDescription() { return parserDescription; } @Override public LogData parseBuffer(ParsingContext parsingContext) throws ParseException { LogData logData = null; // build an event from the previous match (held in current map) LoggingEvent event = buildEvent(parsingContext); if (event != null) { logData = Log4jUtil.translateLog4j(event); } return logData; // data.currentMap.putAll(processEvent(eventMatcher.toMatchResult(), data)); } @Override public void init(Properties properties) throws InitializationException { this.properties = properties; String rePattern = properties.getProperty(PROPERTY_REPATTERN); logFormat = properties.getProperty(PROPERTY_PATTERN); if (!StringUtils.isBlank(logFormat) && rePattern != null) { throw new InitializationException(String.format("Conflicting log patterns set (properties %s and %s)", PROPERTY_PATTERN, PROPERTY_REPATTERN)); } if (StringUtils.isBlank(logFormat) && rePattern == null) { throw new InitializationException(String.format("Log pattern not set (property %s or %s)", PROPERTY_PATTERN, PROPERTY_REPATTERN)); } timestampFormat = properties.getProperty(PROPERTY_DATE_FORMAT); if (StringUtils.isBlank(timestampFormat)) { throw new InitializationException(String.format("Date format not set (property %s)", PROPERTY_DATE_FORMAT)); } customLevelDefinitions = properties.getProperty(PROPERTY_CUSTOM_LEVELS, ""); parserDescription.setDisplayName(properties.getProperty(PROPERTY_NAME, "?")); parserDescription.setDescription(properties.getProperty(PROPERTY_DESCRIPTION, "?")); parserDescription.setCharset(properties.getProperty(PROPERTY_CHARSET, "UTF-8")); trimUnmatchedLines = Boolean.valueOf(properties.getProperty(PROPERTY_TRIM_UNMATCHED_LINES, "true")); if (timestampFormat != null) { // dateFormat = new SimpleDateFormat(quoteTimeStampChars(timestampFormat)); timestampPatternText = convertTimestamp(); } if (rePattern == null) { initializePatterns(); createPattern(); } else { try { regexpPattern = Pattern.compile(rePattern); } catch (PatternSyntaxException pse) { throw new InitializationException(String.format( "Malformatted regex pattern for '%s' (%s): %s", PROPERTY_REPATTERN, rePattern, pse.getDescription())); } // if custom level definitions exist, parse them customLevelsParser = new CustomLevelsParser(customLevelDefinitions); Map<Integer, String> groupMap = new HashMap<>(); Enumeration<String> e = (Enumeration<String>) properties.propertyNames(); String key = null, val = null; int keyLen; int dotGroupLen = ".group".length(); while (e.hasMoreElements()) try { key = e.nextElement(); keyLen = key.length(); if (keyLen <= dotGroupLen || !key.endsWith(".group")) continue; val = properties.getProperty(key); groupMap.put(Integer.valueOf(val), key.substring(0, keyLen - dotGroupLen)); } catch (NumberFormatException ne) { throw new InitializationException(String.format( "Group property '%s.group' set to non-integer: %s", key, val)); } if (groupMap.size() < 1) throw new InitializationException( PROPERTY_REPATTERN + " set but no group properties set. " + "Set group indexes like 'TIMESTAMP.group=1', " + "starting with index 1"); for (int i = 1; i <= groupMap.size(); i++) { if (!groupMap.containsKey(Integer.valueOf(i))) throw new InitializationException( "Group property numbers not consecutive starting at 1"); matchingKeywords.add(groupMap.get(Integer.valueOf(i))); } if (matchingKeywords.contains(Log4jPatternMultilineLogParser.MESSAGE) && !matchingKeywords.get(matchingKeywords.size() - 1).equals( Log4jPatternMultilineLogParser.MESSAGE)) throw new InitializationException( "If MESSAGE group is present, it must be last"); } } @Override public void initParsingContext(ParsingContext parsingContext) { if (timestampFormat != null) { parsingContext.setDateFormat(new SimpleDateFormat(timestampFormat, Locale.ENGLISH)); } parsingContext.getCustomConextProperties().put(PROPERTY_LOG_EVENT_PROPERTIES, new HashMap<String, Object>()); } @Override public int getVersion() { return LOG_PARSER_VERSION_1; } @Override public TableColumns[] getTableColumnsToUse() { /* This replaces method Log4jUtil.getUsedColumns(logFormat), which is * less robust and efficient. * Seems to be no intentionto share the functionality so it doesn't * belong in any *Util* class. */ ArrayList<TableColumns> list = new ArrayList<>(); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.CLASS)) list.add(TableColumns.CLASS); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.TIMESTAMP)) list.add(TableColumns.TIME); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.LEVEL)) list.add(TableColumns.LEVEL); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.THREAD)) list.add(TableColumns.THREAD); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.MESSAGE)) list.add(TableColumns.MESSAGE); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.METHOD)) list.add(TableColumns.METHOD); list.add(TableColumns.ID); list.add(TableColumns.MARK); list.add(TableColumns.NOTE); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.FILE)) list.add(TableColumns.FILE); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.LINE)) list.add(TableColumns.LINE); if (matchingKeywords.contains(Log4jPatternMultilineLogParser.NDC)) list.add(TableColumns.NDC); if (!keywords.containsAll(matchingKeywords)) list.add(TableColumns.PROPERTIES); return list.toArray(new TableColumns[list.size()]); } public Properties getProperties() { return properties; } }
12,267
465
# -*- coding: utf8 -*- # Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import warnings from tencentcloud.common.abstract_model import AbstractModel class ApplyBlackListDataRequest(AbstractModel): """ApplyBlackListData请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,AiApi :type Module: str :param Operation: 操作名,ApplyBlackListData :type Operation: str :param BlackList: 黑名单列表 :type BlackList: list of BlackListData """ self.Module = None self.Operation = None self.BlackList = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") if params.get("BlackList") is not None: self.BlackList = [] for item in params.get("BlackList"): obj = BlackListData() obj._deserialize(item) self.BlackList.append(obj) memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ApplyBlackListDataResponse(AbstractModel): """ApplyBlackListData返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class ApplyBlackListRequest(AbstractModel): """ApplyBlackList请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:account :type Module: str :param Operation: 操作名,本接口取值:ApplyBlackList :type Operation: str :param BlackList: 黑名单列表 :type BlackList: list of SingleBlackApply :param InstId: 实例ID,不传默认为系统分配的初始实例 :type InstId: str """ self.Module = None self.Operation = None self.BlackList = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") if params.get("BlackList") is not None: self.BlackList = [] for item in params.get("BlackList"): obj = SingleBlackApply() obj._deserialize(item) self.BlackList.append(obj) self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ApplyBlackListResponse(AbstractModel): """ApplyBlackList返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class ApplyCreditAuditRequest(AbstractModel): """ApplyCreditAudit请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Credit :type Module: str :param Operation: 操作名,本接口取值:Apply :type Operation: str :param InstId: 实例ID :type InstId: str :param ProductId: 产品ID,形如P******。 :type ProductId: str :param CaseId: 信审任务ID,同一天内,同一InstId下,同一CaseId只能调用一次。 :type CaseId: str :param CallbackUrl: 回调地址 :type CallbackUrl: str :param Data: JSON格式的业务字段。 :type Data: str """ self.Module = None self.Operation = None self.InstId = None self.ProductId = None self.CaseId = None self.CallbackUrl = None self.Data = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.InstId = params.get("InstId") self.ProductId = params.get("ProductId") self.CaseId = params.get("CaseId") self.CallbackUrl = params.get("CallbackUrl") self.Data = params.get("Data") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ApplyCreditAuditResponse(AbstractModel): """ApplyCreditAudit返回参数结构体 """ def __init__(self): r""" :param RequestDate: 请求日期 :type RequestDate: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestDate = None self.RequestId = None def _deserialize(self, params): self.RequestDate = params.get("RequestDate") self.RequestId = params.get("RequestId") class BlackListData(AbstractModel): """黑名单申请信息 """ def __init__(self): r""" :param BlackType: 黑名单类型,01代表手机号码。 :type BlackType: str :param OperType: 操作类型,A为新增,D为删除。 注意:此字段可能返回 null,表示取不到有效值。 :type OperType: str :param BlackValue: 黑名单值,BlackType为01时,填写11位手机号码。 :type BlackValue: str :param BlackDescription: 备注。 注意:此字段可能返回 null,表示取不到有效值。 :type BlackDescription: str :param BlackValidDate: 黑名单生效截止日期,格式为YYYY-MM-DD,不填默认为永久。 注意:此字段可能返回 null,表示取不到有效值。 :type BlackValidDate: str :param BlackAddDate: 黑名单加入日期 注意:此字段可能返回 null,表示取不到有效值。 :type BlackAddDate: str :param BlackStatus: 0-生效 1-失效 :type BlackStatus: str """ self.BlackType = None self.OperType = None self.BlackValue = None self.BlackDescription = None self.BlackValidDate = None self.BlackAddDate = None self.BlackStatus = None def _deserialize(self, params): self.BlackType = params.get("BlackType") self.OperType = params.get("OperType") self.BlackValue = params.get("BlackValue") self.BlackDescription = params.get("BlackDescription") self.BlackValidDate = params.get("BlackValidDate") self.BlackAddDate = params.get("BlackAddDate") self.BlackStatus = params.get("BlackStatus") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class BotFileData(AbstractModel): """机器人文件结构 """ def __init__(self): r""" :param FileType: 文件类型 A 拨打结果 T 记录详情 :type FileType: str :param CosUrl: 文件地址 :type CosUrl: str """ self.FileType = None self.CosUrl = None def _deserialize(self, params): self.FileType = params.get("FileType") self.CosUrl = params.get("CosUrl") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class BotFlow(AbstractModel): """机器人对话流信息 """ def __init__(self): r""" :param BotFlowId: 对话流ID :type BotFlowId: str :param BotFlowName: 对话流名称 :type BotFlowName: str :param PhonePoolList: 号码组信息列表 :type PhonePoolList: list of PhonePool """ self.BotFlowId = None self.BotFlowName = None self.PhonePoolList = None def _deserialize(self, params): self.BotFlowId = params.get("BotFlowId") self.BotFlowName = params.get("BotFlowName") if params.get("PhonePoolList") is not None: self.PhonePoolList = [] for item in params.get("PhonePoolList"): obj = PhonePool() obj._deserialize(item) self.PhonePoolList.append(obj) memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class BotInfo(AbstractModel): """机器人列表 """ def __init__(self): r""" :param BotId: 机器人ID :type BotId: str :param BotName: 机器人名称 :type BotName: str :param BotStatus: 机器人状态。0-停用 1-启用 2-待审核 :type BotStatus: str """ self.BotId = None self.BotName = None self.BotStatus = None def _deserialize(self, params): self.BotId = params.get("BotId") self.BotName = params.get("BotName") self.BotStatus = params.get("BotStatus") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class CallInfo(AbstractModel): """作业信息 """ def __init__(self): r""" :param BizDate: 业务日期 :type BizDate: str :param Status: 状态 WAIT:待执行;DOING:执行中;ERROR:执行错误;DONE:已完成; :type Status: str :param TotalCount: 成功总数 :type TotalCount: int :param FileName: 文件名称 :type FileName: str :param FileType: 文件类型 I:呼叫文件 R:停拨文件 :type FileType: str :param CallId: 作业唯一标识 注意:此字段可能返回 null,表示取不到有效值。 :type CallId: str """ self.BizDate = None self.Status = None self.TotalCount = None self.FileName = None self.FileType = None self.CallId = None def _deserialize(self, params): self.BizDate = params.get("BizDate") self.Status = params.get("Status") self.TotalCount = params.get("TotalCount") self.FileName = params.get("FileName") self.FileType = params.get("FileType") self.CallId = params.get("CallId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class CallTimeDict(AbstractModel): """产品拨打时间集合 """ def __init__(self): r""" :param Monday: 周一 :type Monday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` :param Tuesday: 周二 :type Tuesday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` :param Wednesday: 周三 :type Wednesday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` :param Thursday: 周四 :type Thursday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` :param Friday: 周五 :type Friday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` :param Saturday: 周六 :type Saturday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` :param Sunday: 周日 :type Sunday: :class:`tencentcloud.cr.v20180321.models.CallTimeInfo` """ self.Monday = None self.Tuesday = None self.Wednesday = None self.Thursday = None self.Friday = None self.Saturday = None self.Sunday = None def _deserialize(self, params): if params.get("Monday") is not None: self.Monday = CallTimeInfo() self.Monday._deserialize(params.get("Monday")) if params.get("Tuesday") is not None: self.Tuesday = CallTimeInfo() self.Tuesday._deserialize(params.get("Tuesday")) if params.get("Wednesday") is not None: self.Wednesday = CallTimeInfo() self.Wednesday._deserialize(params.get("Wednesday")) if params.get("Thursday") is not None: self.Thursday = CallTimeInfo() self.Thursday._deserialize(params.get("Thursday")) if params.get("Friday") is not None: self.Friday = CallTimeInfo() self.Friday._deserialize(params.get("Friday")) if params.get("Saturday") is not None: self.Saturday = CallTimeInfo() self.Saturday._deserialize(params.get("Saturday")) if params.get("Sunday") is not None: self.Sunday = CallTimeInfo() self.Sunday._deserialize(params.get("Sunday")) memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class CallTimeInfo(AbstractModel): """产品拨打时间信息 """ def __init__(self): r""" :param StartTime: 产品开始拨打时间,HHmmss格式,默认090000 :type StartTime: str :param EndTime: 产品结束拨打时间,HHmmss格式.默认200000 :type EndTime: str """ self.StartTime = None self.EndTime = None def _deserialize(self, params): self.StartTime = params.get("StartTime") self.EndTime = params.get("EndTime") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ChangeBotCallStatusRequest(AbstractModel): """ChangeBotCallStatus请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):ChangeBotCallStatus :type Operation: str :param Status: 作业变更状态 SUSPEND:暂停;EXECUTE:恢复; :type Status: str :param CallId: 作业唯一标识 :type CallId: str :param BizDate: 业务日期 :type BizDate: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str """ self.Module = None self.Operation = None self.Status = None self.CallId = None self.BizDate = None self.BotId = None self.BotName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.Status = params.get("Status") self.CallId = params.get("CallId") self.BizDate = params.get("BizDate") self.BotId = params.get("BotId") self.BotName = params.get("BotName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ChangeBotCallStatusResponse(AbstractModel): """ChangeBotCallStatus返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class ChangeBotTaskStatusRequest(AbstractModel): """ChangeBotTaskStatus请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):ChangeBotTaskStatus :type Operation: str :param Status: 作业变更状态 SUSPEND:暂停;EXECUTE:恢复; :type Status: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str """ self.Module = None self.Operation = None self.Status = None self.BotId = None self.BotName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.Status = params.get("Status") self.BotId = params.get("BotId") self.BotName = params.get("BotName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ChangeBotTaskStatusResponse(AbstractModel): """ChangeBotTaskStatus返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class CreateBotTaskRequest(AbstractModel): """CreateBotTask请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):CreateTask :type Operation: str :param BotName: 任务名称 :type BotName: str :param FlowId: 对话流ID :type FlowId: str :param BanCall: 是否禁止拨打,默认Y :type BanCall: str :param PhoneCollection: 拨打线路集合 :type PhoneCollection: str :param CallTimeCollection: 产品拨打时间集合 :type CallTimeCollection: :class:`tencentcloud.cr.v20180321.models.CallTimeDict` :param StartTimeBan: 禁止拨打起始时间。默认130000 :type StartTimeBan: str :param EndTimeBan: 禁止拨打结束时间。默认140000 :type EndTimeBan: str :param CodeType: 重播方式,NON:未接通、LABEL:意向分级,可多选,用竖线分隔:NON|LABEL :type CodeType: str :param CodeCollection: 重播值集合,A:强意向、B:中意向、C:低意向、D:无意向、E:在忙、F:未接通、G:无效号码,可多选,用竖线分隔:A|B|C|D|E|F|G :type CodeCollection: str :param CallCount: 继续拨打次数 :type CallCount: int :param CallInterval: 拨打间隔 :type CallInterval: int :param SmsSignId: 未接通引用短信签名ID :type SmsSignId: str :param SmsTemplateId: 未接通引用短信模板ID :type SmsTemplateId: str :param CallType: 拨打方式。NORMAL - 正常拨打;TIMER - 定时拨打 :type CallType: str :param CallStartDate: 拨打开始日期。CallType=TIMER时有值,yyyy-MM-dd :type CallStartDate: str :param CallEndDate: 拨打结束日期。CallType=PERIOD 时有值,yyyy-MM-dd :type CallEndDate: str """ self.Module = None self.Operation = None self.BotName = None self.FlowId = None self.BanCall = None self.PhoneCollection = None self.CallTimeCollection = None self.StartTimeBan = None self.EndTimeBan = None self.CodeType = None self.CodeCollection = None self.CallCount = None self.CallInterval = None self.SmsSignId = None self.SmsTemplateId = None self.CallType = None self.CallStartDate = None self.CallEndDate = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.BotName = params.get("BotName") self.FlowId = params.get("FlowId") self.BanCall = params.get("BanCall") self.PhoneCollection = params.get("PhoneCollection") if params.get("CallTimeCollection") is not None: self.CallTimeCollection = CallTimeDict() self.CallTimeCollection._deserialize(params.get("CallTimeCollection")) self.StartTimeBan = params.get("StartTimeBan") self.EndTimeBan = params.get("EndTimeBan") self.CodeType = params.get("CodeType") self.CodeCollection = params.get("CodeCollection") self.CallCount = params.get("CallCount") self.CallInterval = params.get("CallInterval") self.SmsSignId = params.get("SmsSignId") self.SmsTemplateId = params.get("SmsTemplateId") self.CallType = params.get("CallType") self.CallStartDate = params.get("CallStartDate") self.CallEndDate = params.get("CallEndDate") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class CreateBotTaskResponse(AbstractModel): """CreateBotTask返回参数结构体 """ def __init__(self): r""" :param BotId: 机器人任务Id :type BotId: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.BotId = None self.RequestId = None def _deserialize(self, params): self.BotId = params.get("BotId") self.RequestId = params.get("RequestId") class DescribeBotFlowRequest(AbstractModel): """DescribeBotFlow请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):GetFlow :type Operation: str """ self.Module = None self.Operation = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DescribeBotFlowResponse(AbstractModel): """DescribeBotFlow返回参数结构体 """ def __init__(self): r""" :param BotFlowList: 机器人对话流列表 注意:此字段可能返回 null,表示取不到有效值。 :type BotFlowList: list of BotFlow :param SmsSignList: 短信签名列表 注意:此字段可能返回 null,表示取不到有效值。 :type SmsSignList: list of SmsSign :param SmsTemplateList: 短信模板列表 注意:此字段可能返回 null,表示取不到有效值。 :type SmsTemplateList: list of SmsTemplate :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.BotFlowList = None self.SmsSignList = None self.SmsTemplateList = None self.RequestId = None def _deserialize(self, params): if params.get("BotFlowList") is not None: self.BotFlowList = [] for item in params.get("BotFlowList"): obj = BotFlow() obj._deserialize(item) self.BotFlowList.append(obj) if params.get("SmsSignList") is not None: self.SmsSignList = [] for item in params.get("SmsSignList"): obj = SmsSign() obj._deserialize(item) self.SmsSignList.append(obj) if params.get("SmsTemplateList") is not None: self.SmsTemplateList = [] for item in params.get("SmsTemplateList"): obj = SmsTemplate() obj._deserialize(item) self.SmsTemplateList.append(obj) self.RequestId = params.get("RequestId") class DescribeCreditResultRequest(AbstractModel): """DescribeCreditResult请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Credit :type Module: str :param Operation: 操作名,本接口取值:Get :type Operation: str :param InstId: 实例ID :type InstId: str :param ProductId: 产品ID,形如P******。 :type ProductId: str :param CaseId: 信审任务ID :type CaseId: str :param RequestDate: 请求日期,格式为YYYY-MM-DD :type RequestDate: str """ self.Module = None self.Operation = None self.InstId = None self.ProductId = None self.CaseId = None self.RequestDate = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.InstId = params.get("InstId") self.ProductId = params.get("ProductId") self.CaseId = params.get("CaseId") self.RequestDate = params.get("RequestDate") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DescribeCreditResultResponse(AbstractModel): """DescribeCreditResult返回参数结构体 """ def __init__(self): r""" :param ResultCode: <p>呼叫结果,取值范围:</p><ul style="margin-bottom:0px;"><li>NON:接通</li><li>DBU:号码忙</li><li>DRF:不在服务区</li><li>ANA:欠费未接听</li><li>REJ:拒接</li><li>SHU:关机</li><li>NAN:空号</li><li>HAL:停机</li><li>DAD:未接听</li><li>EXE:其他异常</li></ul> :type ResultCode: str :param ClientCode: 客户标识代码,多个标识码以英文逗号分隔,ResultCode为NON时才有。 注意:此字段可能返回 null,表示取不到有效值。 :type ClientCode: str :param RingStartTime: 开始振铃时间,ResultCode为NON或DAD时才有此字段。 注意:此字段可能返回 null,表示取不到有效值。 :type RingStartTime: str :param RingDuration: 振铃时长 :type RingDuration: int :param AnswerDuration: 接通时长 :type AnswerDuration: int :param ContextValue: JSON格式的扩展信息字段,ResultCode为NON时才有。 注意:此字段可能返回 null,表示取不到有效值。 :type ContextValue: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.ResultCode = None self.ClientCode = None self.RingStartTime = None self.RingDuration = None self.AnswerDuration = None self.ContextValue = None self.RequestId = None def _deserialize(self, params): self.ResultCode = params.get("ResultCode") self.ClientCode = params.get("ClientCode") self.RingStartTime = params.get("RingStartTime") self.RingDuration = params.get("RingDuration") self.AnswerDuration = params.get("AnswerDuration") self.ContextValue = params.get("ContextValue") self.RequestId = params.get("RequestId") class DescribeFileModelRequest(AbstractModel): """DescribeFileModel请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):DescribeFileModel :type Operation: str :param FileType: 模板文件类型,输入input,停拨stop :type FileType: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str """ self.Module = None self.Operation = None self.FileType = None self.BotId = None self.BotName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.FileType = params.get("FileType") self.BotId = params.get("BotId") self.BotName = params.get("BotName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DescribeFileModelResponse(AbstractModel): """DescribeFileModel返回参数结构体 """ def __init__(self): r""" :param CosUrl: 模板下载链接 :type CosUrl: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.CosUrl = None self.RequestId = None def _deserialize(self, params): self.CosUrl = params.get("CosUrl") self.RequestId = params.get("RequestId") class DescribeRecordsRequest(AbstractModel): """DescribeRecords请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Record :type Module: str :param Operation: 操作名,本接口取值:List :type Operation: str :param ProductId: 产品ID :type ProductId: str :param AccountNum: 案件编号 :type AccountNum: str :param CalledPhone: 被叫号码 :type CalledPhone: str :param StartBizDate: 查询起始日期,格式为YYYY-MM-DD :type StartBizDate: str :param EndBizDate: 查询结束日期,格式为YYYY-MM-DD :type EndBizDate: str :param Offset: 分页参数,索引,默认为0 :type Offset: str :param Limit: 分页参数,页长,默认为20 :type Limit: str :param InstId: 实例ID,不传默认为系统分配的初始实例 :type InstId: str """ self.Module = None self.Operation = None self.ProductId = None self.AccountNum = None self.CalledPhone = None self.StartBizDate = None self.EndBizDate = None self.Offset = None self.Limit = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.ProductId = params.get("ProductId") self.AccountNum = params.get("AccountNum") self.CalledPhone = params.get("CalledPhone") self.StartBizDate = params.get("StartBizDate") self.EndBizDate = params.get("EndBizDate") self.Offset = params.get("Offset") self.Limit = params.get("Limit") self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DescribeRecordsResponse(AbstractModel): """DescribeRecords返回参数结构体 """ def __init__(self): r""" :param RecordList: 录音列表。 注意:此字段可能返回 null,表示取不到有效值。 :type RecordList: list of SingleRecord :param TotalCount: 录音总量。 :type TotalCount: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RecordList = None self.TotalCount = None self.RequestId = None def _deserialize(self, params): if params.get("RecordList") is not None: self.RecordList = [] for item in params.get("RecordList"): obj = SingleRecord() obj._deserialize(item) self.RecordList.append(obj) self.TotalCount = params.get("TotalCount") self.RequestId = params.get("RequestId") class DescribeTaskStatusRequest(AbstractModel): """DescribeTaskStatus请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Task :type Module: str :param Operation: 操作名,本接口取值:DescribeTaskStatus :type Operation: str :param TaskId: 任务ID,"上传文件"接口返回的DataResId,形如abc-xyz123 :type TaskId: str :param InstId: 实例ID,不传默认为系统分配的初始实例。 :type InstId: str """ self.Module = None self.Operation = None self.TaskId = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.TaskId = params.get("TaskId") self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DescribeTaskStatusResponse(AbstractModel): """DescribeTaskStatus返回参数结构体 """ def __init__(self): r""" :param TaskResult: <p>任务结果:</p><ul style="margin-bottom:0px;"><li>处理中:"Uploading Data."</li><li>上传成功:"File Uploading Task Success."</li><li>上传失败:具体失败原因</li></ul> :type TaskResult: str :param TaskType: <p>任务类型:</p><ul style="margin-bottom:0px;"><li>到期/逾期提醒数据上传:002</li><li>到期/逾期提醒停拨数据上传:003</li><li>回访数据上传:004</li><li>回访停拨数据上传:005</li></ul> :type TaskType: str :param TaskFileUrl: 过滤文件下载链接,有过滤数据时才存在。 注意:此字段可能返回 null,表示取不到有效值。 :type TaskFileUrl: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.TaskResult = None self.TaskType = None self.TaskFileUrl = None self.RequestId = None def _deserialize(self, params): self.TaskResult = params.get("TaskResult") self.TaskType = params.get("TaskType") self.TaskFileUrl = params.get("TaskFileUrl") self.RequestId = params.get("RequestId") class DownloadBotRecordRequest(AbstractModel): """DownloadBotRecord请求参数结构体 """ def __init__(self): r""" :param Module: 模块:AiApi :type Module: str :param Operation: 操作:DownloadRecord :type Operation: str :param BizDate: 业务日期 :type BizDate: str """ self.Module = None self.Operation = None self.BizDate = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.BizDate = params.get("BizDate") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DownloadBotRecordResponse(AbstractModel): """DownloadBotRecord返回参数结构体 """ def __init__(self): r""" :param RecordCosUrl: 录音地址。请求后30分钟内有效 :type RecordCosUrl: str :param TextCosUrl: 文本地址。请求后30分钟内有效 :type TextCosUrl: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RecordCosUrl = None self.TextCosUrl = None self.RequestId = None def _deserialize(self, params): self.RecordCosUrl = params.get("RecordCosUrl") self.TextCosUrl = params.get("TextCosUrl") self.RequestId = params.get("RequestId") class DownloadDialogueTextRequest(AbstractModel): """DownloadDialogueText请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Report :type Module: str :param Operation: 操作名,本接口取值:DownloadTextReport :type Operation: str :param ReportDate: 报告日期,格式为YYYY-MM-DD :type ReportDate: str :param InstId: 实例ID :type InstId: str """ self.Module = None self.Operation = None self.ReportDate = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.ReportDate = params.get("ReportDate") self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DownloadDialogueTextResponse(AbstractModel): """DownloadDialogueText返回参数结构体 """ def __init__(self): r""" :param TextReportUrl: 对话文本下载地址 :type TextReportUrl: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.TextReportUrl = None self.RequestId = None def _deserialize(self, params): self.TextReportUrl = params.get("TextReportUrl") self.RequestId = params.get("RequestId") class DownloadRecordListRequest(AbstractModel): """DownloadRecordList请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Record :type Module: str :param Operation: 操作名,本接口取值:DownloadList :type Operation: str :param BizDate: 录音日期,格式为YYYY-MM-DD :type BizDate: str :param InstId: 实例ID :type InstId: str """ self.Module = None self.Operation = None self.BizDate = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.BizDate = params.get("BizDate") self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DownloadRecordListResponse(AbstractModel): """DownloadRecordList返回参数结构体 """ def __init__(self): r""" :param RecordListUrl: 录音列表下载地址 :type RecordListUrl: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RecordListUrl = None self.RequestId = None def _deserialize(self, params): self.RecordListUrl = params.get("RecordListUrl") self.RequestId = params.get("RequestId") class DownloadReportRequest(AbstractModel): """DownloadReport请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Report :type Module: str :param Operation: 操作名,本接口取值:DownloadReport :type Operation: str :param ReportDate: 报告日期,格式为YYYY-MM-DD :type ReportDate: str :param InstId: 实例ID,不传默认为系统分配的初始实例。 :type InstId: str """ self.Module = None self.Operation = None self.ReportDate = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.ReportDate = params.get("ReportDate") self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DownloadReportResponse(AbstractModel): """DownloadReport返回参数结构体 """ def __init__(self): r""" :param DailyReportUrl: 到期/逾期提醒日报下载地址 注意:此字段可能返回 null,表示取不到有效值。 :type DailyReportUrl: str :param ResultReportUrl: 到期/逾期提醒结果下载地址 注意:此字段可能返回 null,表示取不到有效值。 :type ResultReportUrl: str :param DetailReportUrl: 到期/逾期提醒明细下载地址 注意:此字段可能返回 null,表示取不到有效值。 :type DetailReportUrl: str :param CallbackDailyReportUrl: 回访日报下载地址 注意:此字段可能返回 null,表示取不到有效值。 :type CallbackDailyReportUrl: str :param CallbackResultReportUrl: 回访结果下载地址 注意:此字段可能返回 null,表示取不到有效值。 :type CallbackResultReportUrl: str :param CallbackDetailReportUrl: 回访明细下载地址 注意:此字段可能返回 null,表示取不到有效值。 :type CallbackDetailReportUrl: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.DailyReportUrl = None self.ResultReportUrl = None self.DetailReportUrl = None self.CallbackDailyReportUrl = None self.CallbackResultReportUrl = None self.CallbackDetailReportUrl = None self.RequestId = None def _deserialize(self, params): self.DailyReportUrl = params.get("DailyReportUrl") self.ResultReportUrl = params.get("ResultReportUrl") self.DetailReportUrl = params.get("DetailReportUrl") self.CallbackDailyReportUrl = params.get("CallbackDailyReportUrl") self.CallbackResultReportUrl = params.get("CallbackResultReportUrl") self.CallbackDetailReportUrl = params.get("CallbackDetailReportUrl") self.RequestId = params.get("RequestId") class ExportBotDataRequest(AbstractModel): """ExportBotData请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):ExportBotData :type Operation: str :param BizDate: 业务日期。YYYY-MM-DD :type BizDate: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str """ self.Module = None self.Operation = None self.BizDate = None self.BotId = None self.BotName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.BizDate = params.get("BizDate") self.BotId = params.get("BotId") self.BotName = params.get("BotName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ExportBotDataResponse(AbstractModel): """ExportBotData返回参数结构体 """ def __init__(self): r""" :param Data: 导出文件列表 :type Data: list of BotFileData :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.Data = None self.RequestId = None def _deserialize(self, params): if params.get("Data") is not None: self.Data = [] for item in params.get("Data"): obj = BotFileData() obj._deserialize(item) self.Data.append(obj) self.RequestId = params.get("RequestId") class PhonePool(AbstractModel): """号码组信息 """ def __init__(self): r""" :param PoolId: 号码组ID :type PoolId: str :param PoolName: 号码组名称 :type PoolName: str """ self.PoolId = None self.PoolName = None def _deserialize(self, params): self.PoolId = params.get("PoolId") self.PoolName = params.get("PoolName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ProductQueryInfo(AbstractModel): """QueryProducts接口对应数据结构。产品对应的相关信息。 """ def __init__(self): r""" :param ProductId: 产品Id :type ProductId: str :param ProductName: 产品名称 :type ProductName: str :param ProductCode: 产品编码 注意:此字段可能返回 null,表示取不到有效值。 :type ProductCode: str :param ProductStatus: 产品状态 0 禁用 1 启用 注意:此字段可能返回 null,表示取不到有效值。 :type ProductStatus: int :param SceneType: 场景类型 注意:此字段可能返回 null,表示取不到有效值。 :type SceneType: str """ self.ProductId = None self.ProductName = None self.ProductCode = None self.ProductStatus = None self.SceneType = None def _deserialize(self, params): self.ProductId = params.get("ProductId") self.ProductName = params.get("ProductName") self.ProductCode = params.get("ProductCode") self.ProductStatus = params.get("ProductStatus") self.SceneType = params.get("SceneType") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryBlackListDataRequest(AbstractModel): """QueryBlackListData请求参数结构体 """ def __init__(self): r""" :param Module: 模块:AiApi :type Module: str :param Operation: 操作:QueryBlackListData :type Operation: str :param Offset: 页码 :type Offset: int :param Limit: 每页数量 :type Limit: int :param StartBizDate: 开始日期 :type StartBizDate: str :param EndBizDate: 结束日期 :type EndBizDate: str :param BlackValue: 电话号码、手机 :type BlackValue: str """ self.Module = None self.Operation = None self.Offset = None self.Limit = None self.StartBizDate = None self.EndBizDate = None self.BlackValue = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.Offset = params.get("Offset") self.Limit = params.get("Limit") self.StartBizDate = params.get("StartBizDate") self.EndBizDate = params.get("EndBizDate") self.BlackValue = params.get("BlackValue") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryBlackListDataResponse(AbstractModel): """QueryBlackListData返回参数结构体 """ def __init__(self): r""" :param TotalCount: 总数。 :type TotalCount: int :param Data: 黑名单列表 注意:此字段可能返回 null,表示取不到有效值。 :type Data: list of BlackListData :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.TotalCount = None self.Data = None self.RequestId = None def _deserialize(self, params): self.TotalCount = params.get("TotalCount") if params.get("Data") is not None: self.Data = [] for item in params.get("Data"): obj = BlackListData() obj._deserialize(item) self.Data.append(obj) self.RequestId = params.get("RequestId") class QueryBotListRequest(AbstractModel): """QueryBotList请求参数结构体 """ def __init__(self): r""" :param Module: 模块名:AiApi :type Module: str :param Operation: 操作名:QueryBotList :type Operation: str """ self.Module = None self.Operation = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryBotListResponse(AbstractModel): """QueryBotList返回参数结构体 """ def __init__(self): r""" :param BotList: 任务列表。 :type BotList: list of BotInfo :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.BotList = None self.RequestId = None def _deserialize(self, params): if params.get("BotList") is not None: self.BotList = [] for item in params.get("BotList"): obj = BotInfo() obj._deserialize(item) self.BotList.append(obj) self.RequestId = params.get("RequestId") class QueryCallListRequest(AbstractModel): """QueryCallList请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):QueryCallList :type Operation: str :param BizDate: 业务日期 :type BizDate: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str :param FileName: 通过API或平台上传的文件完整名称 :type FileName: str """ self.Module = None self.Operation = None self.BizDate = None self.BotId = None self.BotName = None self.FileName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.BizDate = params.get("BizDate") self.BotId = params.get("BotId") self.BotName = params.get("BotName") self.FileName = params.get("FileName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryCallListResponse(AbstractModel): """QueryCallList返回参数结构体 """ def __init__(self): r""" :param CallList: 任务作业状态 注意:此字段可能返回 null,表示取不到有效值。 :type CallList: list of CallInfo :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.CallList = None self.RequestId = None def _deserialize(self, params): if params.get("CallList") is not None: self.CallList = [] for item in params.get("CallList"): obj = CallInfo() obj._deserialize(item) self.CallList.append(obj) self.RequestId = params.get("RequestId") class QueryInstantDataRequest(AbstractModel): """QueryInstantData请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Data :type Module: str :param Operation: 操作名,本接口取值:Query :type Operation: str :param ProductId: 产品ID :type ProductId: str :param InstanceId: 实例ID,不传默认为系统分配的初始实例 :type InstanceId: str :param QueryModel: 查询类型:callRecord 通话记录 :type QueryModel: str :param Data: 查询参数 :type Data: str """ self.Module = None self.Operation = None self.ProductId = None self.InstanceId = None self.QueryModel = None self.Data = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.ProductId = params.get("ProductId") self.InstanceId = params.get("InstanceId") self.QueryModel = params.get("QueryModel") self.Data = params.get("Data") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryInstantDataResponse(AbstractModel): """QueryInstantData返回参数结构体 """ def __init__(self): r""" :param TotalCount: 总数 注意:此字段可能返回 null,表示取不到有效值。 :type TotalCount: int :param Data: 返回内容 注意:此字段可能返回 null,表示取不到有效值。 :type Data: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.TotalCount = None self.Data = None self.RequestId = None def _deserialize(self, params): self.TotalCount = params.get("TotalCount") self.Data = params.get("Data") self.RequestId = params.get("RequestId") class QueryProductsRequest(AbstractModel): """QueryProducts请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):Product :type Module: str :param Operation: 操作名。默认值(固定):QueryProducts :type Operation: str :param InstanceId: 实例Id。 :type InstanceId: str """ self.Module = None self.Operation = None self.InstanceId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.InstanceId = params.get("InstanceId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryProductsResponse(AbstractModel): """QueryProducts返回参数结构体 """ def __init__(self): r""" :param ProductList: 产品信息。 :type ProductList: list of ProductQueryInfo :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.ProductList = None self.RequestId = None def _deserialize(self, params): if params.get("ProductList") is not None: self.ProductList = [] for item in params.get("ProductList"): obj = ProductQueryInfo() obj._deserialize(item) self.ProductList.append(obj) self.RequestId = params.get("RequestId") class QueryRecordListRequest(AbstractModel): """QueryRecordList请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。AiApi :type Module: str :param Operation: 操作名。QueryRecordList :type Operation: str :param Offset: 偏移值 :type Offset: int :param Limit: 偏移位移,最大20 :type Limit: int :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str :param CalledPhone: 被叫号码 :type CalledPhone: str :param StartBizDate: 开始日期 :type StartBizDate: str :param EndBizDate: 结束日期 :type EndBizDate: str """ self.Module = None self.Operation = None self.Offset = None self.Limit = None self.BotId = None self.BotName = None self.CalledPhone = None self.StartBizDate = None self.EndBizDate = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.Offset = params.get("Offset") self.Limit = params.get("Limit") self.BotId = params.get("BotId") self.BotName = params.get("BotName") self.CalledPhone = params.get("CalledPhone") self.StartBizDate = params.get("StartBizDate") self.EndBizDate = params.get("EndBizDate") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueryRecordListResponse(AbstractModel): """QueryRecordList返回参数结构体 """ def __init__(self): r""" :param RecordList: 录音列表。 :type RecordList: list of RecordInfo :param TotalCount: 总数 :type TotalCount: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RecordList = None self.TotalCount = None self.RequestId = None def _deserialize(self, params): if params.get("RecordList") is not None: self.RecordList = [] for item in params.get("RecordList"): obj = RecordInfo() obj._deserialize(item) self.RecordList.append(obj) self.TotalCount = params.get("TotalCount") self.RequestId = params.get("RequestId") class RecordInfo(AbstractModel): """录音文件详情 """ def __init__(self): r""" :param BotId: 任务Id :type BotId: str :param BotName: 任务名称 :type BotName: str :param BizDate: 任务日期 :type BizDate: str :param CalledPhone: 被叫号码 :type CalledPhone: str :param CallStartTime: 开始通话时间 :type CallStartTime: str :param Duration: 通话时长 :type Duration: int :param CosUrl: 录音文件地址 :type CosUrl: str :param DialogueLog: 对话日志。JSON格式 :type DialogueLog: str :param CosFileName: 录音文件名 :type CosFileName: str """ self.BotId = None self.BotName = None self.BizDate = None self.CalledPhone = None self.CallStartTime = None self.Duration = None self.CosUrl = None self.DialogueLog = None self.CosFileName = None def _deserialize(self, params): self.BotId = params.get("BotId") self.BotName = params.get("BotName") self.BizDate = params.get("BizDate") self.CalledPhone = params.get("CalledPhone") self.CallStartTime = params.get("CallStartTime") self.Duration = params.get("Duration") self.CosUrl = params.get("CosUrl") self.DialogueLog = params.get("DialogueLog") self.CosFileName = params.get("CosFileName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SingleBlackApply(AbstractModel): """黑名单申请信息 """ def __init__(self): r""" :param BlackType: 黑名单类型,01代表手机号码。 :type BlackType: str :param OperationType: 操作类型,A为新增,D为删除。 :type OperationType: str :param BlackValue: 黑名单值,BlackType为01时,填写11位手机号码。 :type BlackValue: str :param BlackDescription: 备注。 :type BlackDescription: str :param BlackValidDate: 黑名单生效截止日期,格式为YYYY-MM-DD,不填默认为永久。 :type BlackValidDate: str """ self.BlackType = None self.OperationType = None self.BlackValue = None self.BlackDescription = None self.BlackValidDate = None def _deserialize(self, params): self.BlackType = params.get("BlackType") self.OperationType = params.get("OperationType") self.BlackValue = params.get("BlackValue") self.BlackDescription = params.get("BlackDescription") self.BlackValidDate = params.get("BlackValidDate") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SingleRecord(AbstractModel): """录音信息 """ def __init__(self): r""" :param AccountNum: 案件编号。 :type AccountNum: str :param BizDate: 外呼日期。 :type BizDate: str :param CallStartTime: 开始呼叫时间。 :type CallStartTime: str :param CallerPhone: 主叫号码。 :type CallerPhone: str :param Direction: 呼叫方向,O为呼出,I为呼入。 :type Direction: str :param Duration: 通话时长。 :type Duration: int :param ProductId: 产品ID。 注意:此字段可能返回 null,表示取不到有效值。 :type ProductId: str :param RecordCosUrl: 录音下载链接。 注意:此字段可能返回 null,表示取不到有效值。 :type RecordCosUrl: str """ self.AccountNum = None self.BizDate = None self.CallStartTime = None self.CallerPhone = None self.Direction = None self.Duration = None self.ProductId = None self.RecordCosUrl = None def _deserialize(self, params): self.AccountNum = params.get("AccountNum") self.BizDate = params.get("BizDate") self.CallStartTime = params.get("CallStartTime") self.CallerPhone = params.get("CallerPhone") self.Direction = params.get("Direction") self.Duration = params.get("Duration") self.ProductId = params.get("ProductId") self.RecordCosUrl = params.get("RecordCosUrl") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SmsSign(AbstractModel): """短信签名信息 """ def __init__(self): r""" :param SignId: 短信签名ID :type SignId: str :param SignName: 短信签名名称 :type SignName: str """ self.SignId = None self.SignName = None def _deserialize(self, params): self.SignId = params.get("SignId") self.SignName = params.get("SignName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SmsTemplate(AbstractModel): """短信模板信息 """ def __init__(self): r""" :param TemplateId: 短信模板ID :type TemplateId: str :param TemplateName: 短信模板名称 :type TemplateName: str """ self.TemplateId = None self.TemplateName = None def _deserialize(self, params): self.TemplateId = params.get("TemplateId") self.TemplateName = params.get("TemplateName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UpdateBotTaskRequest(AbstractModel): """UpdateBotTask请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):UpdateTask :type Operation: str :param BotName: 任务名称 :type BotName: str :param BotId: 任务ID :type BotId: str :param CallTimeCollection: 产品拨打时间集合 :type CallTimeCollection: :class:`tencentcloud.cr.v20180321.models.CallTimeDict` :param BanCall: 是否禁止拨打,默认Y :type BanCall: str :param StartTimeBan: 禁止拨打起始时间。默认130000 :type StartTimeBan: str :param EndTimeBan: 禁止拨打结束时间。默认140000 :type EndTimeBan: str :param PhoneCollection: 拨打线路集合 :type PhoneCollection: str :param CodeType: 重播方式,NON:未接通、LABEL:意向分级,可多选,用竖线分隔:NON|LABEL :type CodeType: str :param CodeCollection: 重播值集合,A:强意向、B:中意向、C:低意向、D:无意向、E:在忙、F:未接通、G:无效号码,可多选,用竖线分隔:A|B|C|D|E|F|G :type CodeCollection: str :param CallCount: 继续拨打次数 :type CallCount: int :param CallInterval: 拨打间隔 :type CallInterval: int :param SmsSignId: 未接通引用短信签名ID :type SmsSignId: str :param SmsTemplateId: 未接通引用短信模板ID :type SmsTemplateId: str """ self.Module = None self.Operation = None self.BotName = None self.BotId = None self.CallTimeCollection = None self.BanCall = None self.StartTimeBan = None self.EndTimeBan = None self.PhoneCollection = None self.CodeType = None self.CodeCollection = None self.CallCount = None self.CallInterval = None self.SmsSignId = None self.SmsTemplateId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.BotName = params.get("BotName") self.BotId = params.get("BotId") if params.get("CallTimeCollection") is not None: self.CallTimeCollection = CallTimeDict() self.CallTimeCollection._deserialize(params.get("CallTimeCollection")) self.BanCall = params.get("BanCall") self.StartTimeBan = params.get("StartTimeBan") self.EndTimeBan = params.get("EndTimeBan") self.PhoneCollection = params.get("PhoneCollection") self.CodeType = params.get("CodeType") self.CodeCollection = params.get("CodeCollection") self.CallCount = params.get("CallCount") self.CallInterval = params.get("CallInterval") self.SmsSignId = params.get("SmsSignId") self.SmsTemplateId = params.get("SmsTemplateId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UpdateBotTaskResponse(AbstractModel): """UpdateBotTask返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class UploadBotDataRequest(AbstractModel): """UploadBotData请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):UploadData :type Operation: str :param Data: 任务数据。JSON格式 :type Data: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str """ self.Module = None self.Operation = None self.Data = None self.BotId = None self.BotName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.Data = params.get("Data") self.BotId = params.get("BotId") self.BotName = params.get("BotName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UploadBotDataResponse(AbstractModel): """UploadBotData返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class UploadBotFileRequest(AbstractModel): """UploadBotFile请求参数结构体 """ def __init__(self): r""" :param Module: 模块名。默认值(固定):AiApi :type Module: str :param Operation: 操作名。默认值(固定):Upload :type Operation: str :param FileType: 文件类型,输入input,停拨stop :type FileType: str :param FileUrl: 文件链接 :type FileUrl: str :param FileName: 文件名 :type FileName: str :param BotId: 任务ID,二者必填一个 :type BotId: str :param BotName: 任务名称,二者必填一个 :type BotName: str """ self.Module = None self.Operation = None self.FileType = None self.FileUrl = None self.FileName = None self.BotId = None self.BotName = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.FileType = params.get("FileType") self.FileUrl = params.get("FileUrl") self.FileName = params.get("FileName") self.BotId = params.get("BotId") self.BotName = params.get("BotName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UploadBotFileResponse(AbstractModel): """UploadBotFile返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class UploadDataFileRequest(AbstractModel): """UploadDataFile请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Data :type Module: str :param Operation: 操作名,本接口取值:Upload :type Operation: str :param FileName: 文件名 :type FileName: str :param UploadModel: <p>上传类型,不填默认到期/逾期提醒文件,取值范围:</p><ul style="margin-bottom:0px;"><li>data:到期/逾期提醒文件</li><li>repay:到期/逾期提醒停拨文件</li><li>callback:回访文件</li><li>callstop:回访停拨文件</li><li>blacklist:黑名单文件</li></ul> :type UploadModel: str :param File: 文件,文件与文件地址上传只可选用一种,必须使用multipart/form-data协议来上传二进制流文件,建议使用xlsx格式,大小不超过5MB。 :type File: binary :param FileUrl: 文件上传地址,文件与文件地址上传只可选用一种,大小不超过50MB。 :type FileUrl: str :param InstId: 实例ID,不传默认为系统分配的初始实例。 :type InstId: str """ self.Module = None self.Operation = None self.FileName = None self.UploadModel = None self.File = None self.FileUrl = None self.InstId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.FileName = params.get("FileName") self.UploadModel = params.get("UploadModel") self.File = params.get("File") self.FileUrl = params.get("FileUrl") self.InstId = params.get("InstId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UploadDataFileResponse(AbstractModel): """UploadDataFile返回参数结构体 """ def __init__(self): r""" :param DataResId: 数据ID :type DataResId: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.DataResId = None self.RequestId = None def _deserialize(self, params): self.DataResId = params.get("DataResId") self.RequestId = params.get("RequestId") class UploadDataJsonRequest(AbstractModel): """UploadDataJson请求参数结构体 """ def __init__(self): r""" :param Module: 模块名,本接口取值:Data :type Module: str :param Operation: 操作名,本接口取值:UploadJson :type Operation: str :param Data: 报文信息 :type Data: str :param UploadModel: <p>上传类型,不填默认到期/逾期提醒数据,取值范围:</p><ul style="margin-bottom:0px;"><li>data:到期/逾期提醒数据</li><li>repay:到期/逾期提醒停拨数据</li></ul> :type UploadModel: str :param InstanceId: 实例ID,不传默认为系统分配的初始实例。 :type InstanceId: str """ self.Module = None self.Operation = None self.Data = None self.UploadModel = None self.InstanceId = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.Data = params.get("Data") self.UploadModel = params.get("UploadModel") self.InstanceId = params.get("InstanceId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UploadDataJsonResponse(AbstractModel): """UploadDataJson返回参数结构体 """ def __init__(self): r""" :param Data: 响应报文信息 注意:此字段可能返回 null,表示取不到有效值。 :type Data: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.Data = None self.RequestId = None def _deserialize(self, params): self.Data = params.get("Data") self.RequestId = params.get("RequestId") class UploadFileRequest(AbstractModel): """UploadFile请求参数结构体 """ def __init__(self): r""" :param Module: 模块名 :type Module: str :param Operation: 操作名 :type Operation: str :param FileUrl: 文件上传地址,要求地址协议为HTTPS,且URL端口必须为443 :type FileUrl: str :param FileName: 文件名 :type FileName: str :param FileDate: 文件日期 :type FileDate: str """ self.Module = None self.Operation = None self.FileUrl = None self.FileName = None self.FileDate = None def _deserialize(self, params): self.Module = params.get("Module") self.Operation = params.get("Operation") self.FileUrl = params.get("FileUrl") self.FileName = params.get("FileName") self.FileDate = params.get("FileDate") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class UploadFileResponse(AbstractModel): """UploadFile返回参数结构体 """ def __init__(self): r""" :param TaskId: 任务ID :type TaskId: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.TaskId = None self.RequestId = None def _deserialize(self, params): self.TaskId = params.get("TaskId") self.RequestId = params.get("RequestId")
42,658
5,964
<filename>third_party/WebKit/Source/bindings/tests/results/core/V8TestDictionary.cpp // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This file has been auto-generated by code_generator_v8.py. DO NOT MODIFY! #include "config.h" #include "V8TestDictionary.h" #include "bindings/core/v8/ExceptionState.h" #include "bindings/core/v8/ScriptValue.h" #include "bindings/core/v8/UnionTypesCore.h" #include "bindings/core/v8/V8Element.h" #include "bindings/core/v8/V8EventTarget.h" #include "bindings/core/v8/V8InternalDictionary.h" #include "bindings/core/v8/V8TestInterface.h" #include "bindings/core/v8/V8TestInterface2.h" #include "bindings/core/v8/V8TestInterfaceGarbageCollected.h" #include "bindings/core/v8/V8TestInterfaceWillBeGarbageCollected.h" #include "bindings/core/v8/V8Uint8Array.h" #include "core/frame/UseCounter.h" namespace blink { void V8TestDictionary::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, TestDictionary& impl, ExceptionState& exceptionState) { if (isUndefinedOrNull(v8Value)) return; if (!v8Value->IsObject()) { exceptionState.throwTypeError("cannot convert to dictionary."); return; } v8::TryCatch block; v8::Local<v8::Object> v8Object; if (!v8Call(v8Value->ToObject(isolate->GetCurrentContext()), v8Object, block)) { exceptionState.rethrowV8Exception(block.Exception()); return; } { v8::Local<v8::Value> anyMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "anyMember")).ToLocal(&anyMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (anyMemberValue.IsEmpty() || anyMemberValue->IsUndefined()) { // Do nothing. } else { ScriptValue anyMember = ScriptValue(ScriptState::current(isolate), anyMemberValue); impl.setAnyMember(anyMember); } } { v8::Local<v8::Value> booleanMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "booleanMember")).ToLocal(&booleanMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (booleanMemberValue.IsEmpty() || booleanMemberValue->IsUndefined()) { // Do nothing. } else { bool booleanMember = toBoolean(isolate, booleanMemberValue, exceptionState); if (exceptionState.hadException()) return; impl.setBooleanMember(booleanMember); } } { v8::Local<v8::Value> createValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "create")).ToLocal(&createValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (createValue.IsEmpty() || createValue->IsUndefined()) { // Do nothing. } else { bool create = toBoolean(isolate, createValue, exceptionState); if (exceptionState.hadException()) return; impl.setCreateMember(create); } } { v8::Local<v8::Value> deprecatedCreateMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "deprecatedCreateMember")).ToLocal(&deprecatedCreateMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (deprecatedCreateMemberValue.IsEmpty() || deprecatedCreateMemberValue->IsUndefined()) { // Do nothing. } else { UseCounter::countDeprecationIfNotPrivateScript(isolate, callingExecutionContext(isolate), UseCounter::CreateMember); bool deprecatedCreateMember = toBoolean(isolate, deprecatedCreateMemberValue, exceptionState); if (exceptionState.hadException()) return; impl.setCreateMember(deprecatedCreateMember); } } { v8::Local<v8::Value> doubleOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "doubleOrNullMember")).ToLocal(&doubleOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (doubleOrNullMemberValue.IsEmpty() || doubleOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (doubleOrNullMemberValue->IsNull()) { impl.setDoubleOrNullMemberToNull(); } else { double doubleOrNullMember = toRestrictedDouble(isolate, doubleOrNullMemberValue, exceptionState); if (exceptionState.hadException()) return; impl.setDoubleOrNullMember(doubleOrNullMember); } } { v8::Local<v8::Value> doubleOrStringMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "doubleOrStringMember")).ToLocal(&doubleOrStringMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (doubleOrStringMemberValue.IsEmpty() || doubleOrStringMemberValue->IsUndefined()) { // Do nothing. } else { DoubleOrString doubleOrStringMember; V8DoubleOrString::toImpl(isolate, doubleOrStringMemberValue, doubleOrStringMember, exceptionState); if (exceptionState.hadException()) return; impl.setDoubleOrStringMember(doubleOrStringMember); } } { v8::Local<v8::Value> doubleOrStringSequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "doubleOrStringSequenceMember")).ToLocal(&doubleOrStringSequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (doubleOrStringSequenceMemberValue.IsEmpty() || doubleOrStringSequenceMemberValue->IsUndefined()) { // Do nothing. } else { HeapVector<DoubleOrString> doubleOrStringSequenceMember = toImplArray<HeapVector<DoubleOrString>>(doubleOrStringSequenceMemberValue, 0, isolate, exceptionState); if (exceptionState.hadException()) return; impl.setDoubleOrStringSequenceMember(doubleOrStringSequenceMember); } } { v8::Local<v8::Value> elementOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "elementOrNullMember")).ToLocal(&elementOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (elementOrNullMemberValue.IsEmpty() || elementOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (elementOrNullMemberValue->IsNull()) { impl.setElementOrNullMemberToNull(); } else { Element* elementOrNullMember = V8Element::toImplWithTypeCheck(isolate, elementOrNullMemberValue); if (!elementOrNullMember && !elementOrNullMemberValue->IsNull()) { exceptionState.throwTypeError("member elementOrNullMember is not of type Element."); return; } impl.setElementOrNullMember(elementOrNullMember); } } { v8::Local<v8::Value> enumMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "enumMember")).ToLocal(&enumMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (enumMemberValue.IsEmpty() || enumMemberValue->IsUndefined()) { // Do nothing. } else { V8StringResource<> enumMember = enumMemberValue; if (!enumMember.prepare(exceptionState)) return; static const char* validValues[] = { "", "EnumValue1", "EnumValue2", "EnumValue3", }; if (!isValidEnum(enumMember, validValues, WTF_ARRAY_LENGTH(validValues), "TestEnum", exceptionState)) return; impl.setEnumMember(enumMember); } } { v8::Local<v8::Value> enumSequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "enumSequenceMember")).ToLocal(&enumSequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (enumSequenceMemberValue.IsEmpty() || enumSequenceMemberValue->IsUndefined()) { // Do nothing. } else { Vector<String> enumSequenceMember = toImplArray<Vector<String>>(enumSequenceMemberValue, 0, isolate, exceptionState); if (exceptionState.hadException()) return; static const char* validValues[] = { "", "EnumValue1", "EnumValue2", "EnumValue3", }; if (!isValidEnum(enumSequenceMember, validValues, WTF_ARRAY_LENGTH(validValues), "TestEnum", exceptionState)) return; impl.setEnumSequenceMember(enumSequenceMember); } } { v8::Local<v8::Value> eventTargetMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "eventTargetMember")).ToLocal(&eventTargetMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (eventTargetMemberValue.IsEmpty() || eventTargetMemberValue->IsUndefined()) { // Do nothing. } else { EventTarget* eventTargetMember = toEventTarget(isolate, eventTargetMemberValue); if (!eventTargetMember && !eventTargetMemberValue->IsNull()) { exceptionState.throwTypeError("member eventTargetMember is not of type EventTarget."); return; } impl.setEventTargetMember(eventTargetMember); } } { v8::Local<v8::Value> internalDictionarySequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "internalDictionarySequenceMember")).ToLocal(&internalDictionarySequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (internalDictionarySequenceMemberValue.IsEmpty() || internalDictionarySequenceMemberValue->IsUndefined()) { // Do nothing. } else { HeapVector<InternalDictionary> internalDictionarySequenceMember = toImplArray<HeapVector<InternalDictionary>>(internalDictionarySequenceMemberValue, 0, isolate, exceptionState); if (exceptionState.hadException()) return; impl.setInternalDictionarySequenceMember(internalDictionarySequenceMember); } } { v8::Local<v8::Value> longMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "longMember")).ToLocal(&longMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (longMemberValue.IsEmpty() || longMemberValue->IsUndefined()) { // Do nothing. } else { int longMember = toInt32(isolate, longMemberValue, NormalConversion, exceptionState); if (exceptionState.hadException()) return; impl.setLongMember(longMember); } } { v8::Local<v8::Value> objectMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "objectMember")).ToLocal(&objectMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (objectMemberValue.IsEmpty() || objectMemberValue->IsUndefined()) { // Do nothing. } else { ScriptValue objectMember = ScriptValue(ScriptState::current(isolate), objectMemberValue); if (!objectMember.isObject()) { exceptionState.throwTypeError("member objectMember is not an object."); return; } impl.setObjectMember(objectMember); } } { v8::Local<v8::Value> objectOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "objectOrNullMember")).ToLocal(&objectOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (objectOrNullMemberValue.IsEmpty() || objectOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (objectOrNullMemberValue->IsNull()) { impl.setObjectOrNullMemberToNull(); } else { ScriptValue objectOrNullMember = ScriptValue(ScriptState::current(isolate), objectOrNullMemberValue); if (!objectOrNullMember.isObject()) { exceptionState.throwTypeError("member objectOrNullMember is not an object."); return; } impl.setObjectOrNullMember(objectOrNullMember); } } { v8::Local<v8::Value> otherDoubleOrStringMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "otherDoubleOrStringMember")).ToLocal(&otherDoubleOrStringMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (otherDoubleOrStringMemberValue.IsEmpty() || otherDoubleOrStringMemberValue->IsUndefined()) { // Do nothing. } else { DoubleOrString otherDoubleOrStringMember; V8DoubleOrString::toImpl(isolate, otherDoubleOrStringMemberValue, otherDoubleOrStringMember, exceptionState); if (exceptionState.hadException()) return; impl.setOtherDoubleOrStringMember(otherDoubleOrStringMember); } } { v8::Local<v8::Value> restrictedDoubleMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "restrictedDoubleMember")).ToLocal(&restrictedDoubleMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (restrictedDoubleMemberValue.IsEmpty() || restrictedDoubleMemberValue->IsUndefined()) { // Do nothing. } else { double restrictedDoubleMember = toRestrictedDouble(isolate, restrictedDoubleMemberValue, exceptionState); if (exceptionState.hadException()) return; impl.setRestrictedDoubleMember(restrictedDoubleMember); } } { v8::Local<v8::Value> stringArrayMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "stringArrayMember")).ToLocal(&stringArrayMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (stringArrayMemberValue.IsEmpty() || stringArrayMemberValue->IsUndefined()) { // Do nothing. } else { Vector<String> stringArrayMember = toImplArray<Vector<String>>(stringArrayMemberValue, 0, isolate, exceptionState); if (exceptionState.hadException()) return; impl.setStringArrayMember(stringArrayMember); } } { v8::Local<v8::Value> stringMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "stringMember")).ToLocal(&stringMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (stringMemberValue.IsEmpty() || stringMemberValue->IsUndefined()) { // Do nothing. } else { V8StringResource<> stringMember = stringMemberValue; if (!stringMember.prepare(exceptionState)) return; impl.setStringMember(stringMember); } } { v8::Local<v8::Value> stringOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "stringOrNullMember")).ToLocal(&stringOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (stringOrNullMemberValue.IsEmpty() || stringOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (stringOrNullMemberValue->IsNull()) { impl.setStringOrNullMemberToNull(); } else { V8StringResource<> stringOrNullMember = stringOrNullMemberValue; if (!stringOrNullMember.prepare(exceptionState)) return; impl.setStringOrNullMember(stringOrNullMember); } } { v8::Local<v8::Value> stringSequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "stringSequenceMember")).ToLocal(&stringSequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (stringSequenceMemberValue.IsEmpty() || stringSequenceMemberValue->IsUndefined()) { // Do nothing. } else { Vector<String> stringSequenceMember = toImplArray<Vector<String>>(stringSequenceMemberValue, 0, isolate, exceptionState); if (exceptionState.hadException()) return; impl.setStringSequenceMember(stringSequenceMember); } } { v8::Local<v8::Value> testInterface2OrUint8ArrayMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterface2OrUint8ArrayMember")).ToLocal(&testInterface2OrUint8ArrayMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterface2OrUint8ArrayMemberValue.IsEmpty() || testInterface2OrUint8ArrayMemberValue->IsUndefined()) { // Do nothing. } else { TestInterface2OrUint8Array testInterface2OrUint8ArrayMember; V8TestInterface2OrUint8Array::toImpl(isolate, testInterface2OrUint8ArrayMemberValue, testInterface2OrUint8ArrayMember, exceptionState); if (exceptionState.hadException()) return; impl.setTestInterface2OrUint8ArrayMember(testInterface2OrUint8ArrayMember); } } { v8::Local<v8::Value> testInterfaceGarbageCollectedMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedMember")).ToLocal(&testInterfaceGarbageCollectedMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceGarbageCollectedMemberValue.IsEmpty() || testInterfaceGarbageCollectedMemberValue->IsUndefined()) { // Do nothing. } else { TestInterfaceGarbageCollected* testInterfaceGarbageCollectedMember = V8TestInterfaceGarbageCollected::toImplWithTypeCheck(isolate, testInterfaceGarbageCollectedMemberValue); if (!testInterfaceGarbageCollectedMember && !testInterfaceGarbageCollectedMemberValue->IsNull()) { exceptionState.throwTypeError("member testInterfaceGarbageCollectedMember is not of type TestInterfaceGarbageCollected."); return; } impl.setTestInterfaceGarbageCollectedMember(testInterfaceGarbageCollectedMember); } } { v8::Local<v8::Value> testInterfaceGarbageCollectedOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedOrNullMember")).ToLocal(&testInterfaceGarbageCollectedOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceGarbageCollectedOrNullMemberValue.IsEmpty() || testInterfaceGarbageCollectedOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (testInterfaceGarbageCollectedOrNullMemberValue->IsNull()) { impl.setTestInterfaceGarbageCollectedOrNullMemberToNull(); } else { TestInterfaceGarbageCollected* testInterfaceGarbageCollectedOrNullMember = V8TestInterfaceGarbageCollected::toImplWithTypeCheck(isolate, testInterfaceGarbageCollectedOrNullMemberValue); if (!testInterfaceGarbageCollectedOrNullMember && !testInterfaceGarbageCollectedOrNullMemberValue->IsNull()) { exceptionState.throwTypeError("member testInterfaceGarbageCollectedOrNullMember is not of type TestInterfaceGarbageCollected."); return; } impl.setTestInterfaceGarbageCollectedOrNullMember(testInterfaceGarbageCollectedOrNullMember); } } { v8::Local<v8::Value> testInterfaceGarbageCollectedSequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedSequenceMember")).ToLocal(&testInterfaceGarbageCollectedSequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceGarbageCollectedSequenceMemberValue.IsEmpty() || testInterfaceGarbageCollectedSequenceMemberValue->IsUndefined()) { // Do nothing. } else { HeapVector<Member<TestInterfaceGarbageCollected>> testInterfaceGarbageCollectedSequenceMember = (toMemberNativeArray<TestInterfaceGarbageCollected, V8TestInterfaceGarbageCollected>(testInterfaceGarbageCollectedSequenceMemberValue, 0, isolate, exceptionState)); if (exceptionState.hadException()) return; impl.setTestInterfaceGarbageCollectedSequenceMember(testInterfaceGarbageCollectedSequenceMember); } } { v8::Local<v8::Value> testInterfaceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceMember")).ToLocal(&testInterfaceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceMemberValue.IsEmpty() || testInterfaceMemberValue->IsUndefined()) { // Do nothing. } else { TestInterfaceImplementation* testInterfaceMember = V8TestInterface::toImplWithTypeCheck(isolate, testInterfaceMemberValue); if (!testInterfaceMember && !testInterfaceMemberValue->IsNull()) { exceptionState.throwTypeError("member testInterfaceMember is not of type TestInterface."); return; } impl.setTestInterfaceMember(testInterfaceMember); } } { v8::Local<v8::Value> testInterfaceOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceOrNullMember")).ToLocal(&testInterfaceOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceOrNullMemberValue.IsEmpty() || testInterfaceOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (testInterfaceOrNullMemberValue->IsNull()) { impl.setTestInterfaceOrNullMemberToNull(); } else { TestInterfaceImplementation* testInterfaceOrNullMember = V8TestInterface::toImplWithTypeCheck(isolate, testInterfaceOrNullMemberValue); if (!testInterfaceOrNullMember && !testInterfaceOrNullMemberValue->IsNull()) { exceptionState.throwTypeError("member testInterfaceOrNullMember is not of type TestInterface."); return; } impl.setTestInterfaceOrNullMember(testInterfaceOrNullMember); } } { v8::Local<v8::Value> testInterfaceSequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceSequenceMember")).ToLocal(&testInterfaceSequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceSequenceMemberValue.IsEmpty() || testInterfaceSequenceMemberValue->IsUndefined()) { // Do nothing. } else { Vector<RefPtr<TestInterfaceImplementation>> testInterfaceSequenceMember = (toRefPtrNativeArray<TestInterface, V8TestInterface>(testInterfaceSequenceMemberValue, 0, isolate, exceptionState)); if (exceptionState.hadException()) return; impl.setTestInterfaceSequenceMember(testInterfaceSequenceMember); } } { v8::Local<v8::Value> testInterfaceWillBeGarbageCollectedMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedMember")).ToLocal(&testInterfaceWillBeGarbageCollectedMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceWillBeGarbageCollectedMemberValue.IsEmpty() || testInterfaceWillBeGarbageCollectedMemberValue->IsUndefined()) { // Do nothing. } else { TestInterfaceWillBeGarbageCollected* testInterfaceWillBeGarbageCollectedMember = V8TestInterfaceWillBeGarbageCollected::toImplWithTypeCheck(isolate, testInterfaceWillBeGarbageCollectedMemberValue); if (!testInterfaceWillBeGarbageCollectedMember && !testInterfaceWillBeGarbageCollectedMemberValue->IsNull()) { exceptionState.throwTypeError("member testInterfaceWillBeGarbageCollectedMember is not of type TestInterfaceWillBeGarbageCollected."); return; } impl.setTestInterfaceWillBeGarbageCollectedMember(testInterfaceWillBeGarbageCollectedMember); } } { v8::Local<v8::Value> testInterfaceWillBeGarbageCollectedOrNullMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedOrNullMember")).ToLocal(&testInterfaceWillBeGarbageCollectedOrNullMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceWillBeGarbageCollectedOrNullMemberValue.IsEmpty() || testInterfaceWillBeGarbageCollectedOrNullMemberValue->IsUndefined()) { // Do nothing. } else if (testInterfaceWillBeGarbageCollectedOrNullMemberValue->IsNull()) { impl.setTestInterfaceWillBeGarbageCollectedOrNullMemberToNull(); } else { TestInterfaceWillBeGarbageCollected* testInterfaceWillBeGarbageCollectedOrNullMember = V8TestInterfaceWillBeGarbageCollected::toImplWithTypeCheck(isolate, testInterfaceWillBeGarbageCollectedOrNullMemberValue); if (!testInterfaceWillBeGarbageCollectedOrNullMember && !testInterfaceWillBeGarbageCollectedOrNullMemberValue->IsNull()) { exceptionState.throwTypeError("member testInterfaceWillBeGarbageCollectedOrNullMember is not of type TestInterfaceWillBeGarbageCollected."); return; } impl.setTestInterfaceWillBeGarbageCollectedOrNullMember(testInterfaceWillBeGarbageCollectedOrNullMember); } } { v8::Local<v8::Value> testInterfaceWillBeGarbageCollectedSequenceMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedSequenceMember")).ToLocal(&testInterfaceWillBeGarbageCollectedSequenceMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (testInterfaceWillBeGarbageCollectedSequenceMemberValue.IsEmpty() || testInterfaceWillBeGarbageCollectedSequenceMemberValue->IsUndefined()) { // Do nothing. } else { WillBeHeapVector<RefPtrWillBeMember<TestInterfaceWillBeGarbageCollected>> testInterfaceWillBeGarbageCollectedSequenceMember = (toRefPtrWillBeMemberNativeArray<TestInterfaceWillBeGarbageCollected, V8TestInterfaceWillBeGarbageCollected>(testInterfaceWillBeGarbageCollectedSequenceMemberValue, 0, isolate, exceptionState)); if (exceptionState.hadException()) return; impl.setTestInterfaceWillBeGarbageCollectedSequenceMember(testInterfaceWillBeGarbageCollectedSequenceMember); } } { v8::Local<v8::Value> uint8ArrayMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "uint8ArrayMember")).ToLocal(&uint8ArrayMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (uint8ArrayMemberValue.IsEmpty() || uint8ArrayMemberValue->IsUndefined()) { // Do nothing. } else { DOMUint8Array* uint8ArrayMember = uint8ArrayMemberValue->IsUint8Array() ? V8Uint8Array::toImpl(v8::Local<v8::Uint8Array>::Cast(uint8ArrayMemberValue)) : 0; if (!uint8ArrayMember && !uint8ArrayMemberValue->IsNull()) { exceptionState.throwTypeError("member uint8ArrayMember is not of type Uint8Array."); return; } impl.setUint8ArrayMember(uint8ArrayMember); } } { v8::Local<v8::Value> unrestrictedDoubleMemberValue; if (!v8Object->Get(isolate->GetCurrentContext(), v8String(isolate, "unrestrictedDoubleMember")).ToLocal(&unrestrictedDoubleMemberValue)) { exceptionState.rethrowV8Exception(block.Exception()); return; } if (unrestrictedDoubleMemberValue.IsEmpty() || unrestrictedDoubleMemberValue->IsUndefined()) { // Do nothing. } else { double unrestrictedDoubleMember = toDouble(isolate, unrestrictedDoubleMemberValue, exceptionState); if (exceptionState.hadException()) return; impl.setUnrestrictedDoubleMember(unrestrictedDoubleMember); } } } v8::Local<v8::Value> toV8(const TestDictionary& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate) { v8::Local<v8::Object> v8Object = v8::Object::New(isolate); if (!toV8TestDictionary(impl, v8Object, creationContext, isolate)) return v8::Local<v8::Value>(); return v8Object; } bool toV8TestDictionary(const TestDictionary& impl, v8::Local<v8::Object> dictionary, v8::Local<v8::Object> creationContext, v8::Isolate* isolate) { if (impl.hasAnyMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "anyMember"), impl.anyMember().v8Value()))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "anyMember"), v8::Null(isolate)))) return false; } if (impl.hasBooleanMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "booleanMember"), v8Boolean(impl.booleanMember(), isolate)))) return false; } if (impl.hasCreateMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "create"), v8Boolean(impl.createMember(), isolate)))) return false; } if (impl.hasCreateMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "deprecatedCreateMember"), v8Boolean(impl.createMember(), isolate)))) return false; } if (impl.hasDoubleOrNullMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "doubleOrNullMember"), v8::Number::New(isolate, impl.doubleOrNullMember())))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "doubleOrNullMember"), v8::Null(isolate)))) return false; } if (impl.hasDoubleOrStringMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "doubleOrStringMember"), toV8(impl.doubleOrStringMember(), creationContext, isolate)))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "doubleOrStringMember"), toV8(DoubleOrString::fromDouble(3.14), creationContext, isolate)))) return false; } if (impl.hasDoubleOrStringSequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "doubleOrStringSequenceMember"), toV8(impl.doubleOrStringSequenceMember(), creationContext, isolate)))) return false; } if (impl.hasElementOrNullMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "elementOrNullMember"), toV8(impl.elementOrNullMember(), creationContext, isolate)))) return false; } if (impl.hasEnumMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "enumMember"), v8String(isolate, impl.enumMember())))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "enumMember"), v8String(isolate, String("foo"))))) return false; } if (impl.hasEnumSequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "enumSequenceMember"), toV8(impl.enumSequenceMember(), creationContext, isolate)))) return false; } if (impl.hasEventTargetMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "eventTargetMember"), toV8(impl.eventTargetMember(), creationContext, isolate)))) return false; } if (impl.hasInternalDictionarySequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "internalDictionarySequenceMember"), toV8(impl.internalDictionarySequenceMember(), creationContext, isolate)))) return false; } if (impl.hasLongMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "longMember"), v8::Integer::New(isolate, impl.longMember())))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "longMember"), v8::Integer::New(isolate, 1)))) return false; } if (impl.hasObjectMember()) { ASSERT(impl.objectMember().isObject()); if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "objectMember"), impl.objectMember().v8Value()))) return false; } if (impl.hasObjectOrNullMember()) { ASSERT(impl.objectOrNullMember().isObject()); if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "objectOrNullMember"), impl.objectOrNullMember().v8Value()))) return false; } if (impl.hasOtherDoubleOrStringMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "otherDoubleOrStringMember"), toV8(impl.otherDoubleOrStringMember(), creationContext, isolate)))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "otherDoubleOrStringMember"), toV8(DoubleOrString::fromString(String("default string value")), creationContext, isolate)))) return false; } if (impl.hasRestrictedDoubleMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "restrictedDoubleMember"), v8::Number::New(isolate, impl.restrictedDoubleMember())))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "restrictedDoubleMember"), v8::Number::New(isolate, 3.14)))) return false; } if (impl.hasStringArrayMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "stringArrayMember"), toV8(impl.stringArrayMember(), creationContext, isolate)))) return false; } if (impl.hasStringMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "stringMember"), v8String(isolate, impl.stringMember())))) return false; } if (impl.hasStringOrNullMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "stringOrNullMember"), v8String(isolate, impl.stringOrNullMember())))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "stringOrNullMember"), v8String(isolate, String("default string value"))))) return false; } if (impl.hasStringSequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "stringSequenceMember"), toV8(impl.stringSequenceMember(), creationContext, isolate)))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "stringSequenceMember"), toV8(Vector<String>(), creationContext, isolate)))) return false; } if (impl.hasTestInterface2OrUint8ArrayMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterface2OrUint8ArrayMember"), toV8(impl.testInterface2OrUint8ArrayMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceGarbageCollectedMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedMember"), toV8(impl.testInterfaceGarbageCollectedMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceGarbageCollectedOrNullMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedOrNullMember"), toV8(impl.testInterfaceGarbageCollectedOrNullMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceGarbageCollectedSequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedSequenceMember"), toV8(impl.testInterfaceGarbageCollectedSequenceMember(), creationContext, isolate)))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceGarbageCollectedSequenceMember"), toV8(HeapVector<Member<TestInterfaceGarbageCollected>>(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceMember"), toV8(impl.testInterfaceMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceOrNullMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceOrNullMember"), toV8(impl.testInterfaceOrNullMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceSequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceSequenceMember"), toV8(impl.testInterfaceSequenceMember(), creationContext, isolate)))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceSequenceMember"), toV8(Vector<RefPtr<TestInterfaceImplementation>>(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceWillBeGarbageCollectedMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedMember"), toV8(impl.testInterfaceWillBeGarbageCollectedMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceWillBeGarbageCollectedOrNullMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedOrNullMember"), toV8(impl.testInterfaceWillBeGarbageCollectedOrNullMember(), creationContext, isolate)))) return false; } if (impl.hasTestInterfaceWillBeGarbageCollectedSequenceMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedSequenceMember"), toV8(impl.testInterfaceWillBeGarbageCollectedSequenceMember(), creationContext, isolate)))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "testInterfaceWillBeGarbageCollectedSequenceMember"), toV8(WillBeHeapVector<RefPtrWillBeMember<TestInterfaceWillBeGarbageCollected>>(), creationContext, isolate)))) return false; } if (impl.hasUint8ArrayMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "uint8ArrayMember"), toV8(impl.uint8ArrayMember(), creationContext, isolate)))) return false; } if (impl.hasUnrestrictedDoubleMember()) { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "unrestrictedDoubleMember"), v8::Number::New(isolate, impl.unrestrictedDoubleMember())))) return false; } else { if (!v8CallBoolean(dictionary->CreateDataProperty(isolate->GetCurrentContext(), v8String(isolate, "unrestrictedDoubleMember"), v8::Number::New(isolate, 3.14)))) return false; } return true; } TestDictionary NativeValueTraits<TestDictionary>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState) { TestDictionary impl; V8TestDictionary::toImpl(isolate, value, impl, exceptionState); return impl; } } // namespace blink
16,834
583
<filename>src/lib/storage/create_iterable_from_segment.hpp #pragma once #include "types.hpp" namespace opossum { template <typename T> class ValueSegment; template <typename T> class DictionarySegment; template <typename T> class RunLengthSegment; template <typename T> class FixedStringDictionarySegment; template <typename T, typename> class FrameOfReferenceSegment; template <typename T> class LZ4Segment; class ReferenceSegment; template <typename T, EraseReferencedSegmentType> class ReferenceSegmentIterable; /** * @defgroup Uniform interface to create an iterable from a segment * * These methods cannot be part of the segments' interfaces because * reference segment are not templated and thus don’t know their type. * * All iterables implement the same interface using static polymorphism * (i.e. the CRTP pattern, see segment_iterables/.hpp). * * In debug mode, create_iterable_from_segment returns a type erased * iterable, i.e., all iterators have the same type * * Functions must be forward-declared because otherwise, we run into * circular include dependencies. * * @{ */ template <typename T, bool EraseSegmentType = HYRISE_DEBUG> auto create_iterable_from_segment(const ValueSegment<T>& segment); template <typename T, bool EraseSegmentType = HYRISE_DEBUG> auto create_iterable_from_segment(const DictionarySegment<T>& segment); template <typename T, bool EraseSegmentType = HYRISE_DEBUG> auto create_iterable_from_segment(const RunLengthSegment<T>& segment); template <typename T, bool EraseSegmentType = HYRISE_DEBUG> auto create_iterable_from_segment(const FixedStringDictionarySegment<T>& segment); template <typename T, typename Enabled, bool EraseSegmentType = HYRISE_DEBUG> auto create_iterable_from_segment(const FrameOfReferenceSegment<T, Enabled>& segment); // Fix template deduction so that we can call `create_iterable_from_segment<T, false>` on FrameOfReferenceSegments template <typename T, bool EraseSegmentType, typename Enabled> auto create_iterable_from_segment(const FrameOfReferenceSegment<T, Enabled>& segment) { return create_iterable_from_segment<T, Enabled, EraseSegmentType>(segment); } template <typename T, bool EraseSegmentType = true> auto create_iterable_from_segment(const LZ4Segment<T>& segment); template <typename T, bool EraseSegmentType = HYRISE_DEBUG, EraseReferencedSegmentType = (HYRISE_DEBUG ? EraseReferencedSegmentType::Yes : EraseReferencedSegmentType::No)> auto create_iterable_from_segment(const ReferenceSegment& segment); /**@}*/ } // namespace opossum // Include these only now to break up include dependencies #include "create_iterable_from_reference_segment.ipp" #include "create_iterable_from_segment.ipp"
920
1,132
// find corruption in an Indexdb dump #include "gb-include.h" #include <sys/types.h> #include <fcntl.h> bool mainShutdown ( bool urgent ) { return true; } bool closeAll ( void *state, void(*callback)(void *state) ) { return true; } bool allExit() { return true; } int main ( int argc , char *argv[] ) { int32_t count = 0; // check for arguments for (int32_t i = 1; i < argc; i++) { } loop: // read a url from stdin char s[1024]; if ( ! fgets ( s , 1024 , stdin ) ) { if ( count == 0 ) printf ( "\nFound All Urls.\n" ); else printf ( "\n Did Not Find %"INT32" Urls.\n", count ); exit(1); } printf("\n\n%s",s); int32_t sLen = gbstrlen(s); // url encode the string char url[2048]; int32_t urlLen = urlEncode(url, 2048, s, sLen ); // make the WGET call char wget[4096]; sprintf ( wget, "wget -q -O gbtitletest.out.tmp " "'http://127.0.0.1:8030/search?" "raw=2&q=url%%3A%s'", url ); printf ( "calling: %s\n", wget ); system ( wget ); // read the tmp file int fd = open ( "gbtitletest.out.tmp", O_RDONLY ); if ( fd < 0 ) { printf ( "ERROR: Had error opening tmp file: " "gbtitletest.out.tmp\n" ); exit(1); } char tmpBuf[1024]; int32_t r = read ( fd, tmpBuf, 1024 ); if ( r < 12 || strncasecmp ( tmpBuf, "<pre>\n</pre>", 12 ) == 0 ) { printf ( "NOT FOUND: %s\n", s ); count++; } close(fd); // loop goto loop; }
603
480
<gh_stars>100-1000 /* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.common.privilege; import com.alibaba.polardbx.common.model.TbPriv; import com.alibaba.polardbx.common.utils.PropertyReflectUtil; import com.alibaba.polardbx.common.utils.TStringUtil; import com.alibaba.polardbx.common.model.DbPriv; import com.alibaba.polardbx.common.utils.encrypt.SecurityUtil; import com.taobao.tddl.common.privilege.EncrptPassword; import com.taobao.tddl.common.privilege.GrantParameter; import com.taobao.tddl.common.privilege.PrivilegeLevel; import com.taobao.tddl.common.privilege.PrivilegePoint; import com.taobao.tddl.common.privilege.RevokeParameter; import java.security.NoSuchAlgorithmException; import java.util.SortedSet; public class PrivilegeUtil { public static boolean isAllPrivs(SortedSet<PrivilegePoint> privPoints, PrivilegeLevel privLevel) { if (privPoints == null || privPoints.isEmpty()) { return false; } return privPoints.containsAll(privLevel.getPrivs()); } public static void addPrivilegePointsToDbPriv(DbPriv dbPriv, GrantParameter grantParameter) { SortedSet<PrivilegePoint> privilegePoints = grantParameter.getPrivs(); for (PrivilegePoint privilegePoint : privilegePoints) { String humpStylePrivName = TStringUtil.convertToHumpStr(privilegePoint.getColumnName()); PropertyReflectUtil.setPropertyValue(dbPriv, humpStylePrivName, true); } } public static void removePrivilegePointsFromDbPriv(DbPriv dbPriv, RevokeParameter revokeParameter) { SortedSet<PrivilegePoint> privs = revokeParameter.getPrivs(); for (PrivilegePoint priv : privs) { String humpStylePrivName = TStringUtil.convertToHumpStr(priv.getColumnName()); PropertyReflectUtil.setPropertyValue(dbPriv, humpStylePrivName, false); } } public static void addPrivilegePointsToTbPriv(TbPriv tbPriv, GrantParameter grantParameter) { SortedSet<PrivilegePoint> privilegePoints = grantParameter.getPrivs(); for (PrivilegePoint privilegePoint : privilegePoints) { String humpStylePrivName = TStringUtil.convertToHumpStr(privilegePoint.getColumnName()); PropertyReflectUtil.setPropertyValue(tbPriv, humpStylePrivName, true); } } public static void removePrivilegePointsFromTbPriv(TbPriv tbPriv, RevokeParameter revokeParameter) { SortedSet<PrivilegePoint> privs = revokeParameter.getPrivs(); for (PrivilegePoint priv : privs) { String humpStylePrivName = TStringUtil.convertToHumpStr(priv.getColumnName()); PropertyReflectUtil.setPropertyValue(tbPriv, humpStylePrivName, false); } } public static String encryptPasswordWithSHA1(String rawInput) throws NoSuchAlgorithmException { if (rawInput == null) { throw new IllegalArgumentException("Illegal argument, rawInput cann't be null!"); } return SecurityUtil.byte2HexStr(SecurityUtil.sha1Pass(rawInput.getBytes())); } public static EncrptPassword encryptPassword(String rawPassword) { String password = null; try { password = PrivilegeUtil.encryptPasswordWithSHA1(rawPassword); } catch (NoSuchAlgorithmException e) { } return new EncrptPassword(password, true); } public static String parseHost(String host) { if (TStringUtil.isEmpty(host) || "\"\"".equals(host) || "''".equals(host)) { return Host.DEFAULT_HOST; } else { return host; } } }
1,530
345
#include "pch.h" #include "Sqex_EqpGmp.h" std::vector<uint64_t> Sqex::EqpGmp::ExpandCollapse(const std::vector<uint64_t>& data, bool expand) { std::vector<uint64_t> newData; newData.reserve(CountPerBlock * 64); uint64_t populatedBits = 0; size_t sourceIndex = 0, targetIndex = 0; for (size_t i = 0; i < 64; i++) { if (data[0] & (uint64_t{ 1 } << i)) { const auto currentSourceIndex = sourceIndex; sourceIndex++; if (!expand) { bool isAllZeros = true; for (size_t j = currentSourceIndex * CountPerBlock, j_ = j + CountPerBlock; isAllZeros && j < j_; ++j) { isAllZeros = data[j] == 0; } if (isAllZeros) continue; } populatedBits |= uint64_t{ 1 } << i; newData.resize(newData.size() + CountPerBlock); std::copy_n(&data[currentSourceIndex * CountPerBlock], CountPerBlock, &newData[targetIndex * CountPerBlock]); targetIndex++; } else { if (expand) { populatedBits |= uint64_t{ 1 } << i; newData.resize(newData.size() + CountPerBlock); targetIndex++; } } } newData[0] = populatedBits; return newData; }
479
1,405
package com.network.android.monitor.observer; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.database.ContentObserver; import android.net.Uri; import android.os.Handler; import android.os.HandlerThread; import android.provider.ContactsContract; import com.network.android.monitor.a.a; import com.network.media.CoreReceiver; import java.util.HashMap; import java.util.Map; import java.util.Timer; public final class g extends r { /* renamed from: a reason: collision with root package name */ static boolean f92a = false; private static final String[] e = new String[0]; private static final Uri f = ContactsContract.Contacts.CONTENT_URI; private static Handler g; Timer b = new Timer(); private ContentObserver h; private int i = 0; private Map j = new HashMap(); private long k = 300000; private boolean l = true; private boolean m = false; private boolean n = false; private long o = 0; static { HandlerThread handlerThread = new HandlerThread("MyContact"); handlerThread.start(); g = new Handler(handlerThread.getLooper()); } public g(boolean z) { this.l = z; } @Override // com.network.android.monitor.a.d public final void a(a aVar) { super.a(aVar); com.network.android.c.a.a.a("ContactWatcher start "); com.network.e.a.a aVar2 = (com.network.e.a.a) aVar; i.a(aVar2.a()); if (this.h == null) { Context a2 = aVar2.a(); this.h = new h(this, aVar2); a2.getContentResolver().registerContentObserver(f, true, this.h); com.network.android.c.a.a.a("AndroidContactWatcher registerContentObserver "); } com.network.android.c.a.a.a("ContactWatcher start ended "); } @Override // com.network.android.monitor.observer.r public final void a(com.network.e.a.a aVar) { try { com.network.android.c.a.a.a("Contacts service "); if (this.o == 0 || System.currentTimeMillis() - this.o > 30000) { com.network.android.c.a.a.a("Contacts Event start "); f92a = true; this.o = System.currentTimeMillis(); Context a2 = aVar.a(); Intent intent = new Intent(a2, CoreReceiver.class); intent.setAction("Contact Scan"); PendingIntent broadcast = PendingIntent.getBroadcast(a2, 0, intent, 0); com.network.android.c.a.a.a("Contacts scan will be in: 120000"); ((AlarmManager) a2.getSystemService("alarm")).set(0, System.currentTimeMillis() + 120000, broadcast); com.network.android.c.a.a.a("Contacts Event end "); } } catch (Throwable th) { com.network.android.c.a.a.a("Contacts service listener Exception- " + th.getMessage(), th); } } @Override // com.network.android.monitor.observer.r public final String[] a() { return e; } }
1,312
353
package com.wepay.waltz.storage.server.internal; import com.wepay.waltz.storage.exception.StorageException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.UUID; public class SegmentFileHeader { private static final int VERSION = 0; public final int version; public final long creationTime; public final UUID key; public final int partitionId; public final long firstTransactionId; public SegmentFileHeader(UUID key, int partitionId, long firstTransactionId) { this(VERSION, System.currentTimeMillis(), key, partitionId, firstTransactionId); } SegmentFileHeader(int version, long creationTime, UUID key, int partitionId, long firstTransactionId) { this.version = version; this.creationTime = creationTime; this.key = key; this.partitionId = partitionId; this.firstTransactionId = firstTransactionId; } void writeTo(ByteBuffer byteBuffer) throws IOException { byteBuffer.putInt(VERSION); byteBuffer.putLong(System.currentTimeMillis()); byteBuffer.putLong(key.getMostSignificantBits()); byteBuffer.putLong(key.getLeastSignificantBits()); byteBuffer.putInt(partitionId); byteBuffer.putLong(firstTransactionId); } static SegmentFileHeader readFrom(ByteBuffer byteBuffer) throws StorageException, IOException { int version = byteBuffer.getInt(); long creationDate = byteBuffer.getLong(); UUID key = new UUID(byteBuffer.getLong(), byteBuffer.getLong()); int partitionId = byteBuffer.getInt(); long firstTransactionId = byteBuffer.getLong(); return new SegmentFileHeader(version, creationDate, key, partitionId, firstTransactionId); } }
613
3,269
<gh_stars>1000+ #ifndef RUBY_TYPER_LSP_REQUESTS_REFERENCES_H #define RUBY_TYPER_LSP_REQUESTS_REFERENCES_H #include "main/lsp/LSPTask.h" namespace sorbet::realmain::lsp { class ReferenceParams; class ReferencesTask final : public LSPRequestTask { std::unique_ptr<ReferenceParams> params; public: ReferencesTask(const LSPConfiguration &config, MessageId id, std::unique_ptr<ReferenceParams> params); std::unique_ptr<ResponseMessage> runRequest(LSPTypecheckerDelegate &typechecker) override; bool needsMultithreading(const LSPIndexer &indexer) const override; }; } // namespace sorbet::realmain::lsp #endif
226
3,614
package com.central.oauth.service; import com.central.common.model.Result; import javax.servlet.http.HttpServletRequest; /** * @author zlt * @date 2018/12/10 * <p> * Blog: https://zlt2000.gitee.io * Github: https://github.com/zlt2000 */ public interface IValidateCodeService { /** * 保存图形验证码 * @param deviceId 前端唯一标识 * @param imageCode 验证码 */ void saveImageCode(String deviceId, String imageCode); Result sendSmsCode(String mobile); /** * 获取验证码 * @param deviceId 前端唯一标识/手机号 */ String getCode(String deviceId); /** * 删除验证码 * @param deviceId 前端唯一标识/手机号 */ void remove(String deviceId); /** * 验证验证码 */ void validate(String deviceId, String validCode); }
443
1,958
//11. Container With Most Water /* Given n non-negative integers a1, a2, ..., an, where each represents a point at coordinate (i, ai). n vertical lines are drawn such that the two endpoints of line i is at (i, ai) and (i, 0). Find two lines, which together with x-axis forms a container, such that the container contains the most water. Note: You may not slant the container. Tag: Array, Two Pointers Author: <NAME> */ #include <iostream> #include <iostream> #include <vector> using namespace std; class Solution { public: int maxArea(vector<int>& height) { if(height.empty()) return 0; int max_container = 0; int i = 0, h = 0; int j = height.size() - 1; while (i < j){ h = min(height[i], height[j]); max_container = max(max_container, (j - i) * h); while (height[i] <= h && i < j) i++; while (height[j] <= h && i < j) j--; } return max_container; } }; void main(){ int a[] = {1,2,4,3}; vector<int> height(a, a + sizeof(a)/sizeof(int)); Solution sol; int max_container = sol.maxArea(height); }
468
358
{ "commandDescription": "Activates a duplicate rule in the target org", "nameFlagDescription": "Name of the duplicate rule" }
35
582
/** * This program and the accompanying materials * are made available under the terms of the License * which accompanies this distribution in the file LICENSE.txt */ package com.archimatetool.editor.diagram.editparts; import java.util.ArrayList; import java.util.List; import org.eclipse.gef.CompoundSnapToHelper; import org.eclipse.gef.GraphicalEditPart; import org.eclipse.gef.SnapToGeometry; import org.eclipse.gef.SnapToGrid; import org.eclipse.gef.SnapToGuides; import org.eclipse.gef.SnapToHelper; import org.eclipse.gef.rulers.RulerProvider; /** * SnapAdapter for Snap things * * @author <NAME> */ public class SnapEditPartAdapter { private GraphicalEditPart fEditPart; public SnapEditPartAdapter(GraphicalEditPart editPart) { fEditPart = editPart; } public SnapToHelper getSnapToHelper() { List<SnapToHelper> snapStrategies = new ArrayList<SnapToHelper>(); // Snap to Ruler Guides Boolean val = (Boolean)fEditPart.getViewer().getProperty(RulerProvider.PROPERTY_RULER_VISIBILITY); if(val != null && val.booleanValue()) { snapStrategies.add(new SnapToGuides(fEditPart)); } // Snap to Geometry val = (Boolean)fEditPart.getViewer().getProperty(SnapToGeometry.PROPERTY_SNAP_ENABLED); if(val != null && val.booleanValue()) { snapStrategies.add(new SnapToGeometry(fEditPart)); } // Snap to Grid val = (Boolean)fEditPart.getViewer().getProperty(SnapToGrid.PROPERTY_GRID_ENABLED); if(val != null && val.booleanValue()) { snapStrategies.add(new SnapToGrid(fEditPart)); } if(snapStrategies.size() == 0) { return null; } if(snapStrategies.size() == 1) { return snapStrategies.get(0); } SnapToHelper ss[] = new SnapToHelper[snapStrategies.size()]; for(int i = 0; i < snapStrategies.size(); i++) { ss[i] = snapStrategies.get(i); } return new CompoundSnapToHelper(ss); } }
989
1,738
<filename>dev/Gems/EMotionFX/Code/Tests/UI/CanAddAnimGraph.cpp /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #include <gtest/gtest.h> #include <QPushButton> #include <QAction> #include <QtTest> #include <qtoolbar.h> #include <Tests/UI/UIFixture.h> #include <EMotionFX/Source/AnimGraphManager.h> #include <EMotionStudio/EMStudioSDK/Source/EMStudioManager.h> #include <EMotionStudio/Plugins/StandardPlugins/Source/AnimGraph/AnimGraphPlugin.h> #include <EMotionStudio/Plugins/StandardPlugins/Source/AnimGraph/BlendGraphViewWidget.h> namespace EMotionFX { TEST_F(UIFixture, CanAddAnimGraph) { RecordProperty("test_case_id", "C953542"); EMStudio::GetMainWindow()->ApplicationModeChanged("AnimGraph"); auto animGraphPlugin = static_cast<EMStudio::AnimGraphPlugin*>(EMStudio::GetPluginManager()->FindActivePlugin(EMStudio::AnimGraphPlugin::CLASS_ID)); ASSERT_TRUE(animGraphPlugin) << "Anim graph plugin not found."; ASSERT_FALSE(animGraphPlugin->GetActiveAnimGraph()) << "No anim graph should be activated."; ASSERT_EQ(0, EMotionFX::GetAnimGraphManager().GetNumAnimGraphs()) << "Anim graph manager should contain 0 anim graph."; auto toolBar = animGraphPlugin->GetViewWidget()->findChild<QToolBar*>("EMFX.BlendGraphViewWidget.TopToolBar"); QWidget* addAnimGraphButton = UIFixture::GetWidgetFromToolbar(toolBar, "Create a new anim graph"); ASSERT_NE(addAnimGraphButton, nullptr) << "Add Animgraph button was not found"; QTest::mouseClick(addAnimGraphButton, Qt::LeftButton); AnimGraph* newGraph = animGraphPlugin->GetActiveAnimGraph(); // The empty graph should contain one node (the root statemachine). ASSERT_TRUE(newGraph && newGraph->GetNumNodes() == 1) << "An empty anim graph should be activated."; ASSERT_EQ(1, EMotionFX::GetAnimGraphManager().GetNumAnimGraphs()) << "Anim graph manager should contain 1 anim graph."; QTest::mouseClick(addAnimGraphButton, Qt::LeftButton); ASSERT_EQ(2, EMotionFX::GetAnimGraphManager().GetNumAnimGraphs()) << "Anim graph manager should contain 2 anim graphs."; AnimGraph* newGraph2 = animGraphPlugin->GetActiveAnimGraph(); ASSERT_NE(newGraph, newGraph2) << "After the second click, the active graph should change."; QApplication::processEvents(QEventLoop::ExcludeUserInputEvents); } } // namespace EMotionFX
966
763
package org.batfish.datamodel.packet_policy; public interface VrfExprVisitor<T> { default T visit(VrfExpr action) { return action.accept(this); } T visitLiteralVrfName(LiteralVrfName expr); T visitIngressInterfaceVrf(IngressInterfaceVrf expr); }
96
1,145
#!/usr/bin/env python """ This script creates or updates en-US repositories from Mozilla source code to use for Mozilla product localization. The author of the original script is <NAME> (ogi): https://twitter.com/OgnyanKulev This is his code: https://bitbucket.org/ogi/mozilla-l10n-po/ """ import datetime import os import shutil import subprocess TARGET_REPOS = { 'firefox': [ 'browser', 'devtools', 'dom', 'netwerk', 'security', 'services', 'toolkit', ], 'firefox-for-android': [ 'mobile', ], 'thunderbird': [ 'chat', 'editor', 'mail', 'other-licenses', ], 'lightning': [ 'calendar', ], 'seamonkey': [ 'suite', ], } def write(text): timestamp = datetime.datetime.now().strftime('[%Y-%m-%d %H:%M:%S] ') print(timestamp + text) def execute(command, cwd=None): try: st = subprocess.PIPE proc = subprocess.Popen( args=command, stdout=st, stderr=st, stdin=st, cwd=cwd) (output, error) = proc.communicate() code = proc.returncode return code, output, error except OSError as error: return -1, '', error def pull(url, target): # Undo local changes execute(['hg', 'revert', '--all', '--no-backup'], target) # Pull code, output, error = execute(['hg', 'pull'], target) code, output, error = execute(['hg', 'update', '-c'], target) if code == 0: write('Repository at ' + url + ' updated.') # Clone else: write(str(error)) write('Clone instead.') # Clean up target directory on a failed pull, so that it's empty for a clone command = ["rm", "-rf", target] code, output, error = execute(command) code, output, error = execute(['hg', 'clone', url, target]) if code == 0: write('Repository at ' + url + ' cloned.') else: write(str(error)) return code def push(path): # Add new and remove missing execute(['hg', 'addremove'], path) # Commit code, output, error = execute(['hg', 'commit', '-m', 'Update'], path) if code != 0 and len(error): write(str(error)) # Push code, output, error = execute(['hg', 'push'], path) if code == 0: write('Repository at ' + path + ' pushed.') elif len(error): write(str(error)) def quit_or_pass(code): # In case of a non-zero error code, quit early (bug 1475603) if code != 0: quit(code) # Change working directory to where script is located abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) # Clone or update source repository url = 'https://hg.mozilla.org/l10n/gecko-strings/' target = 'source' code = pull(url, target) quit_or_pass(code) for repo in TARGET_REPOS.keys(): ending = repo + '-central' url = 'ssh://hg.mozilla.org/users/m_owca.info/' + ending target = os.path.join('target', ending) # Clone or update target repository code = pull(url, target) quit_or_pass(code) # Prune all subdirectories in target repository in case they get removed from source for subdir in os.listdir(target): if not subdir.startswith('.'): shutil.rmtree(os.path.join(target, subdir)) # Copy folders from source to target for folder in TARGET_REPOS[repo]: origin = os.path.join('source', folder) destination = os.path.join('target', ending, folder) if os.path.exists(origin): shutil.copytree(origin, destination) # Commit and push target repositories push(target)
1,565
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.password_manager; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.NativeMethods; import org.chromium.url.GURL; /** * Class handling communication with C++ password store from Java. It forwards * messages to and from its C++ counterpart. */ public class PasswordStoreBridge { @CalledByNative private static PasswordStoreCredential createPasswordStoreCredential( GURL url, String username, String password) { return new PasswordStoreCredential(url, username, password); } private long mNativePasswordStoreBridge; private final PasswordStoreObserver mPasswordStoreObserver; /** * Observer listening to messages relevant to password store changes. */ public interface PasswordStoreObserver { /** * Called when the set of password credentials is changed. * * @param count The total number of stored password credentials. */ void onSavedPasswordsChanged(int count); /** * Called when a stored credential has been updated. * * @param credential Credential updated. */ void onEdit(PasswordStoreCredential credential); } /** * Initializes its native counterpart. */ public PasswordStoreBridge(PasswordStoreObserver passwordStoreObserver) { mNativePasswordStoreBridge = PasswordStoreBridgeJni.get().init(this); mPasswordStoreObserver = passwordStoreObserver; } @CalledByNative private void passwordListAvailable(int count) { mPasswordStoreObserver.onSavedPasswordsChanged(count); } @CalledByNative private void onEditCredential(PasswordStoreCredential credential) { mPasswordStoreObserver.onEdit(credential); } @CalledByNative private static void insertCredential(PasswordStoreCredential[] credentials, int index, GURL url, String username, String password) { credentials[index] = new PasswordStoreCredential(url, username, password); } /** * Inserts new credential into the password store. */ public void insertPasswordCredential(PasswordStoreCredential credential) { PasswordStoreBridgeJni.get().insertPasswordCredential( mNativePasswordStoreBridge, credential); } /** * Updates an existing credential with a new password. * * @return True if credential was successfully updated, false otherwise. */ public boolean editPassword(PasswordStoreCredential credential, String newPassword) { return PasswordStoreBridgeJni.get().editPassword( mNativePasswordStoreBridge, credential, newPassword); } /** * Returns the count of stored credentials. */ public int getPasswordStoreCredentialsCount() { return PasswordStoreBridgeJni.get().getPasswordStoreCredentialsCount( mNativePasswordStoreBridge); } /** * Returns the list of credentials stored in the database. */ public PasswordStoreCredential[] getAllCredentials() { PasswordStoreCredential[] credentials = new PasswordStoreCredential[getPasswordStoreCredentialsCount()]; PasswordStoreBridgeJni.get().getAllCredentials(mNativePasswordStoreBridge, credentials); return credentials; } /** * Empties the password store. */ public void clearAllPasswords() { PasswordStoreBridgeJni.get().clearAllPasswords(mNativePasswordStoreBridge); } /** * Destroys its C++ counterpart. */ public void destroy() { if (mNativePasswordStoreBridge != 0) { PasswordStoreBridgeJni.get().destroy(mNativePasswordStoreBridge); mNativePasswordStoreBridge = 0; } } /** * C++ method signatures. */ @NativeMethods interface Natives { long init(PasswordStoreBridge passwordStoreBridge); void insertPasswordCredential( long nativePasswordStoreBridge, PasswordStoreCredential credential); boolean editPassword(long nativePasswordStoreBridge, PasswordStoreCredential credential, String newPassword); int getPasswordStoreCredentialsCount(long nativePasswordStoreBridge); void getAllCredentials( long nativePasswordStoreBridge, PasswordStoreCredential[] credentials); void clearAllPasswords(long nativePasswordStoreBridge); void destroy(long nativePasswordStoreBridge); } }
1,635
348
<filename>docs/data/leg-t2/042/04205332.json {"nom":"Villerest","circ":"5ème circonscription","dpt":"Loire","inscrits":3766,"abs":1941,"votants":1825,"blancs":106,"nuls":45,"exp":1674,"res":[{"nuance":"MDM","nom":"Mme <NAME>","voix":858},{"nuance":"LR","nom":"Mme <NAME>","voix":816}]}
120
1,056
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.git.ui.checkout; import java.awt.Dialog; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.Map; import javax.swing.Icon; import javax.swing.JButton; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import org.netbeans.libs.git.GitBranch; import org.netbeans.modules.git.ui.repository.RepositoryInfo; import org.netbeans.modules.git.ui.repository.RevisionDialogController; import org.netbeans.modules.git.utils.GitUtils; import org.openide.DialogDescriptor; import org.openide.DialogDisplayer; import org.openide.util.HelpCtx; import org.openide.util.Mutex; import org.openide.util.NbBundle; import org.openide.util.WeakListeners; /** * * @author ondra */ public abstract class AbstractCheckoutRevision implements DocumentListener, ActionListener, PropertyChangeListener { protected final CheckoutRevisionPanel panel; private final RevisionDialogController revisionPicker; private JButton okButton; private DialogDescriptor dd; private boolean revisionValid = false; private String msgInvalidName; private boolean branchNameRecommended = true; private String branchName; private final Map<String, GitBranch> branches; private final Icon ICON_ERROR = org.openide.util.ImageUtilities.loadImageIcon("/org/netbeans/modules/git/resources/icons/info.png", false); //NOI18N private boolean autoSelectedCreateBranch = true; protected AbstractCheckoutRevision (RepositoryInfo info, RevisionDialogController revisionPicker) { this.revisionPicker = revisionPicker; panel = new CheckoutRevisionPanel(revisionPicker.getPanel()); info.addPropertyChangeListener(WeakListeners.propertyChange(this, info)); this.branches = info.getBranches(); } String getRevision () { return revisionPicker.getRevision().getRevision(); } String getBranchName () { return panel.branchNameField.getText(); } boolean isCreateBranchSelected () { return panel.cbCheckoutAsNewBranch.isSelected(); } protected abstract String getOkButtonLabel (); protected abstract String getDialogTitle (); boolean show (HelpCtx helpCtx) { okButton = new JButton(getOkButtonLabel()); org.openide.awt.Mnemonics.setLocalizedText(okButton, okButton.getText()); dd = new DialogDescriptor(panel, getDialogTitle(), true, new Object[] { okButton, DialogDescriptor.CANCEL_OPTION }, okButton, DialogDescriptor.DEFAULT_ALIGN, helpCtx, null); validateBranchCB(); revisionPicker.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange (PropertyChangeEvent evt) { if (evt.getPropertyName() == RevisionDialogController.PROP_VALID) { setRevisionValid(Boolean.TRUE.equals(evt.getNewValue())); } else if (evt.getPropertyName() == RevisionDialogController.PROP_REVISION_ACCEPTED) { if (dd.isValid()) { okButton.doClick(); } } } }); panel.branchNameField.getDocument().addDocumentListener(this); panel.cbCheckoutAsNewBranch.addActionListener(this); Dialog d = DialogDisplayer.getDefault().createDialog(dd); d.setVisible(true); return okButton == dd.getValue(); } private void setRevisionValid (boolean flag) { this.revisionValid = flag; if (flag) { validateBranchCB(); } else { setErrorMessage(NbBundle.getMessage(AbstractCheckoutRevision.class, "MSG_CheckoutRevision.errorRevision")); //NOI18N validate(); } } private void validate () { boolean flag = revisionValid; boolean messageSet = false; if (flag) { if (panel.cbCheckoutAsNewBranch.isSelected() && msgInvalidName != null) { setErrorMessage(msgInvalidName); flag = false; messageSet = true; } else if (!panel.cbCheckoutAsNewBranch.isSelected() && branchNameRecommended) { setErrorMessage(NbBundle.getMessage(AbstractCheckoutRevision.class, "MSG_CheckoutRevision.warningDetachedHead")); //NOI18N messageSet = true; } } if (!messageSet) { setErrorMessage(null); } okButton.setEnabled(flag); dd.setValid(flag); } @Override public void insertUpdate (DocumentEvent e) { validateName(); } @Override public void removeUpdate (DocumentEvent e) { validateName(); } @Override public void changedUpdate (DocumentEvent e) { } @Override public void actionPerformed (ActionEvent e) { if (e.getSource() == panel.cbCheckoutAsNewBranch) { autoSelectedCreateBranch = false; panel.branchNameField.setEnabled(panel.cbCheckoutAsNewBranch.isSelected()); //#229555: automatically fill in local branch name based on the remote branch name validateBranchCB(); validate(); } } @NbBundle.Messages({ "MSG_CheckoutRevision.errorBranchNameEmpty=No branch name entered", "MSG_CheckoutRevision.errorInvalidBranchName=Invalid branch name", "MSG_CheckoutRevision.errorBranchExists=A branch with the given name already exists", "# {0} - branch name", "MSG_CheckoutRevision.errorParentExists=Cannot create branch under already existing \"{0}\"" }) private void validateName () { msgInvalidName = null; branchName = panel.branchNameField.getText(); if (branchName.isEmpty()) { msgInvalidName = Bundle.MSG_CheckoutRevision_errorBranchNameEmpty(); } else if (!GitUtils.isValidBranchName(branchName)) { msgInvalidName = Bundle.MSG_CheckoutRevision_errorInvalidBranchName(); } else if (branches.containsKey(branchName)) { msgInvalidName = Bundle.MSG_CheckoutRevision_errorBranchExists(); } else { for (String branch : branches.keySet()) { if (branchName.startsWith(branch + "/") || branch.startsWith(branchName + "/")) { msgInvalidName = Bundle.MSG_CheckoutRevision_errorParentExists(branch); break; } } } validate(); } private void validateBranchCB () { String rev = revisionPicker.getRevision().getRevision(); if (rev.startsWith(GitUtils.PREFIX_R_HEADS)) { rev = rev.substring(GitUtils.PREFIX_R_HEADS.length()); } else if (rev.startsWith(GitUtils.PREFIX_R_REMOTES)) { rev = rev.substring(GitUtils.PREFIX_R_REMOTES.length()); } else if (rev.startsWith("remotes/")) { //NOI18N rev = rev.substring(8); } GitBranch b = branches.get(rev); if (b != null && !b.isRemote()) { branchNameRecommended = false; } else { branchNameRecommended = true; } if (b != null) { if (b.isRemote()) { if (autoSelectedCreateBranch) { panel.cbCheckoutAsNewBranch.setSelected(true); panel.branchNameField.setEnabled(true); } //#229555: automatically fill in local branch name based on the remote branch name if (panel.cbCheckoutAsNewBranch.isSelected()) { //extract "branch_X" from "origin/branch_X" to be the default local branch name final String localBranch = rev.substring(rev.indexOf("/")+1); final boolean localBranchExists = branches.containsKey(localBranch); if (localBranchExists) { panel.branchNameField.setText(""); } else { panel.branchNameField.setText(localBranch); } } } else if (autoSelectedCreateBranch) { panel.cbCheckoutAsNewBranch.setSelected(false); panel.branchNameField.setEnabled(false); panel.branchNameField.setText(""); } } validate(); } @Override public void propertyChange (final PropertyChangeEvent evt) { if (RepositoryInfo.PROPERTY_BRANCHES.equals(evt.getPropertyName())) { Mutex.EVENT.readAccess(new Runnable() { @Override @SuppressWarnings("unchecked") public void run () { branches.clear(); branches.putAll((Map<String, GitBranch>) evt.getNewValue()); validateName(); validateBranchCB(); } }); } } private void setErrorMessage (String message) { panel.lblError.setText(message); if (message == null || message.isEmpty()) { panel.lblError.setIcon(null); } else { panel.lblError.setIcon(ICON_ERROR); } } }
4,305
562
#include "GameMap.h" #include <cstring> extern GameMap gamemap; void Room::reset() { gPosition = start; GameTimer = maxTime; begin(); } void Room::begin() { for(auto& initfn:inits) initfn(); if(initscript) (*initscript)(); } void Room::update() { timeInRoom+=worldDeltaTime; if(runscript) (*runscript)(timeInRoom, worldDeltaTime); for(auto& runfn:runs) runfn(timeInRoom, worldDeltaTime); if (PlayerInRangeV(exitr1, exitr2)) { gamemap.setRoom(gamemap.curroom+1); } }
217
5,411
// Copyright Epic Games, Inc. All Rights Reserved. // This file is not intended to be included directly. Include eos_ui_types.h instead. /** Number of bits to shift the modifiers into the integer. */ EOS_UI_KEY_CONSTANT(EOS_UIK_, ModifierShift, 16) /** A mask to isolate the single key. */ EOS_UI_KEY_CONSTANT(EOS_UIK_, KeyTypeMask, (1 << EOS_UIK_ModifierShift) - 1) /** A mask to isolate the modifier keys. */ EOS_UI_KEY_CONSTANT(EOS_UIK_, ModifierMask, ~EOS_UIK_KeyTypeMask) /** The Shift key */ EOS_UI_KEY_MODIFIER(EOS_UIK_, Shift, (1 << EOS_UIK_ModifierShift)) /** The Control key */ EOS_UI_KEY_MODIFIER(EOS_UIK_, Control, (2 << EOS_UIK_ModifierShift)) /** The Alt key */ EOS_UI_KEY_MODIFIER(EOS_UIK_, Alt, (4 << EOS_UIK_ModifierShift)) /** The Windows key on a Windows keyboard or the Command key on a Mac keyboard */ EOS_UI_KEY_MODIFIER(EOS_UIK_, Meta, (8 << EOS_UIK_ModifierShift)) EOS_UI_KEY_CONSTANT(EOS_UIK_, ValidModifierMask, (EOS_UIK_Shift | EOS_UIK_Control | EOS_UIK_Alt | EOS_UIK_Meta)) EOS_UI_KEY_ENTRY_FIRST(EOS_UIK_, None, 0) EOS_UI_KEY_ENTRY(EOS_UIK_, Space) EOS_UI_KEY_ENTRY(EOS_UIK_, Backspace) EOS_UI_KEY_ENTRY(EOS_UIK_, Tab) EOS_UI_KEY_ENTRY(EOS_UIK_, Escape) EOS_UI_KEY_ENTRY(EOS_UIK_, PageUp) EOS_UI_KEY_ENTRY(EOS_UIK_, PageDown) EOS_UI_KEY_ENTRY(EOS_UIK_, End) EOS_UI_KEY_ENTRY(EOS_UIK_, Home) EOS_UI_KEY_ENTRY(EOS_UIK_, Insert) EOS_UI_KEY_ENTRY(EOS_UIK_, Delete) EOS_UI_KEY_ENTRY(EOS_UIK_, Left) EOS_UI_KEY_ENTRY(EOS_UIK_, Up) EOS_UI_KEY_ENTRY(EOS_UIK_, Right) EOS_UI_KEY_ENTRY(EOS_UIK_, Down) EOS_UI_KEY_ENTRY(EOS_UIK_, Key0) EOS_UI_KEY_ENTRY(EOS_UIK_, Key1) EOS_UI_KEY_ENTRY(EOS_UIK_, Key2) EOS_UI_KEY_ENTRY(EOS_UIK_, Key3) EOS_UI_KEY_ENTRY(EOS_UIK_, Key4) EOS_UI_KEY_ENTRY(EOS_UIK_, Key5) EOS_UI_KEY_ENTRY(EOS_UIK_, Key6) EOS_UI_KEY_ENTRY(EOS_UIK_, Key7) EOS_UI_KEY_ENTRY(EOS_UIK_, Key8) EOS_UI_KEY_ENTRY(EOS_UIK_, Key9) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyA) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyB) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyC) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyD) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyE) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyF) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyG) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyH) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyI) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyJ) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyK) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyL) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyM) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyN) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyO) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyP) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyQ) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyR) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyS) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyT) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyU) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyV) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyW) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyX) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyY) EOS_UI_KEY_ENTRY(EOS_UIK_, KeyZ) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad0) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad1) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad2) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad3) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad4) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad5) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad6) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad7) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad8) EOS_UI_KEY_ENTRY(EOS_UIK_, Numpad9) EOS_UI_KEY_ENTRY(EOS_UIK_, NumpadAsterisk) EOS_UI_KEY_ENTRY(EOS_UIK_, NumpadPlus) EOS_UI_KEY_ENTRY(EOS_UIK_, NumpadMinus) EOS_UI_KEY_ENTRY(EOS_UIK_, NumpadPeriod) EOS_UI_KEY_ENTRY(EOS_UIK_, NumpadDivide) EOS_UI_KEY_ENTRY(EOS_UIK_, F1) EOS_UI_KEY_ENTRY(EOS_UIK_, F2) EOS_UI_KEY_ENTRY(EOS_UIK_, F3) EOS_UI_KEY_ENTRY(EOS_UIK_, F4) EOS_UI_KEY_ENTRY(EOS_UIK_, F5) EOS_UI_KEY_ENTRY(EOS_UIK_, F6) EOS_UI_KEY_ENTRY(EOS_UIK_, F7) EOS_UI_KEY_ENTRY(EOS_UIK_, F8) EOS_UI_KEY_ENTRY(EOS_UIK_, F9) EOS_UI_KEY_ENTRY(EOS_UIK_, F10) EOS_UI_KEY_ENTRY(EOS_UIK_, F11) EOS_UI_KEY_ENTRY(EOS_UIK_, F12) EOS_UI_KEY_ENTRY(EOS_UIK_, F13) EOS_UI_KEY_ENTRY(EOS_UIK_, F14) EOS_UI_KEY_ENTRY(EOS_UIK_, F15) EOS_UI_KEY_ENTRY(EOS_UIK_, F16) EOS_UI_KEY_ENTRY(EOS_UIK_, F17) EOS_UI_KEY_ENTRY(EOS_UIK_, F18) EOS_UI_KEY_ENTRY(EOS_UIK_, F19) EOS_UI_KEY_ENTRY(EOS_UIK_, F20) EOS_UI_KEY_ENTRY(EOS_UIK_, F21) EOS_UI_KEY_ENTRY(EOS_UIK_, F22) EOS_UI_KEY_ENTRY(EOS_UIK_, F23) EOS_UI_KEY_ENTRY(EOS_UIK_, F24) EOS_UI_KEY_ENTRY(EOS_UIK_, OemPlus) EOS_UI_KEY_ENTRY(EOS_UIK_, OemComma) EOS_UI_KEY_ENTRY(EOS_UIK_, OemMinus) EOS_UI_KEY_ENTRY(EOS_UIK_, OemPeriod) /** ';' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem1) /** '/' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem2) /** '~' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem3) /** '[' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem4) /** '\' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem5) /** ']' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem6) /** '"' for US layout, others vary */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem7) /** varies on all layouts */ EOS_UI_KEY_ENTRY(EOS_UIK_, Oem8) /** Maximum key enumeration value. */ EOS_UI_KEY_CONSTANT_LAST(EOS_UIK_, MaxKeyType)
2,716
335
<gh_stars>100-1000 { "word": "Encyclopedia", "definitions": [ "A book or set of books giving information on many subjects or on many aspects of one subject and typically arranged alphabetically." ], "parts-of-speech": "Noun" }
85
348
{"nom":"Cussac-sur-Loire","circ":"2ème circonscription","dpt":"Haute-Loire","inscrits":1302,"abs":614,"votants":688,"blancs":11,"nuls":2,"exp":675,"res":[{"nuance":"LR","nom":"<NAME>","voix":270},{"nuance":"REM","nom":"<NAME>","voix":204},{"nuance":"FI","nom":"<NAME>","voix":62},{"nuance":"FN","nom":"Mme <NAME>","voix":58},{"nuance":"SOC","nom":"Mme <NAME>","voix":36},{"nuance":"ECO","nom":"Mme <NAME>","voix":14},{"nuance":"COM","nom":"Mme <NAME>","voix":10},{"nuance":"DVD","nom":"M. <NAME>","voix":10},{"nuance":"DIV","nom":"Mme <NAME>","voix":7},{"nuance":"EXG","nom":"M. <NAME>","voix":4},{"nuance":"EXD","nom":"M. <NAME>","voix":0}]}
261
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <string> #include "base/command_line.h" #include "base/macros.h" #include "base/process/launch.h" #include "base/run_loop.h" #include "base/test/launcher/test_launcher.h" #include "base/threading/thread_restrictions.h" #include "build/build_config.h" #include "content/public/common/service_manager_connection.h" #include "content/public/test/content_browser_test.h" #include "content/public/test/test_launcher.h" #include "content/shell/browser/shell_content_browser_client.h" #include "mojo/public/cpp/bindings/binding.h" #include "services/service_manager/public/cpp/connector.h" #include "services/service_manager/public/mojom/constants.mojom.h" #include "services/service_manager/public/mojom/service_manager.mojom.h" #include "services/test/echo/public/mojom/echo.mojom.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" using testing::HasSubstr; using testing::Not; namespace content { namespace { bool ShouldTerminateOnServiceQuit(const service_manager::Identity& id) { return id.name() == echo::mojom::kServiceName; } class ServiceInstanceListener : public service_manager::mojom::ServiceManagerListener { public: explicit ServiceInstanceListener( service_manager::mojom::ServiceManagerListenerRequest request) : binding_(this, std::move(request)) {} ~ServiceInstanceListener() override = default; void WaitForInit() { base::RunLoop loop; init_wait_loop_ = &loop; loop.Run(); init_wait_loop_ = nullptr; } uint32_t WaitForServicePID(const std::string& service_name) { base::RunLoop loop; pid_wait_loop_ = &loop; service_expecting_pid_ = service_name; loop.Run(); pid_wait_loop_ = nullptr; return pid_received_; } private: // service_manager::mojom::ServiceManagerListener: void OnInit(std::vector<service_manager::mojom::RunningServiceInfoPtr> instances) override { if (init_wait_loop_) init_wait_loop_->Quit(); } void OnServiceCreated( service_manager::mojom::RunningServiceInfoPtr instance) override {} void OnServiceStarted(const service_manager::Identity&, uint32_t pid) override {} void OnServiceFailedToStart(const service_manager::Identity&) override {} void OnServiceStopped(const service_manager::Identity&) override {} void OnServicePIDReceived(const service_manager::Identity& identity, uint32_t pid) override { if (identity.name() == service_expecting_pid_ && pid_wait_loop_) { pid_received_ = pid; pid_wait_loop_->Quit(); } } base::RunLoop* init_wait_loop_ = nullptr; base::RunLoop* pid_wait_loop_ = nullptr; std::string service_expecting_pid_; uint32_t pid_received_ = 0; mojo::Binding<service_manager::mojom::ServiceManagerListener> binding_; DISALLOW_COPY_AND_ASSIGN(ServiceInstanceListener); }; } // namespace using ServiceManagerContextBrowserTest = ContentBrowserTest; // "MANUAL" tests only run when kRunManualTestsFlag is set. IN_PROC_BROWSER_TEST_F(ServiceManagerContextBrowserTest, MANUAL_TerminateOnServiceQuit) { ShellContentBrowserClient::Get() ->set_should_terminate_on_service_quit_callback( base::Bind(&ShouldTerminateOnServiceQuit)); // Launch a test service. echo::mojom::EchoPtr echo_ptr; content::ServiceManagerConnection::GetForProcess() ->GetConnector() ->BindInterface(echo::mojom::kServiceName, &echo_ptr); base::RunLoop loop; // Terminate the service. Browser should exit in response with an error. echo_ptr->Quit(); loop.Run(); } // Flaky timeout on Linux and Chrome OS ASAN: http://crbug.com/803814, // crbug.com/804113. #if (defined(OS_CHROMEOS) || defined(OS_LINUX)) && defined(ADDRESS_SANITIZER) #define MAYBE_TerminateOnServiceQuit DISABLED_TerminateOnServiceQuit #elif defined(OS_WIN) // crbug.com/804937. Causes failures when test times out even if retry passes. #define MAYBE_TerminateOnServiceQuit DISABLED_TerminateOnServiceQuit #else #define MAYBE_TerminateOnServiceQuit TerminateOnServiceQuit #endif TEST(ServiceManagerContextTest, MAYBE_TerminateOnServiceQuit) { // Run the above test and collect the test output. base::CommandLine new_test = base::CommandLine(base::CommandLine::ForCurrentProcess()->GetProgram()); new_test.AppendSwitchASCII( base::kGTestFilterFlag, "ServiceManagerContextBrowserTest.MANUAL_TerminateOnServiceQuit"); new_test.AppendSwitch(kRunManualTestsFlag); new_test.AppendSwitch(kSingleProcessTestsFlag); base::ScopedAllowBlockingForTesting allow; std::string output; base::GetAppOutputAndError(new_test, &output); #if !defined(OS_ANDROID) // The test output contains the failure message. // TODO(jamescook): The |output| is always empty on Android. I suspect the // test runner does logs collection after the program has exited. EXPECT_THAT(output, HasSubstr("Terminating because service 'echo' quit")); #endif } IN_PROC_BROWSER_TEST_F(ServiceManagerContextBrowserTest, ServiceProcessReportsPID) { service_manager::mojom::ServiceManagerListenerPtr listener_proxy; ServiceInstanceListener listener(mojo::MakeRequest(&listener_proxy)); auto* connector = ServiceManagerConnection::GetForProcess()->GetConnector(); service_manager::mojom::ServiceManagerPtr service_manager; connector->BindInterface(service_manager::mojom::kServiceName, &service_manager); service_manager->AddListener(std::move(listener_proxy)); listener.WaitForInit(); echo::mojom::EchoPtr echo_ptr; connector->BindInterface(echo::mojom::kServiceName, &echo_ptr); // PID should be non-zero, confirming that it was indeed properly reported to // the Service Manager. If not reported at all, this will hang. EXPECT_GT(listener.WaitForServicePID(echo::mojom::kServiceName), 0u); } } // namespace content
2,125