max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
336
<filename>tools/rtl433/rtltest/devices/alecto.c<gh_stars>100-1000 /*** @file AlectoV1 Weather Sensor protocol. */ /** @fn int alectov1_callback(r_device *decoder, bitbuffer_t *bitbuffer) AlectoV1 Weather Sensor decoder. Documentation also at http://www.tfd.hu/tfdhu/files/wsprotocol/auriol_protocol_v20.pdf PPM with pulse width 500 us, long gap 4000 us, short gap 2000 us, sync gap 9000 us. Message Format: (9 nibbles, 36 bits): Please note that bytes need to be reversed before processing! Format for Temperature Humidity: IIIICCII BMMP TTTT TTTT TTTT HHHHHHHH CCCC RC Type Temperature___ Humidity Checksum - I: 8 bit Random Device ID, includes 2 bit channel (X, 1, 2, 3) - B: 1 bit Battery status (0 normal, 1 voltage is below ~2.6 V) - M: 2 bit Message type, Temp/Humidity if not '11' else wind/rain sensor - P: 1 bit a 0 indicates regular transmission, 1 indicates requested by pushbutton - T: 12 bit Temperature (two's complement) - H: 8 bit Humidity BCD format - C: 4 bit Checksum Format for Rain: IIIIIIII BMMP 1100 RRRR RRRR RRRR RRRR CCCC RC Type Rain Checksum - I: 8 bit Random Device ID, includes 2 bit channel (X, 1, 2, 3) - B: 1 bit Battery status (0 normal, 1 voltage is below ~2.6 V) - M: 2 bit Message type, Temp/Humidity if not '11' else wind/rain sensor - P: 1 bit a 0 indicates regular transmission, 1 indicates requested by pushbutton - R: 16 bit Rain (bitvalue * 0.25 mm) - C: 4 bit Checksum Format for Windspeed: IIIIIIII BMMP 1000 0000 0000 WWWWWWWW CCCC RC Type Windspd Checksum - I: 8 bit Random Device ID, includes 2 bit channel (X, 1, 2, 3) - B: 1 bit Battery status (0 normal, 1 voltage is below ~2.6 V) - M: 2 bit Message type, Temp/Humidity if not '11' else wind/rain sensor - P: 1 bit a 0 indicates regular transmission, 1 indicates requested by pushbutton - W: 8 bit Windspeed (bitvalue * 0.2 m/s, correction for webapp = 3600/1000 * 0.2 * 100 = 72) - C: 4 bit Checksum Format for Winddirection & Windgust: IIIIIIII BMMP 111D DDDD DDDD GGGGGGGG CCCC RC Type Winddir Windgust Checksum - I: 8 bit Random Device ID, includes 2 bit channel (X, 1, 2, 3) - B: 1 bit Battery status (0 normal, 1 voltage is below ~2.6 V) - M: 2 bit Message type, Temp/Humidity if not '11' else wind/rain sensor - P: 1 bit a 0 indicates regular transmission, 1 indicates requested by pushbutton - D: 9 bit Wind direction - G: 8 bit Windgust (bitvalue * 0.2 m/s, correction for webapp = 3600/1000 * 0.2 * 100 = 72) - C: 4 bit Checksum */ #include "decoder.h" /* return 1 if the checksum passes and 0 if it fails */ int alecto_checksum(r_device *decoder, bitrow_t *bb) { int i, csum = 0, csum2 = 0; for (i = 0; i < 4; i++) { uint8_t tmp = reverse8(bb[1][i]); csum += (tmp & 0xf) + ((tmp & 0xf0) >> 4); tmp = reverse8(bb[5][i]); csum2 += (tmp & 0xf) + ((tmp & 0xf0) >> 4); } csum = ((bb[1][1] & 0x7f) == 0x6c) ? (csum + 0x7) : (0xf - csum); csum2 = ((bb[5][1] & 0x7f) == 0x6c) ? (csum2 + 0x7) : (0xf - csum2); csum = reverse8((csum & 0xf) << 4); csum2 = reverse8((csum2 & 0xf) << 4); /* Quit if checksum does not work out */ if (csum != (bb[1][4] >> 4) || csum2 != (bb[5][4] >> 4)) { //fprintf(stderr, "\nAlectoV1 CRC error"); if(decoder->verbose) { fprintf(stderr, "AlectoV1 Checksum/Parity error\n"); } return 0; } //Invalid checksum if (decoder->verbose){ fprintf(stderr, "Checksum = %01x (calculated %01x)\n", bb[1][4] >> 4, csum); } return 1; } static uint8_t bcd_decode8(uint8_t x) { return ((x & 0xF0) >> 4) * 10 + (x & 0x0F); } static int alectov1_callback(r_device *decoder, bitbuffer_t *bitbuffer) { bitrow_t *bb = bitbuffer->bb; uint8_t *b = bitbuffer->bb[1]; int16_t temp; uint8_t humidity; int ret; data_t *data; unsigned bits = bitbuffer->bits_per_row[1]; if (bits != 36) return 0; if (bb[1][0] != bb[5][0] || bb[2][0] != bb[6][0] || (bb[1][4] & 0xf) != 0 || (bb[5][4] & 0xf) != 0 || bb[5][0] == 0 || bb[5][1] == 0) return 0; ret = alecto_checksum(decoder, bb); if (!ret) return 0; int battery_low = (b[1] & 0x80) >> 7; int msg_type = (b[1] & 0x60) >> 5; int button = (b[1] & 0x10) >> 4; int msg_rain = (b[1] & 0x0f) == 0x0c; int msg_wind = (b[1] & 0x0f) == 0x08 && b[2] == 0; int msg_gust = (b[1] & 0x0e) == 0x0e; int channel = (b[0] & 0xc) >> 2; int sensor_id = reverse8(b[0]); //fprintf(stderr, "AlectoV1 type : %d rain : %d wind : %d gust : %d\n", msg_type, msg_rain, msg_wind, msg_gust); if (msg_type == 0x3 && !msg_rain) { // Wind sensor int skip = -1; /* Untested code written according to the specification, may not decode correctly */ if ((b[1]&0xe) == 0x8 && b[2] == 0) { skip = 0; } else if ((b[1]&0xe) == 0xe) { skip = 4; } //According to supplied data! if (skip >= 0) { double speed = reverse8(bb[1 + skip][3]); double gust = reverse8(bb[5 + skip][3]); int direction = (reverse8(bb[5 + skip][2]) << 1) | (bb[5 + skip][1] & 0x1); data = data_make( "model", "", DATA_STRING, "AlectoV1 Wind Sensor", "id", "House Code", DATA_INT, sensor_id, "channel", "Channel", DATA_INT, channel, "battery", "Battery", DATA_STRING, battery_low ? "LOW" : "OK", "wind_speed", "Wind speed", DATA_FORMAT, "%.2f m/s", DATA_DOUBLE, speed * 0.2F, "wind_gust", "Wind gust", DATA_FORMAT, "%.2f m/s", DATA_DOUBLE, gust * 0.2F, "wind_direction", "Direction", DATA_INT, direction, "mic", "Integrity", DATA_STRING, "CHECKSUM", NULL); decoder_output_data(decoder, data); return 1; } } else if (msg_type == 0x3 && msg_rain) { // Rain sensor double rain_mm = ((reverse8(b[3]) << 8) | reverse8(b[2])) * 0.25F; data = data_make( "model", "", DATA_STRING, "AlectoV1 Rain Sensor", "id", "House Code", DATA_INT, sensor_id, "channel", "Channel", DATA_INT, channel, "battery", "Battery", DATA_STRING, battery_low ? "LOW" : "OK", "rain_total", "Total Rain", DATA_FORMAT, "%.02f mm", DATA_DOUBLE, rain_mm, "mic", "Integrity", DATA_STRING, "CHECKSUM", NULL); decoder_output_data(decoder, data); return 1; } else if (msg_type != 0x3 && bb[2][0] == bb[3][0] && bb[3][0] == bb[4][0] && bb[4][0] == bb[5][0] && bb[5][0] == bb[6][0] && (bb[3][4] & 0xf) == 0 && (bb[5][4] & 0xf) == 0) { //static char * temp_states[4] = {"stable", "increasing", "decreasing", "invalid"}; temp = (int16_t) ((uint16_t) (reverse8(b[1]) >> 4) | (reverse8(b[2]) << 4)); if ((temp & 0x800) != 0) { temp |= 0xf000; } humidity = bcd_decode8(reverse8(b[3])); if (humidity>100) return 0;//extra detection false positive!! prologue is also 36bits and sometimes detected as alecto data = data_make( "model", "", DATA_STRING, "AlectoV1 Temperature Sensor", "id", "House Code", DATA_INT, sensor_id, "channel", "Channel", DATA_INT, channel, "battery", "Battery", DATA_STRING, battery_low ? "LOW" : "OK", "temperature_C", "Temperature", DATA_FORMAT, "%.02f C", DATA_DOUBLE, (float) temp / 10.0F, "humidity", "Humidity", DATA_FORMAT, "%u %%", DATA_INT, humidity, "mic", "Integrity", DATA_STRING, "CHECKSUM", NULL); decoder_output_data(decoder, data); return 1; } return 0; } static char *output_fields[] = { "model", "id", "channel", "battery", "temperature_C", "humidity", "rain_total", "wind_speed", "wind_gust", "wind_direction", "mic", NULL }; r_device alectov1 = { .name = "AlectoV1 Weather Sensor (Alecto WS3500 WS4500 Ventus W155/W044 Oregon)", .modulation = OOK_PULSE_PPM, .short_width = 2000, .long_width = 4000, .gap_limit = 7000, .reset_limit = 10000, .decode_fn = &alectov1_callback, .disabled = 0, .fields = output_fields };
4,362
3,372
/* * Copyright 2011-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://aws.amazon.com/apache2.0 * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.dynamodbv2.datamodeling; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.UUID; /** * Annotation for auto-generating a {@link UUID}. * * <pre class="brush: java"> * &#064;DynamoDBGeneratedUuid(DynamoDBAutoGenerateStrategy.CREATE) * public UUID getKey() * </pre> * * <p>When applied to a key field, only the strategy * {@link DynamoDBAutoGenerateStrategy#CREATE} is supported.</p> * * <p>The short-formed {@link DynamoDBAutoGeneratedKey} may also be used for * create only.</p> * * <p>May be used as a meta-annotation.</p> * * @see java.util.UUID */ @DynamoDB @DynamoDBAutoGenerated(generator=DynamoDBGeneratedUuid.Generator.class) @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE}) public @interface DynamoDBGeneratedUuid { /** * The auto-generation strategy. */ DynamoDBAutoGenerateStrategy value(); /** * Default generator. */ static final class Generator<T> extends DynamoDBAutoGenerator.AbstractGenerator<T> { private final DynamoDBTypeConverter<T,UUID> converter; public Generator(Class<T> targetType, DynamoDBGeneratedUuid annotation) { super(annotation.value()); this.converter = StandardTypeConverters.factory().getConverter(targetType, UUID.class); } @Override public final T generate(final T currentValue) { return converter.convert(UUID.randomUUID()); } } }
759
5,169
{ "name": "EGOTableViewPullRefreshAndLoadMore", "version": "1.0", "authors": { "JackShi": "<EMAIL>" }, "homepage": "https://github.com/JackShi/EGOTableViewPullRefreshAndLoadMore", "summary": "Inspired by EGOTableViewPullRefresh, pull down to refresh, pull up to load more", "license": { "type": "MIT", "file": "License" }, "source": { "git": "https://github.com/JackShi/EGOTableViewPullRefreshAndLoadMore.git", "tag": "1.0" }, "source_files": [ "EGOTableViewPullRefreshAndLoadMore/EGORefreshClass/*.{h,m}", "EGOTableViewPullRefreshAndLoadMore/LoadMoreClass/*.{h,m}", "EGOTableViewPullRefreshAndLoadMore/Resources/*" ], "platforms": { "ios": null }, "requires_arc": true }
302
304
<reponame>Jabbah/vrperfkit<gh_stars>100-1000 #include "d3d11_injector.h" #include "hooks.h" namespace vrperfkit { namespace { bool alreadyInsideHook = false; class HookGuard { public: HookGuard() { state = alreadyInsideHook; alreadyInsideHook = true; } ~HookGuard() { alreadyInsideHook = state; } const bool AlreadyInsideHook() const { return state; } private: bool state; }; template<typename T> D3D11Injector *GetInjector(T *object) { D3D11Injector *injector = nullptr; UINT size = sizeof(injector); object->GetPrivateData(__uuidof(D3D11Injector), &size, &injector); return injector; } void D3D11ContextHook_PSSetSamplers(ID3D11DeviceContext *self, UINT StartSlot, UINT NumSamplers, ID3D11SamplerState * const *ppSamplers) { HookGuard hookGuard; D3D11Injector *injector = GetInjector(self); if (injector != nullptr && !hookGuard.AlreadyInsideHook()) { if (injector->PrePSSetSamplers(StartSlot, NumSamplers, ppSamplers)) { return; } } hooks::CallOriginal(D3D11ContextHook_PSSetSamplers)(self, StartSlot, NumSamplers, ppSamplers); } void D3D11ContextHook_OMSetRenderTargets( ID3D11DeviceContext *self, UINT NumViews, ID3D11RenderTargetView * const *ppRenderTargetViews, ID3D11DepthStencilView *pDepthStencilView) { HookGuard hookGuard; hooks::CallOriginal(D3D11ContextHook_OMSetRenderTargets)(self, NumViews, ppRenderTargetViews, pDepthStencilView); if (D3D11Injector *injector = GetInjector(self)) { injector->PostOMSetRenderTargets(NumViews, ppRenderTargetViews, pDepthStencilView); } } void D3D11ContextHook_OMSetRenderTargetsAndUnorderedAccessViews( ID3D11DeviceContext *self, UINT NumRTVs, ID3D11RenderTargetView * const *ppRenderTargetViews, ID3D11DepthStencilView *pDepthStencilView, UINT UAVStartSlot, UINT NumUAVs, ID3D11UnorderedAccessView * const *ppUnorderedAccessViews, const UINT *pUAVInitialCounts) { HookGuard hookGuard; hooks::CallOriginal(D3D11ContextHook_OMSetRenderTargetsAndUnorderedAccessViews)(self, NumRTVs, ppRenderTargetViews, pDepthStencilView, UAVStartSlot, NumUAVs, ppUnorderedAccessViews, pUAVInitialCounts); if (D3D11Injector *injector = GetInjector(self)) { injector->PostOMSetRenderTargets(NumRTVs, ppRenderTargetViews, pDepthStencilView); } } } D3D11Injector::D3D11Injector(ComPtr<ID3D11Device> device) { this->device = device; device->GetImmediateContext(context.GetAddressOf()); D3D11Injector *instance = this; UINT size = sizeof(instance); device->SetPrivateData(__uuidof(D3D11Injector), size, &instance); context->SetPrivateData(__uuidof(D3D11Injector), size, &instance); hooks::InstallVirtualFunctionHook("ID3D11DeviceContext::PSSetSamplers", context.Get(), 10, (void*)&D3D11ContextHook_PSSetSamplers); hooks::InstallVirtualFunctionHook("ID3D11DeviceContext::OMSetRenderTargets", context.Get(), 33, (void*)&D3D11ContextHook_OMSetRenderTargets); hooks::InstallVirtualFunctionHook("ID3D11DeviceContext::OMSetRenderTargetsAndUnorderedAccessViews", context.Get(), 34, (void*)&D3D11ContextHook_OMSetRenderTargetsAndUnorderedAccessViews); } D3D11Injector::~D3D11Injector() { hooks::RemoveHook((void*)&D3D11ContextHook_PSSetSamplers); hooks::RemoveHook((void*)&D3D11ContextHook_OMSetRenderTargets); hooks::RemoveHook((void*)&D3D11ContextHook_OMSetRenderTargetsAndUnorderedAccessViews); device->SetPrivateData(__uuidof(D3D11Injector), 0, nullptr); context->SetPrivateData(__uuidof(D3D11Injector), 0, nullptr); } void D3D11Injector::AddListener(D3D11Listener *listener) { if (std::find(listeners.begin(), listeners.end(), listener) == listeners.end()) { listeners.push_back(listener); } } void D3D11Injector::RemoveListener(D3D11Listener *listener) { auto it = std::find(listeners.begin(), listeners.end(), listener); if (it != listeners.end()) { listeners.erase(it); } } bool D3D11Injector::PrePSSetSamplers(UINT startSlot, UINT numSamplers, ID3D11SamplerState * const *ppSamplers) { for (D3D11Listener *listener : listeners) { if (listener->PrePSSetSamplers(startSlot, numSamplers, ppSamplers)) { return true; } } return false; } void D3D11Injector::PostOMSetRenderTargets(UINT numViews, ID3D11RenderTargetView *const *renderTargetViews, ID3D11DepthStencilView *depthStencilView) { for (D3D11Listener *listener : listeners) { listener->PostOMSetRenderTargets(numViews, renderTargetViews, depthStencilView); } } }
1,864
432
/* * Copyright 2015 Open mHealth * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openmhealth.shim.jawbone.mapper; /** * Represents different body event types in Jawbone. The enum maps each type to the property name that contains its * value. * * @author <NAME> */ public enum JawboneBodyEventType { BODY_WEIGHT("weight"), BODY_MASS_INDEX("bmi"); private String propertyName; JawboneBodyEventType(String propertyName) { this.propertyName = propertyName; } public String getPropertyName() { return propertyName; } }
326
687
<filename>xls/noc/config_ng/network_topology_component.cc<gh_stars>100-1000 // Copyright 2021 The XLS Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "xls/noc/config_ng/network_topology_component.h" #include "xls/noc/config_ng/network_component_visitor_abstract.h" namespace xls::noc { ChannelTopologyComponent::ChannelTopologyComponent(NetworkView* view) : NetworkComponent(view) {} absl::Status Visit(NetworkComponentVisitor& v); absl::Status ChannelTopologyComponent::Visit(NetworkComponentVisitor& v) { return v.Handle(*this); } ChannelTopologyComponent::~ChannelTopologyComponent() {} ReceivePortTopologyComponent::ReceivePortTopologyComponent(NetworkView* view) : NetworkComponent(view) {} absl::Status ReceivePortTopologyComponent::Visit(NetworkComponentVisitor& v) { return v.Handle(*this); } ReceivePortTopologyComponent::~ReceivePortTopologyComponent() {} RouterTopologyComponent::RouterTopologyComponent(NetworkView* view) : NetworkComponent(view), coordinate_({}) {} const Coordinate& RouterTopologyComponent::GetCoordinate() const { return coordinate_; } RouterTopologyComponent& RouterTopologyComponent::SetCoordinate( const Coordinate& coordinate) { coordinate_ = coordinate; return *this; } absl::Status RouterTopologyComponent::Visit(NetworkComponentVisitor& v) { return v.Handle(*this); } RouterTopologyComponent::~RouterTopologyComponent() {} SendPortTopologyComponent::SendPortTopologyComponent(NetworkView* view) : NetworkComponent(view) {} absl::Status SendPortTopologyComponent::Visit(NetworkComponentVisitor& v) { return v.Handle(*this); } SendPortTopologyComponent::~SendPortTopologyComponent() {} } // namespace xls::noc
655
559
<reponame>proteanblank/building_tool import bmesh from ..arch import ( fill_arch, create_arch, add_arch_depth, ) from ..array import ( spread_array, clamp_array_count, get_array_split_edges ) from ..fill import fill_face from ..frame import add_frame_depth from ..facemap import ( FaceMap, map_new_faces, add_faces_to_map, find_faces_without_facemap ) from ...utils import ( clamp, XYDir, select, VEC_UP, validate, local_xyz, sort_verts, valid_ngon, ngon_to_quad, get_top_edges, get_top_faces, get_bottom_faces, extrude_face_region, calc_face_dimensions, subdivide_face_vertically, subdivide_face_horizontally, ) def create_door(bm, faces, prop): """Create door from face selection""" for face in faces: face.select = False if not valid_ngon(face): ngon_to_quad(bm, face) clamp_array_count(face, prop) array_faces = subdivide_face_horizontally(bm, face, widths=[prop.width] * prop.count) max_width = calc_face_dimensions(array_faces[0])[0] split_edges = get_array_split_edges(array_faces) split_faces = [create_door_split(bm, aface, prop) for aface in array_faces] spread_array(bm, split_edges, split_faces, max_width, prop) for face in split_faces: door, arch = create_door_frame(bm, face, prop) create_door_fill(bm, door, prop) if prop.add_arch: fill_arch(bm, arch, prop) bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=0.0001) nulfaces = find_faces_without_facemap(bm) add_faces_to_map(bm, nulfaces, FaceMap.WALLS) return True @map_new_faces(FaceMap.WALLS) def create_door_split(bm, face, prop): """Use properties from SizeOffset to subdivide face into regular quads""" wall_w, wall_h = calc_face_dimensions(face) width, height, offset = *prop.size, prop.offset # horizontal split h_widths = [wall_w / 2 - offset.x - width / 2, width, wall_w / 2 + offset.x - width / 2] h_faces = subdivide_face_horizontally(bm, face, h_widths) # vertical split v_width = [height, wall_h - height] v_faces = subdivide_face_vertically(bm, h_faces[1], v_width) return v_faces[0] def create_door_frame(bm, face, prop): """Extrude and inset face to make door frame""" normal = face.normal.copy() # XXX Frame thickness should not exceed size of door min_frame_size = min(calc_face_dimensions(face)) / 2 prop.frame_thickness = clamp(prop.frame_thickness, 0.01, min_frame_size - 0.001) door_face, frame_faces = make_door_inset(bm, face, prop) arch_face = None # create arch if prop.add_arch: frame_faces.remove(get_top_faces(frame_faces).pop()) # remove top face from frame_faces top_edges = get_top_edges({e for f in get_bottom_faces(frame_faces, n=2) for e in f.edges}, n=2) arch_face, arch_frame_faces = create_arch( bm, top_edges, frame_faces, prop.arch, prop.frame_thickness, local_xyz(face) ) frame_faces += arch_frame_faces else: # -- postprocess merge loose split verts merge_loose_split_verts(bm, door_face, prop) bmesh.ops.recalc_face_normals(bm, faces=list(bm.faces)) # add depths if prop.add_arch: [door_face], _, [arch_face], frame_faces = add_frame_depth( bm, [door_face], [], [arch_face], frame_faces, prop.frame_depth, normal ) arch_face, new_frame_faces = add_arch_depth(bm, arch_face, prop.arch.depth, normal) frame_faces += new_frame_faces else: [door_face], _, _, frame_faces = add_frame_depth(bm, [door_face], [], [], frame_faces, prop.frame_depth, normal) door_face, new_frame_faces = add_door_depth(bm, door_face, prop.door_depth, normal) frame_faces += new_frame_faces # add face maps add_faces_to_map(bm, [door_face], FaceMap.DOOR) add_faces_to_map(bm, validate(frame_faces), FaceMap.FRAME) if prop.add_arch: add_faces_to_map(bm, [arch_face], FaceMap.DOOR) return door_face, arch_face def add_door_depth(bm, door, depth, normal): if depth > 0.0: door_faces, frame_faces = extrude_face_region(bm, [door], -depth, normal) return door_faces[0], frame_faces else: return door, [] def create_door_fill(bm, face, prop): """Add decorative elements on door face""" if prop.double_door: faces = subdivide_face_horizontally(bm, face, widths=[1, 1]) for f in faces: fill_face(bm, f, prop, "DOOR") else: fill_face(bm, face, prop, "DOOR") def make_door_inset(bm, face, prop): """Make one horizontal cut and two vertical cuts on face""" width, frame_thickness = prop.width, prop.frame_thickness door_width = width - frame_thickness * 2 _, face_height = calc_face_dimensions(face) door_height = face_height - frame_thickness # horizontal cuts h_widths = [frame_thickness, door_width, frame_thickness] h_faces = subdivide_face_horizontally(bm, face, h_widths) # vertical cuts v_widths = [door_height, frame_thickness] v_faces = subdivide_face_vertically(bm, h_faces[1], v_widths) return v_faces[0], h_faces[::2] + [v_faces[1]] def merge_loose_split_verts(bm, door_face, prop): """ Merge the split verts to the corners of the window frame""" median = door_face.calc_center_median() door_face_verts = sort_verts(door_face.verts, VEC_UP)[2:] for vert in door_face_verts: extent_edge = [e for e in vert.link_edges if e not in door_face.edges].pop() corner_vert = extent_edge.other_vert(vert) move_mag = prop.frame_thickness move_dir = XYDir(corner_vert.co - median) bmesh.ops.translate(bm, verts=[corner_vert], vec=move_dir * move_mag)
2,694
698
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.trello.navi2.internal; import com.trello.navi2.Event; import java.util.Arrays; import java.util.List; /** * A place to store a common list of handled events by activities and fragments */ final class HandledEvents { static final List<Event<?>> ACTIVITY_EVENTS = Arrays.asList( Event.CREATE, Event.CREATE_PERSISTABLE, Event.START, Event.POST_CREATE, Event.POST_CREATE_PERSISTABLE, Event.RESUME, Event.PAUSE, Event.STOP, Event.DESTROY, Event.RESTART, Event.SAVE_INSTANCE_STATE, Event.SAVE_INSTANCE_STATE_PERSISTABLE, Event.RESTORE_INSTANCE_STATE, Event.RESTORE_INSTANCE_STATE_PERSISTABLE, Event.NEW_INTENT, Event.BACK_PRESSED, Event.ATTACHED_TO_WINDOW, Event.DETACHED_FROM_WINDOW, Event.CONFIGURATION_CHANGED, Event.ACTIVITY_RESULT, Event.REQUEST_PERMISSIONS_RESULT ); static final List<Event<?>> FRAGMENT_EVENTS = Arrays.asList( Event.ATTACH, Event.CREATE, Event.CREATE_VIEW, Event.VIEW_CREATED, Event.ACTIVITY_CREATED, Event.VIEW_STATE_RESTORED, Event.START, Event.RESUME, Event.PAUSE, Event.STOP, Event.DESTROY_VIEW, Event.DESTROY, Event.DETACH, Event.SAVE_INSTANCE_STATE, Event.CONFIGURATION_CHANGED, Event.ACTIVITY_RESULT, Event.REQUEST_PERMISSIONS_RESULT ); private HandledEvents() { throw new AssertionError("No instances!"); } }
1,006
536
import argparse import csv from decimal import Decimal import distutils.spawn import glob import json import logging import os import shutil from subprocess import call import sys import tempfile import numpy import pandas import pydicom from radiomics import featureextractor scriptlogger = logging.getLogger('radiomics.dicom') scriptlogger.setLevel(logging.DEBUG) def dcmImageToNRRD(inputDICOMImageDir, tempDir): scanNRRDFile = os.path.join(tempDir, "image.nrrd") if not os.path.isfile(scanNRRDFile): call(['plastimatch', 'convert', '--input', inputDICOMImageDir, '--output-img', scanNRRDFile]) return scanNRRDFile def dcmImageToNIfTI(inputDICOMImageDir, tempDir): destScanNIfTIFile = os.path.join(tempDir, "volume.nii") scanNIfTIFile = os.path.join(inputDICOMImageDir, "volume.nii") scanJSONFile = os.path.join(inputDICOMImageDir, "volume.json") # will save to volume.nii if not os.path.isfile(destScanNIfTIFile): cmd = ['dcm2niix', "-m", "y", "-f", "volume", inputDICOMImageDir] call(cmd) shutil.move(scanNIfTIFile, destScanNIfTIFile) if os.path.isfile(scanJSONFile): os.remove(scanJSONFile) return destScanNIfTIFile # individual segments will be extracted to the destination directory into NRRD # files, with the names assigned consecutive numbers starting from 1 def dcmSEGToNRRDs(inputSEG, tempDir): segmentsDir = os.path.join(tempDir, 'Segments') if not os.path.isdir(segmentsDir): os.mkdir(segmentsDir) call(['segimage2itkimage', '--inputDICOM', inputSEG, '--outputDirectory', segmentsDir]) return glob.glob(os.path.join(segmentsDir, "*nrrd")) def writeSR(inputSEG, inputJSON, inputDICOMImageDir, outputSR): cmd = [ 'tid1500writer', '--inputImageLibraryDirectory', inputDICOMImageDir, '--inputCompositeContextDirectory', os.path.split(inputSEG)[0], '--inputMetadata', inputJSON, '--outputDICOM', outputSR] scriptlogger.debug("Writing SR with: " + str(cmd)) call(cmd) def getCTSeriesUID(imageDICOMDir): ctFile = os.listdir(imageDICOMDir)[0] dcm = pydicom.read_file(os.path.join(imageDICOMDir, ctFile)) return dcm.SeriesInstanceUID class DICOMMetadataAccessor: def __init__(self, dcmFileName): self.dcm = pydicom.read_file(dcmFileName) def getInstanceUID(self): return self.dcm.SOPInstanceUID def getSeriesDescription(self): return self.dcm.SeriesDescription def getSeriesInstanceUID(self): return self.dcm.SeriesInstanceUID class SEGMetadataAccessor(DICOMMetadataAccessor): def __init__(self, segFileName): DICOMMetadataAccessor.__init__(self, segFileName) if self.dcm.SOPClassUID != '1.2.840.10008.5.1.4.1.1.66.4': raise ValueError( "SEGMetadataAccessor: DICOM object is not Segmentation!") def getSegmentSegmentationTypeCode(self, segmentNumber): try: return self.dcm.SegmentSequence[segmentNumber].SegmentedPropertyTypeCodeSequence[0] except BaseException: return None def getTrackingIdentifier(self, segmentNumber): try: return self.dcm.SegmentSequence[segmentNumber].TrackingIdentifier except BaseException: return None def getTrackingUniqueIdentifier(self, segmentNumber): try: return self.dcm.SegmentSequence[segmentNumber].TrackingUID except BaseException: return None def getSegmentDescription(self, segmentNumber): try: return self.dcm.SegmentSequence[segmentNumber].SegmentDescription except BaseException: return None def getSegmentAnatomicLocationCode(self, segmentNumber): try: return self.dcm.SegmentSequence[segmentNumber].AnatomicRegionSequence[0] except BaseException: return None class CodedValue: def __init__(self, value, scheme, meaning): self.codeValue = value self.codingSchemeDesignator = scheme self.codeMeaning = meaning def getDict(self): return {"CodeValue": self.codeValue, "CodeMeaning": self.codeMeaning, "CodingSchemeDesignator": self.codingSchemeDesignator} class TID1500Metadata: def __init__( self, featuresDictFile, seriesDescription="Radiomics features"): self.featuresDict = self.readDictionary(featuresDictFile) self.m = {} self.m["@schema"] = "https://raw.githubusercontent.com/qiicr/dcmqi/master/doc/schemas/sr-tid1500-schema.json#" self.m["SeriesDescription"] = seriesDescription self.m["Measurements"] = [] self.measurementGroupCount = 0 def addMeasurementGroup(self): self.measurementGroupCount = self.measurementGroupCount + 1 measurementsGroup = {} measurementsGroup["measurementItems"] = [] measurementsGroup["ReferencedSegment"] = self.measurementGroupCount self.m["Measurements"].append(measurementsGroup) @staticmethod def readDictionary(featuresDictFile): return pandas.read_csv(featuresDictFile, sep='\t', low_memory=False) @staticmethod def makeHash(text, length=6): from base64 import b64encode from hashlib import sha1 return b64encode(sha1(str.encode(text)).digest()).decode('ascii')[:length] def makePrivateCode(self, text): return CodedValue(self.makeHash(text), "99PYRADIOMICS", text).getDict() # returns None if prefix is not recognized, otherwise returns a tuple of # (measurementModifiers, derivationParameters) def prefix2codes(self, prefix): modifiers = [] derivationParameters = [] import re imageTransformationConcept = self.makePrivateCode( "Image transformation") if re.match("original", prefix): pass elif re.match("square", prefix): modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Square transformation")}) elif re.match("squareroot", prefix): modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Square root transformation")}) elif re.match("logarithm", prefix): modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Logarithm transformation")}) elif re.match("gradient", prefix): modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Gradient magnitude transformation")}) elif re.match("exponential", prefix): modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Exponent transformation")}) elif re.match("exponential", prefix): modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Exponent transformation")}) # parameterized processing operations elif re.match(r"wavelet-([HL]{2,3})", prefix): match = re.match(r"wavelet-([HL]{2,3})", prefix) modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Wavelet transformation")}) modifiers.append({"modifier": self.makePrivateCode("Wavelet sub-band"), "modifierValue": self.makePrivateCode(match.group(1))}) elif re.match(r"log-sigma-([\d]+)-([\d]+)-([a-z]+)", prefix): match = re.match(r"log-sigma-([\d]+)-([\d]+)-([a-z]+)", prefix) units = match.group(3) if units == "mm": unitsCode = CodedValue("mm", "UCUM", "millimeters").getDict() elif units == "cm": unitsCode = CodedValue("mm", "UCUM", "centimeters").getDict() else: unitsCode = self.makePrivateCode(units) modifiers.append({"modifier": imageTransformationConcept, "modifierValue": self.makePrivateCode("Laplacian of Gaussian")}) derivationParameters.append({"derivationParameter": self.makePrivateCode("Kernel size"), "derivationParameterValue": str('.'.join([match.group(1), match.group(2)])), "derivationParameterUnits": unitsCode}) else: # unknown prefix return None return modifiers, derivationParameters # adds a single measurement to the last measurement group def addMeasurement( self, value, quantityCode, unitsCode=CodedValue( "1", "UCUM", "no units" )): if self.measurementGroupCount < 1: scriptlogger.error( "Cannot add measurement - no measurement groups initialized!") return (preprocessing, featureClass, featureName) = quantityCode.split('_') mpTuple = self.prefix2codes(preprocessing) if mpTuple is None: return measurement = {} classSubset = self.featuresDict[self.featuresDict['pyradiomics_feature_class'] == featureClass] featureTuple = classSubset[classSubset['pyradiomics_feature_name'] == featureName] if featureTuple.empty: codeMeaning = featureClass + "_" + featureName code = self.makeHash(codeMeaning) measurement["quantity"] = CodedValue( code, "99PYRADIOMICS", codeMeaning).getDict() if len(code) > 16: scriptlogger.error("Sorry, the code value is too long!") sys.exit() else: measurement["quantity"] = CodedValue( featureTuple["IBSI_code"].values[0], "IBSI", featureTuple["IBSI_meaning"].values[0]).getDict() try: if numpy.isnan(value): scriptlogger.info( "Skipping NaN value for feature %s", quantityCode) return except Exception as e: scriptlogger.error("Exception checking for NaN: %s %s", str(e), value) return try: measurement["value"] = '%E' % Decimal(float(value)) except Exception as e: scriptlogger.error("Exception formatting %s as Decimal: %s", value, str(e)) scriptlogger.error("type of value: %s", type(value)) measurement["units"] = unitsCode.getDict() self.m["Measurements"][-1]["measurementItems"].append(measurement) if len(mpTuple[0]): measurement["measurementModifiers"] = [m for m in mpTuple[0]] if len(mpTuple[1]): measurement["measurementDerivationParameters"] = [ d for d in mpTuple[1]] return def saveJSONToFile(self, fileName): with open(fileName, 'w') as f: json.dump(self.m, f, indent=2, sort_keys=True) def main(): parser = argparse.ArgumentParser( usage="%(prog)s --input-image <dir> --input-seg <name> --output-sr <name>\n\n" + "Warning: This is a \"pyradiomics labs\" script, which means it is an experimental feature in development!\n" + "The intent of this helper script is to enable pyradiomics feature extraction directly from/to DICOM data.\n" + "The segmentation defining the region of interest must be defined as a DICOM Segmentation image.\n" + "Support for DICOM Radiotherapy Structure Sets for defining region of interest may be added in the future.\n") parser.add_argument( '--input-image-dir', dest="inputDICOMImageDir", metavar="<folder>", help="Path to the directory with the input DICOM series." + " It is expected that a single series is corresponding to a single scalar volume.", required=True) parser.add_argument( '--input-seg-file', dest="inputSEG", metavar="<file>", help="Path to the input segmentation defined as a DICOM Segmentation object.", required=True) parser.add_argument( '--output-dir', dest="outputDir", metavar="<folder>", help="Path to the directory for saving the resulting DICOM file.", required=True) parser.add_argument( '--parameters', dest="parameters", metavar="<parameters>", help="Pyradiomics feature extractor positional arguments") parser.add_argument( '--temp-dir', dest="tempDir", metavar="<folder>", help="Path to the directory to store intermediate results") parser.add_argument( '--features-dict', dest="featuresDict", metavar="<file>", help="Path to the dictionary mapping pyradiomics feature names to the IBSI defined features.") parser.add_argument( '--volume-reconstructor', dest="volumeReconstructor", metavar="<plastimatch or dcm2niix>", help="Choose the tool to be used for reconstructing image volume from the DICOM image series." + " Allowed options are plastimatch or dcm2niix (should be installed on the system). plastimatch" + " will be used by default.", choices=['plastimatch', 'dcm2niix'], default="plastimatch") parser.add_argument( '--geometry-tolerance', dest="geometryTolerance", metavar="<number>", help="Decimal number setting geometry tolerance for the extractor. Defaults to 1e-6.", default=1e-6) parser.add_argument( '--correct-mask', dest="correctMask", help="Boolean flag argument. If present, PyRadiomics will attempt to resample the mask to the image" + " geometry if the mask check fails.", action='store_true', default=False) args = parser.parse_args() # with tempfile.mkdtemp() as tempDir: tempDir = args.tempDir if not tempDir: tempDir = tempfile.mkdtemp() scriptlogger.info("Temporary directory: " + tempDir) # convert input DICOM series into a scalar volume # plastimatch fails for prostate DWI Data! Need to report # Selection of the optimal volume reconstructor may depend # on the specific dataset! if args.volumeReconstructor == "plastimatch": scriptlogger.info( "Using Plastimatch for DICOM image volume reconstruction.") inputImage = dcmImageToNRRD(args.inputDICOMImageDir, tempDir) else: scriptlogger.info( "Using dcm2niix for DICOM image volume reconstruction.") inputImage = dcmImageToNIfTI(args.inputDICOMImageDir, tempDir) # convert segmentation into segments inputSegments = dcmSEGToNRRDs(args.inputSEG, tempDir) if len(inputSegments) == 0: scriptlogger.error("No segments found. Cannot compute features.") return -1 featuresDir = os.path.join(tempDir, 'Features') if not os.path.isdir(featuresDir): os.mkdir(featuresDir) # initialize Metadata for the individual features # TODO: to be replaced with direct mapping in the pyradiomics feature functions # see https://github.com/Radiomics/pyradiomics/issues/435 if args.featuresDict is not None: featuresDictPath = args.featuresDict else: featuresDictPath = "featuresDict.tsv" if not os.path.exists(featuresDictPath): scriptlogger.error( "Features dictionary file %s is not found!", featuresDictPath) return -1 m = TID1500Metadata(featuresDictPath) # find a valid DICOM file in the input image DICOM directory dicomImage = None for f in os.listdir(args.inputDICOMImageDir): try: pydicom.read_file(os.path.join(args.inputDICOMImageDir, f)) dicomImage = os.path.join(args.inputDICOMImageDir, f) break except BaseException: continue if dicomImage is None: scriptlogger.error( "Input DICOM image directory does not seem to contain any valid DICOM files!") return -1 imageMetadataAccessor = DICOMMetadataAccessor( os.path.join(args.inputDICOMImageDir, f)) segmentationMetadataAccessor = SEGMetadataAccessor(args.inputSEG) pyradiomicsVersion = None for inputSegment in inputSegments: scriptlogger.debug("Processing segmentation file %s", inputSegment) segmentNumber = os.path.split(inputSegment)[-1].split('.')[0] try: scriptlogger.debug("Initializing extractor") extractionSettings = { "geometryTolerance": float(args.geometryTolerance), "correctMask": True if args.correctMask else False } params = [] if args.parameters is not None: params = [args.parameters] extractor = featureextractor.RadiomicsFeatureExtractor(*params, **extractionSettings) except Exception: scriptlogger.error( 'Initialization of the pyradimics feature extraction failed.', exc_info=True) return -1 featureVector = extractor.execute( inputImage, inputSegment, int(segmentNumber)) if len(featureVector) == 0: scriptlogger.error("No features extracted!") return -1 featuresFileName = os.path.join(featuresDir, segmentNumber + '.csv') scriptlogger.debug("Will save features as %s", featuresFileName) writer = csv.writer(open(featuresFileName, 'w'), lineterminator='\n') headers = list(featureVector.keys()) writer.writerow(headers) row = [] for h in headers: row.append(featureVector.get(h, "")) writer.writerow(row) scriptlogger.debug("Initializing TID 1500 Measurement groups.") m.addMeasurementGroup() includedFeatureVectorItems = 0 for featureName in featureVector.keys(): if featureName == 'diagnostics_Versions_PyRadiomics': pyradiomicsVersion = featureVector[featureName] continue featureValue = featureVector[featureName] featureNameSplit = featureName.split('_') if len(featureNameSplit) < 3: scriptlogger.warning( "Skipping unrecognized feature %s", featureName) continue includedFeatureVectorItems += 1 m.addMeasurement(featureValue, featureName) scriptlogger.debug( "%d of %d total features included in the TID 1500 Measurement group.", len(featureVector), includedFeatureVectorItems) # initialize metadata common to all measurements scriptlogger.debug("Populating common metadata") m.m["Measurements"][-1]["SourceSeriesForImageSegmentation"] = imageMetadataAccessor.getSeriesInstanceUID() m.m["Measurements"][-1]["segmentationSOPInstanceUID"] = segmentationMetadataAccessor.getInstanceUID() # TODO: populate those from SEG SegmentationType / AnatomicLocation segmentationType = segmentationMetadataAccessor.getSegmentSegmentationTypeCode( int(segmentNumber) - 1) if segmentationType: m.m["Measurements"][-1]["Finding"] = CodedValue(segmentationType.CodeValue, segmentationType.CodingSchemeDesignator, segmentationType.CodeMeaning).getDict() segTrackingIdentifier = segmentationMetadataAccessor.getTrackingIdentifier(int(segmentNumber)-1) segTrackingUniqueIdentifier = segmentationMetadataAccessor.getTrackingUniqueIdentifier(int(segmentNumber)-1) if segTrackingIdentifier: m.m["Measurements"][-1]["TrackingIdentifier"] = segTrackingIdentifier else: m.m["Measurements"][-1]["TrackingIdentifier"] = segmentationType.CodeMeaning segmentDescription = segmentationMetadataAccessor.getSegmentDescription(int(segmentNumber)-1) # SegmentDescription is Type 3, and can be missing if segmentDescription is not None: m.m["Measurements"][-1]["TrackingIdentifier"] = segmentationType.CodeMeaning+" - "+segmentDescription if segTrackingUniqueIdentifier: m.m["Measurements"][-1]["TrackingUniqueIdentifier"] = segTrackingUniqueIdentifier segmentationLocation = segmentationMetadataAccessor.getSegmentAnatomicLocationCode( int(segmentNumber) - 1) if segmentationLocation: m.m["Measurements"][-1]["FindingSite"] = CodedValue(segmentationLocation.CodeValue, segmentationLocation.CodingSchemeDesignator, segmentationLocation.CodeMeaning).getDict() # AlgorithmIdentification m.m["Measurements"][-1]["measurementAlgorithmIdentification"] = {} m.m["Measurements"][-1]["measurementAlgorithmIdentification"]["AlgorithmName"] = "https://github.com/Radiomics/pyradiomics" m.m["Measurements"][-1]["measurementAlgorithmIdentification"]["AlgorithmVersion"] = pyradiomicsVersion m.m["Measurements"][-1]["measurementAlgorithmIdentification"]["AlgorithmParameters"] = [json.dumps(extractor.settings)] m.m["observerContext"] = {} m.m["observerContext"]["ObserverType"] = "DEVICE" m.m["observerContext"]["DeviceObserverName"] = "pyradiomics" m.m["observerContext"]["DeviceObserverModelName"] = pyradiomicsVersion m.m["compositeContext"] = [os.path.split(args.inputSEG)[-1]] m.m["imageLibrary"] = [os.path.split(f)[-1] for f in os.listdir(args.inputDICOMImageDir)] m.m["SeriesDescription"] = segmentationMetadataAccessor.getSeriesDescription() + ' - pyradiomics features' scriptlogger.debug("Saving temporary files for DICOM SR writer.") dcmqiMetadataFile = os.path.join(featuresDir, "dcmqi_sr.json") outputSRTempFile = os.path.join(featuresDir, "sr.dcm") m.saveJSONToFile(dcmqiMetadataFile) scriptlogger.debug("Generating DICOM SR.") writeSR( args.inputSEG, dcmqiMetadataFile, args.inputDICOMImageDir, outputSRTempFile) # copy to the dest directory under UID as a name try: dcm = pydicom.read_file(outputSRTempFile) shutil.move( outputSRTempFile, os.path.join(args.outputDir, dcm.SOPInstanceUID + ".dcm")) except BaseException: scriptlogger.error("Failed to move output SR!") if __name__ == "__main__": exeFound = {} for exe in ['tid1500writer', 'dcm2niix', 'plastimatch', 'segimage2itkimage']: if distutils.spawn.find_executable(exe) is None: exeFound[exe] = False else: exeFound[exe] = True if not (exeFound['tid1500writer'] and exeFound['segimage2itkimage']) or not ( exeFound['plastimatch'] or exeFound['dcm2niix']): scriptlogger.error( "Dependency converter(s) not found in the path.") scriptlogger.error( "dcmqi (https://github.com/qiicr/dcmqi), and dcm2niix (https://github.com/rordenlab/dcm2niix/releases)") scriptlogger.error("or Plastimatch (http://plastimatch.org/)") scriptlogger.error( "need to be installed and available in the PATH for using this converter script.") sys.exit() main()
8,311
2,960
<reponame>zl3one6/leetcode<gh_stars>1000+ #include <bits/stdc++.h> using namespace std; /** * Definition for a binary tree node. * struct TreeNode { * int val; * TreeNode *left; * TreeNode *right; * TreeNode() : val(0), left(nullptr), right(nullptr) {} * TreeNode(int x) : val(x), left(nullptr), right(nullptr) {} * TreeNode(int x, TreeNode *left, TreeNode *right) : val(x), left(left), right(right) {} * }; */ class Solution { public: vector<vector<int>> pathSum(TreeNode* root, int targetSum) { vector<vector<int>> res; dfs(root, targetSum, res); return res; } private: vector<int> stack; void dfs(TreeNode* root, int sum, vector<vector<int>>& res) { if (root == nullptr) { return; } else if (root->left == nullptr && root->right == nullptr && sum == root->val) { stack.push_back(root->val); res.push_back(stack); stack.pop_back(); } else { stack.push_back(root->val); dfs(root->left, sum - root->val, res); dfs(root->right, sum - root->val, res); stack.pop_back(); } } };
545
348
<gh_stars>100-1000 {"nom":"Noirétable","circ":"6ème circonscription","dpt":"Loire","inscrits":1290,"abs":644,"votants":646,"blancs":19,"nuls":8,"exp":619,"res":[{"nuance":"REM","nom":"<NAME>","voix":224},{"nuance":"LR","nom":"<NAME>","voix":123},{"nuance":"FN","nom":"Mme <NAME>","voix":88},{"nuance":"ECO","nom":"<NAME>","voix":63},{"nuance":"FI","nom":"Mme <NAME>","voix":53},{"nuance":"DVD","nom":"<NAME>","voix":30},{"nuance":"EXG","nom":"Mme <NAME>","voix":15},{"nuance":"DLF","nom":"Mme <NAME>","voix":10},{"nuance":"ECO","nom":"M. <NAME>","voix":6},{"nuance":"DIV","nom":"<NAME>","voix":5},{"nuance":"DIV","nom":"<NAME>","voix":2}]}
258
977
package io.leangen.graphql.metadata.messages; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class DelegatingMessageBundle implements MessageBundle { private final List<MessageBundle> delegates = new ArrayList<>(); @Override public String getMessage(String key) { return delegates.stream() .filter(bundle -> bundle.containsKey(key)) .findFirst() .map(bundle -> bundle.getMessage(key)) .orElse(null); } public DelegatingMessageBundle withBundles(MessageBundle... messageBundle) { Collections.addAll(delegates, messageBundle); return this; } }
279
474
<filename>android/videolib/src/main/java/com/thinkkeep/videolib/model/video/CameraSupport.java<gh_stars>100-1000 package com.thinkkeep.videolib.model.video; import android.view.SurfaceView; import com.thinkkeep.videolib.api.EvilsLiveStreamerConfig; /** * 对Camera类封装 * Created by jason on 17/2/27. */ public interface CameraSupport { /** * 打开Camera * @return Camera实例 */ CameraSupport open(); /** * 开始推流 * @return err: 小于0 成功:0 */ int startPushStream(); /** * 获取Camera 方向 * @param cameraId 前置或后置 */ int getOrientation(int cameraId); /** * 设置预览View * @param surfaceView view */ void setDisplayPreview(SurfaceView surfaceView); /** * 设置推流预览回调 * @param listener listener */ void setOnPreviewFrameListener(OnPreviewFrameListener listener); /** * 设置推流配置 * @param config config */ void setStreamConfig(EvilsLiveStreamerConfig config); /** * 停止推流 * @return err:小于0 成功:0 */ void stopPushStream(); /** * 关闭 camera */ void close(); }
575
480
<reponame>weicao/galaxysql /* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.optimizer.selectivity; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.metadata.RelMdUtil; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.util.ImmutableBitSet; import java.util.List; import java.util.stream.Collectors; public class JoinSelectivityEstimator extends AbstractSelectivityEstimator { private final Join join; private final Double leftRowCount; private final Double rightRowCount; private final int leftBound; public JoinSelectivityEstimator(Join join, RelMetadataQuery metadataQuery) { super(metadataQuery, join.getCluster().getRexBuilder()); this.join = join; this.leftRowCount = metadataQuery.getRowCount(join.getLeft()); this.rightRowCount = metadataQuery.getRowCount(join.getRight()); this.leftBound = join.getLeft().getRowType().getFieldCount(); } @Override public Double visitCall(RexCall call) { if (call.getOperator() == SqlStdOperatorTable.AND) { // TODO: use (a, b) -> a * b instead of (a, b) -> Math.min(a, b) Double selectivityAnd = call.getOperands().stream().map(rexNode -> this.evaluate(rexNode)) .reduce(1.0, (a, b) -> Math.min(a, b)); return normalize(selectivityAnd); } else if (call.getOperator() == SqlStdOperatorTable.OR) { Double selectivityOr = call.getOperands().stream().map(rexNode -> this.evaluate(rexNode)).reduce(0.0, (a, b) -> a + b - a * b); return normalize(selectivityOr); } else if (call.getOperator() == SqlStdOperatorTable.NOT) { Double selectivity = this.evaluate(call.getOperands().get(0)); return normalize(1 - selectivity); } else if (call.getOperator() == SqlStdOperatorTable.EQUALS) { return estimateEqualSelectivity(call); } else { // TODO: add more predicate return RelMdUtil.guessSelectivity(call); } } private double estimateEqualSelectivity(RexCall call) { assert call.getOperator() == SqlStdOperatorTable.EQUALS; RexNode leftRexNode = call.getOperands().get(0); RexNode rightRexNode = call.getOperands().get(1); Integer leftIndex = null; Integer rightIndex = null; Boolean leftUnique = null; Boolean rightUnique = null; Double leftNdv = null; Double rightNdv = null; if (leftRexNode instanceof RexInputRef) { int index = ((RexInputRef) leftRexNode).getIndex(); if (index < leftBound) { leftIndex = index; leftNdv = metadataQuery.getDistinctRowCount(join.getLeft(), ImmutableBitSet.of(index), null); } else { rightIndex = index; rightNdv = metadataQuery.getDistinctRowCount(join.getRight(), ImmutableBitSet.of(index - leftBound), null); } } if (rightRexNode instanceof RexInputRef) { int index = ((RexInputRef) rightRexNode).getIndex(); if (index < leftBound) { leftIndex = index; leftNdv = metadataQuery.getDistinctRowCount(join.getLeft(), ImmutableBitSet.of(index), null); } else { rightIndex = index; rightNdv = metadataQuery.getDistinctRowCount(join.getRight(), ImmutableBitSet.of(index - leftBound), null); } } if (leftNdv != null && rightNdv != null) { return 1.0 / Math.max(leftNdv, rightNdv); } if (leftIndex != null) { leftUnique = metadataQuery.areColumnsUnique(join.getLeft(), ImmutableBitSet.of(leftIndex)); } if (rightIndex != null) { rightUnique = metadataQuery.areColumnsUnique(join.getRight(), ImmutableBitSet.of(rightIndex - leftBound)); } if (Boolean.TRUE.equals(leftUnique) && Boolean.TRUE.equals(rightUnique)) { return 1.0 / Math.max(leftRowCount, rightRowCount); } else if (Boolean.TRUE.equals(leftUnique)) { return 1.0 / leftRowCount; } else if (Boolean.TRUE.equals(rightUnique)) { return 1.0 / rightRowCount; } return RelMdUtil.guessSelectivity(call); } }
2,178
14,668
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "base/android/jni_android.h" #include "base/android/jni_array.h" #include "base/android/jni_string.h" #include "chrome/browser/feed/android/feed_service_factory.h" #include "chrome/browser/feed/android/jni_headers/FeedProcessScopeDependencyProvider_jni.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/profiles/profile_manager.h" #include "components/feed/core/proto/v2/ui.pb.h" #include "components/feed/core/v2/public/feed_api.h" #include "components/feed/core/v2/public/feed_service.h" #include "components/feed/core/v2/public/feed_stream_surface.h" #include "components/variations/variations_ids_provider.h" namespace feed { namespace android { static FeedApi* GetFeedApi() { FeedService* service = FeedServiceFactory::GetForBrowserContext( ProfileManager::GetLastUsedProfile()); if (!service) return nullptr; return service->GetStream(); } static void JNI_FeedProcessScopeDependencyProvider_ProcessViewAction( JNIEnv* env, const base::android::JavaParamRef<jbyteArray>& data) { FeedApi* feed_stream_api = GetFeedApi(); if (!feed_stream_api) return; std::string data_string; base::android::JavaByteArrayToString(env, data, &data_string); feed_stream_api->ProcessViewAction(data_string); } static void JNI_FeedProcessScopeDependencyProvider_ProcessViewActionWithLoggingParameters( JNIEnv* env, const base::android::JavaParamRef<jbyteArray>& action_data, const base::android::JavaParamRef<jbyteArray>& logging_parameters) { FeedApi* feed_stream_api = GetFeedApi(); if (!feed_stream_api) return; std::string action_data_string; base::android::JavaByteArrayToString(env, action_data, &action_data_string); std::string logging_parameters_string; base::android::JavaByteArrayToString(env, logging_parameters, &logging_parameters_string); feedui::LoggingParameters logging_parameters_value; if (!logging_parameters_value.ParseFromString(logging_parameters_string)) { DLOG(ERROR) << "Error parsing logging parameters"; return; } feed_stream_api->ProcessViewAction(action_data_string, logging_parameters_value); } static base::android::ScopedJavaLocalRef<jstring> JNI_FeedProcessScopeDependencyProvider_GetSessionId(JNIEnv* env) { std::string session; FeedApi* feed_stream_api = GetFeedApi(); if (feed_stream_api) { session = feed_stream_api->GetSessionId(); } return base::android::ConvertUTF8ToJavaString(env, session); } static base::android::ScopedJavaLocalRef<jintArray> JNI_FeedProcessScopeDependencyProvider_GetExperimentIds(JNIEnv* env) { auto* variations_ids_provider = variations::VariationsIdsProvider::GetInstance(); DCHECK(variations_ids_provider != nullptr); return base::android::ToJavaIntArray( env, variations_ids_provider->GetVariationsVectorForWebPropertiesKeys()); } } // namespace android } // namespace feed
1,104
6,036
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. #include "core/codegen/mti/debug/tvm_print.h" #include "core/codegen/common/utils.h" #include "core/codegen/common/dump_array.h" #include "core/codegen/mti/common.h" #include <topi/detail/extern.h> namespace onnxruntime { namespace tvm_codegen { TVM_REGISTER_GLOBAL("tvm.contrib.onnxruntime.print") .set_body([](tvm::TVMArgs args, tvm::TVMRetValue* /*ret*/) { DLTensor* X = args[0]; DLTensor* Y = args[1]; DLDataType dtype = X->dtype; std::vector<int64_t> shape; int64_t total_size = 1; for (int i = 0; i < X->ndim; ++i) { shape.push_back(X->shape[i]); total_size *= X->shape[i]; } // pass X to Y memcpy(static_cast<char*>(Y->data) + Y->byte_offset, static_cast<char*>(X->data) + X->byte_offset, total_size * dtype.bits / 8); if (tvm::runtime::TypeMatch(dtype, kDLFloat, 32)) { float* data = reinterpret_cast<float*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("float tensor:", data, shape); } else if (tvm::runtime::TypeMatch(dtype, kDLInt, 8)) { int8_t* data = reinterpret_cast<int8_t*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("int8 tensor:", data, shape); } else if (tvm::runtime::TypeMatch(dtype, kDLInt, 16)) { int16_t* data = reinterpret_cast<int16_t*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("int16 tensor:", data, shape); } else if (tvm::runtime::TypeMatch(dtype, kDLInt, 32)) { int32_t* data = reinterpret_cast<int32_t*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("int32 tensor:", data, shape); } else if (tvm::runtime::TypeMatch(dtype, kDLUInt, 8)) { uint8_t* data = reinterpret_cast<uint8_t*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("uint8 tensor:", data, shape); } else if (tvm::runtime::TypeMatch(dtype, kDLUInt, 16)) { uint16_t* data = reinterpret_cast<uint16_t*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("uint16 tensor:", data, shape); } else if (tvm::runtime::TypeMatch(dtype, kDLUInt, 32)) { uint32_t* data = reinterpret_cast<uint32_t*>(static_cast<char*>(X->data) + X->byte_offset); DumpArray("uint32 tensor:", data, shape); } else { MTI_ASSERT(0 && "not implemented!"); } }); tvm::Array<tvm::Tensor> PrintTVMTensorExtern(const tvm::Tensor& X, const std::string& name) { return topi::detail::make_extern( {X->shape}, {X->dtype}, {X}, [&](tvm::Array<tvm::Buffer> ins, tvm::Array<tvm::Buffer> outs) { return topi::detail::call_packed({tvm::Expr("tvm.contrib.onnxruntime.print"), topi::detail::pack_buffer(ins[0]), topi::detail::pack_buffer(outs[0])}); }, name + "_print", "", {}); } tvm::Tensor PrintImmutable(const tvm::Tensor& X) { auto outputs = PrintTVMTensorExtern(X, X->op->name + "_print"); return outputs[0]; } void Print(tvm::Tensor& X) { X = PrintImmutable(X); } } // namespace tvm_codegen } // namespace onnxruntime
1,536
22,688
<reponame>jzjonah/apollo #!/usr/bin/env python3 ############################################################################### # Copyright 2018 The Apollo Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### import math import os import sys import numpy def get_stat2_from_data(data): """Find the max number of continuous frames when position error is lager than 30cm, 20cm and 10cm Arguments: data: error array Returns: stat: array of max number of continuous frames """ max_con_frame_num_10 = 0 max_con_frame_num_20 = 0 max_con_frame_num_30 = 0 tem_con_frame_num_10 = 0 tem_con_frame_num_20 = 0 tem_con_frame_num_30 = 0 for d in data: if d > 0.1: tem_con_frame_num_10 += 1 if d > 0.2: tem_con_frame_num_20 += 1 if d > 0.3: tem_con_frame_num_30 += 1 else: if tem_con_frame_num_30 > max_con_frame_num_30: max_con_frame_num_30 = tem_con_frame_num_30 tem_con_frame_num_30 = 0 else: if tem_con_frame_num_20 > max_con_frame_num_20: max_con_frame_num_20 = tem_con_frame_num_20 tem_con_frame_num_20 = 0 else: if tem_con_frame_num_10 > max_con_frame_num_10: max_con_frame_num_10 = tem_con_frame_num_10 tem_con_frame_num_10 = 0 stat = [max_con_frame_num_10, max_con_frame_num_20, max_con_frame_num_30] return stat def get_angle_stat2_from_data(data): """Find the max number of continuous frames when yaw error is lager than 1.0d, 0.6d and 0.3d Arguments: data: error array Returns: stat: array of max number of continuous frames """ max_con_frame_num_1_0 = 0 max_con_frame_num_0_6 = 0 max_con_frame_num_0_3 = 0 tem_con_frame_num_1_0 = 0 tem_con_frame_num_0_6 = 0 tem_con_frame_num_0_3 = 0 for d in data: if(d > 0.3): tem_con_frame_num_0_3 += 1 if(d > 0.6): tem_con_frame_num_0_6 += 1 if(d > 1.0): tem_con_frame_num_1_0 += 1 else: if tem_con_frame_num_1_0 > max_con_frame_num_1_0: max_con_frame_num_1_0 = tem_con_frame_num_1_0 tem_con_frame_num_1_0 = 0 else: if tem_con_frame_num_0_6 > max_con_frame_num_0_6: max_con_frame_num_0_6 = tem_con_frame_num_0_6 tem_con_frame_num_0_6 = 0 else: if tem_con_frame_num_0_3 > max_con_frame_num_0_3: max_con_frame_num_0_3 = tem_con_frame_num_0_3 tem_con_frame_num_0_3 = 0 stat = [max_con_frame_num_1_0, max_con_frame_num_0_6, max_con_frame_num_0_3] return stat def get_stat_from_data(data): if len(data) == 0: print("No statistics data!") return [] num_data = numpy.array(data) sum1 = num_data.sum() sum2 = (num_data * num_data).sum() mean = sum1 / len(data) std = math.sqrt(sum2 / len(data) - mean * mean) mx = num_data.max() count_less_than_30 = 0.0 count_less_than_20 = 0.0 count_less_than_10 = 0.0 for d in data: if d < 0.3: count_less_than_30 += 1.0 if d < 0.2: count_less_than_20 += 1.0 if d < 0.1: count_less_than_10 += 1.0 count_less_than_30 /= float(len(data)) count_less_than_20 /= float(len(data)) count_less_than_10 /= float(len(data)) stat = [mean, std, mx, count_less_than_30, count_less_than_20, count_less_than_10] return stat def get_angle_stat_from_data(data): if len(data) == 0: print("No statistics data!") return [] num_data = numpy.array(data) sum1 = num_data.sum() sum2 = (num_data * num_data).sum() mean = sum1 / len(data) std = math.sqrt(sum2 / len(data) - mean * mean) mx = num_data.max() count_less_than_1 = 0.0 count_less_than_06 = 0.0 count_less_than_03 = 0.0 for d in data: if d < 1.0: count_less_than_1 += 1.0 if d < 0.6: count_less_than_06 += 1.0 if d < 0.3: count_less_than_03 += 1.0 count_less_than_1 /= float(len(data)) count_less_than_06 /= float(len(data)) count_less_than_03 /= float(len(data)) stat = [mean, std, mx, count_less_than_1, count_less_than_06, count_less_than_03] return stat def parse_file(filename, type): with open(filename, 'r') as fp: lines = fp.readlines() print('%d frames' % len(lines)) error = [] error_lon = [] error_lat = [] error_alt = [] error_roll = [] error_pitch = [] error_yaw = [] for line in lines: s = line.split() if (len(s) > 7): # error.append(float(s[6])) error_lon.append(float(s[2])) error_lat.append(float(s[3])) error_alt.append(float(s[4])) error_roll.append(float(s[5])) error_pitch.append(float(s[6])) error_yaw.append(float(s[7])) x = float(s[2]) y = float(s[3]) error.append(math.sqrt(x * x + y * y)) # print "%f %f %f" % (error[-1], error_lon[-1], error_lat[-1]) if type == "all": print_distance_error(error, error_lon, error_lat, error_alt) print_angle_error(error_roll, error_pitch, error_yaw) elif type == "distance_only": print_distance_error(error, error_lon, error_lat, error_alt) elif type == "angle_only": print_angle_error(error_roll, error_pitch, error_yaw) else: print_distance_error(error, error_lon, error_lat, error_alt) print_angle_error(error_roll, error_pitch, error_yaw) def print_distance_error(error, error_lon, error_lat, error_alt): print('criteria : mean std max < 30cm < 20cm < 10cm con_frames(>30cm)') result = get_stat_from_data(error) if len(result) != 6: return res = get_stat2_from_data(error) print('error : %06f %06f %06f %06f %06f %06f %06d' % (result[0], result[1], result[2], result[3], result[4], result[5], res[2])) result = get_stat_from_data(error_lon) res = get_stat2_from_data(error_lon) print('error lon: %06f %06f %06f %06f %06f %06f %06d' % (result[0], result[1], result[2], result[3], result[4], result[5], res[2])) result = get_stat_from_data(error_lat) res = get_stat2_from_data(error_lat) print('error lat: %06f %06f %06f %06f %06f %06f %06d' % (result[0], result[1], result[2], result[3], result[4], result[5], res[2])) result = get_stat_from_data(error_alt) res = get_stat2_from_data(error_alt) print('error alt: %06f %06f %06f %06f %06f %06f %06d' % (result[0], result[1], result[2], result[3], result[4], result[5], res[2])) def print_angle_error(error_roll, error_pitch, error_yaw): print('criteria : mean std max < 1.0d < 0.6d < 0.3d con_frames(>1.0d)') result = get_angle_stat_from_data(error_roll) if len(result) != 6: return res = get_angle_stat2_from_data(error_roll) print("error rol: %06f %06f %06f %06f %06f %06f %06d" % (result[0], result[1], result[2], result[3], result[4], result[5], res[0])) result = get_angle_stat_from_data(error_pitch) res = get_angle_stat2_from_data(error_pitch) print("error pit: %06f %06f %06f %06f %06f %06f %06d" % (result[0], result[1], result[2], result[3], result[4], result[5], res[0])) result = get_angle_stat_from_data(error_yaw) res = get_angle_stat2_from_data(error_yaw) print("error yaw: %06f %06f %06f %06f %06f %06f %06d" % (result[0], result[1], result[2], result[3], result[4], result[5], res[0])) if __name__ == '__main__': if len(sys.argv) < 2: print('Usage: %s [evaluation file] [evaluation type]' % argv[0]) sys.exit(0) elif not os.path.isfile(sys.argv[1]): print('File does not exist') elif len(sys.argv) < 3: parse_file(sys.argv[1], 'all') else: parse_file(sys.argv[1], sys.argv[2])
4,532
310
{ "name": "F-5XB", "description": "A camera bag.", "url": "https://www.amazon.com/Domke-F-5XB-Shoulder-Belt-Black/dp/B00009R89L" }
65
369
<filename>inc/osvr/PluginKit/SkeletonInterfaceC.h /** @file @brief Header @date 2015 @author Sensics, Inc. <http://sensics.com/osvr> */ // Copyright 2015 Sensics, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef INCLUDED_SkeletonInterfaceC_h_GUID_69382E69_DFA1_4FB0_5287_D874B740B4C0 #define INCLUDED_SkeletonInterfaceC_h_GUID_69382E69_DFA1_4FB0_5287_D874B740B4C0 /* Internal Includes */ #include <osvr/PluginKit/DeviceInterfaceC.h> #include <osvr/Util/ChannelCountC.h> #include <osvr/Util/ClientReportTypesC.h> // Library/third-party includes // - none // Standard includes // - none OSVR_EXTERN_C_BEGIN /** @defgroup PluginKitCSkeleton Skeleton interface (base C API) @brief Sends notifications that skeleton reports (aka tracker reports) are complete. Skeleton interface is implemented as a device that exposes a tracker interface for each skeleton element. For each element, a separate tracker report will be sent with an updated pose followed by skeletonComplete. and the client can request a complete report of entire skeleton (all reported elements), as well as subsets. Note that since it exposes a device via multiple interfaces, you will need to "configure" both interfaces (Tracker and Skeleton). @ingroup PluginKit @{ */ /** @brief Opaque type used in conjunction with a device token to send data on Skeleton Interface */ typedef struct OSVR_SkeletonDeviceInterfaceObject *OSVR_SkeletonDeviceInterface; /** @brief Specify that your device will implement the Skeleton interface. @param opts The device init options object. @param [out] iface An interface object you should retain with the same lifetime as the device token in order to send messages conforming to a Skeleton interface. @param jsonDescriptor A device descriptor json that contains skeleton spec Note that the number of skeleton sensors is not the same as number of skeleton elements. For example if plugin device reports two hands that will be 2 separate skeleton sensors because hands are not connected via common parent. If device can report an entire skeleton then it should only report one sensor. */ OSVR_PLUGINKIT_EXPORT OSVR_ReturnCode osvrDeviceSkeletonConfigure(OSVR_INOUT_PTR OSVR_DeviceInitOptions opts, OSVR_OUT_PTR OSVR_SkeletonDeviceInterface *iface, OSVR_IN_READS(len) const char *jsonDescriptor) OSVR_FUNC_NONNULL((1, 2)); /** @brief Report that transmission of tracker reports for given skeleton sensor is complete. This method should be called after device reports updated poses for skeleton elements for given skeleton sensor. For example, if device is reporting two hands (two skeleton sensors), it should report all poses for left hand followed by osvrDeviceSkeletonComplete(leftSensor) call and then report all poses for right hand followed by osvrDeviceSkeletonComplete(rightSensor). This ensures that client receives consistent, single-frame reports and avoids bone stretching. @param dev Device token @param iface Skeleton Interface @param sensor Sensor number @param timestamp The same timestamp as for your tracker reports */ OSVR_PLUGINKIT_EXPORT OSVR_ReturnCode osvrDeviceSkeletonComplete(OSVR_IN_PTR OSVR_SkeletonDeviceInterface iface, OSVR_IN OSVR_ChannelCount sensor, OSVR_IN_PTR OSVR_TimeValue const *timestamp) OSVR_FUNC_NONNULL((1, 3)); /** @brief If device detects another skeleton and/or change in existing articulation specification, then it needs to update the spec with the client as well. During the skeleton interface initialization this is performed once using the spec provided in the device descriptor. This should be used for unbounded skeletons where the articulation spec may change over time. Devices with bounded skeletons don't need to use this method because the articulation spec remains the same throughout plugin runtime @param dev Device token @param iface Skeleton Interface @param spec Updated/New Skeleton Articulation Spec in JSON format @param timestamp The same timestamp as for your tracker reports */ OSVR_PLUGINKIT_EXPORT OSVR_ReturnCode osvrDeviceSkeletonUpdateSpec(OSVR_IN_PTR OSVR_SkeletonDeviceInterface iface, OSVR_IN_READS(len) const char *spec) OSVR_FUNC_NONNULL((1, 2)); /**@} */ /* end of group */ OSVR_EXTERN_C_END #endif // INCLUDED_SkeletonInterfaceC_h_GUID_69382E69_DFA1_4FB0_5287_D874B740B4C0
1,573
335
{ "word": "Quodlibet", "definitions": [ "A topic for or exercise in philosophical or theological discussion.", "A light-hearted medley of well-known tunes." ], "parts-of-speech": "Noun" }
86
665
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.applib.services.xactn; /** * Represents the state of the current transaction. * * <p> * Obtainable from {@link TransactionService#currentTransactionState()}. * </p> * * @since 2.0 {@index} */ public enum TransactionState { /** * No transaction exists. */ NONE, /** * Started, still in progress. * * <p> * May flush, commit or abort. * </p> */ IN_PROGRESS, /** * Started, but has hit an exception. * * <p> * May not flush or commit (will throw an {@link IllegalStateException}), * can only abort. * </p> */ MUST_ABORT, /** * Completed, having successfully committed. * * <p> * May not flush or abort or commit (will throw {@link IllegalStateException}). * </p> */ COMMITTED, /** * Completed, having aborted. * * <p> * May not flush, commit or abort (will throw {@link IllegalStateException}). * </p> */ ABORTED ; /** * Whether it is valid to flush the transaction (specifically if the * transaction is {@link #IN_PROGRESS in progress}. */ public boolean canFlush() { return this == IN_PROGRESS; } /** * Whether it is valid to commit the transaction (specifically if the * transaction is {@link #IN_PROGRESS in progress}. */ public boolean canCommit() { return this == IN_PROGRESS; } /** * Whether it is valid to mark as aborted this transaction. * * <p> * This is the case if the transaction is either currently * {@link #IN_PROGRESS in progress} or has already been marked as * {@link #MUST_ABORT must abort}. * </p> */ public boolean canAbort() { return this == IN_PROGRESS || this == MUST_ABORT; } /** * Whether the transaction is complete (that is, is either * {@link #COMMITTED committed} or {@link #ABORTED aborted}), and so a new * transaction can be started. */ public boolean isComplete() { return this == COMMITTED || this == ABORTED; } /** * Whether the transaction is {@link #IN_PROGRESS in progress}. */ public boolean isInProgress() { return this == IN_PROGRESS; } /** * Whether the transaction {@link #MUST_ABORT must abort}. */ public boolean mustAbort() { return this == MUST_ABORT; } }
1,217
2,151
<reponame>zipated/src<filename>third_party/blink/tools/blinkpy/common/html_diff_unittest.py<gh_stars>1000+ # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from blinkpy.common.html_diff import HtmlDiffGenerator, html_diff class TestHtmlDiff(unittest.TestCase): def test_html_diff(self): self.assertEqual( html_diff('one\ntoo\nthree\n', 'one\ntwo\nthree\n'), ('<html>\n' '<head>\n' '<style>\n' 'table { white-space: pre-wrap; font-family: monospace; border-collapse: collapse; }\n' 'th { color: #444; background: #eed; text-align: right; vertical-align: baseline; padding: 1px 4px 1px 4px; }\n' '.del { background: #faa; }\n' '.add { background: #afa; }\n' '</style>\n' '</head>\n' '<body><table>' '<tr><th>1<th>1<td>one\n</tr>' '<tr><th>2<th><td class="del">too\n</tr>' '<tr><th><th>2<td class="add">two\n</tr>' '<tr><th>3<th>3<td>three\n</tr>' '</table></body>\n' '</html>\n')) def test_html_diff_same(self): self.assertEqual( HtmlDiffGenerator().generate_tbody(['one line\n'], ['one line\n']), '<tr><th>1<th>1<td>one line\n</tr>') self.assertEqual( HtmlDiffGenerator().generate_tbody(['<script>\n'], ['<script>\n']), '<tr><th>1<th>1<td>&lt;script&gt;\n</tr>') def test_html_diff_delete(self): self.assertEqual( HtmlDiffGenerator().generate_tbody(['one line\n'], []), '<tr><th>1<th><td class="del">one line\n</tr>') self.assertEqual( HtmlDiffGenerator().generate_tbody(['</pre>\n'], []), '<tr><th>1<th><td class="del">&lt;/pre&gt;\n</tr>') def test_html_diff_insert(self): self.assertEqual( HtmlDiffGenerator().generate_tbody([], ['one line\n']), '<tr><th><th>1<td class="add">one line\n</tr>') self.assertEqual( HtmlDiffGenerator().generate_tbody([], ['<!--\n']), '<tr><th><th>1<td class="add">&lt;!--\n</tr>') def test_html_diff_ending_newline(self): self.assertEqual( HtmlDiffGenerator().generate_tbody(['one line'], ['one line\n']), '<tr><th>1<th><td class="del">one line</tr><tr><th><th>1<td class="add">one line\n</tr>') def test_html_diff_replace_multiple_lines(self): a_lines = [ '1. Beautiful is better than ugly.\n', '2. Explicit is better than implicit.\n', '3. Simple is better than complex.\n', '4. Complex is better than complicated.\n', ] b_lines = [ '1. Beautiful is better than ugly.\n', '3. Simple is better than complex.\n', '4. Complicated is better than complex.\n', '5. Flat is better than nested.\n', ] self.assertEqual(HtmlDiffGenerator().generate_tbody(a_lines, b_lines), ( '<tr><th>1<th>1<td>1. Beautiful is better than ugly.\n</tr>' '<tr><th>2<th><td class="del">2. Explicit is better than implicit.\n</tr>' '<tr><th>3<th><td class="del">3. Simple is better than complex.\n</tr>' '<tr><th>4<th><td class="del">4. Complex is better than complicated.\n</tr>' '<tr><th><th>2<td class="add">3. Simple is better than complex.\n</tr>' '<tr><th><th>3<td class="add">4. Complicated is better than complex.\n</tr>' '<tr><th><th>4<td class="add">5. Flat is better than nested.\n</tr>')) def test_html_diff_context(self): a_lines = [ 'line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n', 'line6\n', 'line7\n', 'line8\n', 'line9a\n', 'line10\n', 'line11\n', 'line12\n', 'line13\n', 'line14\n', 'line15a\n', 'line16\n', 'line17\n', 'line18\n', 'line19\n', 'line20\n', 'line21\n', 'line22\n', 'line23\n', ] b_lines = [ 'line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n', 'line6\n', 'line7\n', 'line8\n', 'line9b\n', 'line10\n', 'line11\n', 'line12\n', 'line13\n', 'line14\n', 'line15b\n', 'line16\n', 'line17\n', 'line18\n', 'line19\n', 'line20\n', 'line21\n', 'line22\n', 'line23\n', ] self.assertEqual(HtmlDiffGenerator().generate_tbody(a_lines, b_lines), ( '<tr><td colspan=3>\n\n</tr>' '<tr><th>6<th>6<td>line6\n</tr>' '<tr><th>7<th>7<td>line7\n</tr>' '<tr><th>8<th>8<td>line8\n</tr>' '<tr><th>9<th><td class="del">line9a\n</tr>' '<tr><th><th>9<td class="add">line9b\n</tr>' '<tr><th>10<th>10<td>line10\n</tr>' '<tr><th>11<th>11<td>line11\n</tr>' '<tr><th>12<th>12<td>line12\n</tr>' '<tr><th>13<th>13<td>line13\n</tr>' '<tr><th>14<th>14<td>line14\n</tr>' '<tr><th>15<th><td class="del">line15a\n</tr>' '<tr><th><th>15<td class="add">line15b\n</tr>' '<tr><th>16<th>16<td>line16\n</tr>' '<tr><th>17<th>17<td>line17\n</tr>' '<tr><th>18<th>18<td>line18\n</tr>' '<tr><td colspan=3>\n\n</tr>')) def test_html_diff_context_at_edge(self): a_lines = [ 'line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n', 'line6\n', 'line7\n', 'line8\n', ] b_lines = [ 'line0\n', 'line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n', 'line6\n', 'line7\n', 'line8\n', 'line9\n', ] self.assertEqual(HtmlDiffGenerator().generate_tbody(a_lines, b_lines), ( '<tr><th><th>1<td class="add">line0\n</tr>' '<tr><th>1<th>2<td>line1\n</tr>' '<tr><th>2<th>3<td>line2\n</tr>' '<tr><th>3<th>4<td>line3\n</tr>' '<tr><td colspan=3>\n\n</tr>' '<tr><th>6<th>7<td>line6\n</tr>' '<tr><th>7<th>8<td>line7\n</tr>' '<tr><th>8<th>9<td>line8\n</tr>' '<tr><th><th>10<td class="add">line9\n</tr>'))
4,132
2,201
<gh_stars>1000+ """ Copyright (c) 2018-2022 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from collections import defaultdict import numpy as np from .metric import PerImageEvaluationMetric from ..config import StringField, BoolField, ConfigError from ..representation import ( SequenceClassificationAnnotation, BERTNamedEntityRecognitionAnnotation, SequenceClassificationPrediction ) def align_sequences(gt_seq, pred_seq, label_map, convert_token_ids=True, label_mask=None, valid_ids=None): aligned_gt, aligned_pred = [], [] if label_mask is not None and valid_ids is not None: gt_seq = np.array(gt_seq)[label_mask] pred_seq = np.array(pred_seq[valid_ids]) start_id = 0 if gt_seq[0] in label_map and label_map[gt_seq[0]] != '[CLS]' else 1 for gt_tok, pred_tok in zip(gt_seq[start_id:], pred_seq[start_id:]): if gt_tok not in label_map: continue if gt_tok == len(label_map): break aligned_gt.append(gt_tok if not convert_token_ids else label_map[gt_tok]) aligned_pred.append(pred_tok if not convert_token_ids else label_map.get(pred_tok, '[unk]')) return aligned_gt, aligned_pred def _prf_divide(numerator, denominator): mask = denominator == 0.0 denominator = denominator.copy() denominator[mask] = 1 # avoid infs/nans result = numerator / denominator if not np.any(mask): return result # if ``zero_division=1``, set those with denominator == 0 equal to 1 result[mask] = 0.0 return result def _precision_recall_fscore_support(pred_sum, tp_sum, true_sum): precision = _prf_divide( numerator=tp_sum, denominator=pred_sum ) recall = _prf_divide( numerator=tp_sum, denominator=true_sum, ) denom = precision + recall denom[denom == 0] = 1 # avoid division by 0 f_score = 2 * precision * recall / denom return precision, recall, f_score def extract_tp_actual_correct(y_true, y_pred): entities_true = defaultdict(set) entities_pred = defaultdict(set) for type_name, start, end in get_entities(y_true): entities_true[type_name].add((start, end)) for type_name, start, end in get_entities(y_pred): entities_pred[type_name].add((start, end)) target_names = sorted(set(entities_true.keys()) | set(entities_pred.keys())) tp_sum = defaultdict(lambda: 0) pred_sum = defaultdict(lambda: 0) true_sum = defaultdict(lambda: 0) for type_name in target_names: entities_true_type = entities_true.get(type_name, set()) entities_pred_type = entities_pred.get(type_name, set()) tp_sum[type_name] += len(entities_true_type & entities_pred_type) pred_sum[type_name] += len(entities_pred_type) true_sum[type_name] += len(entities_true_type) return pred_sum, tp_sum, true_sum, target_names def get_entities(seq, suffix=False): if any(isinstance(s, list) for s in seq): seq = [item for sublist in seq for item in sublist + ['O']] prev_tag = 'O' prev_type = '' begin_offset = 0 chunks = [] for i, chunk in enumerate(seq + ['O']): if suffix: tag = chunk[-1] type_ = chunk[:-1].rsplit('-', maxsplit=1)[0] or '_' else: tag = chunk[0] type_ = chunk[1:].split('-', maxsplit=1)[-1] or '_' if end_of_chunk(prev_tag, tag, prev_type, type_): chunks.append((prev_type, begin_offset, i - 1)) if start_of_chunk(prev_tag, tag, prev_type, type_): begin_offset = i prev_tag = tag prev_type = type_ return chunks def end_of_chunk(prev_tag, tag, prev_type, type_): chunk_end = False if prev_tag == 'E': chunk_end = True if prev_tag == 'S': chunk_end = True if prev_tag == 'B' and tag == 'B': chunk_end = True if prev_tag == 'B' and tag == 'S': chunk_end = True if prev_tag == 'B' and tag == 'O': chunk_end = True if prev_tag == 'I' and tag == 'B': chunk_end = True if prev_tag == 'I' and tag == 'S': chunk_end = True if prev_tag == 'I' and tag == 'O': chunk_end = True if prev_tag != 'O' and prev_tag != '.' and prev_type != type_: chunk_end = True return chunk_end def start_of_chunk(prev_tag, tag, prev_type, type_): chunk_start = False if tag == 'B': chunk_start = True if tag == 'S': chunk_start = True if prev_tag == 'E' and tag == 'E': chunk_start = True if prev_tag == 'E' and tag == 'I': chunk_start = True if prev_tag == 'S' and tag == 'E': chunk_start = True if prev_tag == 'S' and tag == 'I': chunk_start = True if prev_tag == 'O' and tag == 'E': chunk_start = True if prev_tag == 'O' and tag == 'I': chunk_start = True if tag != 'O' and tag != '.' and prev_type != type_: chunk_start = True return chunk_start class NERAccuracy(PerImageEvaluationMetric): __provider__ = 'ner_accuracy' annotation_types = (SequenceClassificationAnnotation, BERTNamedEntityRecognitionAnnotation,) prediction_types = (SequenceClassificationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'label_map': StringField(optional=True, default='label_map', description="Label map."), 'include_all_tokens': BoolField( optional=True, default=False, description='should all tokens will be considered during metirc calculation or not' ) }) return parameters def configure(self): label_map = self.get_value_from_config('label_map') if self.dataset.metadata: self.labels = self.dataset.metadata.get(label_map) if not self.labels: raise ConfigError('ner_accuracy metric requires label_map providing in dataset_meta' 'Please provide dataset meta file or regenerate annotation') else: raise ConfigError('ner_accuracy metric requires dataset metadata' 'Please provide dataset meta file or regenerate annotation') self.include_all_tokens = self.get_value_from_config('include_all_tokens') self.correct = 0 self.total = 0 def update(self, annotation, prediction): gt_seq = annotation.label pred_seq = prediction.label label_mask = annotation.label_mask if not self.include_all_tokens else None valid_ids = annotation.valid_ids if not self.include_all_tokens else None y_true, y_pred = align_sequences( gt_seq, pred_seq, self.labels, False, label_mask, valid_ids ) nb_correct = sum(y_t == y_p for y_t, y_p in zip(y_true, y_pred)) nb_true = len(y_true) self.correct += nb_correct self.total += nb_true return nb_correct / nb_true if nb_true else 0 def evaluate(self, annotations, predictions): return self.correct / self.total if self.total else 0 def reset(self): self.correct = 0 self.total = 0 class NERPrecision(PerImageEvaluationMetric): __provider__ = 'ner_recall' annotation_types = (SequenceClassificationAnnotation, BERTNamedEntityRecognitionAnnotation,) prediction_types = (SequenceClassificationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'label_map': StringField(optional=True, default='label_map', description="Label map."), 'include_all_tokens': BoolField( optional=True, default=False, description='should all tokens will be considered during metirc calculation or not' )}) return parameters def configure(self): label_map = self.get_value_from_config('label_map') if self.dataset.metadata: self.labels = self.dataset.metadata.get(label_map) if not self.labels: raise ConfigError('ner_recall metric requires label_map providing in dataset_meta' 'Please provide dataset meta file or regenerate annotation') else: raise ConfigError('ner_recall metric requires dataset metadata' 'Please provide dataset meta file or regenerate annotation') self.reset() self.imclude_all_tokens = self.get_value_from_config('include_all_tokens') def update(self, annotation, prediction): gt_seq = annotation.label pred_seq = prediction.label label_mask = annotation.label_mask if not self.include_all_tokens else None valid_ids = annotation.valid_ids if not self.include_all_tokens else None y_true, y_pred = align_sequences(gt_seq, pred_seq, self.labels, False, label_mask, valid_ids) pred_sum, tp_sum, true_sum, target_names = extract_tp_actual_correct(y_true, y_pred) for type_name in target_names: self.tp_sum[type_name] += tp_sum[type_name] self.pred_sum[type_name] += pred_sum[type_name] self.true_sum[type_name] += true_sum[type_name] pred_sum_arr = np.array(list(pred_sum.values())) tp_sum_arr = np.array(list(tp_sum.values())) true_sum_arr = np.array(list(true_sum.values())) _, r, _ = _precision_recall_fscore_support(pred_sum_arr, tp_sum_arr, true_sum_arr) return r def evaluate(self, annotations, predictions): pred_sum_arr = np.array(list(self.pred_sum.values())) tp_sum_arr = np.array(list(self.tp_sum.values())) true_sum_arr = np.array(list(self.true_sum.values())) _, recall, _ = _precision_recall_fscore_support(pred_sum_arr, tp_sum_arr, true_sum_arr) self.meta['names'] = list(self.pred_sum.keys()) return recall def reset(self): self.tp_sum = defaultdict(lambda: 0) self.pred_sum = defaultdict(lambda: 0) self.true_sum = defaultdict(lambda: 0) class NERRecall(PerImageEvaluationMetric): __provider__ = 'ner_precision' annotation_types = (SequenceClassificationAnnotation, BERTNamedEntityRecognitionAnnotation,) prediction_types = (SequenceClassificationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'label_map': StringField(optional=True, default='label_map', description="Label map."), 'include_all_tokens': BoolField( optional=True, default=False, description='should all tokens will be considered during metric calculation or not' ) }) return parameters def configure(self): label_map = self.get_value_from_config('label_map') if self.dataset.metadata: self.labels = self.dataset.metadata.get(label_map) if not self.labels: raise ConfigError('ner_precision metric requires label_map providing in dataset_meta' 'Please provide dataset meta file or regenerate annotation') else: raise ConfigError('ner_precision metric requires dataset metadata' 'Please provide dataset meta file or regenerate annotation') self.reset() self.include_all_tokens = self.get_value_from_config('include_all_tokens') def update(self, annotation, prediction): gt_seq = annotation.label pred_seq = prediction.label label_mask = annotation.label_mask if not self.include_all_tokens else None valid_ids = annotation.valid_ids if not self.include_all_tokens else None y_true, y_pred = align_sequences(gt_seq, pred_seq, self.labels, False, label_mask, valid_ids) pred_sum, tp_sum, true_sum, target_names = extract_tp_actual_correct(y_true, y_pred) for type_name in target_names: self.tp_sum[type_name] += tp_sum[type_name] self.pred_sum[type_name] += pred_sum[type_name] self.true_sum[type_name] += true_sum[type_name] pred_sum_arr = np.array(list(pred_sum.values())) tp_sum_arr = np.array(list(tp_sum.values())) true_sum_arr = np.array(list(true_sum.values())) p, _, _ = _precision_recall_fscore_support(pred_sum_arr, tp_sum_arr, true_sum_arr) return p def evaluate(self, annotations, predictions): pred_sum_arr = np.array(list(self.pred_sum.values())) tp_sum_arr = np.array(list(self.tp_sum.values())) true_sum_arr = np.array(list(self.true_sum.values())) precision, _, _ = _precision_recall_fscore_support(pred_sum_arr, tp_sum_arr, true_sum_arr) self.meta['names'] = list(self.pred_sum.keys()) return precision def reset(self): self.tp_sum = defaultdict(lambda: 0) self.pred_sum = defaultdict(lambda: 0) self.true_sum = defaultdict(lambda: 0) class NERFScore(PerImageEvaluationMetric): __provider__ = 'ner_f_score' annotation_types = (SequenceClassificationAnnotation, BERTNamedEntityRecognitionAnnotation,) prediction_types = (SequenceClassificationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'label_map': StringField(optional=True, default='label_map', description="Label map."), 'include_all_tokens': BoolField( optional=True, default=False, description='should all tokens will be considered during metirc calculation or not' ) }) return parameters def configure(self): label_map = self.get_value_from_config('label_map') if self.dataset.metadata: self.labels = self.dataset.metadata.get(label_map) if not self.labels: raise ConfigError('ner_f_score metric requires label_map providing in dataset_meta' 'Please provide dataset meta file or regenerate annotation') else: raise ConfigError('ner_f_score metric requires dataset metadata' 'Please provide dataset meta file or regenerate annotation') self.reset() self.include_all_tokens = self.get_value_from_config('include_all_tokens') def update(self, annotation, prediction): gt_seq = annotation.label pred_seq = prediction.label label_mask = annotation.label_mask if not self.include_all_tokens else None valid_ids = annotation.valid_ids if not self.include_all_tokens else None y_true, y_pred = align_sequences(gt_seq, pred_seq, self.labels, True, label_mask, valid_ids) pred_sum, tp_sum, true_sum, target_names = extract_tp_actual_correct(y_true, y_pred) for type_name in target_names: self.tp_sum[type_name] += tp_sum[type_name] self.pred_sum[type_name] += pred_sum[type_name] self.true_sum[type_name] += true_sum[type_name] pred_sum_arr = np.array(list(pred_sum.values())) tp_sum_arr = np.array(list(tp_sum.values())) true_sum_arr = np.array(list(true_sum.values())) _, _, f = _precision_recall_fscore_support(pred_sum_arr, tp_sum_arr, true_sum_arr) return f def evaluate(self, annotations, predictions): pred_sum_arr = np.array(list(self.pred_sum.values())) tp_sum_arr = np.array(list(self.tp_sum.values())) true_sum_arr = np.array(list(self.true_sum.values())) _, _, f_score = _precision_recall_fscore_support(pred_sum_arr, tp_sum_arr, true_sum_arr) self.meta['names'] = list(self.pred_sum.keys()) return f_score def reset(self): self.tp_sum = defaultdict(lambda: 0) self.pred_sum = defaultdict(lambda: 0) self.true_sum = defaultdict(lambda: 0)
7,011
1,109
<reponame>LuShengDong/wasabi /******************************************************************************* * Copyright 2016 Intuit * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.intuit.wasabi; import com.intuit.autumn.metrics.MetricsModule; import com.intuit.autumn.service.ServiceManager; import com.intuit.wasabi.api.ApiModule; import com.intuit.wasabi.eventlog.EventLogService; import org.joda.time.DateTimeZone; import org.slf4j.Logger; import java.util.TimeZone; import static com.intuit.autumn.metrics.MetricsServices.getEnabledMetricsServices; import static com.intuit.autumn.web.WebServices.getEnabledWebServices; import static java.util.TimeZone.getTimeZone; import static org.joda.time.DateTimeZone.UTC; import static org.slf4j.LoggerFactory.getLogger; public class Main { private static final Logger LOGGER = getLogger(MetricsModule.class); // Stop people from accidentally create a new Main Object private Main() { } /** * Application entry point. * * @param args application arguments * @throws Exception unintended exception */ public static void main(String[] args) throws Exception { LOGGER.info("starting {}", Main.class.getSimpleName()); TimeZone.setDefault(getTimeZone("UTC")); DateTimeZone.setDefault(UTC); ServiceManager serviceManager = new ServiceManager() .addModules(ApiModule.class, MetricsModule.class) .addServices(getEnabledWebServices()) .addServices(getEnabledMetricsServices()) .addServices(EventLogService.class); serviceManager.start(); LOGGER.info("started {}", Main.class.getSimpleName()); } }
738
1,675
<reponame>LaudateCorpus1/AnExplorer<filename>app/src/main/java/dev/dworks/apps/anexplorer/adapter/CommonInfo.java package dev.dworks.apps.anexplorer.adapter; import android.content.Context; import android.database.Cursor; import android.graphics.drawable.Drawable; import android.net.Uri; import java.io.File; import androidx.core.content.ContextCompat; import dev.dworks.apps.anexplorer.misc.IconUtils; import dev.dworks.apps.anexplorer.model.DocumentInfo; import dev.dworks.apps.anexplorer.model.RootInfo; import static dev.dworks.apps.anexplorer.adapter.HomeAdapter.TYPE_RECENT; public class CommonInfo { public int type; public DocumentInfo documentInfo; public RootInfo rootInfo; public static CommonInfo from(RootInfo rootInfo, int type){ CommonInfo commonInfo = new CommonInfo(); commonInfo.type = type; commonInfo.rootInfo = rootInfo; return commonInfo; } public static CommonInfo from(DocumentInfo documentInfo, int type){ CommonInfo commonInfo = new CommonInfo(); commonInfo.type = type; commonInfo.documentInfo = documentInfo; return commonInfo; } public static CommonInfo from(Cursor cursor){ DocumentInfo documentInfo = DocumentInfo.fromDirectoryCursor(cursor); CommonInfo commonInfo = from(documentInfo, TYPE_RECENT); return commonInfo; } }
487
1,691
package com.kodedu.controller; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import static org.springframework.web.bind.annotation.RequestMethod.*; /** * Created by usta on 02.09.2015. */ @Controller public class AllController { private final CommonResource commonResource; private Logger logger = LoggerFactory.getLogger(AllController.class); @Autowired public AllController(CommonResource commonResource) { this.commonResource = commonResource; } @RequestMapping(value = {"/**/*.*", "*.*"}, method = {GET, HEAD, OPTIONS, POST}, produces = "*/*", consumes = "*/*") @ResponseBody public void all(HttpServletRequest request, HttpServletResponse response) { Payload payload = new Payload(request, response); commonResource.processPayload(payload); } }
378
304
<filename>app/oauth_office365/tests.py<gh_stars>100-1000 from django.test import TestCase # Create your tests here. # TODO add test to validate application scopes are enforced when creating # TODO add test to validate that application redirect and refresh URL match the site's base url # TODO add unicode handling tests # TODO test large attachments # TODO basic tests for getting and deleting messages, attachments
105
1,338
#include <Application.h> #include <TextView.h> #include <Window.h> class window : public BWindow { public: window() : BWindow(BRect(30, 30, 300, 300), "BTextView test", B_DOCUMENT_WINDOW, B_ASYNCHRONOUS_CONTROLS) { BTextView *textview = new BTextView(Bounds(), "textview", Bounds(), B_FOLLOW_ALL, B_WILL_DRAW); AddChild(textview); textview->SetText("Type into the Haiku BTextView!"); textview->MakeFocus(); } }; class application : public BApplication { public: application() :BApplication("application/x-vnd.test") { } virtual void ReadyToRun() { (new window())->Show(); } }; int main() { application app; app.Run(); return 0; }
277
646
package com.dimple.project.home.domain; import lombok.AllArgsConstructor; import lombok.Data; /** * <p> * description * </p> * * @author Dimple * @date 06/05/20 10:15 */ @Data @AllArgsConstructor public class KeyValue { private String key; private Long value; }
104
4,895
''' This demo shows the parameter estimations of HMMs via Baulm-Welch algorithm on the occasionally dishonest casino example. Author : <NAME>(@karalleyna) ''' import superimport import jax.numpy as jnp from jax.random import split, PRNGKey, randint import numpy as np from hmm_discrete_lib import HMMNumpy, HMMJax, hmm_sample_jax from hmm_discrete_lib import hmm_plot_graphviz from hmm_discrete_em_lib import init_random_params_jax from hmm_discrete_em_lib import hmm_em_numpy, hmm_em_jax import hmm_utils import time A = jnp.array([ [0.95, 0.05], [0.10, 0.90] ]) # observation matrix B = jnp.array([ [1/6, 1/6, 1/6, 1/6, 1/6, 1/6], # fair die [1/10, 1/10, 1/10, 1/10, 1/10, 5/10] # loaded die ]) pi = jnp.array([1, 1]) / 2 seed = 100 rng_key = PRNGKey(seed) rng_key, rng_sample, rng_batch, rng_init = split(rng_key, 4) casino = HMMJax(A, B, pi) n_obs_seq, batch_size, max_len = 5, 5, 3000 observations, lens = hmm_utils.hmm_sample_n(casino, hmm_sample_jax, n_obs_seq, max_len, rng_sample) observations, lens = hmm_utils.pad_sequences(observations, lens) n_hidden, n_obs = B.shape params_jax = init_random_params_jax([n_hidden, n_obs], rng_key=rng_init) params_numpy= HMMNumpy(np.array(params_jax.trans_mat), np.array(params_jax.obs_mat), np.array(params_jax.init_dist)) num_epochs = 20 start = time.time() params_numpy, neg_ll_numpy = hmm_em_numpy(np.array(observations), np.array(lens), num_epochs=num_epochs, init_params=params_numpy) print(f'Time taken by numpy version of EM : {time.time()-start}s') start = time.time() params_jax, neg_ll_jax = hmm_em_jax(observations, lens, num_epochs=num_epochs, init_params=params_jax) print(f'Time taken by JAX version of EM : {time.time()-start}s') assert jnp.allclose(np.array(neg_ll_jax), np.array(neg_ll_numpy), 4) print(f' Negative loglikelihoods : {neg_ll_jax}') hmm_utils.plot_loss_curve(neg_ll_numpy, "EM Numpy") hmm_utils.plot_loss_curve(neg_ll_jax, "EM JAX") states, observations = ['Fair Dice', 'Loaded Dice'], [str(i+1) for i in range(B.shape[1])] hmm_plot_graphviz(params_numpy, 'hmm_casino_train_np', states, observations) hmm_plot_graphviz(params_jax, 'hmm_casino_train_jax', states, observations)
1,359
1,011
<reponame>Said6289/SPlisHSPlasH #include "common.h" #include <SPlisHSPlasH/Common.h> #include <SPlisHSPlasH/SPHKernels.h> #include <pybind11/pybind11.h> #include <pybind11/eigen.h> namespace py = pybind11; template <typename... Args> using overload_cast_ = pybind11::detail::overload_cast_impl<Args...>; template<class T> py::class_<T> define_kernel(py::module m_sub, const char* name) { auto cl = py::class_<T>(m_sub, name) .def(py::init<>()) .def_static("getRadius", &T::getRadius) .def_static("setRadius", &T::setRadius) .def_static("W", overload_cast_<const Real>()(&T::W)) .def_static("W", overload_cast_<const Vector3r&>()(&T::W)) .def_static("gradW", &T::gradW) .def_static("W_zero", &T::W_zero); return cl; } template<class T> py::class_<T> define_kernel_no_grad(py::module m_sub, const char* name) { auto cl = py::class_<T>(m_sub, name) .def(py::init<>()) .def_static("getRadius", &T::getRadius) .def_static("setRadius", &T::setRadius) .def_static("W", overload_cast_<const Real>()(&T::W)) .def_static("W", overload_cast_<const Vector3r&>()(&T::W)) .def_static("W_zero", &T::W_zero); return cl; } void SPHKernelsModule(py::module m) { auto m_sub = m.def_submodule("SPHKernels"); auto m_sub_sub = m_sub.def_submodule("Precomputed"); // Cubic Kernel auto kernel = define_kernel<SPH::CubicKernel>(m_sub, "CubicKernel"); kernel = define_kernel<SPH::PrecomputedKernel<SPH::CubicKernel>>(m_sub_sub, "CubicKernel"); // Cubic Kernel 2D kernel = define_kernel<SPH::CubicKernel2D>(m_sub, "CubicKernel2D"); kernel = define_kernel<SPH::PrecomputedKernel<SPH::CubicKernel2D>>(m_sub_sub, "CubicKernel2D"); // Poly6 Kernel kernel = define_kernel<SPH::Poly6Kernel>(m_sub, "Poly6Kernel"); kernel.def_static("laplacianW", &SPH::Poly6Kernel::laplacianW); kernel = define_kernel<SPH::PrecomputedKernel<SPH::Poly6Kernel>>(m_sub_sub, "Poly6Kernel"); // Spiky Kernel kernel = define_kernel<SPH::SpikyKernel>(m_sub, "SpikyKernel"); kernel = define_kernel<SPH::PrecomputedKernel<SPH::SpikyKernel>>(m_sub_sub, "SpikyKernel"); // WendlandQuinticC2 Kernel kernel = define_kernel<SPH::WendlandQuinticC2Kernel>(m_sub, "WendlandQuinticC2Kernel"); kernel = define_kernel<SPH::PrecomputedKernel<SPH::WendlandQuinticC2Kernel>>(m_sub_sub, "WendlandQuinticC2Kernel"); // WendlandQuinticC2 Kernel 2D kernel = define_kernel<SPH::WendlandQuinticC2Kernel2D>(m_sub, "WendlandQuinticC2Kernel2D"); kernel = define_kernel<SPH::PrecomputedKernel<SPH::WendlandQuinticC2Kernel2D>>(m_sub_sub, "WendlandQuinticC2Kernel2D"); // Cohesion Kernel kernel = define_kernel_no_grad<SPH::CohesionKernel>(m_sub, "CohesionKernel"); // Adhesion Kernel kernel = define_kernel_no_grad<SPH::AdhesionKernel>(m_sub, "AdhesionKernel"); }
1,168
3,390
<reponame>Noahhoetger2001/test-infra # defusedxml # # Copyright (c) 2013 by <NAME> <<EMAIL>> # Licensed to PSF under a Contributor Agreement. # See http://www.python.org/psf/license for licensing details. """Defused xml.etree.cElementTree """ from __future__ import absolute_import from xml.etree.cElementTree import TreeBuilder as _TreeBuilder from xml.etree.cElementTree import parse as _parse from xml.etree.cElementTree import tostring # iterparse from ElementTree! from xml.etree.ElementTree import iterparse as _iterparse from .ElementTree import DefusedXMLParser, _IterParseIterator from .common import _generate_etree_functions __origin__ = "xml.etree.cElementTree" XMLTreeBuilder = XMLParse = DefusedXMLParser parse, iterparse, fromstring = _generate_etree_functions(DefusedXMLParser, _TreeBuilder, _IterParseIterator, _parse, _iterparse) XML = fromstring
284
1,144
<filename>backend/de.metas.adempiere.adempiere/base/src/main/java-gen/org/compiere/model/X_C_InvoiceTax.java /** Generated Model - DO NOT CHANGE */ package org.compiere.model; import java.math.BigDecimal; import java.sql.ResultSet; import java.util.Properties; import org.compiere.util.Env; /** Generated Model for C_InvoiceTax * @author Adempiere (generated) */ @SuppressWarnings("javadoc") public class X_C_InvoiceTax extends org.compiere.model.PO implements I_C_InvoiceTax, org.compiere.model.I_Persistent { /** * */ private static final long serialVersionUID = -131732838L; /** Standard Constructor */ public X_C_InvoiceTax (Properties ctx, int C_InvoiceTax_ID, String trxName) { super (ctx, C_InvoiceTax_ID, trxName); /** if (C_InvoiceTax_ID == 0) { setC_Invoice_ID (0); setC_InvoiceTax_ID (0); setC_Tax_ID (0); setIsPackagingTax (false); // N setIsTaxIncluded (false); setIsWholeTax (false); // N setProcessed (false); setTaxAmt (Env.ZERO); setTaxBaseAmt (Env.ZERO); } */ } /** Load Constructor */ public X_C_InvoiceTax (Properties ctx, ResultSet rs, String trxName) { super (ctx, rs, trxName); } /** Load Meta Data */ @Override protected org.compiere.model.POInfo initPO (Properties ctx) { org.compiere.model.POInfo poi = org.compiere.model.POInfo.getPOInfo (ctx, Table_Name, get_TrxName()); return poi; } @Override public org.compiere.model.I_C_Invoice getC_Invoice() throws RuntimeException { return get_ValueAsPO(COLUMNNAME_C_Invoice_ID, org.compiere.model.I_C_Invoice.class); } @Override public void setC_Invoice(org.compiere.model.I_C_Invoice C_Invoice) { set_ValueFromPO(COLUMNNAME_C_Invoice_ID, org.compiere.model.I_C_Invoice.class, C_Invoice); } /** Set Rechnung. @param C_Invoice_ID Invoice Identifier */ @Override public void setC_Invoice_ID (int C_Invoice_ID) { if (C_Invoice_ID < 1) set_ValueNoCheck (COLUMNNAME_C_Invoice_ID, null); else set_ValueNoCheck (COLUMNNAME_C_Invoice_ID, Integer.valueOf(C_Invoice_ID)); } /** Get Rechnung. @return Invoice Identifier */ @Override public int getC_Invoice_ID () { Integer ii = (Integer)get_Value(COLUMNNAME_C_Invoice_ID); if (ii == null) return 0; return ii.intValue(); } /** Set C_InvoiceTax. @param C_InvoiceTax_ID C_InvoiceTax */ @Override public void setC_InvoiceTax_ID (int C_InvoiceTax_ID) { if (C_InvoiceTax_ID < 1) set_ValueNoCheck (COLUMNNAME_C_InvoiceTax_ID, null); else set_ValueNoCheck (COLUMNNAME_C_InvoiceTax_ID, Integer.valueOf(C_InvoiceTax_ID)); } /** Get C_InvoiceTax. @return C_InvoiceTax */ @Override public int getC_InvoiceTax_ID () { Integer ii = (Integer)get_Value(COLUMNNAME_C_InvoiceTax_ID); if (ii == null) return 0; return ii.intValue(); } @Override public org.compiere.model.I_C_Tax getC_Tax() throws RuntimeException { return get_ValueAsPO(COLUMNNAME_C_Tax_ID, org.compiere.model.I_C_Tax.class); } @Override public void setC_Tax(org.compiere.model.I_C_Tax C_Tax) { set_ValueFromPO(COLUMNNAME_C_Tax_ID, org.compiere.model.I_C_Tax.class, C_Tax); } /** Set Steuer. @param C_Tax_ID Tax identifier */ @Override public void setC_Tax_ID (int C_Tax_ID) { if (C_Tax_ID < 1) set_ValueNoCheck (COLUMNNAME_C_Tax_ID, null); else set_ValueNoCheck (COLUMNNAME_C_Tax_ID, Integer.valueOf(C_Tax_ID)); } /** Get Steuer. @return Tax identifier */ @Override public int getC_Tax_ID () { Integer ii = (Integer)get_Value(COLUMNNAME_C_Tax_ID); if (ii == null) return 0; return ii.intValue(); } /** Set Packaging Tax. @param IsPackagingTax Packaging Tax */ @Override public void setIsPackagingTax (boolean IsPackagingTax) { set_Value (COLUMNNAME_IsPackagingTax, Boolean.valueOf(IsPackagingTax)); } /** Get Packaging Tax. @return Packaging Tax */ @Override public boolean isPackagingTax () { Object oo = get_Value(COLUMNNAME_IsPackagingTax); if (oo != null) { if (oo instanceof Boolean) return ((Boolean)oo).booleanValue(); return "Y".equals(oo); } return false; } /** Set Preis inklusive Steuern. @param IsTaxIncluded Tax is included in the price */ @Override public void setIsTaxIncluded (boolean IsTaxIncluded) { set_Value (COLUMNNAME_IsTaxIncluded, Boolean.valueOf(IsTaxIncluded)); } /** Get Preis inklusive Steuern. @return Tax is included in the price */ @Override public boolean isTaxIncluded () { Object oo = get_Value(COLUMNNAME_IsTaxIncluded); if (oo != null) { if (oo instanceof Boolean) return ((Boolean)oo).booleanValue(); return "Y".equals(oo); } return false; } /** Set Whole Tax. @param IsWholeTax If this flag is set, in a tax aware document (e.g. Invoice, Order) this tax will absorb the whole amount, leaving 0 for base amount */ @Override public void setIsWholeTax (boolean IsWholeTax) { set_Value (COLUMNNAME_IsWholeTax, Boolean.valueOf(IsWholeTax)); } /** Get Whole Tax. @return If this flag is set, in a tax aware document (e.g. Invoice, Order) this tax will absorb the whole amount, leaving 0 for base amount */ @Override public boolean isWholeTax () { Object oo = get_Value(COLUMNNAME_IsWholeTax); if (oo != null) { if (oo instanceof Boolean) return ((Boolean)oo).booleanValue(); return "Y".equals(oo); } return false; } /** Set Verarbeitet. @param Processed Checkbox sagt aus, ob der Beleg verarbeitet wurde. */ @Override public void setProcessed (boolean Processed) { set_Value (COLUMNNAME_Processed, Boolean.valueOf(Processed)); } /** Get Verarbeitet. @return Checkbox sagt aus, ob der Beleg verarbeitet wurde. */ @Override public boolean isProcessed () { Object oo = get_Value(COLUMNNAME_Processed); if (oo != null) { if (oo instanceof Boolean) return ((Boolean)oo).booleanValue(); return "Y".equals(oo); } return false; } /** Set Steuerbetrag. @param TaxAmt Tax Amount for a document */ @Override public void setTaxAmt (java.math.BigDecimal TaxAmt) { set_ValueNoCheck (COLUMNNAME_TaxAmt, TaxAmt); } /** Get Steuerbetrag. @return Tax Amount for a document */ @Override public java.math.BigDecimal getTaxAmt () { BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_TaxAmt); if (bd == null) return Env.ZERO; return bd; } /** Set Bezugswert. @param TaxBaseAmt Base for calculating the tax amount */ @Override public void setTaxBaseAmt (java.math.BigDecimal TaxBaseAmt) { set_ValueNoCheck (COLUMNNAME_TaxBaseAmt, TaxBaseAmt); } /** Get Bezugswert. @return Base for calculating the tax amount */ @Override public java.math.BigDecimal getTaxBaseAmt () { BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_TaxBaseAmt); if (bd == null) return Env.ZERO; return bd; } }
2,916
4,036
struct S100 { int i; int* p; }; int test100() { int x = 0; struct S100 s100; // GOOD: address is only written to another stack variable, which is // safe. s100.p = &x; return x; } static struct S100 s101; int test101() { int x = 0; // BAD: local address is written to a static variable, which could // be unsafe. s101.p = &x; return x; } int test102() { int x = 0; static struct S100 s102; // BAD: local address is written to a local static variable, which could // be unsafe. s102.p = &x; return x; } void test103(int *p) { static struct S100 s103; // BAD: address is written to a local static variable, which could // be unsafe. s103.p = p; } // Helper for test103. void test103_caller1(int *p) { test103(p); } // Helper for test103. void test103_caller2() { int x = 0; test103_caller1(&x); } void test104(int *p) { static struct S100 s104; // GOOD: a stack address does not flow here, so this assignment is safe. s104.p = p; } void test104_caller1(int *p) { test104(p); } void test104_caller2() { static int x = 0; test104_caller1(&x); } // Test for pointer arithmetic. int test105() { int x = 0; int* p0 = &x; int* p1 = p0 + 1; int* p2 = p1 - 1; int* p3 = 1 + p2; p3++; // BAD: local address is written to a static variable, which could // be unsafe. s101.p = p3; return x; } static struct S100 s106; // Test for taking the address of a field. void test106() { S100 s; // BAD: local address is written to a static variable, which could // be unsafe. s106.p = &(s.i); } // Test for reference types. int test107() { int x = 0; int& r0 = x; int& r1 = r0; r1++; // BAD: local address is written to a static variable, which could // be unsafe. s101.p = &r1; return r1; } struct S200 { int i; union { void* p; const char* str; }; }; int test200() { int x = 0; struct S200 s200; // GOOD: address is only written to another stack variable, which is // safe. s200.p = &x; return x; } static struct S200 s201; int test201() { int x = 0; // BAD: local address is written to a static variable, which could // be unsafe. s201.p = &x; return x; } int test202() { int x = 0; static struct S200 s202; // BAD: local address is written to a local static variable, which could // be unsafe. s202.p = &x; return x; } // Example used in qhelp. static const int* xptr; void example1() { int x = 0; xptr = &x; // BAD: address of local variable stored in non-local memory. } void example2() { static const int x = 0; xptr = &x; // GOOD: storing address of static variable is safe. } struct S300 { int a1[15]; int a2[14][15]; int a3[13][14][15]; int *p1; int (*p2)[15]; int (*p3)[14][15]; int** pp; }; void test301() { static S300 s; int b1[15]; int b2[14][15]; int b3[13][14][15]; s.p1 = b1; // BAD: address of local variable stored in non-local memory. s.p1 = &b1[1]; // BAD: address of local variable stored in non-local memory. s.p2 = b2; // BAD: address of local variable stored in non-local memory. s.p2 = &b2[1]; // BAD: address of local variable stored in non-local memory. s.p1 = b2[1]; // BAD: address of local variable stored in non-local memory. s.p1 = &b2[1][2]; // BAD: address of local variable stored in non-local memory. s.p3 = b3; // BAD: address of local variable stored in non-local memory. s.p3 = &b3[1]; // BAD: address of local variable stored in non-local memory. s.p2 = b3[1]; // BAD: address of local variable stored in non-local memory. s.p2 = &b3[1][2]; // BAD: address of local variable stored in non-local memory. s.p1 = b3[1][2]; // BAD: address of local variable stored in non-local memory. s.p1 = &b3[1][2][3]; // BAD: address of local variable stored in non-local memory. s.pp[0] = b1; // BAD: address of local variable stored in non-local memory. s.pp[0] = &b1[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = b2[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &b2[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = b3[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &b3[1][2][3]; // BAD: address of local variable stored in non-local memory. } void test302() { S300 s; int b1[15]; int b2[14][15]; int b3[13][14][15]; s.p1 = b1; // GOOD: address is only stored in another local variable s.p1 = &b1[1]; // GOOD: address is only stored in another local variable s.p2 = b2; // GOOD: address is only stored in another local variable s.p2 = &b2[1]; // GOOD: address is only stored in another local variable s.p1 = b2[1]; // GOOD: address is only stored in another local variable s.p1 = &b2[1][2]; // GOOD: address is only stored in another local variable s.p3 = b3; // GOOD: address is only stored in another local variable s.p3 = &b3[1]; // GOOD: address is only stored in another local variable s.p2 = b3[1]; // GOOD: address is only stored in another local variable s.p2 = &b3[1][2]; // GOOD: address is only stored in another local variable s.p1 = b3[1][2]; // GOOD: address is only stored in another local variable s.p1 = &b3[1][2][3]; // GOOD: address is only stored in another local variable // Even though s is local, we don't know that s.pp is local because // there is a pointer indirection involved. s.pp[0] = b1; // BAD: address of local variable stored in non-local memory. s.pp[0] = &b1[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = b2[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &b2[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = b3[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &b3[1][2][3]; // BAD: address of local variable stored in non-local memory. } void test303() { static S300 s; S300 x; s.p1 = x.a1; // BAD: address of local variable stored in non-local memory. s.p1 = &x.a1[1]; // BAD: address of local variable stored in non-local memory. s.p2 = x.a2; // BAD: address of local variable stored in non-local memory. s.p2 = &x.a2[1]; // BAD: address of local variable stored in non-local memory. s.p1 = x.a2[1]; // BAD: address of local variable stored in non-local memory. s.p1 = &x.a2[1][2]; // BAD: address of local variable stored in non-local memory. s.p3 = x.a3; // BAD: address of local variable stored in non-local memory. s.p3 = &x.a3[1]; // BAD: address of local variable stored in non-local memory. s.p2 = x.a3[1]; // BAD: address of local variable stored in non-local memory. s.p2 = &x.a3[1][2]; // BAD: address of local variable stored in non-local memory. s.p1 = x.a3[1][2]; // BAD: address of local variable stored in non-local memory. s.p1 = &x.a3[1][2][3]; // BAD: address of local variable stored in non-local memory. // Even though s is local, we don't know that s.pp is local because // there is a pointer indirection involved. s.pp[0] = x.a1; // BAD: address of local variable stored in non-local memory. s.pp[0] = &x.a1[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = x.a2[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &x.a2[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = x.a3[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &x.a3[1][2][3]; // BAD: address of local variable stored in non-local memory. } void test304() { S300 s; S300 x; s.p1 = x.a1; // GOOD: address is only stored in another local variable s.p1 = &x.a1[1]; // GOOD: address is only stored in another local variable s.p2 = x.a2; // GOOD: address is only stored in another local variable s.p2 = &x.a2[1]; // GOOD: address is only stored in another local variable s.p1 = x.a2[1]; // GOOD: address is only stored in another local variable s.p1 = &x.a2[1][2]; // GOOD: address is only stored in another local variable s.p3 = x.a3; // GOOD: address is only stored in another local variable s.p3 = &x.a3[1]; // GOOD: address is only stored in another local variable s.p2 = x.a3[1]; // GOOD: address is only stored in another local variable s.p2 = &x.a3[1][2]; // GOOD: address is only stored in another local variable s.p1 = x.a3[1][2]; // GOOD: address is only stored in another local variable s.p1 = &x.a3[1][2][3]; // GOOD: address is only stored in another local variable // Even though s is local, we don't know that s.pp is local because // there is a pointer indirection involved. s.pp[0] = x.a1; // BAD: address of local variable stored in non-local memory. s.pp[0] = &x.a1[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = x.a2[1]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &x.a2[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = x.a3[1][2]; // BAD: address of local variable stored in non-local memory. s.pp[0] = &x.a3[1][2][3]; // BAD: address of local variable stored in non-local memory. } struct S400 { int* p0; int* p1[10]; int* p2[10][11]; int** q1; int** q2[10]; int** q3[10][11]; int* (*r2)[11]; int* (*r3)[11][12]; S400() { q1 = new int*[10]; for (int i = 0; i < 10; i++) { q2[i] = new int*[11]; } for (int i = 0; i < 10; i++) { for (int j = 0; j < 11; j++) { q3[i][j] = new int*[12]; } } r2 = new int*[10][11]; r3 = new int*[10][11][12]; } }; int test400() { S400 s; int x = 0; s.p0 = &x; // GOOD: s.p0 is on the stack. s.p1[1] = &x; // GOOD: s.p1 is on the stack. s.p2[1][2] = &x; // GOOD: s.p1 is on the stack. s.q1[1] = &x; // BAD: pointer indirection to the heap. s.q2[1][2] = &x; // BAD: pointer indirection to the heap. s.q3[1][2][3] = &x; // BAD: pointer indirection to the heap. s.r2[1][2] = &x; // BAD: pointer indirection to the heap. s.r3[1][2][3] = &x; // BAD: pointer indirection to the heap. return x; } class ListOnStack { int i_; ListOnStack *parent1_; ListOnStack *parent2_; public: ListOnStack(int i, ListOnStack *parent) : i_(i), parent1_(parent) { // The assignment below is safe, because ListOnStack is always // allocated on the stack, so it cannot outlive the pointer. parent2_ = parent; // GOOD. } }; void test500_impl(int n, ListOnStack *parent) { if (n > 0) { ListOnStack curr(n, parent); test500_impl(n-1, &curr); } } void test500() { test500_impl(10, 0); } class BaseClass600 { int *p_; public: BaseClass600(int* p) { p_ = p; } }; class DerivedClass600 : public BaseClass600 { public: DerivedClass600(int* p) : BaseClass600(p) {} }; void test600() { int i = 0; DerivedClass600 x(&i); } class BaseClass601 { int *p_; public: BaseClass601(int* p) { p_ = p; } }; class DerivedClass601 : public virtual BaseClass601 { public: DerivedClass601(int* p) : BaseClass601(p) {} }; void test601() { int i = 0; DerivedClass601 x(&i); } class DelegatingClass602 { int *p_; DelegatingClass602(int* p) : p_(p) {} public: DelegatingClass602(int* p, int, int) : DelegatingClass602(p) {} }; void test602() { int i = 0; DelegatingClass602 x(&i, 0, 0); } class BaseClass602 { int *p_; public: BaseClass602(int* p) { p_ = p; } }; class FieldInitClass602 { BaseClass602 base; public: FieldInitClass602(int* p) : base(p) {} }; void test603() { int i = 0; FieldInitClass602 x(&i); }
4,825
1,962
<gh_stars>1000+ package com.bolingcavalry.advanced.service.impl; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.DynamicTest; import org.junit.jupiter.api.TestFactory; import org.springframework.boot.test.context.SpringBootTest; import java.util.Arrays; import static org.junit.jupiter.api.DynamicTest.dynamicTest; @SpringBootTest @Slf4j class DynamicDemoTest { @TestFactory Iterable<org.junit.jupiter.api.DynamicTest> testFactoryTest() { DynamicTest firstTest = dynamicTest( "一号动态测试用例", () -> { log.info("一号用例,这里编写单元测试逻辑代码"); } ); DynamicTest secondTest = dynamicTest( "二号动态测试用例", () -> { log.info("二号用例,这里编写单元测试逻辑代码"); } ); return Arrays.asList(firstTest, secondTest); } }
504
892
{ "schema_version": "1.2.0", "id": "GHSA-58ff-hvrc-gp86", "modified": "2022-05-02T03:54:04Z", "published": "2022-05-02T03:54:04Z", "aliases": [ "CVE-2009-4367" ], "details": "The Staging Webservice (\"sitecore modules/staging/service/api.asmx\") in Sitecore Staging Module 5.4.0 rev.080625 and earlier allows remote attackers to bypass authentication and (1) upload files, (2) download files, (3) list directories, and (4) clear the server cache via crafted SOAP requests with arbitrary Username and Password values, possibly related to a direct request.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4367" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/54881" }, { "type": "WEB", "url": "https://www.sec-consult.com/files/20091217-0_sitecore_StagingModule_1.0.txt" }, { "type": "WEB", "url": "http://osvdb.org/61147" }, { "type": "WEB", "url": "http://secunia.com/advisories/37763" }, { "type": "WEB", "url": "http://www.exploit-db.com/exploits/10513" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/508529/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/37388" } ], "database_specific": { "cwe_ids": [ "CWE-287" ], "severity": "MODERATE", "github_reviewed": false } }
711
1,444
<gh_stars>1000+ package mage.interfaces; import mage.players.PlayerType; import mage.utils.MageVersion; import mage.view.GameTypeView; import mage.view.TournamentTypeView; import java.io.Serializable; import java.util.List; import java.util.stream.Collectors; /** * @author <EMAIL> */ public class ServerState implements Serializable { private final List<GameTypeView> gameTypes; private final List<TournamentTypeView> tournamentTypes; private final PlayerType[] playerTypes; private final String[] deckTypes; private final String[] draftCubes; private final boolean testMode; private final MageVersion version; private final long cardsContentVersion; private final long expansionsContentVersion; public ServerState(List<GameTypeView> gameTypes, List<TournamentTypeView> tournamentTypes, PlayerType[] playerTypes, String[] deckTypes, String[] draftCubes, boolean testMode, MageVersion version, long cardsContentVersion, long expansionsContentVersion) { this.gameTypes = gameTypes; this.tournamentTypes = tournamentTypes; this.playerTypes = playerTypes; this.deckTypes = deckTypes; this.draftCubes = draftCubes; this.testMode = testMode; this.version = version; this.cardsContentVersion = cardsContentVersion; this.expansionsContentVersion = expansionsContentVersion; } public List<GameTypeView> getGameTypes() { return gameTypes; } public List<GameTypeView> getTournamentGameTypes() { return gameTypes.stream() .filter(gameTypeView -> gameTypeView.getMinPlayers() == 2 && gameTypeView.getMaxPlayers() == 2) .collect(Collectors.toList()); } public List<TournamentTypeView> getTournamentTypes() { return tournamentTypes; } public PlayerType[] getPlayerTypes() { return playerTypes; } public String[] getDeckTypes() { return deckTypes; } public String[] getDraftCubes() { return draftCubes; } public boolean isTestMode() { return testMode; } public MageVersion getVersion() { return version; } public long getCardsContentVersion() { return cardsContentVersion; } public long getExpansionsContentVersion() { return expansionsContentVersion; } }
868
526
<reponame>MightyNerdEric/egeria<gh_stars>100-1000 /* SPDX-License-Identifier: Apache 2.0 */ /* Copyright Contributors to the ODPi Egeria project. */ package org.odpi.openmetadata.accessservices.governanceprogram.handlers; import org.odpi.openmetadata.accessservices.governanceprogram.converters.GovernanceProgramOMASConverter; import org.odpi.openmetadata.accessservices.governanceprogram.metadataelements.*; import org.odpi.openmetadata.commonservices.generichandlers.ActorProfileHandler; import org.odpi.openmetadata.commonservices.generichandlers.OpenMetadataAPIMapper; import org.odpi.openmetadata.commonservices.generichandlers.PersonRoleHandler; import org.odpi.openmetadata.commonservices.repositoryhandler.RepositoryErrorHandler; import org.odpi.openmetadata.frameworks.auditlog.AuditLog; import org.odpi.openmetadata.frameworks.connectors.ffdc.InvalidParameterException; import org.odpi.openmetadata.frameworks.connectors.ffdc.PropertyServerException; import org.odpi.openmetadata.frameworks.connectors.ffdc.UserNotAuthorizedException; import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.properties.instances.InstanceProperties; import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.properties.instances.Relationship; import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.repositoryconnector.OMRSRepositoryHelper; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * AppointmentHandler is responsible for managing the relationship between a person role and a * profile. It typically returns the combination of the relationship and the profile bean. * Specifically it supports the return of GovernanceAppointee in GovernanceRoleAppointee and * GovernanceRoleHistory. */ public class AppointmentHandler { private PersonRoleHandler<GovernanceRoleElement> roleHandler; private ActorProfileHandler<ProfileElement> profileHandler; private GovernanceProgramOMASConverter<GovernanceAppointee> converter; private RepositoryErrorHandler errorHandler; public AppointmentHandler(PersonRoleHandler<GovernanceRoleElement> roleHandler, ActorProfileHandler<ProfileElement> profileHandler, OMRSRepositoryHelper repositoryHelper, String serviceName, String serverName, AuditLog auditLog) { this.roleHandler = roleHandler; this.profileHandler = profileHandler; this.converter = new GovernanceProgramOMASConverter<>(repositoryHelper, serviceName, serverName); this.errorHandler = new RepositoryErrorHandler(repositoryHelper, serviceName, serverName, auditLog); } /** * Return all of the governance roles and their incumbents (if any). * * @param userId the name of the calling user * @param domainIdentifier identifier of domain - 0 means all * @param startFrom where to start from in the list of definitions * @param pageSize max number of results to return in one call * @param methodName calling method * * @return list of governance role objects * * @throws InvalidParameterException the userId is either null or invalid. * @throws PropertyServerException the server is not available. * @throws UserNotAuthorizedException the calling user is not authorized to issue the call. */ public List<GovernanceRoleAppointee> getCurrentGovernanceRoleAppointments(String userId, int domainIdentifier, int startFrom, int pageSize, String methodName) throws InvalidParameterException, PropertyServerException, UserNotAuthorizedException { final String governanceRoleGUIDParameterName = "governanceRoleGUID"; List<GovernanceRoleElement> governanceRoles = roleHandler.getPersonRolesForDomainId(userId, domainIdentifier, startFrom, pageSize, null, methodName); if (governanceRoles != null) { List<GovernanceRoleAppointee> results = new ArrayList<>(); for (GovernanceRoleElement governanceRole : governanceRoles) { if ((governanceRole != null) && (governanceRole.getElementHeader() != null)) { GovernanceRoleAppointee governanceRoleAppointee = new GovernanceRoleAppointee(governanceRole); List<Relationship> appointmentRelationships = roleHandler.getAttachmentLinks(userId, governanceRole.getElementHeader().getGUID(), governanceRoleGUIDParameterName, OpenMetadataAPIMapper.PERSON_ROLE_TYPE_NAME, OpenMetadataAPIMapper.PERSON_ROLE_APPOINTMENT_RELATIONSHIP_TYPE_GUID, OpenMetadataAPIMapper.PERSON_ROLE_APPOINTMENT_RELATIONSHIP_TYPE_NAME, null, OpenMetadataAPIMapper.ACTOR_PROFILE_TYPE_NAME, 1, false, 0, 0, null, methodName); if (appointmentRelationships != null) { List<GovernanceAppointee> currentAppointees = new ArrayList<>(); for (Relationship relationship : appointmentRelationships) { if ((relationship != null) && (relationship.getProperties() != null)) { InstanceProperties properties = relationship.getProperties(); Date now = new Date(); /* * Need to retrieve the appointments that are active */ if (((properties.getEffectiveFromTime() == null) || properties.getEffectiveFromTime().before(now)) && ((properties.getEffectiveToTime() == null) || properties.getEffectiveToTime().after(now))) { GovernanceAppointee appointee = getAppointeeFromRelationship(userId, relationship, methodName); currentAppointees.add(appointee); } } else { errorHandler.logBadRelationship(OpenMetadataAPIMapper.PERSON_ROLE_APPOINTMENT_RELATIONSHIP_TYPE_NAME, relationship, methodName); } } if (!currentAppointees.isEmpty()) { governanceRoleAppointee.setCurrentAppointees(currentAppointees); } } results.add(governanceRoleAppointee); } } if (! results.isEmpty()) { return results; } } return null; } /** * Retrieve a governance role description by unique guid along with the history of who has been appointed * to the role. * * @param userId the name of the calling user. * @param governanceRoleGUID unique identifier (guid) of the governance role. * @return governance role object * @throws InvalidParameterException the unique identifier of the governance role is either null or invalid. * @throws PropertyServerException the server is not available. * @throws UserNotAuthorizedException the calling user is not authorized to issue the call. */ public GovernanceRoleHistory getGovernanceRoleHistoryByGUID(String userId, String governanceRoleGUID, String methodName) throws InvalidParameterException, PropertyServerException, UserNotAuthorizedException { final String governanceRoleGUIDParameterName = "governanceRoleGUID"; GovernanceRoleElement governanceRole = roleHandler.getBeanFromRepository(userId, governanceRoleGUID, governanceRoleGUIDParameterName, OpenMetadataAPIMapper.PERSON_ROLE_TYPE_NAME, false, false, new Date(), methodName); if ((governanceRole != null) && (governanceRole.getElementHeader() != null)) { GovernanceRoleHistory governanceRoleHistory = new GovernanceRoleHistory(governanceRole); List<Relationship> appointmentRelationships = roleHandler.getAttachmentLinks(userId, governanceRole.getElementHeader().getGUID(), governanceRoleGUIDParameterName, OpenMetadataAPIMapper.PERSON_ROLE_TYPE_NAME, OpenMetadataAPIMapper.PERSON_ROLE_APPOINTMENT_RELATIONSHIP_TYPE_GUID, OpenMetadataAPIMapper.PERSON_ROLE_APPOINTMENT_RELATIONSHIP_TYPE_NAME, null, OpenMetadataAPIMapper.ACTOR_PROFILE_TYPE_NAME, 1, false, 0, 0, null, methodName); if (appointmentRelationships != null) { List<GovernanceAppointee> previousAppointees = new ArrayList<>(); List<GovernanceAppointee> currentAppointees = new ArrayList<>(); List<GovernanceAppointee> futureAppointees = new ArrayList<>(); for (Relationship relationship : appointmentRelationships) { if ((relationship != null) && (relationship.getProperties() != null)) { InstanceProperties properties = relationship.getProperties(); Date now = new Date(); GovernanceAppointee appointee = getAppointeeFromRelationship(userId, relationship, methodName); /* * Need to retrieve the appointments that are active */ if (((properties.getEffectiveFromTime() == null) || properties.getEffectiveFromTime().before(now)) && ((properties.getEffectiveToTime() == null) || properties.getEffectiveToTime().after(now))) { currentAppointees.add(appointee); } else if ((properties.getEffectiveToTime() != null) && properties.getEffectiveToTime().before(now)) { previousAppointees.add(appointee); } else { futureAppointees.add(appointee); } } } if (! previousAppointees.isEmpty()) { governanceRoleHistory.setPredecessors(previousAppointees); } if (! currentAppointees.isEmpty()) { governanceRoleHistory.setCurrentAppointees(currentAppointees); } if (! futureAppointees.isEmpty()) { governanceRoleHistory.setSuccessors(futureAppointees); } } return governanceRoleHistory; } return null; } /** * Extract the appointee from the supplied relationship * * @param userId calling user * @param relationship PersonRoleAppointment relationship * @param methodName calling method * * @return populated appointee * * @throws InvalidParameterException the unique identifier of the governance role is either null or invalid. * @throws PropertyServerException the server is not available. * @throws UserNotAuthorizedException the calling user is not authorized to issue the call. */ private GovernanceAppointee getAppointeeFromRelationship(String userId, Relationship relationship, String methodName) throws InvalidParameterException, PropertyServerException, UserNotAuthorizedException { final String profileGUIDParameterName = "profileGUID"; if ((relationship != null) && (relationship.getProperties() != null) && (relationship.getEntityOneProxy() != null) && (relationship.getEntityTwoProxy() != null)) { GovernanceAppointee appointee = new GovernanceAppointee(); InstanceProperties properties = relationship.getProperties(); ElementHeader elementHeader = converter.getMetadataElementHeader(GovernanceAppointee.class, relationship, methodName); appointee.setElementHeader(elementHeader); appointee.setStartDate(properties.getEffectiveFromTime()); appointee.setEndDate(properties.getEffectiveToTime()); ProfileElement profile = profileHandler.getActorProfileByGUID(userId, relationship.getEntityOneProxy().getGUID(), profileGUIDParameterName, null, methodName); appointee.setProfile(profile); return appointee; } else { errorHandler.logBadRelationship(OpenMetadataAPIMapper.PERSON_ROLE_APPOINTMENT_RELATIONSHIP_TYPE_NAME, relationship, methodName); } return null; } }
10,265
323
<filename>src/main/java/io/codearte/jfairy/producer/person/locale/pl/PlAddressProvider.java package io.codearte.jfairy.producer.person.locale.pl; import io.codearte.jfairy.data.DataMaster; import io.codearte.jfairy.producer.BaseProducer; import io.codearte.jfairy.producer.person.AbstractAddressProvider; import javax.inject.Inject; public class PlAddressProvider extends AbstractAddressProvider { @Inject public PlAddressProvider(DataMaster dataMaster, BaseProducer baseProducer) { super(dataMaster, baseProducer); } @Override public PlAddress get() { return new PlAddress(getStreet(), getStreetNumber(), getApartmentNumber(), getPostalCode(), getCity()); } }
226
2,881
<filename>app/src/main/java/com/example/zhpan/banner/net/ApiService.java<gh_stars>1000+ package com.example.zhpan.banner.net; import com.example.zhpan.banner.bean.ArticleWrapper; import java.util.List; import io.reactivex.Observable; import retrofit2.http.GET; import retrofit2.http.Headers; /** * <pre> * Created by zhangpan on 2019-08-14. * Description: * </pre> */ public interface ApiService { @Headers("Cache-Control: public, max-age=" + 3600) @GET("banner/json") Observable<List<BannerData>> getBannerData(); @Headers("Cache-Control: public, max-age=" + 3600) @GET("article/list/0/json") Observable<ArticleWrapper> getArticle(); }
249
348
<reponame>chamberone/Leaflet.PixiOverlay<filename>docs/data/leg-t2/089/08903136.json {"nom":"Cuy","circ":"3ème circonscription","dpt":"Yonne","inscrits":610,"abs":321,"votants":289,"blancs":2,"nuls":14,"exp":273,"res":[{"nuance":"FN","nom":"<NAME>","voix":139},{"nuance":"REM","nom":"Mme <NAME>","voix":134}]}
127
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_sc.hxx" // ============================================================================ #include "warnpassword.hxx" #include <com/sun/star/task/XInteractionHandler.hpp> #include <com/sun/star/task/XInteractionRequest.hpp> #include <svl/itemset.hxx> #include <sfx2/docfile.hxx> #include <sfx2/sfxsids.hrc> #include <ucbhelper/simpleinteractionrequest.hxx> #include <com/sun/star/task/InteractionClassification.hpp> #include <com/sun/star/ucb/InteractiveAppException.hpp> #include <com/sun/star/ucb/XContent.hpp> #include <svx/svxerr.hxx> using ::rtl::OUString; using ::com::sun::star::uno::makeAny; using ::com::sun::star::uno::Any; using ::com::sun::star::uno::Reference; using ::com::sun::star::uno::Exception; using ::com::sun::star::uno::XInterface; using ::com::sun::star::task::InteractionClassification_QUERY; using ::com::sun::star::task::XInteractionHandler; using ::com::sun::star::task::XInteractionRequest; using ::com::sun::star::ucb::InteractiveAppException; bool ScWarnPassword::WarningOnPassword( SfxMedium& rMedium ) { bool bReturn = true; Reference< XInteractionHandler > xHandler( rMedium.GetInteractionHandler()); if( xHandler.is() ) { OUString empty; Any xException( makeAny(InteractiveAppException(empty, Reference <XInterface> (), InteractionClassification_QUERY, ERRCODE_SVX_EXPORT_FILTER_CRYPT))); Reference< ucbhelper::SimpleInteractionRequest > xRequest = new ucbhelper::SimpleInteractionRequest( xException, ucbhelper::CONTINUATION_APPROVE | ucbhelper::CONTINUATION_DISAPPROVE ); xHandler->handle( xRequest.get() ); const sal_Int32 nResp = xRequest->getResponse(); switch ( nResp ) { case ucbhelper::CONTINUATION_UNKNOWN: break; case ucbhelper::CONTINUATION_APPROVE: // Continue break; case ucbhelper::CONTINUATION_DISAPPROVE: bReturn = false; break; } } return bReturn; }
1,133
685
# flake8: noqa from .gradient_ascent import *
17
342
# Convenience utilities for plotting with Bokeh. import math, statistics, random, time, sys # For graphs from bokeh.layouts import gridplot from bokeh.plotting import figure, output_file, show import bokeh.io # The next two lines prevent Bokeh from opening the graph in a new window. bokeh.io.reset_output() bokeh.io.output_notebook() def square_circle_plot(radius=1.0, title=''): """ For the Monte Carlo Pi calculation, it helps to plot a square and circle, then use it for the sample points, which are added to the returned "plot". Example: from bokeh.plotting import show scp_plot, grid = square_circle_plot(radius=1.0) show(grid) # no points ... scp_plot.circle(xs, ys, color='darkgrey', size=4) # for xs, ys points show(grid) # with points """ tooltips = [ ("x", "$x"), ("y", "$y")] # Tip: use match_aspect=True or the arc won't be drawn to match how the 2x2 square is actually drawn. plot = figure(title=title, tooltips=tooltips, match_aspect=True) plot.grid.grid_line_alpha=0.2 plot.xaxis.axis_label = 'x' plot.yaxis.axis_label = 'y' plot.arc(x=0.0, y=0.0, radius=radius, start_angle=0.0, end_angle=2.0*math.pi, color="red", line_width=2) plot.segment(x0=[-radius, radius, radius, -radius], y0=[-radius, -radius, radius, radius], x1=[radius, radius, -radius, -radius], y1=[-radius, radius, radius, -radius], color="blue", line_width=2) return plot def two_lines_plot(title, x_label, y_label, line_one_label, line_two_label, ns, durations, ray_ns, ray_durations, x_axis_type='log', y_axis_type='log'): tooltips = [ ("name", "$name"), ("array size", "$x"), ("time", "$y")] plot = figure(x_axis_type=x_axis_type, y_axis_type=y_axis_type, title=title, tooltips=tooltips) plot.grid.grid_line_alpha=0.3 plot.xaxis.axis_label = x_label plot.yaxis.axis_label = y_label plot.line(ns, durations, color='#A6CEE3', legend_label=line_one_label, name=line_one_label) plot.circle(ns, durations, color='#A6CEE3', size=4) plot.line(ray_ns, ray_durations, color='#B2DF8A', legend_label=line_two_label, name=line_two_label) plot.square(ray_ns, ray_durations, color='#B2DF8A', size=4) plot.legend.location = "top_left" return plot def means_stddevs_plot(ns, means, stddevs, title=''): tooltips = [ ("name", "$name"), ("array size", "$x"), ("time", "$y")] plot = figure(x_axis_type="log", title=title, tooltips=tooltips, sizing_mode='stretch_both') plot.grid.grid_line_alpha=0.5 plot.xaxis.axis_label = 'N' plot.yaxis.axis_label = '' # Draw a line for correct Pi value. plot.segment(x0=ns[0]*0.8, y0=[math.pi], x1=ns[-1]*1.2, y1=[math.pi], color="red", line_width=2, legend_label='π', name='π') plot.line(ns, means, color='#A6CEE3', legend_label='mean', name='mean') plot.circle(ns, means, color='#A6CEE3', size=10) # Draw std. dev. lines. m_ss = list(zip(means, stddevs)) minus_stddevs = list(map(lambda m_s: m_s[0]-m_s[1], m_ss.copy())) plus_stddevs = list(map(lambda m_s: m_s[0]+m_s[1], m_ss.copy())) plot.segment(x0=ns, y0=minus_stddevs, x1=ns, y1=plus_stddevs, color="black", line_width=2, legend_label='std. dev.', name='σ') # "Whiskers" at the end of the std. dev. lines # (almost-0 height rects simpler than segments) widths = list(map(lambda x: x/10.0, ns.copy())) plot.rect(x=ns, y=minus_stddevs, width=widths, height=0.0001, fill_color="black") plot.rect(x=ns, y=plus_stddevs, width=widths, height=0.0001, fill_color="black") plot.legend.location = "bottom_right" return plot
1,451
516
<filename>HYBHelperKitSources/NSObject+HYBHelperKit.h<gh_stars>100-1000 // // NSObject+HYBHelperKit.h // HYBHelperKit // // Created by huangyibiao on 16/6/29. // Copyright © 2016年 huangyibiao. All rights reserved. // #import <Foundation/Foundation.h> #import <UIKit/UIKit.h> @interface NSObject (HYBHelperKit) /** * Get the name of currnet object's class. */ - (NSString *)hyb_className; #pragma mark - Json to object and object to jason /** * Transform an object to json data. * * @param object Any kind of object. * * @return json data object if transform successfully, otherwise return nil. */ + (NSMutableData *)hyb_toJsonDataWithObject:(id)object; /** * Transform self to json data. * * @return json data if transform successfully, otherwise return nil. */ - (NSMutableData *)hyb_toJsonData; /** * Transform an object to json string. * * @param object Any kind of object * * @return json string if transform successfully, otherwise return nil. */ + (NSString *)hyb_toJsonStringWithObject:(id)object; /** * Transform self to json string. * * @return json string if transform successfully, otherwise return nil. */ - (NSString *)hyb_toJsonString; #pragma mark - Filter Null and nil /** * Filter all nil and null object. */ - (id)hyb_filterNullNil; /** * Filter all nil and null object. * * @param object The object to be filtered. */ + (id)hyb_filterNullNilFromObject:(id)object; @end
491
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/web_applications/external_install_options.h" #include <ostream> #include <string> #include <tuple> #include <vector> #include "base/strings/string_util.h" #include "base/strings/utf_string_conversions.h" #include "build/chromeos_buildflags.h" #include "chrome/browser/web_applications/system_web_apps/system_web_app_types.h" #include "third_party/blink/public/common/manifest/manifest_util.h" namespace web_app { ExternalInstallOptions::ExternalInstallOptions( const GURL& install_url, DisplayMode user_display_mode, ExternalInstallSource install_source) : install_url(install_url), user_display_mode(user_display_mode), install_source(install_source) {} ExternalInstallOptions::~ExternalInstallOptions() = default; ExternalInstallOptions::ExternalInstallOptions( const ExternalInstallOptions& other) = default; ExternalInstallOptions::ExternalInstallOptions(ExternalInstallOptions&& other) = default; ExternalInstallOptions& ExternalInstallOptions::operator=( const ExternalInstallOptions& other) = default; bool ExternalInstallOptions::operator==( const ExternalInstallOptions& other) const { auto AsTuple = [](const ExternalInstallOptions& options) { // Keep in order declared in external_install_options.h. return std::tie( // clang-format off options.install_url, options.user_display_mode, options.install_source, options.fallback_app_name, options.add_to_applications_menu, options.add_to_desktop, options.add_to_quick_launch_bar, options.add_to_search, options.add_to_management, options.run_on_os_login, options.is_disabled, options.override_previous_user_uninstall, options.only_for_new_users, options.only_if_previously_preinstalled, options.user_type_allowlist, options.gate_on_feature, #if BUILDFLAG(IS_CHROMEOS_ASH) options.disable_if_arc_supported, options.disable_if_tablet_form_factor, #endif // BUILDFLAG(IS_CHROMEOS_ASH) options.bypass_service_worker_check, options.require_manifest, options.force_reinstall, options.force_reinstall_for_milestone, options.wait_for_windows_closed, options.install_placeholder, options.reinstall_placeholder, options.launch_query_params, options.load_and_await_service_worker_registration, options.service_worker_registration_url, options.uninstall_and_replace, options.additional_search_terms, options.only_use_app_info_factory, options.system_app_type, options.oem_installed, options.disable_if_touchscreen_with_stylus_not_supported // clang-format on ); }; return AsTuple(*this) == AsTuple(other); } base::Value ExternalInstallOptions::AsDebugValue() const { base::Value root(base::Value::Type::DICTIONARY); auto ConvertStringList = [](const std::vector<std::string> list) { base::Value list_json(base::Value::Type::LIST); for (const std::string& item : list) list_json.Append(item); return list_json; }; auto ConvertOptional = [](const auto& value) { return value ? base::Value(*value) : base::Value(); }; // Prefix with a ! so this appears at the top when serialized. root.SetStringKey("!install_url", install_url.spec()); root.SetBoolKey("add_to_applications_menu", add_to_applications_menu); root.SetBoolKey("add_to_desktop", add_to_desktop); root.SetBoolKey("add_to_management", add_to_management); root.SetBoolKey("add_to_quick_launch_bar", add_to_quick_launch_bar); root.SetBoolKey("add_to_search", add_to_search); root.SetKey("additional_search_terms", ConvertStringList(additional_search_terms)); root.SetBoolKey("app_info_factory", static_cast<bool>(app_info_factory)); root.SetBoolKey("bypass_service_worker_check", bypass_service_worker_check); #if BUILDFLAG(IS_CHROMEOS_ASH) root.SetBoolKey("disable_if_arc_supported", disable_if_arc_supported); root.SetBoolKey("disable_if_tablet_form_factor", disable_if_tablet_form_factor); #endif // BUILDFLAG(IS_CHROMEOS_ASH) root.SetBoolKey("disable_if_touchscreen_with_stylus_not_supported", disable_if_touchscreen_with_stylus_not_supported); root.SetKey("fallback_app_name", ConvertOptional(fallback_app_name)); root.SetBoolKey("force_reinstall", force_reinstall); root.SetKey("force_reinstall_for_milestone", ConvertOptional(force_reinstall_for_milestone)); root.SetKey("gate_on_feature", ConvertOptional(gate_on_feature)); root.SetBoolKey("install_placeholder", install_placeholder); root.SetIntKey("install_source", static_cast<int>(install_source)); root.SetBoolKey("is_disabled", is_disabled); root.SetKey("launch_query_params", ConvertOptional(launch_query_params)); root.SetBoolKey("load_and_await_service_worker_registration", load_and_await_service_worker_registration); root.SetBoolKey("oem_installed", oem_installed); root.SetBoolKey("only_for_new_users", only_for_new_users); root.SetBoolKey("only_if_previously_preinstalled", only_if_previously_preinstalled); root.SetBoolKey("only_use_app_info_factory", only_use_app_info_factory); root.SetBoolKey("override_previous_user_uninstall", override_previous_user_uninstall); root.SetBoolKey("reinstall_placeholder", reinstall_placeholder); root.SetBoolKey("require_manifest", require_manifest); root.SetBoolKey("run_on_os_login", run_on_os_login); root.SetKey("service_worker_registration_url", service_worker_registration_url ? base::Value(service_worker_registration_url->spec()) : base::Value()); root.SetKey("system_app_type", system_app_type ? base::Value(static_cast<int>(*system_app_type)) : base::Value()); root.SetKey("uninstall_and_replace", ConvertStringList(uninstall_and_replace)); root.SetStringKey("user_display_mode", blink::DisplayModeToString(user_display_mode)); root.SetKey("user_type_allowlist", ConvertStringList(user_type_allowlist)); root.SetBoolKey("wait_for_windows_closed", wait_for_windows_closed); return root; } WebAppInstallParams ConvertExternalInstallOptionsToParams( const ExternalInstallOptions& install_options) { WebAppInstallParams params; params.force_reinstall = install_options.force_reinstall; params.user_display_mode = install_options.user_display_mode; if (install_options.fallback_app_name.has_value()) { params.fallback_app_name = base::UTF8ToUTF16(install_options.fallback_app_name.value()); } params.fallback_start_url = install_options.install_url; params.add_to_applications_menu = install_options.add_to_applications_menu; params.add_to_desktop = install_options.add_to_desktop; params.add_to_quick_launch_bar = install_options.add_to_quick_launch_bar; params.run_on_os_login = install_options.run_on_os_login; params.add_to_search = install_options.add_to_search; params.add_to_management = install_options.add_to_management; params.is_disabled = install_options.is_disabled; params.bypass_service_worker_check = install_options.bypass_service_worker_check; params.require_manifest = install_options.require_manifest; params.additional_search_terms = install_options.additional_search_terms; params.launch_query_params = install_options.launch_query_params; params.system_app_type = install_options.system_app_type; params.oem_installed = install_options.oem_installed; return params; } } // namespace web_app
2,965
435
<gh_stars>100-1000 { "copyright_text": null, "description": "", "duration": 1377, "language": "eng", "recorded": "2019-11-02", "related_urls": [ { "label": "Group web", "url": "https://2019.northbaypython.org/" }, { "label": "Group meeting schedule", "url": "https://2019.northbaypython.org/schedule/" } ], "speakers": [], "tags": [], "thumbnail_url": "https://i.ytimg.com/vi/FVqy8LksHqU/maxresdefault.jpg", "title": "Script Layering for large-scale number crunching in Python", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=FVqy8LksHqU" } ] }
295
322
<reponame>troydieter/aws-auto-cleanup import sys import botocore import boto3 from src.helper import Helper class AirflowCleanup: def __init__(self, logging, whitelist, settings, execution_log, region): self.logging = logging self.whitelist = whitelist self.settings = settings self.execution_log = execution_log self.region = region self._client_airflow = None self.is_dry_run = Helper.get_setting(self.settings, "general.dry_run", True) @property def client_airflow(self): if not self._client_airflow: self._client_airflow = boto3.client("mwaa", region_name=self.region) return self._client_airflow def run(self): self.environments() def environments(self): """ Deletes Airflow Environments. """ self.logging.debug("Started cleanup of Airflow Environments.") is_cleaning_enabled = Helper.get_setting( self.settings, "services.airflow.environment.clean", False ) resource_maximum_age = Helper.get_setting( self.settings, "services.airflow.environment.ttl", 7 ) resource_whitelist = Helper.get_whitelist(self.whitelist, "airflow.environment") if is_cleaning_enabled: try: paginator = self.client_airflow.get_paginator("list_environments") resources = paginator.paginate().build_full_result().get("Environments") except botocore.exceptions.EndpointConnectionError: self.logging.debug(f"Airflow is not enabled in region '{self.region}'.") return False except: self.logging.error("Could not list all Airflow Environments.") self.logging.error(sys.exc_info()[1]) return False for resource in resources: try: resource_details = self.client_airflow.get_environment( Name=resource ).get("Environment") except: self.logging.error( f"Could not get Airflow Environment's '{resource}' details." ) self.logging.error(sys.exc_info()[1]) resource_action = "ERROR" else: resource_date = resource_details.get("CreatedAt") resource_age = Helper.get_day_delta(resource_date).days resource_action = None if resource not in resource_whitelist: if resource_age > resource_maximum_age: try: if not self.is_dry_run: self.client_airflow.delete_environment( Name=resource ) except: self.logging.error( f"Could not delete Airflow Environment '{resource}'." ) self.logging.error(sys.exc_info()[1]) resource_action = "ERROR" else: self.logging.info( f"Airflow Environment '{resource}' was created {resource_age} days ago " "and has been deleted." ) resource_action = "DELETE" else: self.logging.debug( f"Airflow Environment '{resource}' was created {resource_age} days ago " "(less than TTL setting) and has not been deleted." ) resource_action = "SKIP - TTL" else: self.logging.debug( f"Airflow Environment '{resource}' has been whitelisted and has not " "been deleted." ) resource_action = "SKIP - WHITELIST" Helper.record_execution_log_action( self.execution_log, self.region, "Airflow", "Environment", resource, resource_action, ) self.logging.debug("Finished cleanup of Airflow Environments.") return True else: self.logging.info("Skipping cleanup of Airflow Environments.") return True
2,659
477
# Generated by Django 2.2 on 2019-06-11 12:58 from django.db import migrations, models def add_toplevel(apps, schema_editor): # We can't import the Person model directly as it may be a newer # version than this migration expects. We use the historical version. Post = apps.get_model('forum', 'Post') Post.objects.filter(type__in=(0,2,3,4,5,7,8,9,10,11,12)).update(is_toplevel=True) class Migration(migrations.Migration): dependencies = [ ('forum', '0001_initial'), ] operations = [ migrations.AddField( model_name='post', name='is_toplevel', field=models.BooleanField(default=False), ), migrations.AlterField( model_name='subscription', name='type', field=models.IntegerField(choices=[(3, 'Default'), (1, 'Email'), (0, 'Local Messages'), (2, 'No messages')], default=0), ), migrations.RunPython(add_toplevel), ]
413
698
<reponame>dumpmemory/transformer-deploy<gh_stars>100-1000 #!/usr/bin/env python3 # Copyright 2022, <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This module contains code related to client interface. """ import argparse import gc import logging import os from pathlib import Path from typing import Callable, Dict, List, Tuple, Type, Union import numpy as np import torch from transformers import ( AutoConfig, AutoModelForCausalLM, AutoModelForSequenceClassification, AutoModelForTokenClassification, AutoTokenizer, PretrainedConfig, PreTrainedModel, PreTrainedTokenizer, ) from transformer_deploy.backends.ort_utils import ( cpu_quantization, create_model_for_provider, inference_onnx_binding, optimize_onnx, ) from transformer_deploy.backends.pytorch_utils import ( convert_to_onnx, get_model_size, infer_classification_pytorch, infer_feature_extraction_pytorch, ) from transformer_deploy.backends.st_utils import STransformerWrapper, load_sentence_transformers from transformer_deploy.benchmarks.utils import ( compare_outputs, generate_multiple_inputs, print_timings, setup_logging, to_numpy, track_infer_time, ) from transformer_deploy.triton.configuration import Configuration, EngineType from transformer_deploy.triton.configuration_decoder import ConfigurationDec from transformer_deploy.triton.configuration_encoder import ConfigurationEnc from transformer_deploy.triton.configuration_token_classifier import ConfigurationTokenClassifier from transformer_deploy.utils.args import parse_args def check_accuracy( engine_name: str, pytorch_output: List[torch.Tensor], engine_output: List[Union[np.ndarray, torch.Tensor]], tolerance: float, ) -> None: """ Compare engine predictions with a reference. Assert that the difference is under a threshold. :param engine_name: string used in error message, if any :param pytorch_output: reference output used for the comparaison :param engine_output: output from the engine :param tolerance: if difference in outputs is above threshold, an error will be raised """ pytorch_output = to_numpy(pytorch_output) engine_output = to_numpy(engine_output) discrepency = compare_outputs(pytorch_output=pytorch_output, engine_output=engine_output) assert discrepency <= tolerance, ( f"{engine_name} discrepency is too high ({discrepency:.2f} >= {tolerance}):\n" f"Pythorch:\n{pytorch_output}\n" f"VS\n" f"Engine:\n{engine_output}\n" f"Diff:\n" f"{torch.asarray(pytorch_output) - torch.asarray(engine_output)}\n" "Tolerance can be increased with --atol parameter." ) def launch_inference( infer: Callable, inputs: List[Dict[str, Union[np.ndarray, torch.Tensor]]], nb_measures: int ) -> Tuple[List[Union[np.ndarray, torch.Tensor]], List[float]]: """ Perform inference and measure latency. :param infer: a lambda which will perform the inference :param inputs: tensor compatible with the lambda (Torch tensor for Pytorch, or numpy otherwise) :param nb_measures: number of measures to perform for the latency measure :return: a tuple of model output and inference latencies """ assert type(inputs) == list assert len(inputs) > 0 outputs = list() for batch_input in inputs: output = infer(batch_input) outputs.append(output) time_buffer: List[int] = list() for _ in range(nb_measures): with track_infer_time(time_buffer): _ = infer(inputs[0]) return outputs, time_buffer def get_triton_output_shape(output: torch.Tensor, task: str) -> List[int]: triton_output_shape = list(output.shape) triton_output_shape[0] = -1 # dynamic batch size if task in ["text-generation", "token-classification"]: triton_output_shape[1] = -1 # dynamic sequence size return triton_output_shape def main(commands: argparse.Namespace): setup_logging(level=logging.INFO if commands.verbose else logging.WARNING) logging.info("running with commands: %s", commands) if commands.device == "cpu" and "tensorrt" in commands.backend: raise Exception("can't perform inference on CPU and use Nvidia TensorRT as backend") if len(commands.seq_len) == len(set(commands.seq_len)) and "tensorrt" in commands.backend: logging.warning("having different sequence lengths may make TensorRT slower") torch.manual_seed(commands.seed) np.random.seed(commands.seed) torch.set_num_threads(commands.nb_threads) if commands.device is None: commands.device = "cuda" if torch.cuda.is_available() else "cpu" if isinstance(commands.auth_token, str) and commands.auth_token.lower() in ["true", "t"]: auth_token = True elif isinstance(commands.auth_token, str): auth_token = commands.auth_token else: auth_token = None run_on_cuda: bool = commands.device.startswith("cuda") Path(commands.output).mkdir(parents=True, exist_ok=True) onnx_model_path = os.path.join(commands.output, "model-original.onnx") onnx_optim_model_path = os.path.join(commands.output, "model.onnx") tensorrt_path = os.path.join(commands.output, "model.plan") if run_on_cuda: assert torch.cuda.is_available(), "CUDA/GPU is not available on Pytorch. Please check your CUDA installation" tokenizer_path = commands.tokenizer if commands.tokenizer else commands.model tokenizer: PreTrainedTokenizer = AutoTokenizer.from_pretrained(tokenizer_path, use_auth_token=auth_token) model_config: PretrainedConfig = AutoConfig.from_pretrained(pretrained_model_name_or_path=commands.model) input_names: List[str] = tokenizer.model_input_names if commands.task == "embedding": model_pytorch: Union[PreTrainedModel, STransformerWrapper] = load_sentence_transformers(commands.model) elif commands.task == "classification": model_pytorch = AutoModelForSequenceClassification.from_pretrained(commands.model, use_auth_token=auth_token) elif commands.task == "token-classification": model_pytorch = AutoModelForTokenClassification.from_pretrained(commands.model, use_auth_token=auth_token) elif commands.task == "text-generation": model_pytorch = AutoModelForCausalLM.from_pretrained(commands.model, use_auth_token=auth_token) input_names = ["input_ids"] else: raise Exception(f"unknown task: {commands.task}") logging.info(f"axis: {input_names}") model_pytorch.eval() if run_on_cuda: model_pytorch.cuda() tensor_shapes = list(zip(commands.batch_size, commands.seq_len)) # take optimial size inputs_pytorch = generate_multiple_inputs( batch_size=tensor_shapes[1][0], seq_len=tensor_shapes[1][1], input_names=input_names, device=commands.device, nb_inputs_to_gen=commands.warmup, ) # create onnx model and compare results convert_to_onnx( model_pytorch=model_pytorch, output_path=onnx_model_path, inputs_pytorch=inputs_pytorch[0], quantization=commands.quantization, var_output_seq=commands.task in ["text-generation", "token-classification"], ) timings = {} def get_pytorch_infer(model: PreTrainedModel, cuda: bool, task: str): if task in ["classification", "text-generation", "token-classification"]: return infer_classification_pytorch(model=model, run_on_cuda=cuda) if task == "embedding": return infer_feature_extraction_pytorch(model=model, run_on_cuda=cuda) raise Exception(f"unknown task: {task}") with torch.inference_mode(): logging.info("running Pytorch (FP32) benchmark") pytorch_output, time_buffer = launch_inference( infer=get_pytorch_infer(model=model_pytorch, cuda=run_on_cuda, task=commands.task), inputs=inputs_pytorch, nb_measures=commands.nb_measures, ) if commands.task == "text-generation": conf_class: Type[Configuration] = ConfigurationDec elif commands.task == "token-classification": conf_class: Type[Configuration] = ConfigurationTokenClassifier else: conf_class = ConfigurationEnc triton_conf = conf_class( model_name_base=commands.name, dim_output=get_triton_output_shape(output=pytorch_output[0], task=commands.task), nb_instance=commands.nb_instances, tensor_input_names=input_names, working_directory=commands.output, device=commands.device, ) timings["Pytorch (FP32)"] = time_buffer if run_on_cuda and not commands.fast: from torch.cuda.amp import autocast with autocast(): engine_name = "Pytorch (FP16)" logging.info("running Pytorch (FP16) benchmark") pytorch_fp16_output, time_buffer = launch_inference( infer=get_pytorch_infer(model=model_pytorch, cuda=run_on_cuda, task=commands.task), inputs=inputs_pytorch, nb_measures=commands.nb_measures, ) check_accuracy( engine_name=engine_name, pytorch_output=pytorch_output, engine_output=pytorch_fp16_output, tolerance=commands.atol, ) timings[engine_name] = time_buffer elif commands.device == "cpu": logging.info("preparing Pytorch (INT-8) benchmark") model_pytorch = torch.quantization.quantize_dynamic(model_pytorch, {torch.nn.Linear}, dtype=torch.qint8) engine_name = "Pytorch (INT-8)" logging.info("running Pytorch (FP32) benchmark") pytorch_int8_output, time_buffer = launch_inference( infer=get_pytorch_infer(model=model_pytorch, cuda=run_on_cuda, task=commands.task), inputs=inputs_pytorch, nb_measures=commands.nb_measures, ) check_accuracy( engine_name=engine_name, pytorch_output=pytorch_output, engine_output=pytorch_int8_output, tolerance=commands.atol, ) timings[engine_name] = time_buffer model_pytorch.cpu() logging.info("cleaning up") if run_on_cuda: torch.cuda.empty_cache() gc.collect() if "tensorrt" in commands.backend: logging.info("preparing TensorRT (FP16) benchmark") try: import tensorrt as trt from tensorrt.tensorrt import ICudaEngine, Logger, Runtime from transformer_deploy.backends.trt_utils import build_engine, load_engine, save_engine except ImportError: raise ImportError( "It seems that TensorRT is not yet installed. " "It is required when you declare TensorRT backend." "Please find installation instruction on " "https://docs.nvidia.com/deeplearning/tensorrt/install-guide/index.html" ) trt_logger: Logger = trt.Logger(trt.Logger.INFO if commands.verbose else trt.Logger.WARNING) runtime: Runtime = trt.Runtime(trt_logger) engine: ICudaEngine = build_engine( runtime=runtime, onnx_file_path=onnx_model_path, logger=trt_logger, min_shape=tensor_shapes[0], optimal_shape=tensor_shapes[1], max_shape=tensor_shapes[2], workspace_size=commands.workspace_size * 1024 * 1024, fp16=not commands.quantization, int8=commands.quantization, ) save_engine(engine=engine, engine_file_path=tensorrt_path) # important to check the engine has been correctly serialized tensorrt_model: Callable[[Dict[str, torch.Tensor]], torch.Tensor] = load_engine( runtime=runtime, engine_file_path=tensorrt_path ) logging.info("running TensorRT (FP16) benchmark") engine_name = "TensorRT (FP16)" tensorrt_output, time_buffer = launch_inference( infer=tensorrt_model, inputs=inputs_pytorch, nb_measures=commands.nb_measures ) check_accuracy( engine_name=engine_name, pytorch_output=pytorch_output, engine_output=tensorrt_output, tolerance=commands.atol, ) timings[engine_name] = time_buffer del engine, tensorrt_model, runtime # delete all tensorrt objects gc.collect() triton_conf.create_configs( tokenizer=tokenizer, model_path=tensorrt_path, config=model_config, engine_type=EngineType.TensorRT ) if "onnx" in commands.backend: num_attention_heads, hidden_size = get_model_size(path=commands.model) # create optimized onnx model and compare results optimize_onnx( onnx_path=onnx_model_path, onnx_optim_model_path=onnx_optim_model_path, fp16=run_on_cuda, use_cuda=run_on_cuda, num_attention_heads=num_attention_heads, hidden_size=hidden_size, ) if commands.device == "cpu" and commands.quantization: cpu_quantization(input_model_path=onnx_optim_model_path, output_model_path=onnx_optim_model_path) ort_provider = "CUDAExecutionProvider" if run_on_cuda else "CPUExecutionProvider" for provider, model_path, benchmark_name in [ (ort_provider, onnx_model_path, "ONNX Runtime (FP32)"), (ort_provider, onnx_optim_model_path, "ONNX Runtime (optimized)"), ]: logging.info("preparing %s benchmark", benchmark_name) ort_model = create_model_for_provider( path=model_path, provider_to_use=provider, nb_threads=commands.nb_threads, nb_instances=commands.nb_instances, ) def infer_ort(inputs: Dict[str, torch.Tensor]) -> torch.Tensor: results = inference_onnx_binding(model_onnx=ort_model, inputs=inputs, device=commands.device) return results["output"] logging.info("running %s benchmark", benchmark_name) ort_output, time_buffer = launch_inference( infer=infer_ort, inputs=inputs_pytorch, nb_measures=commands.nb_measures ) check_accuracy( engine_name=benchmark_name, pytorch_output=pytorch_output, engine_output=ort_output, tolerance=commands.atol, ) timings[benchmark_name] = time_buffer del ort_model gc.collect() triton_conf.create_configs( tokenizer=tokenizer, model_path=onnx_optim_model_path, config=model_config, engine_type=EngineType.ONNX, ) if run_on_cuda: from torch.cuda import get_device_name print(f"Inference done on {get_device_name(0)}") print("latencies:") for name, time_buffer in timings.items(): print_timings(name=name, timings=time_buffer) print(f"Each infence engine output is within {commands.atol} tolerance compared to Pytorch output") def entrypoint(): args = parse_args() main(commands=args) if __name__ == "__main__": entrypoint()
6,861
1,118
{"name":{"common":"Zimbabwe","official":"Republic of Zimbabwe","native":{"bwg":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"eng":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"kck":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"khi":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"ndc":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"nde":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"nya":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"sna":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"sot":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"toi":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"tsn":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"tso":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"ven":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"xho":{"common":"Zimbabwe","official":"Republic of Zimbabwe"},"zib":{"common":"Zimbabwe","official":"Republic of Zimbabwe"}}},"demonym":"Zimbabwean","capital":"Harare","iso_3166_1_alpha2":"ZW","iso_3166_1_alpha3":"ZWE","iso_3166_1_numeric":"716","currency":{"ZWL":{"iso_4217_code":"ZWL","iso_4217_numeric":932,"iso_4217_name":"Zimbabwe Dollar","iso_4217_minor_unit":2}},"tld":[".zw"],"alt_spellings":["ZW","Republic of Zimbabwe"],"languages":{"bwg":"Chibarwe","eng":"English","kck":"Kalanga","khi":"Khoisan","ndc":"Ndau","nde":"Northern Ndebele","nya":"Chewa","sna":"Shona","sot":"Sotho","toi":"Tonga","tsn":"Tswana","tso":"Tsonga","ven":"Venda","xho":"Xhosa","zib":"Zimbabwean Sign Language"},"geo":{"continent":{"AF":"Africa"},"postal_code":false,"latitude":"20 00 S","latitude_desc":"-19.000280380249023","longitude":"30 00 E","longitude_desc":"29.86876106262207","max_latitude":"-15.6","max_longitude":"33.05","min_latitude":"-22.316667","min_longitude":"25.333333","area":390757,"region":"Africa","subregion":"Eastern Africa","world_region":"EMEA","region_code":"002","subregion_code":"014","landlocked":true,"borders":["BWA","MOZ","ZAF","ZMB"],"independent":"Yes"},"dialling":{"calling_code":["263"],"national_prefix":"0","national_number_lengths":[8,9,10,11],"national_destination_code_lengths":[2],"international_prefix":"00"},"extra":{"geonameid":878675,"edgar":"Y5","itu":"ZWE","marc":"rh","wmo":"ZW","ds":"ZW","fifa":"ZIM","fips":"ZI","gaul":271,"ioc":"ZIM","cowc":"ZIM","cown":552,"fao":181,"imf":698,"ar5":"MAF","address_format":null,"eu_member":null,"data_protection":"Other","vat_rates":null,"emoji":"🇿🇼"}}
787
852
import FWCore.ParameterSet.Config as cms from Configuration.Generator.Pythia8CommonSettings_cfi import * from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import * from GeneratorInterface.ExternalDecays.TauolaSettings_cff import * generator = cms.EDFilter("Pythia8HadronizerFilter", maxEventsToPrint = cms.untracked.int32(1), nAttempts = cms.uint32(1000), HepMCFilter = cms.PSet( filterName = cms.string('EmbeddingHepMCFilter'), filterParameters = cms.PSet( ElElCut = cms.string('El1.Pt > 22 && El2.Pt > 10'), ElHadCut = cms.string('El.Pt > 28 && Had.Pt > 25'), ElMuCut = cms.string('(El.Pt > 21 && Mu.Pt > 10) || (El.Pt > 10 && Mu.Pt > 21)'), HadHadCut = cms.string('Had1.Pt > 35 && Had2.Pt > 30'), MuHadCut = cms.string('Mu.Pt > 18 && Had.Pt > 25 && Mu.Eta < 2.1'), MuMuCut = cms.string('Mu1.Pt > 17 && Mu2.Pt > 8'), Final_States = cms.vstring('ElEl','ElHad','ElMu','HadHad','MuHad','MuMu'), BosonPDGID = cms.int32(23) ), ), pythiaPylistVerbosity = cms.untracked.int32(0), filterEfficiency = cms.untracked.double(1.0), pythiaHepMCVerbosity = cms.untracked.bool(False), comEnergy = cms.double(13000.), crossSection = cms.untracked.double(1.0), PythiaParameters = cms.PSet( pythia8CommonSettingsBlock, pythia8CUEP8M1SettingsBlock, processParameters = cms.vstring( 'JetMatching:merge = off', 'Init:showChangedSettings = off', 'Init:showChangedParticleData = off' ), parameterSets = cms.vstring('pythia8CommonSettings', 'pythia8CUEP8M1Settings', 'processParameters' ) ) )
848
1,350
/** * Copyright Microsoft Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoft.windowsazure.services.blob; import java.io.InputStream; import com.microsoft.windowsazure.core.pipeline.jersey.JerseyFilterableService; import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.services.blob.models.BlockList; import com.microsoft.windowsazure.services.blob.models.CommitBlobBlocksOptions; import com.microsoft.windowsazure.services.blob.models.CreateBlobBlockOptions; import com.microsoft.windowsazure.services.blob.models.CreateBlobOptions; import com.microsoft.windowsazure.services.blob.models.CreateBlobResult; import com.microsoft.windowsazure.services.blob.models.CreateContainerOptions; import com.microsoft.windowsazure.services.blob.models.DeleteBlobOptions; import com.microsoft.windowsazure.services.blob.models.DeleteContainerOptions; import com.microsoft.windowsazure.services.blob.models.GetBlobOptions; import com.microsoft.windowsazure.services.blob.models.GetBlobPropertiesOptions; import com.microsoft.windowsazure.services.blob.models.GetBlobPropertiesResult; import com.microsoft.windowsazure.services.blob.models.GetBlobResult; import com.microsoft.windowsazure.services.blob.models.ListBlobBlocksOptions; import com.microsoft.windowsazure.services.blob.models.ListBlobBlocksResult; import com.microsoft.windowsazure.services.blob.models.ListContainersOptions; import com.microsoft.windowsazure.services.blob.models.ListContainersResult; /** * Defines the methods available on the Windows Azure blob storage service. * Construct an object instance implementing <code>BlobContract</code> with one * of the static <em>create</em> methods on {@link BlobService}. These methods * associate a <code>Configuration</code> with the implementation, so the * methods on the instance of <code>BlobContract</code> all work with a * particular storage account. */ public interface BlobContract extends JerseyFilterableService<BlobContract> { /** * Marks a blob for deletion. * <p> * This method marks the properties, metadata, and content of the blob * specified by the <em>blob</em> and <em>container</em> parameters for * deletion. * <p> * When a blob is successfully deleted, it is immediately removed from the * storage account's index and is no longer accessible to clients. The * blob's data is later removed from the service during garbage collection. * <p> * Note that in order to delete a blob, you must delete all of its * snapshots. You can delete an individual snapshot, only the snapshots, or * both the blob and its snapshots with the * {@link #deleteBlob(String, String, DeleteBlobOptions)} method. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to delete. * @throws ServiceException * if an error occurs accessing the storage service. */ void deleteBlob(String container, String blob) throws ServiceException; /** * Marks a blob or snapshot for deletion, using the specified options. * <p> * This method marks the properties, metadata, and content of the blob or * snapshot specified by the <em>blob</em> and <em>container</em> parameters * for deletion. Use the {@link DeleteBlobOptions options} parameter to set * an optional server timeout for the operation, a snapshot timestamp to * specify an individual snapshot to delete, a blob lease ID to delete a * blob with an active lease, a flag indicating whether to delete all * snapshots but not the blob, or both the blob and all snapshots, and any * access conditions to satisfy. * <p> * When a blob is successfully deleted, it is immediately removed from the * storage account's index and is no longer accessible to clients. The * blob's data is later removed from the service during garbage collection. * <p> * If the blob has an active lease, the client must specify a valid lease ID * in the <em>options</em> parameter in order to delete it. * <p> * If a blob has a large number of snapshots, it's possible that the delete * blob operation will time out. If this happens, the client should retry * the request. * * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to delete. * @param options * A {@link DeleteBlobOptions} instance containing options for * the request. * @throws ServiceException * if an error occurs accessing the storage service. */ void deleteBlob(String container, String blob, DeleteBlobOptions options) throws ServiceException; /** * Gets a list of the containers in the blob storage account. * * @return A {@link ListContainersResult} reference to the result of the * list containers operation. * @throws ServiceException * if an error occurs accessing the storage service. */ ListContainersResult listContainers() throws ServiceException; /** * Gets a list of the containers in the blob storage account using the * specified options. * <p> * Use the {@link ListContainersOptions options} parameter to specify * options, including a server response timeout for the request, a container * name prefix filter, a marker for continuing requests, the maximum number * of results to return in a request, and whether to include container * metadata in the results. * * @param options * A {@link ListContainersOptions} instance containing options * for the request. * @return A {@link ListContainersResult} reference to the result of the * list containers operation. * @throws ServiceException * if an error occurs accessing the storage service. */ ListContainersResult listContainers(ListContainersOptions options) throws ServiceException; /** * Creates a container with the specified name. * <p> * Container names must be unique within a storage account, and must follow * the naming rules specified in <a href= * "http://msdn.microsoft.com/en-us/library/windowsazure/dd135715.aspx" * >Naming and Referencing Containers, Blobs, and Metadata</a>. * * @param container * A {@link String} containing the name of the container to * create. * @throws ServiceException * if an error occurs accessing the storage service. */ void createContainer(String container) throws ServiceException; /** * Creates a container with the specified name, using the specified options. * <p> * Use the {@link CreateContainerOptions options} parameter to specify * options, including a server response timeout for the request, metadata to * set on the container, and the public access level for container and blob * data. Container names must be unique within a storage account, and must * follow the naming rules specified in <a href= * "http://msdn.microsoft.com/en-us/library/windowsazure/dd135715.aspx" * >Naming and Referencing Containers, Blobs, and Metadata</a>. * * @param container * A {@link String} containing the name of the container to * create. * @param options * A {@link CreateContainerOptions} instance containing options * for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ void createContainer(String container, CreateContainerOptions options) throws ServiceException; /** * Marks a container for deletion. The container and any blobs contained * within it are later deleted during garbage collection. * <p> * When a container is deleted, a container with the same name cannot be * created for at least 30 seconds; the container may not be available for * more than 30 seconds if the service is still processing the request. * * @param container * A {@link String} containing the name of the container to * delete. * @throws ServiceException * if an error occurs accessing the storage service. */ void deleteContainer(String container) throws ServiceException; /** * Marks a container for deletion, using the specified options. The * container and any blobs contained within it are later deleted during * garbage collection. * <p> * Use the {@link DeleteContainerOptions options} parameter to specify the * server response timeout and any access conditions for the container * deletion operation. Access conditions can be used to make the operation * conditional on the value of the Etag or last modified time of the * container. * <p> * When a container is deleted, a container with the same name cannot be * created for at least 30 seconds; the container may not be available for * more than 30 seconds if the service is still processing the request. * * @param container * A {@link String} containing the name of the container to * delete. * @param options * A {@link DeleteContainerOptions} instance containing options * for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ void deleteContainer(String container, DeleteContainerOptions options) throws ServiceException; /** * Creates a block blob from a content stream. * * @param container * A {@link String} containing the name of the container to * create the blob in. * @param blob * A {@link String} containing the name of the blob to create. A * blob name can contain any combination of characters, but * reserved URL characters must be properly escaped. A blob name * must be at least one character long and cannot be more than * 1,024 characters long, and must be unique within the * container. * @param contentStream * An {@link InputStream} reference to the content stream to * upload to the new blob. * @throws ServiceException * if an error occurs accessing the storage service. */ CreateBlobResult createBlockBlob(String container, String blob, InputStream contentStream) throws ServiceException; /** * Creates a block blob from a content stream, using the specified options. * <p> * Use the {@link CreateBlobOptions options} parameter to optionally specify * the server timeout for the operation, the MIME content type and content * encoding for the blob, the content language, the MD5 hash, a cache * control value, and blob metadata. * * @param container * A {@link String} containing the name of the container to * create the blob in. * @param blob * A {@link String} containing the name of the blob to create. A * blob name can contain any combination of characters, but * reserved URL characters must be properly escaped. A blob name * must be at least one character long and cannot be more than * 1,024 characters long, and must be unique within the * container. * @param contentStream * An {@link InputStream} reference to the content to upload to * the new blob. * @param options * A {@link CreateBlobOptions} instance containing options for * the request. * @throws ServiceException * if an error occurs accessing the storage service. */ CreateBlobResult createBlockBlob(String container, String blob, InputStream contentStream, CreateBlobOptions options) throws ServiceException; /** * Creates a new uncommited block from a content stream. * <p> * This method creates an uncommitted block for a block blob specified by * the <em>blob</em> and <em>container</em> parameters. The <em>blockId</em> * parameter is a client-specified ID for the block, which must be less than * or equal to 64 bytes in size. For a given blob, the length of the value * specified for the <em>blockId</em> parameter must be the same size for * each block. The <em>contentStream</em> parameter specifies the content to * be copied to the block. The content for the block must be less than or * equal to 4 MB in size. * <p> * To create or update a block blob, the blocks that have been successfully * written to the server with this method must be committed using a call to * {@link com.microsoft.windowsazure.services.blob.BlobContract#commitBlobBlocks(String, String, BlockList)} or * {@link com.microsoft.windowsazure.services.blob.BlobContract#commitBlobBlocks(String, String, BlockList, CommitBlobBlocksOptions)}. * * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to create the * block for. * @param blockId * A {@link String} containing a client-specified ID for the * block. * @param contentStream * An {@link InputStream} reference to the content to copy to the * block. * @throws ServiceException * if an error occurs accessing the storage service. */ void createBlobBlock(String container, String blob, String blockId, InputStream contentStream) throws ServiceException; /** * Creates a new uncommitted block from a content stream, using the * specified options. * <p> * This method creates an uncommitted block for a block blob specified by * the <em>blob</em> and <em>container</em> parameters. The <em>blockId</em> * parameter is a client-specified ID for the block, which must be less than * or equal to 64 bytes in size. For a given blob, the length of the value * specified for the <em>blockId</em> parameter must be the same size for * each block. The <em>contentStream</em> parameter specifies the content to * be copied to the block. The content for the block must be less than or * equal to 4 MB in size. Use the {@link CreateBlobBlockOptions options} * parameter to optionally specify the server timeout for the operation, the * lease ID if the blob has an active lease, and the MD5 hash value for the * block content. * <p> * To create or update a block blob, the blocks that have been successfully * written to the server with this method must be committed using a call to * {@link com.microsoft.windowsazure.services.blob.BlobContract#commitBlobBlocks(String, String, BlockList)} or * {@link com.microsoft.windowsazure.services.blob.BlobContract#commitBlobBlocks(String, String, BlockList, CommitBlobBlocksOptions)}. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to create the * block for. * @param blockId * A {@link String} containing a client-specified ID for the * block. * @param contentStream * An {@link InputStream} reference to the content to copy to the * block. * @param options * A {@link CreateBlobBlockOptions} instance containing options * for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ void createBlobBlock(String container, String blob, String blockId, InputStream contentStream, CreateBlobBlockOptions options) throws ServiceException; /** * Commits a list of blocks to a block blob. * <p> * This method creates or updates the block blob specified by the * <em>blob</em> and <em>container</em> parameters. You can call this method * to update a blob by uploading only those blocks that have changed, then * committing the new and existing blocks together. You can do this with the * <em>blockList</em> parameter by specifying whether to commit a block from * the committed block list or from the uncommitted block list, or to commit * the most recently uploaded version of the block, whichever list it may * belong to. * <p> * In order to be written as part of a blob, each block in the list must * have been successfully written to the server with a call to * {@link com.microsoft.windowsazure.services.blob.BlobContract#createBlobBlock(String, String, String, InputStream)} * or * {@link com.microsoft.windowsazure.services.blob.BlobContract#createBlobBlock(String, String, String, InputStream, CreateBlobBlockOptions)}. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the block blob to * create or update. * @param blockList * A {@link BlockList} containing the list of blocks to commit to * the block blob. * @throws ServiceException * if an error occurs accessing the storage service. */ void commitBlobBlocks(String container, String blob, BlockList blockList) throws ServiceException; /** * Commits a block list to a block blob, using the specified options. * <p> * This method creates or updates the block blob specified by the * <em>blob</em> and <em>container</em> parameters. You can call this method * to update a blob by uploading only those blocks that have changed, then * committing the new and existing blocks together. You can do this with the * <em>blockList</em> parameter by specifying whether to commit a block from * the committed block list or from the uncommitted block list, or to commit * the most recently uploaded version of the block, whichever list it may * belong to. Use the {@link CommitBlobBlocksOptions options} parameter to * optionally specify the server timeout for the operation, the MIME content * type and content encoding for the blob, the content language, the MD5 * hash, a cache control value, blob metadata, the lease ID if the blob has * an active lease, and any access conditions for the operation. * <p> * In order to be written as part of a blob, each block in the list must * have been successfully written to the server with a call to * {@link com.microsoft.windowsazure.services.blob.BlobContract#createBlobBlock(String, String, String, InputStream)} * or * {@link com.microsoft.windowsazure.services.blob.BlobContract#createBlobBlock(String, String, String, InputStream, CreateBlobBlockOptions)}. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the block blob to * create or update. * @param blockList * A {@link BlockList} containing the list of blocks to commit to * the block blob. * @param options * A {@link CommitBlobBlocksOptions} instance containing options * for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ void commitBlobBlocks(String container, String blob, BlockList blockList, CommitBlobBlocksOptions options) throws ServiceException; /** * Lists the blocks of a blob. * <p> * This method lists the committed blocks of the block blob specified by the * <em>blob</em> and <em>container</em> parameters. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the block blob to * list. * @return A {@link ListBlobBlocksResult} instance containing the list of * blocks returned for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ ListBlobBlocksResult listBlobBlocks(String container, String blob) throws ServiceException; /** * Lists the blocks of a blob, using the specified options. * <p> * This method lists the committed blocks, uncommitted blocks, or both, of * the block blob specified by the <em>blob</em> and <em>container</em> * parameters. Use the {@link ListBlobBlocksOptions options} parameter to * specify an optional server timeout for the operation, the lease ID if the * blob has an active lease, the snapshot timestamp to get the committed * blocks of a snapshot, whether to return the committed block list, and * whether to return the uncommitted block list. By default, only the * committed blocks of the blob are returned. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the block blob to * list. * @param options * A {@link ListBlobBlocksOptions} instance containing options * for the request. * @return A {@link ListBlobBlocksResult} instance containing the list of * blocks returned for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ ListBlobBlocksResult listBlobBlocks(String container, String blob, ListBlobBlocksOptions options) throws ServiceException; /** * Gets the properties of a blob. * <p> * This method lists the properties of the blob specified by the * <em>blob</em> and <em>container</em> parameters. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to get * properties for. * @return A {@link GetBlobPropertiesResult} instance containing the blob * properties returned for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ GetBlobPropertiesResult getBlobProperties(String container, String blob) throws ServiceException; /** * Gets the properties of a blob, using the specified options. * <p> * This method lists the properties of the blob specified by the * <em>blob</em> and <em>container</em> parameters. Use the * {@link GetBlobPropertiesOptions options} parameter to set an optional * server timeout for the operation, the lease ID if the blob has an active * lease, the snapshot timestamp to get the properties of a snapshot, and * any access conditions for the request. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to get * properties for. * @param options * A {@link GetBlobPropertiesOptions} instance containing options * for the request. * @return A {@link GetBlobPropertiesResult} instance containing the blob * properties returned for the request. * @throws ServiceException * if an error occurs accessing the storage service. */ GetBlobPropertiesResult getBlobProperties(String container, String blob, GetBlobPropertiesOptions options) throws ServiceException; /** * Gets the properties, metadata, and content of a blob. * <p> * This method gets the properties, metadata, and content of the blob * specified by the <em>blob</em> and <em>container</em> parameters. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to get. * @return A {@link GetBlobResult} instance containing the properties, * metadata, and content of the blob from the server response to the * request. * @throws ServiceException * if an error occurs accessing the storage service. */ GetBlobResult getBlob(String container, String blob) throws ServiceException; /** * Gets the properties, metadata, and content of a blob or blob snapshot, * using the specified options. * <p> * This method gets the properties, metadata, and content of the blob * specified by the <em>blob</em> and <em>container</em> parameters. Use the * {@link GetBlobOptions options} parameter to set an optional server * timeout for the operation, a snapshot timestamp to specify a snapshot, a * blob lease ID to get a blob with an active lease, an optional start and * end range for blob content to return, and any access conditions to * satisfy. * * @param container * A {@link String} containing the name of the blob's container. * @param blob * A {@link String} containing the name of the blob to get. * @param options * A {@link GetBlobOptions} instance containing options for the * request. * @return A {@link GetBlobResult} instance containing the properties, * metadata, and content of the blob from the server response to the * request. * @throws ServiceException * if an error occurs accessing the storage service. */ GetBlobResult getBlob(String container, String blob, GetBlobOptions options) throws ServiceException; }
9,549
348
<filename>docs/data/leg-t2/077/07706490.json {"nom":"Vendrest","circ":"6ème circonscription","dpt":"Seine-et-Marne","inscrits":513,"abs":339,"votants":174,"blancs":22,"nuls":7,"exp":145,"res":[{"nuance":"LR","nom":"<NAME>","voix":96},{"nuance":"REM","nom":"<NAME>","voix":49}]}
115
2,151
<reponame>zipated/src<filename>chrome/browser/extensions/webstore_data_fetcher.cc // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/extensions/webstore_data_fetcher.h" #include <utility> #include "base/bind.h" #include "base/metrics/field_trial_params.h" #include "base/values.h" #include "chrome/browser/extensions/webstore_data_fetcher_delegate.h" #include "components/safe_browsing/features.h" #include "content/public/browser/browser_context.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/storage_partition.h" #include "content/public/common/service_manager_connection.h" #include "extensions/common/extension_urls.h" #include "net/base/load_flags.h" #include "net/traffic_annotation/network_traffic_annotation.h" #include "net/url_request/url_fetcher.h" #include "net/url_request/url_request_status.h" #include "services/data_decoder/public/cpp/safe_json_parser.h" #include "services/network/public/cpp/resource_request.h" #include "services/network/public/cpp/simple_url_loader.h" #include "services/network/public/mojom/url_loader_factory.mojom.h" namespace { const char kInvalidWebstoreResponseError[] = "Invalid Chrome Web Store reponse"; } // namespace namespace extensions { WebstoreDataFetcher::WebstoreDataFetcher(WebstoreDataFetcherDelegate* delegate, const GURL& referrer_url, const std::string webstore_item_id) : delegate_(delegate), referrer_url_(referrer_url), id_(webstore_item_id), max_auto_retries_(0) {} WebstoreDataFetcher::~WebstoreDataFetcher() {} void WebstoreDataFetcher::SetPostData(const std::string& data) { post_data_ = data; } void WebstoreDataFetcher::Start( network::mojom::URLLoaderFactory* url_loader_factory) { GURL webstore_data_url(extension_urls::GetWebstoreItemJsonDataURL(id_)); net::NetworkTrafficAnnotationTag traffic_annotation = net::DefineNetworkTrafficAnnotation("webstore_data_fetcher", R"( semantics { sender: "Webstore Data Fetcher" description: "Fetches metadata about an extension from the Chrome Web Store." trigger: "The user or another program triggers some action where Chrome " "will show metadata about an extension. This includes extension " "installation flows, triggering an install for a disabled " "extension, and an extension being added to Chrome through " "third-party sideloading. It also happens when a kiosk app account " "whose metadata (app icon, name, required platform version) is not " "cached locally is detected in device local accounts list. The " "account can be set either by device policy or through extensions " "web UI, by the device owner (user that was initially added to the " "device; implies non managed device). The latter case is " "deprecated and not supported on newer Chrome OS boards." data: "The extension id and referrer url. The referrer chain is also " "included if the user has not opted out of SafeBrowsing." destination: GOOGLE_OWNED_SERVICE } policy { cookies_allowed: NO setting: "This feature cannot be disabled in settings. It will only be " "triggered if the user uses extensions." policy_exception_justification: "Not implemented." })"); auto resource_request = std::make_unique<network::ResourceRequest>(); resource_request->url = webstore_data_url; resource_request->load_flags = net::LOAD_DO_NOT_SAVE_COOKIES | net::LOAD_DISABLE_CACHE; resource_request->referrer = referrer_url_; resource_request->method = post_data_.empty() ? "GET" : "POST"; simple_url_loader_ = network::SimpleURLLoader::Create( std::move(resource_request), traffic_annotation); if (!post_data_.empty()) simple_url_loader_->AttachStringForUpload(post_data_, "application/octet-stream"); if (max_auto_retries_ > 0) { simple_url_loader_->SetRetryOptions( max_auto_retries_, network::SimpleURLLoader::RETRY_ON_5XX | network::SimpleURLLoader::RETRY_ON_NETWORK_CHANGE); } simple_url_loader_->DownloadToStringOfUnboundedSizeUntilCrashAndDie( url_loader_factory, base::BindOnce(&WebstoreDataFetcher::OnSimpleLoaderComplete, base::Unretained(this))); } void WebstoreDataFetcher::OnJsonParseSuccess( std::unique_ptr<base::Value> parsed_json) { if (!parsed_json->is_dict()) { OnJsonParseFailure(kInvalidWebstoreResponseError); return; } delegate_->OnWebstoreResponseParseSuccess( std::unique_ptr<base::DictionaryValue>( static_cast<base::DictionaryValue*>(parsed_json.release()))); } void WebstoreDataFetcher::OnJsonParseFailure( const std::string& error) { delegate_->OnWebstoreResponseParseFailure(error); } void WebstoreDataFetcher::OnSimpleLoaderComplete( std::unique_ptr<std::string> response_body) { if (!response_body) { delegate_->OnWebstoreRequestFailure(); return; } // The parser will call us back via one of the callbacks. data_decoder::SafeJsonParser::Parse( content::ServiceManagerConnection::GetForProcess()->GetConnector(), *response_body, base::Bind(&WebstoreDataFetcher::OnJsonParseSuccess, AsWeakPtr()), base::Bind(&WebstoreDataFetcher::OnJsonParseFailure, AsWeakPtr())); } } // namespace extensions
2,196
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.azurearcdata.generated; import com.azure.core.util.Context; /** Samples for DataControllers Delete. */ public final class DataControllersDeleteSamples { /* * x-ms-original-file: specification/azurearcdata/resource-manager/Microsoft.AzureArcData/stable/2021-08-01/examples/DeleteDataController.json */ /** * Sample code: Delete a dataController. * * @param manager Entry point to AzureArcDataManager. */ public static void deleteADataController(com.azure.resourcemanager.azurearcdata.AzureArcDataManager manager) { manager.dataControllers().delete("testrg", "testdataController", Context.NONE); } }
267
587
/* * Copyright 2016-2020 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.gchq.gaffer.serialisation; import org.junit.jupiter.api.Test; import uk.gov.gchq.gaffer.commonutil.pair.Pair; import uk.gov.gchq.gaffer.exception.SerialisationException; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; public class DoubleSerialiserTest extends ToBytesSerialisationTest<Double> { @Test public void testCanSerialiseASampleRange() throws SerialisationException { // Given for (double i = 0; i < 1000; i += 1.1) { // When final byte[] b = serialiser.serialise(i); final Object o = serialiser.deserialise(b); // Then assertEquals(Double.class, o.getClass()); assertEquals(i, o); } } @Test public void canSerialiseDoubleMinValue() throws SerialisationException { // Given When final byte[] b = serialiser.serialise(Double.MIN_VALUE); final Object o = serialiser.deserialise(b); // Then assertEquals(Double.class, o.getClass()); assertEquals(Double.MIN_VALUE, o); } @Test public void canSerialiseDoubleMaxValue() throws SerialisationException { // Given When final byte[] b = serialiser.serialise(Double.MAX_VALUE); final Object o = serialiser.deserialise(b); // Then assertEquals(Double.class, o.getClass()); assertEquals(Double.MAX_VALUE, o); } @Test public void cantSerialiseStringClass() { assertFalse(serialiser.canHandle(String.class)); } @Test public void canSerialiseDoubleClass() { assertTrue(serialiser.canHandle(Double.class)); } @Override public Serialiser getSerialisation() { return new DoubleSerialiser(); } @Override @SuppressWarnings("unchecked") public Pair<Double, byte[]>[] getHistoricSerialisationPairs() { return new Pair[] { new Pair<>(Double.MAX_VALUE, new byte[] {49, 46, 55, 57, 55, 54, 57, 51, 49, 51, 52, 56, 54, 50, 51, 49, 53, 55, 69, 51, 48, 56}), new Pair<>(Double.MIN_VALUE, new byte[] {52, 46, 57, 69, 45, 51, 50, 52}), new Pair<>(0.0, new byte[] {48, 46, 48}), new Pair<>(1.00, new byte[] {49, 46, 48}), }; } }
1,172
2,613
/* LzmaDecode.h LZMA Decoder interface LZMA SDK 4.40 Copyright (c) 1999-2006 <NAME> (2006-05-01) http://www.7-zip.org/ LZMA SDK is licensed under two licenses: 1) GNU Lesser General Public License (GNU LGPL) 2) Common Public License (CPL) It means that you can select one of these two licenses and follow rules of that license. SPECIAL EXCEPTION: Igor Pavlov, as the author of this code, expressly permits you to statically or dynamically link your code (or bind by name) to the interfaces of this file without subjecting your linked code to the terms of the CPL or GNU LGPL. Any modifications or additions to this file, however, are subject to the LGPL or CPL terms. */ #ifndef __LZMADECODE_H #define __LZMADECODE_H #include "LzmaTypes.h" /* #define _LZMA_IN_CB */ /* Use callback for input data */ /* #define _LZMA_OUT_READ */ /* Use read function for output data */ /* #define _LZMA_PROB32 */ /* It can increase speed on some 32-bit CPUs, but memory usage will be doubled in that case */ /* #define _LZMA_LOC_OPT */ /* Enable local speed optimizations inside code */ #ifdef _LZMA_PROB32 #define CProb UInt32 #else #define CProb UInt16 #endif #define LZMA_RESULT_OK 0 #define LZMA_RESULT_DATA_ERROR 1 #ifdef _LZMA_IN_CB typedef struct _ILzmaInCallback { int (*Read)(void *object, const unsigned char **buffer, SizeT *bufferSize); } ILzmaInCallback; #endif #define LZMA_BASE_SIZE 1846 #define LZMA_LIT_SIZE 768 #define LZMA_PROPERTIES_SIZE 5 typedef struct _CLzmaProperties { int lc; int lp; int pb; #ifdef _LZMA_OUT_READ UInt32 DictionarySize; #endif }CLzmaProperties; int LzmaDecodeProperties(CLzmaProperties *propsRes, const unsigned char *propsData, int size); #define LzmaGetNumProbs(Properties) (LZMA_BASE_SIZE + (LZMA_LIT_SIZE << ((Properties)->lc + (Properties)->lp))) #define kLzmaNeedInitId (-2) typedef struct _CLzmaDecoderState { CLzmaProperties Properties; CProb *Probs; #ifdef _LZMA_IN_CB const unsigned char *Buffer; const unsigned char *BufferLim; #endif #ifdef _LZMA_OUT_READ unsigned char *Dictionary; UInt32 Range; UInt32 Code; UInt32 DictionaryPos; UInt32 GlobalPos; UInt32 DistanceLimit; UInt32 Reps[4]; int State; int RemainLen; unsigned char TempDictionary[4]; #endif } CLzmaDecoderState; #ifdef _LZMA_OUT_READ #define LzmaDecoderInit(vs) { (vs)->RemainLen = kLzmaNeedInitId; } #endif int LzmaDecode(CLzmaDecoderState *vs, #ifdef _LZMA_IN_CB ILzmaInCallback *inCallback, #else const unsigned char *inStream, SizeT inSize, SizeT *inSizeProcessed, #endif unsigned char *outStream, SizeT outSize, SizeT *outSizeProcessed); #endif
1,129
1,102
<filename>src/vizdoom/src/g_strife/a_oracle.cpp /* #include "actor.h" #include "a_action.h" #include "a_strifeglobal.h" #include "p_enemy.h" #include "r_defs.h" #include "thingdef/thingdef.h" */ DEFINE_ACTION_FUNCTION(AActor, A_WakeOracleSpectre) { TThinkerIterator<AActor> it(NAME_AlienSpectre3); AActor *spectre = it.Next(); if (spectre != NULL && spectre->health > 0 && self->target != spectre) { spectre->Sector->SoundTarget = spectre->LastHeard = self->LastHeard; spectre->target = self->target; spectre->SetState (spectre->SeeState); } }
255
678
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/OfficeImport.framework/OfficeImport */ #import <OfficeImport/XXUnknownSuperclass.h> @class NSString; __attribute__((visibility("hidden"))) @interface EDPivotPageField : XXUnknownSuperclass { @private unsigned mFieldId; // 4 = 0x4 NSString *mCap; // 8 = 0x8 NSString *mName; // 12 = 0xc } @property(assign) unsigned fieldId; // G=0x251751; S=0x251761; converted property @property(retain) id name; // G=0x251771; S=0x251901; converted property @property(retain) id cap; // G=0x251781; S=0x2518b9; converted property + (id)pivotPageField; // 0x251871 - (id)init; // 0x251831 - (void)dealloc; // 0x251949 // converted property getter: - (unsigned)fieldId; // 0x251751 // converted property setter: - (void)setFieldId:(unsigned)anId; // 0x251761 // converted property getter: - (id)name; // 0x251771 // converted property setter: - (void)setName:(id)name; // 0x251901 // converted property getter: - (id)cap; // 0x251781 // converted property setter: - (void)setCap:(id)cap; // 0x2518b9 @end
418
749
<reponame>tirkarthi/python-plexapi<filename>tests/test_settings.py def test_settings_group(plex): assert plex.settings.group("general") def test_settings_get(plex): value = plex.settings.get("FriendlyName").value assert isinstance(value, str) def test_settings_set(plex): cd = plex.settings.get("autoEmptyTrash") old_value = cd.value new_value = not old_value cd.set(new_value) plex.settings.save() plex._settings = None assert plex.settings.get("autoEmptyTrash").value == new_value def test_settings_set_str(plex): cd = plex.settings.get("OnDeckWindow") new_value = 99 cd.set(new_value) plex.settings.save() plex._settings = None assert plex.settings.get("OnDeckWindow").value == 99
290
1,020
<filename>codegen/src/test/java/org/robobinding/codegen/presentationmodel/differentpackage/CustomClass.java package org.robobinding.codegen.presentationmodel.differentpackage; /** * @since 1.0 * @author <NAME> * */ public class CustomClass { }
81
1,338
<filename>src/apps/processcontroller/PriorityMenu.h /* * Copyright 2000, <NAME>. All rights reserved. * Distributed under the terms of the MIT License. */ #ifndef _PRIORITY_MENU_H_ #define _PRIORITY_MENU_H_ #include <Menu.h> class PriorityMenu : public BMenu { public: PriorityMenu (thread_id thread, int32 priority); void Update (int32 priority); void BuildMenu (); private: thread_id fThreadID; int32 fPriority; }; #endif // _PRIORITY_MENU_H_
183
1,552
# -*- coding: utf-8 -* """ BOW 分类网络 """ import paddle from erniekit.common.register import RegisterSet from erniekit.common.rule import InstanceName from erniekit.model.model import BaseModel from erniekit.modules.encoder import BoWEncoder from model.base_cls import BaseClassification @RegisterSet.models.register class BowClassification(BaseClassification): """BowClassification """ def __init__(self, model_params): """ """ BaseModel.__init__(self, model_params) def structure(self): """网络结构组织,定义需要用到的成员变量即可 :return: None """ self.dict_dim = self.model_params.get('vocab_size', 33261) self.emb_dim = self.model_params.get('emb_dim', 128) self.hid_dim = self.model_params.get('hid_dim', 128) self.hid_dim2 = self.model_params.get('hid_dim2', 96) self.num_labels = self.model_params.get('num_labels', 2) self.embedding = paddle.nn.Embedding(num_embeddings=self.dict_dim, embedding_dim=self.emb_dim) self.bow_encoder = BoWEncoder(self.emb_dim) self.fc_1 = paddle.nn.Linear(in_features=self.hid_dim, out_features=self.hid_dim) self.fc_2 = paddle.nn.Linear(in_features=self.hid_dim, out_features=self.hid_dim2) self.fc_prediction = paddle.nn.Linear(in_features=self.hid_dim2, out_features=self.num_labels) self.loss = paddle.nn.CrossEntropyLoss(use_softmax=False) def forward(self, fields_dict, phase): """ :param fields_dict: 动态图模式下是tensor格式,静态图模式下是python数组 :param phase: :return: """ instance_text_a = fields_dict["text_a"] record_id_text_a = instance_text_a[InstanceName.RECORD_ID] text_src = record_id_text_a[InstanceName.SRC_IDS] emb_output = self.embedding(text_src) bow_output = self.bow_encoder(emb_output) # bow_output = paddle.sum(emb_output, axis=1) fc_1_output = paddle.tanh(self.fc_1(bow_output)) fc_2_output = paddle.tanh(self.fc_2(fc_1_output)) prediction = self.fc_prediction(fc_2_output) probs = paddle.nn.functional.softmax(prediction) if phase == InstanceName.TRAINING or phase == InstanceName.EVALUATE or phase == InstanceName.TEST: instance_label = fields_dict["label"] record_id_label = instance_label[InstanceName.RECORD_ID] label = record_id_label[InstanceName.SRC_IDS] # label = paddle.to_tensor(label) cost = self.loss(probs, label) forward_return_dict = { InstanceName.PREDICT_RESULT: probs, InstanceName.LABEL: label, InstanceName.LOSS: cost } elif phase == InstanceName.SAVE_INFERENCE: "save inference model with jit" target_predict_list = [probs] target_feed_list = [text_src] # 以json的形式存入模型的meta文件中,在离线预测的时候用,field_name#field_tensor_name target_feed_name_list = ["text_a#src_ids"] forward_return_dict = { InstanceName.TARGET_FEED: target_feed_list, InstanceName.TARGET_PREDICTS: target_predict_list, InstanceName.TARGET_FEED_NAMES: target_feed_name_list } else: forward_return_dict = { InstanceName.PREDICT_RESULT: probs } return forward_return_dict
1,711
435
{ "copyright_text": "Creative Commons Attribution license (reuse allowed)", "description": "<NAME>\nhttps://2016.pycon-au.org/schedule/18/view_talk\nPython is a dynamic programming language and has a strong tradition of adhering to a programming style called duck-typing. This means that it is possible to easily modify an application's code while it is running. One might wish to do this for various reasons, including enhancing the functionality of code, correcting errant behaviour, or adding instrumentation or debugging code.\n\nMaking such code modifications can be tricky though and not done correctly can potentially interfere with the operation of the original code, through destroying introspection abilities, not honouring the duck-typing mantra or due to being applied at the wrong time.\n\nIf you do need to do monkey patching though, the 'wrapt' library is your friend, with its transparent object proxy wrappers and post import hook mechanism, it allows you to safely monkey patch code to modify its behaviour.\n\nCome learn about the 'wrapt' library and the joys, but also the dangers, of monkey patching.", "duration": 1882, "language": "eng", "recorded": "2016-08-15", "related_urls": [ "https://2016.pycon-au.org/schedule/18/view_talk" ], "speakers": [ "<NAME>" ], "tags": [], "thumbnail_url": "https://i.ytimg.com/vi/u7oj-ghfhUk/maxresdefault.jpg", "title": "Hear no evil, see no evil, patch no evil: Or, how to monkey-patch safely.", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=u7oj-ghfhUk" } ] }
472
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.authorization.fluent.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.time.OffsetDateTime; import java.util.HashMap; import java.util.List; import java.util.Map; /** todoTask. */ @Fluent public final class MicrosoftGraphTodoTaskInner extends MicrosoftGraphEntity { @JsonIgnore private final ClientLogger logger = new ClientLogger(MicrosoftGraphTodoTaskInner.class); /* * itemBody */ @JsonProperty(value = "body") private MicrosoftGraphItemBody body; /* * The date and time when the task was last modified. By default, it is in * UTC. You can provide a custom time zone in the request header. The * property value uses ISO 8601 format and is always in UTC time. For * example, midnight UTC on Jan 1, 2020 would look like this: * '2020-01-01T00:00:00Z'. */ @JsonProperty(value = "bodyLastModifiedDateTime") private OffsetDateTime bodyLastModifiedDateTime; /* * dateTimeTimeZone */ @JsonProperty(value = "completedDateTime") private MicrosoftGraphDateTimeZone completedDateTime; /* * The date and time when the task was created. By default, it is in UTC. * You can provide a custom time zone in the request header. The property * value uses ISO 8601 format. For example, midnight UTC on Jan 1, 2020 * would look like this: '2020-01-01T00:00:00Z'. */ @JsonProperty(value = "createdDateTime") private OffsetDateTime createdDateTime; /* * dateTimeTimeZone */ @JsonProperty(value = "dueDateTime") private MicrosoftGraphDateTimeZone dueDateTime; /* * The importance property. */ @JsonProperty(value = "importance") private MicrosoftGraphImportance importance; /* * Set to true if an alert is set to remind the user of the task. */ @JsonProperty(value = "isReminderOn") private Boolean isReminderOn; /* * The date and time when the task was last modified. By default, it is in * UTC. You can provide a custom time zone in the request header. The * property value uses ISO 8601 format and is always in UTC time. For * example, midnight UTC on Jan 1, 2020 would look like this: * '2020-01-01T00:00:00Z'. */ @JsonProperty(value = "lastModifiedDateTime") private OffsetDateTime lastModifiedDateTime; /* * patternedRecurrence */ @JsonProperty(value = "recurrence") private MicrosoftGraphPatternedRecurrence recurrence; /* * dateTimeTimeZone */ @JsonProperty(value = "reminderDateTime") private MicrosoftGraphDateTimeZone reminderDateTime; /* * The status property. */ @JsonProperty(value = "status") private MicrosoftGraphTaskStatus status; /* * A brief description of the task. */ @JsonProperty(value = "title") private String title; /* * The collection of open extensions defined for the task. Nullable. */ @JsonProperty(value = "extensions") private List<MicrosoftGraphExtensionInner> extensions; /* * A collection of resources linked to the task. */ @JsonProperty(value = "linkedResources") private List<MicrosoftGraphLinkedResourceInner> linkedResources; /* * todoTask */ @JsonIgnore private Map<String, Object> additionalProperties; /** * Get the body property: itemBody. * * @return the body value. */ public MicrosoftGraphItemBody body() { return this.body; } /** * Set the body property: itemBody. * * @param body the body value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withBody(MicrosoftGraphItemBody body) { this.body = body; return this; } /** * Get the bodyLastModifiedDateTime property: The date and time when the task was last modified. By default, it is * in UTC. You can provide a custom time zone in the request header. The property value uses ISO 8601 format and is * always in UTC time. For example, midnight UTC on Jan 1, 2020 would look like this: '2020-01-01T00:00:00Z'. * * @return the bodyLastModifiedDateTime value. */ public OffsetDateTime bodyLastModifiedDateTime() { return this.bodyLastModifiedDateTime; } /** * Set the bodyLastModifiedDateTime property: The date and time when the task was last modified. By default, it is * in UTC. You can provide a custom time zone in the request header. The property value uses ISO 8601 format and is * always in UTC time. For example, midnight UTC on Jan 1, 2020 would look like this: '2020-01-01T00:00:00Z'. * * @param bodyLastModifiedDateTime the bodyLastModifiedDateTime value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withBodyLastModifiedDateTime(OffsetDateTime bodyLastModifiedDateTime) { this.bodyLastModifiedDateTime = bodyLastModifiedDateTime; return this; } /** * Get the completedDateTime property: dateTimeTimeZone. * * @return the completedDateTime value. */ public MicrosoftGraphDateTimeZone completedDateTime() { return this.completedDateTime; } /** * Set the completedDateTime property: dateTimeTimeZone. * * @param completedDateTime the completedDateTime value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withCompletedDateTime(MicrosoftGraphDateTimeZone completedDateTime) { this.completedDateTime = completedDateTime; return this; } /** * Get the createdDateTime property: The date and time when the task was created. By default, it is in UTC. You can * provide a custom time zone in the request header. The property value uses ISO 8601 format. For example, midnight * UTC on Jan 1, 2020 would look like this: '2020-01-01T00:00:00Z'. * * @return the createdDateTime value. */ public OffsetDateTime createdDateTime() { return this.createdDateTime; } /** * Set the createdDateTime property: The date and time when the task was created. By default, it is in UTC. You can * provide a custom time zone in the request header. The property value uses ISO 8601 format. For example, midnight * UTC on Jan 1, 2020 would look like this: '2020-01-01T00:00:00Z'. * * @param createdDateTime the createdDateTime value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withCreatedDateTime(OffsetDateTime createdDateTime) { this.createdDateTime = createdDateTime; return this; } /** * Get the dueDateTime property: dateTimeTimeZone. * * @return the dueDateTime value. */ public MicrosoftGraphDateTimeZone dueDateTime() { return this.dueDateTime; } /** * Set the dueDateTime property: dateTimeTimeZone. * * @param dueDateTime the dueDateTime value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withDueDateTime(MicrosoftGraphDateTimeZone dueDateTime) { this.dueDateTime = dueDateTime; return this; } /** * Get the importance property: The importance property. * * @return the importance value. */ public MicrosoftGraphImportance importance() { return this.importance; } /** * Set the importance property: The importance property. * * @param importance the importance value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withImportance(MicrosoftGraphImportance importance) { this.importance = importance; return this; } /** * Get the isReminderOn property: Set to true if an alert is set to remind the user of the task. * * @return the isReminderOn value. */ public Boolean isReminderOn() { return this.isReminderOn; } /** * Set the isReminderOn property: Set to true if an alert is set to remind the user of the task. * * @param isReminderOn the isReminderOn value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withIsReminderOn(Boolean isReminderOn) { this.isReminderOn = isReminderOn; return this; } /** * Get the lastModifiedDateTime property: The date and time when the task was last modified. By default, it is in * UTC. You can provide a custom time zone in the request header. The property value uses ISO 8601 format and is * always in UTC time. For example, midnight UTC on Jan 1, 2020 would look like this: '2020-01-01T00:00:00Z'. * * @return the lastModifiedDateTime value. */ public OffsetDateTime lastModifiedDateTime() { return this.lastModifiedDateTime; } /** * Set the lastModifiedDateTime property: The date and time when the task was last modified. By default, it is in * UTC. You can provide a custom time zone in the request header. The property value uses ISO 8601 format and is * always in UTC time. For example, midnight UTC on Jan 1, 2020 would look like this: '2020-01-01T00:00:00Z'. * * @param lastModifiedDateTime the lastModifiedDateTime value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withLastModifiedDateTime(OffsetDateTime lastModifiedDateTime) { this.lastModifiedDateTime = lastModifiedDateTime; return this; } /** * Get the recurrence property: patternedRecurrence. * * @return the recurrence value. */ public MicrosoftGraphPatternedRecurrence recurrence() { return this.recurrence; } /** * Set the recurrence property: patternedRecurrence. * * @param recurrence the recurrence value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withRecurrence(MicrosoftGraphPatternedRecurrence recurrence) { this.recurrence = recurrence; return this; } /** * Get the reminderDateTime property: dateTimeTimeZone. * * @return the reminderDateTime value. */ public MicrosoftGraphDateTimeZone reminderDateTime() { return this.reminderDateTime; } /** * Set the reminderDateTime property: dateTimeTimeZone. * * @param reminderDateTime the reminderDateTime value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withReminderDateTime(MicrosoftGraphDateTimeZone reminderDateTime) { this.reminderDateTime = reminderDateTime; return this; } /** * Get the status property: The status property. * * @return the status value. */ public MicrosoftGraphTaskStatus status() { return this.status; } /** * Set the status property: The status property. * * @param status the status value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withStatus(MicrosoftGraphTaskStatus status) { this.status = status; return this; } /** * Get the title property: A brief description of the task. * * @return the title value. */ public String title() { return this.title; } /** * Set the title property: A brief description of the task. * * @param title the title value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withTitle(String title) { this.title = title; return this; } /** * Get the extensions property: The collection of open extensions defined for the task. Nullable. * * @return the extensions value. */ public List<MicrosoftGraphExtensionInner> extensions() { return this.extensions; } /** * Set the extensions property: The collection of open extensions defined for the task. Nullable. * * @param extensions the extensions value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withExtensions(List<MicrosoftGraphExtensionInner> extensions) { this.extensions = extensions; return this; } /** * Get the linkedResources property: A collection of resources linked to the task. * * @return the linkedResources value. */ public List<MicrosoftGraphLinkedResourceInner> linkedResources() { return this.linkedResources; } /** * Set the linkedResources property: A collection of resources linked to the task. * * @param linkedResources the linkedResources value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withLinkedResources(List<MicrosoftGraphLinkedResourceInner> linkedResources) { this.linkedResources = linkedResources; return this; } /** * Get the additionalProperties property: todoTask. * * @return the additionalProperties value. */ @JsonAnyGetter public Map<String, Object> additionalProperties() { return this.additionalProperties; } /** * Set the additionalProperties property: todoTask. * * @param additionalProperties the additionalProperties value to set. * @return the MicrosoftGraphTodoTaskInner object itself. */ public MicrosoftGraphTodoTaskInner withAdditionalProperties(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; return this; } @JsonAnySetter void withAdditionalProperties(String key, Object value) { if (additionalProperties == null) { additionalProperties = new HashMap<>(); } additionalProperties.put(key, value); } /** {@inheritDoc} */ @Override public MicrosoftGraphTodoTaskInner withId(String id) { super.withId(id); return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ @Override public void validate() { super.validate(); if (body() != null) { body().validate(); } if (completedDateTime() != null) { completedDateTime().validate(); } if (dueDateTime() != null) { dueDateTime().validate(); } if (recurrence() != null) { recurrence().validate(); } if (reminderDateTime() != null) { reminderDateTime().validate(); } if (extensions() != null) { extensions().forEach(e -> e.validate()); } if (linkedResources() != null) { linkedResources().forEach(e -> e.validate()); } } }
5,617
971
#include <memory> #include <vector> #include "benchmark/benchmark.h" #include "benchmark_util/benchmark_config.h" #include "common/scoped_timer.h" #include "storage/data_table.h" #include "storage/storage_util.h" #include "test_util/multithread_test_util.h" #include "test_util/storage_test_util.h" #include "transaction/transaction_context.h" namespace noisepage { /** * This benchmark measures how fast the */ class SlotIteratorBenchmark : public benchmark::Fixture { public: void SetUp(const benchmark::State &state) final { // generate a random redo ProjectedRow to Insert redo_buffer_ = common::AllocationUtil::AllocateAligned(initializer_.ProjectedRowSize()); redo_ = initializer_.InitializeRow(redo_buffer_); StorageTestUtil::PopulateRandomRow(redo_, layout_, 0, &generator_); // generate a ProjectedRow buffer to Read read_buffer_ = common::AllocationUtil::AllocateAligned(initializer_.ProjectedRowSize()); read_ = initializer_.InitializeRow(read_buffer_); // generate a vector of ProjectedRow buffers for concurrent reads for (uint32_t i = 0; i < BenchmarkConfig::num_threads; ++i) { // Create read buffer byte *read_buffer = common::AllocationUtil::AllocateAligned(initializer_.ProjectedRowSize()); storage::ProjectedRow *read = initializer_.InitializeRow(read_buffer); read_buffers_.emplace_back(read_buffer); reads_.emplace_back(read); } } void TearDown(const benchmark::State &state) final { delete[] redo_buffer_; delete[] read_buffer_; for (uint32_t i = 0; i < BenchmarkConfig::num_threads; ++i) delete[] read_buffers_[i]; // google benchmark might run benchmark several iterations. We need to clear vectors. read_buffers_.clear(); reads_.clear(); } // Tuple layout const uint8_t column_size_ = 8; const storage::BlockLayout layout_{{column_size_, column_size_, column_size_}}; // Tuple properties const storage::ProjectedRowInitializer initializer_ = storage::ProjectedRowInitializer::Create(layout_, StorageTestUtil::ProjectionListAllColumns(layout_)); // Workload const uint32_t num_reads_ = 10000000; const uint64_t buffer_pool_reuse_limit_ = 10000000; // Test infrastructure std::default_random_engine generator_; storage::BlockStore block_store_{1000, 1000}; storage::RecordBufferSegmentPool buffer_pool_{num_reads_, buffer_pool_reuse_limit_}; // Insert buffer pointers byte *redo_buffer_; storage::ProjectedRow *redo_; // Read buffer pointers; byte *read_buffer_; storage::ProjectedRow *read_; // Read buffers pointers for concurrent reads std::vector<byte *> read_buffers_; std::vector<storage::ProjectedRow *> reads_; }; // Iterate the num_reads_ of tuples in the sequential order from a DataTable concurrently // NOLINTNEXTLINE BENCHMARK_DEFINE_F(SlotIteratorBenchmark, ConcurrentSlotIterators)(benchmark::State &state) { storage::DataTable read_table(common::ManagedPointer<storage::BlockStore>(&block_store_), layout_, storage::layout_version_t(0)); // populate read_table_ by inserting tuples // We can use dummy timestamps here since we're not invoking concurrency control transaction::TransactionContext txn(transaction::timestamp_t(0), transaction::timestamp_t(0), common::ManagedPointer(&buffer_pool_), DISABLED); for (uint32_t i = 0; i < num_reads_; ++i) { read_table.Insert(common::ManagedPointer(&txn), *redo_); } auto workload = [&]() { auto it = read_table.begin(); uint32_t num_reads = 0; while (it != read_table.end()) { num_reads++; it++; } EXPECT_EQ(num_reads, num_reads_); }; common::WorkerPool thread_pool(BenchmarkConfig::num_threads, {}); thread_pool.Startup(); // NOLINTNEXTLINE for (auto _ : state) { uint64_t elapsed_ms; { common::ScopedTimer<std::chrono::milliseconds> timer(&elapsed_ms); for (uint32_t j = 0; j < BenchmarkConfig::num_threads; j++) { thread_pool.SubmitTask([&workload] { workload(); }); } thread_pool.WaitUntilAllFinished(); } state.SetIterationTime(static_cast<double>(elapsed_ms) / 1000.0); } state.SetItemsProcessed(state.iterations() * num_reads_ * BenchmarkConfig::num_threads); } // Iterate the num_reads_ of tuples in the sequential order from a DataTable concurrently // NOLINTNEXTLINE BENCHMARK_DEFINE_F(SlotIteratorBenchmark, ConcurrentSlotIteratorsReads)(benchmark::State &state) { storage::DataTable read_table(common::ManagedPointer<storage::BlockStore>(&block_store_), layout_, storage::layout_version_t(0)); // populate read_table_ by inserting tuples // We can use dummy timestamps here since we're not invoking concurrency control transaction::TransactionContext txn(transaction::timestamp_t(0), transaction::timestamp_t(0), common::ManagedPointer(&buffer_pool_), DISABLED); for (uint32_t i = 0; i < num_reads_; ++i) { read_table.Insert(common::ManagedPointer(&txn), *redo_); } std::vector<std::vector<storage::TupleSlot>> reads(BenchmarkConfig::num_threads); for (auto &i : reads) i.resize(num_reads_); auto workload = [&](const uint32_t worker_id) { auto it = read_table.begin(); uint32_t num_reads = 0; while (it != read_table.end()) { reads[worker_id][num_reads++] = *it++; } EXPECT_EQ(num_reads, num_reads_); }; common::WorkerPool thread_pool(BenchmarkConfig::num_threads, {}); thread_pool.Startup(); // NOLINTNEXTLINE for (auto _ : state) { uint64_t elapsed_ms; { common::ScopedTimer<std::chrono::milliseconds> timer(&elapsed_ms); for (uint32_t j = 0; j < BenchmarkConfig::num_threads; j++) { thread_pool.SubmitTask([&workload, j] { workload(j); }); } thread_pool.WaitUntilAllFinished(); } state.SetIterationTime(static_cast<double>(elapsed_ms) / 1000.0); } state.SetItemsProcessed(state.iterations() * num_reads_ * BenchmarkConfig::num_threads); } // ---------------------------------------------------------------------------- // BENCHMARK REGISTRATION // ---------------------------------------------------------------------------- // clang-format off BENCHMARK_REGISTER_F(SlotIteratorBenchmark, ConcurrentSlotIterators) ->Unit(benchmark::kMillisecond) ->UseRealTime() ->UseManualTime(); BENCHMARK_REGISTER_F(SlotIteratorBenchmark, ConcurrentSlotIteratorsReads) ->Unit(benchmark::kMillisecond) ->UseRealTime() ->UseManualTime(); // clang-format on } // namespace noisepage
2,378
826
<reponame>dartartem/eventuate-tram-sagas package io.eventuate.tram.sagas.reactive.orchestration; import io.eventuate.tram.consumer.common.reactive.ReactiveMessageConsumer; import io.eventuate.tram.reactive.commands.producer.ReactiveCommandProducer; import io.eventuate.tram.sagas.reactive.common.ReactiveSagaLockManager; public class ReactiveSagaManagerFactory { private final ReactiveSagaInstanceRepository sagaInstanceRepository; private final ReactiveCommandProducer commandProducer; private final ReactiveMessageConsumer messageConsumer; private final ReactiveSagaLockManager sagaLockManager; private final ReactiveSagaCommandProducer sagaCommandProducer; public ReactiveSagaManagerFactory(ReactiveSagaInstanceRepository sagaInstanceRepository, ReactiveCommandProducer commandProducer, ReactiveMessageConsumer messageConsumer, ReactiveSagaLockManager sagaLockManager, ReactiveSagaCommandProducer sagaCommandProducer) { this.sagaInstanceRepository = sagaInstanceRepository; this.commandProducer = commandProducer; this.messageConsumer = messageConsumer; this.sagaLockManager = sagaLockManager; this.sagaCommandProducer = sagaCommandProducer; } public <SagaData> ReactiveSagaManagerImpl<SagaData> make(ReactiveSaga<SagaData> saga) { return new ReactiveSagaManagerImpl<>(saga, sagaInstanceRepository, commandProducer, messageConsumer, sagaLockManager, sagaCommandProducer); } }
616
679
/** * Configuration classes for Spring Data R2DBC. */ @org.springframework.lang.NonNullApi @org.springframework.lang.NonNullFields package org.springframework.data.r2dbc.config;
58
790
<reponame>MiCHiLU/google_appengine_sdk<gh_stars>100-1000 from django.conf.urls import patterns, url from django.contrib.auth import context_processors from django.contrib.auth.urls import urlpatterns from django.contrib.auth.views import password_reset from django.contrib.auth.decorators import login_required from django.contrib.messages.api import info from django.http import HttpResponse from django.shortcuts import render_to_response from django.template import Template, RequestContext from django.views.decorators.cache import never_cache @never_cache def remote_user_auth_view(request): "Dummy view for remote user tests" t = Template("Username is {{ user }}.") c = RequestContext(request, {}) return HttpResponse(t.render(c)) def auth_processor_no_attr_access(request): r1 = render_to_response('context_processors/auth_attrs_no_access.html', RequestContext(request, {}, processors=[context_processors.auth])) # *After* rendering, we check whether the session was accessed return render_to_response('context_processors/auth_attrs_test_access.html', {'session_accessed':request.session.accessed}) def auth_processor_attr_access(request): r1 = render_to_response('context_processors/auth_attrs_access.html', RequestContext(request, {}, processors=[context_processors.auth])) return render_to_response('context_processors/auth_attrs_test_access.html', {'session_accessed':request.session.accessed}) def auth_processor_user(request): return render_to_response('context_processors/auth_attrs_user.html', RequestContext(request, {}, processors=[context_processors.auth])) def auth_processor_perms(request): return render_to_response('context_processors/auth_attrs_perms.html', RequestContext(request, {}, processors=[context_processors.auth])) def auth_processor_messages(request): info(request, "Message 1") return render_to_response('context_processors/auth_attrs_messages.html', RequestContext(request, {}, processors=[context_processors.auth])) def userpage(request): pass # special urls for auth test cases urlpatterns = urlpatterns + patterns('', (r'^logout/custom_query/$', 'django.contrib.auth.views.logout', dict(redirect_field_name='follow')), (r'^logout/next_page/$', 'django.contrib.auth.views.logout', dict(next_page='/somewhere/')), (r'^remote_user/$', remote_user_auth_view), (r'^password_reset_from_email/$', 'django.contrib.auth.views.password_reset', dict(from_email='<EMAIL>')), (r'^admin_password_reset/$', 'django.contrib.auth.views.password_reset', dict(is_admin_site=True)), (r'^login_required/$', login_required(password_reset)), (r'^login_required_login_url/$', login_required(password_reset, login_url='/somewhere/')), (r'^auth_processor_no_attr_access/$', auth_processor_no_attr_access), (r'^auth_processor_attr_access/$', auth_processor_attr_access), (r'^auth_processor_user/$', auth_processor_user), (r'^auth_processor_perms/$', auth_processor_perms), (r'^auth_processor_messages/$', auth_processor_messages), url(r'^userpage/(.+)/$', userpage, name="userpage"), )
1,118
8,772
package org.apereo.cas.authentication; import org.apereo.cas.authentication.principal.Principal; import java.util.List; import java.util.Map; /** * This is {@link PrincipalElectionStrategyConflictResolver} * that determines the final principal identifier * as part of the principal election strategy. * * @author <NAME> * @since 6.4.0 */ @FunctionalInterface public interface PrincipalElectionStrategyConflictResolver { /** * Pick the last principal in the chain of principals resolved. * * @return the principal election strategy conflict resolver */ static PrincipalElectionStrategyConflictResolver last() { return (principals, attributes) -> principals.get(principals.size() - 1).getId(); } /** * Pick the first principal in the chain of principals resolved. * * @return the principal election strategy conflict resolver */ static PrincipalElectionStrategyConflictResolver first() { return (principals, attributes) -> principals.get(0).getId(); } /** * Resolve the principal id from the chain. * * @param principals the principals chain * @param attributes the attributes * @return the final principal id */ String resolve(List<Principal> principals, Map<String, List<Object>> attributes); }
424
14,668
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/platform/scheduler/public/worker_pool.h" #include <memory> #include "base/location.h" #include "base/test/task_environment.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h" namespace blink { namespace { void PingPongTask(base::WaitableEvent* done_event) { done_event->Signal(); } } // namespace TEST(BackgroundSchedulerTest, RunOnBackgroundThread) { base::test::TaskEnvironment task_environment; std::unique_ptr<base::WaitableEvent> done_event = std::make_unique<base::WaitableEvent>(); worker_pool::PostTask( FROM_HERE, CrossThreadBindOnce(&PingPongTask, CrossThreadUnretained(done_event.get()))); // Test passes by not hanging on the following wait(). done_event->Wait(); } } // namespace blink
371
16,461
<reponame>ahmadtech199/expo /* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ #import <Foundation/Foundation.h> #import <ABI41_0_0React/ABI41_0_0RCTURLRequestDelegate.h> #import <ABI41_0_0React/ABI41_0_0RCTURLRequestHandler.h> typedef void (^ABI41_0_0RCTURLRequestCompletionBlock)(NSURLResponse *response, NSData *data, NSError *error); typedef void (^ABI41_0_0RCTURLRequestCancellationBlock)(void); typedef void (^ABI41_0_0RCTURLRequestIncrementalDataBlock)(NSData *data, int64_t progress, int64_t total); typedef void (^ABI41_0_0RCTURLRequestProgressBlock)(int64_t progress, int64_t total); typedef void (^ABI41_0_0RCTURLRequestResponseBlock)(NSURLResponse *response); typedef NS_ENUM(NSInteger, ABI41_0_0RCTNetworkTaskStatus) { ABI41_0_0RCTNetworkTaskPending = 0, ABI41_0_0RCTNetworkTaskInProgress, ABI41_0_0RCTNetworkTaskFinished, }; @interface ABI41_0_0RCTNetworkTask : NSObject <ABI41_0_0RCTURLRequestDelegate> @property (nonatomic, readonly) NSURLRequest *request; @property (nonatomic, readonly) NSNumber *requestID; @property (nonatomic, readonly, weak) id requestToken; @property (nonatomic, readonly) NSURLResponse *response; @property (nonatomic, copy) ABI41_0_0RCTURLRequestCompletionBlock completionBlock; @property (nonatomic, copy) ABI41_0_0RCTURLRequestProgressBlock downloadProgressBlock; @property (nonatomic, copy) ABI41_0_0RCTURLRequestIncrementalDataBlock incrementalDataBlock; @property (nonatomic, copy) ABI41_0_0RCTURLRequestResponseBlock responseBlock; @property (nonatomic, copy) ABI41_0_0RCTURLRequestProgressBlock uploadProgressBlock; @property (nonatomic, readonly) ABI41_0_0RCTNetworkTaskStatus status; - (instancetype)initWithRequest:(NSURLRequest *)request handler:(id<ABI41_0_0RCTURLRequestHandler>)handler callbackQueue:(dispatch_queue_t)callbackQueue NS_DESIGNATED_INITIALIZER; - (void)start; - (void)cancel; @end
757
1,582
# -*- coding: utf-8 -*- from vilya.libs.store import mc class MLockMeta(type): def __getattr__(cls, name): return MLock(name) class MLock(object): __metaclass__ = MLockMeta def __init__(self, mc_prefix): self.mc_prefix = mc_prefix def __call__(self, **kw): parts = [self.mc_prefix] for pair in sorted(kw.items()): parts += list(pair) mc_key = ':'.join(map(str, parts)) return MLockContext(mc_key) class MLockContext(object): def __init__(self, mc_key, value=1, expire=30): self.mc_key = mc_key self.value = value self.expire = expire def __str__(self): return ('<MLockContext(mc_key=%s, value=%s, expire=%s)>' % (self.mc_key, self.get_value(), self.expire)) __repr__ = __str__ def acquire(self): if not mc.add(self.mc_key, self.value, self.expire): raise MLockExclusiveError def release(self): mc.delete(self.mc_key) def get_value(self): return mc.get(self.mc_key) def __enter__(self): try: self.acquire() except MLockExclusiveError as e: return e def __exit__(self, type_, value, traceback): if traceback is None: self.release() class MLockExclusiveError(Exception): pass
640
1,403
<filename>clipper_admin/clipper_admin/exceptions.py class ClipperException(Exception): """A generic exception indicating that Clipper encountered a problem.""" def __init__(self, msg, *args): self.msg = msg super(Exception, self).__init__(msg, *args) class UnconnectedException(ClipperException): """A ``ClipperConnection`` instance must be connected to a Clipper cluster to issue this command.""" def __init__(self, *args): message = ( "No connection to Clipper cluster. Call ClipperConnection.connect to " "connect to an existing cluster or ClipperConnnection.start_clipper to " "create a new one") self.message = message super(UnconnectedException, self).__init__(message, *args)
272
348
{"nom":"Framont","circ":"1ère circonscription","dpt":"Haute-Saône","inscrits":142,"abs":76,"votants":66,"blancs":4,"nuls":2,"exp":60,"res":[{"nuance":"FN","nom":"Mme <NAME>","voix":36},{"nuance":"REM","nom":"Mme <NAME>","voix":24}]}
94
798
<gh_stars>100-1000 package picard.illumina.parser.fakers; import java.nio.ByteBuffer; /** * Created by jcarey on 3/13/14. */ public class LocsFileFaker extends FileFaker { @Override protected void fakeFile(final ByteBuffer buffer) { buffer.putInt(1); buffer.putFloat(1.0f); buffer.putInt(1); buffer.putFloat(5.0f); buffer.putFloat(5.0f); } @Override protected boolean addLeadingZeros() { return false; } @Override protected int bufferSize() { return (Integer.SIZE * 2) + (Float.SIZE * 3); } }
288
737
// essfunc.dll for Vulnserver // Visit my blog for more details: http://www.thegreycorner.com/ /* Copyright (c) 2010, <NAME> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the organization nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <stdio.h> #define VERSION "1.00" void EssentialFunc1() { printf ("Called essential function dll version %s\n", VERSION); } void EssentialFunc2() { __asm__("jmp *%esp\n\t" "jmp *%eax\n\t" "pop %eax\n\t" "pop %eax\n\t" "ret"); } void EssentialFunc3() { __asm__("jmp *%esp\n\t" "jmp *%ecx\n\t" "pop %ebx\n\t" "pop %ebx\n\t" "ret"); } void EssentialFunc4() { __asm__("jmp *%esp\n\t" "jmp *%ebx\n\t" "pop %ebp\n\t" "pop %ebp\n\t" "ret"); } void EssentialFunc5() { __asm__("jmp *%esp\n\t" "jmp *%edi\n\t" "pop %ebx\n\t" "pop %ebx\n\t" "ret"); } void EssentialFunc6() { __asm__("jmp *%esp\n\t" "jmp *%edx\n\t" "pop %ecx\n\t" "pop %edx\n\t" "ret"); } void EssentialFunc7() { __asm__("jmp *%esp\n\t" "jmp *%esi\n\t" "pop %ecx\n\t" "pop %eax\n\t" "ret"); } void EssentialFunc8() { __asm__("jmp *%esp\n\t" "jmp *%ebp\n\t" "pop %eax\n\t" "pop %edx\n\t" "ret"); } void EssentialFunc9() { __asm__("jmp *%esp\n\t" "jmp *%esp\n\t" "jmp *-12(%esp)\n\t" "pop %ecx\n\t" "pop %ecx\n\t" "ret"); } void EssentialFunc10(char *Input) { char Buffer2S[140]; strcpy(Buffer2S, Input); } void EssentialFunc11(char *Input) { char Buffer2S[60]; strcpy(Buffer2S, Input); } void EssentialFunc12(char *Status, char *Input) { char Buffer2S[2000]; strcpy(Buffer2S, Input); printf("%s", Status); } void EssentialFunc13(char *Input) { char Buffer2S[2000]; strcpy(Buffer2S, Input); } void EssentialFunc14(char *Input) { char Buffer2S[1000]; strcpy(Buffer2S, Input); }
1,386
36,552
# Copyright 2021 The gRPC Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import grpc import unittest import sys import os import pkgutil from typing import Sequence class SingleLoader(object): def __init__(self, pattern: str): loader = unittest.TestLoader() self.suite = unittest.TestSuite() tests = [] for importer, module_name, is_package in pkgutil.walk_packages([os.path.dirname(os.path.relpath(__file__))]): if pattern in module_name: module = importer.find_module(module_name).load_module(module_name) tests.append(loader.loadTestsFromModule(module)) if len(tests) != 1: raise AssertionError("Expected only 1 test module. Found {}".format(tests)) self.suite.addTest(tests[0]) def loadTestsFromNames(self, names: Sequence[str], module: str = None) -> unittest.TestSuite: return self.suite if __name__ == "__main__": from gevent import monkey monkey.patch_all() import grpc.experimental.gevent grpc.experimental.gevent.init_gevent() import gevent if len(sys.argv) != 2: print(f"USAGE: {sys.argv[0]} TARGET_MODULE", file=sys.stderr) target_module = sys.argv[1] loader = SingleLoader(target_module) runner = unittest.TextTestRunner() result = gevent.spawn(runner.run, loader.suite) result.join() if not result.value.wasSuccessful(): sys.exit("Test failure.")
720
5,079
<reponame>yetsun/hue #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2012 <NAME>, European Environment Agency # # This is free software. You may redistribute it under the terms # of the Apache license and the GNU General Public License Version # 2 or at your option any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Contributor(s): # # This example shows how to create a manual page break. from odf.opendocument import OpenDocumentText from odf.style import Style, TextProperties, ParagraphProperties from odf.text import P textdoc = OpenDocumentText() # Create a style for the paragraph with page-break withbreak = Style(name="WithBreak", parentstylename="Standard", family="paragraph") withbreak.addElement(ParagraphProperties(breakbefore="page")) textdoc.automaticstyles.addElement(withbreak) p = P(text=u'First paragraph') textdoc.text.addElement(p) p = P(stylename=withbreak,text=u'Second paragraph') textdoc.text.addElement(p) textdoc.save("pagebreak_odfpy.odt")
414
2,921
{ "name": "<NAME>", "symbol": "DFS", "type": "ERC20", "decimals": 18, "description": "DFS is a global payment option that is fully decentralized built on the Ethereum Blockchain.", "website": "https://www.digitalfantasysports.com", "explorer": "https://etherscan.io/token/0xcec38306558a31cdbb2a9d6285947C5b44A24f3e", "status": "active", "id": "0xcec38306558a31cdbb2a9d6285947C5b44A24f3e" }
189
1,822
<reponame>STEllAR-GROUP/hpx<filename>libs/core/algorithms/include/hpx/parallel/algorithms/ends_with.hpp // Copyright (c) 2020 ETH Zurich // Copyright (c) 2014 <NAME> // // SPDX-License-Identifier: BSL-1.0 // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) /// \file parallel/algorithms/ends_with.hpp #pragma once #if defined(DOXYGEN) namespace hpx { namespace ranges { // clang-format off /// Checks whether the second range defined by [first1, last1) matches the /// suffix of the first range defined by [first2, last2) /// /// \note Complexity: Linear: at most min(N1, N2) applications of the /// predicate and both projections. /// /// \tparam InIter1 The type of the begin source iterators used /// (deduced). This iterator type must meet the /// requirements of an input iterator. /// \tparam InIter2 The type of the begin destination iterators used /// deduced). This iterator type must meet the /// requirements of a input iterator. /// \tparam Pred The binary predicate that compares the projected /// elements. /// \tparam Proj1 The type of an optional projection function for /// the source range. This defaults to /// \a util::projection_identity /// \tparam Proj1 The type of an optional projection function for /// the destination range. This defaults to /// \a util::projection_identity /// /// \param first1 Refers to the beginning of the source range. /// \param last1 Refers to the end of the source range. /// \param first2 Refers to the beginning of the destination range. /// \param last2 Refers to the end of the destination range. /// \param pred Specifies the binary predicate function /// (or function object) which will be invoked for /// comparison of the elements in the in two ranges /// projected by proj1 and proj2 respectively. /// \param proj1 Specifies the function (or function object) which /// will be invoked for each of the elements in the /// source range as a projection operation before the /// actual predicate \a is invoked. /// \param proj2 Specifies the function (or function object) which /// will be invoked for each of the elements in the /// destination range as a projection operation before /// the actual predicate \a is invoked. /// /// The assignments in the parallel \a ends_with algorithm invoked /// without an execution policy object execute in sequential order /// in the calling thread. /// /// \returns The \a ends_with algorithm returns \a bool. /// The \a ends_with algorithm returns a boolean with the /// value true if the second range matches the suffix of the /// first range, false otherwise. template <typename InIter1, typename InIter2, typename Pred, typename Proj1, typename Proj2> bool ends_with(InIter1 first1, InIter1 last1, InIter2 first2, InIter2 last2, Pred&& pred, Proj1&& proj1, Proj2&& proj2); /// Checks whether the second range defined by [first1, last1) matches the /// suffix of the first range defined by [first2, last2) /// /// \note Complexity: Linear: at most min(N1, N2) applications of the /// predicate and both projections. /// /// \tparam ExPolicy The type of the execution policy to use (deduced). /// It describes the manner in which the execution /// of the algorithm may be parallelized and the manner /// in which it executes the assignments. /// \tparam FwdIter1 The type of the begin source iterators used /// (deduced). This iterator type must meet the /// requirements of an forward iterator. /// \tparam FwdIter2 The type of the begin destination iterators used /// deduced). This iterator type must meet the /// requirements of a forward iterator. /// \tparam Pred The binary predicate that compares the projected /// elements. /// \tparam Proj1 The type of an optional projection function for /// the source range. This defaults to /// \a util::projection_identity /// \tparam Proj1 The type of an optional projection function for /// the destination range. This defaults to /// \a util::projection_identity /// /// \param policy The execution policy to use for the scheduling of /// the iterations. /// \param first1 Refers to the beginning of the source range. /// \param last1 Refers to the end of the source range. /// \param first2 Refers to the beginning of the destination range. /// \param last2 Refers to the end of the destination range. /// \param pred Specifies the binary predicate function /// (or function object) which will be invoked for /// comparison of the elements in the in two ranges /// projected by proj1 and proj2 respectively. /// \param proj1 Specifies the function (or function object) which /// will be invoked for each of the elements in the /// source range as a projection operation before the /// actual predicate \a is invoked. /// \param proj2 Specifies the function (or function object) which /// will be invoked for each of the elements in the /// destination range as a projection operation before /// the actual predicate \a is invoked. /// /// The assignments in the parallel \a ends_with algorithm invoked with an /// execution policy object of type \a sequenced_policy /// execute in sequential order in the calling thread. /// /// The assignments in the parallel \a ends_with algorithm invoked with /// an execution policy object of type \a parallel_policy or /// \a parallel_task_policy are permitted to execute in an unordered /// fashion in unspecified threads, and indeterminately sequenced /// within each thread. /// /// \returns The \a ends_with algorithm returns a /// \a hpx::future<bool> if the execution policy is of type /// \a sequenced_task_policy or \a parallel_task_policy and /// returns \a bool otherwise. /// The \a ends_with algorithm returns a boolean with the /// value true if the second range matches the suffix of the /// first range, false otherwise. template <typename ExPolicy, typename FwdIter1, typename FwdIter2, typename Pred, typename Proj1, typename Proj2> typename hpx::parallel::util::detail::algorithm_result<ExPolicy, bool>::type ends_with(ExPolicy&& policy, FwdIter1 first1, FwdIter1 last1, FwdIter2 first2, FwdIter2 last2, Pred&& pred, Proj1&& proj1, Proj2&& proj2); // clang-format on }} // namespace hpx::ranges #else // DOXYGEN #include <hpx/config.hpp> #include <hpx/algorithms/traits/projected.hpp> #include <hpx/execution/algorithms/detail/predicates.hpp> #include <hpx/executors/execution_policy.hpp> #include <hpx/iterator_support/traits/is_iterator.hpp> #include <hpx/parallel/algorithms/detail/dispatch.hpp> #include <hpx/parallel/algorithms/detail/distance.hpp> #include <hpx/parallel/algorithms/equal.hpp> #include <hpx/parallel/algorithms/mismatch.hpp> #include <hpx/parallel/util/detail/algorithm_result.hpp> #include <hpx/parallel/util/invoke_projected.hpp> #include <hpx/parallel/util/projection_identity.hpp> #include <hpx/parallel/util/result_types.hpp> #include <algorithm> #include <cstddef> #include <iterator> #include <type_traits> #include <utility> #include <vector> namespace hpx { namespace parallel { inline namespace v1 { /////////////////////////////////////////////////////////////////////////// // ends_with namespace detail { /// \cond NOINTERNAL struct ends_with : public detail::algorithm<ends_with, bool> { ends_with() : ends_with::algorithm("ends_with") { } template <typename ExPolicy, typename Iter1, typename Sent1, typename Iter2, typename Sent2, typename Pred, typename Proj1, typename Proj2> static bool sequential(ExPolicy, Iter1 first1, Sent1 last1, Iter2 first2, Sent2 last2, Pred&& pred, Proj1&& proj1, Proj2&& proj2) { const auto drop = detail::distance(first1, last1) - detail::distance(first2, last2); if (drop < 0) return false; return hpx::parallel::v1::detail::equal_binary().call( hpx::execution::seq, std::next(std::move(first1), drop), std::move(last1), std::move(first2), std::move(last2), std::forward<Pred>(pred), std::forward<Proj1>(proj1), std::forward<Proj2>(proj2)); } template <typename ExPolicy, typename FwdIter1, typename Sent1, typename FwdIter2, typename Sent2, typename Pred, typename Proj1, typename Proj2> static typename util::detail::algorithm_result<ExPolicy, bool>::type parallel(ExPolicy&& policy, FwdIter1 first1, Sent1 last1, FwdIter2 first2, Sent2 last2, Pred&& pred, Proj1&& proj1, Proj2&& proj2) { const auto drop = detail::distance(first1, last1) - detail::distance(first2, last2); if (drop < 0) { return util::detail::algorithm_result<ExPolicy, bool>::get( false); } return hpx::parallel::v1::detail::equal_binary().call( std::forward<ExPolicy>(policy), std::next(std::move(first1), drop), std::move(last1), std::move(first2), std::move(last2), std::forward<Pred>(pred), std::forward<Proj1>(proj1), std::forward<Proj2>(proj2)); } }; /// \endcond } // namespace detail }}} // namespace hpx::parallel::v1 namespace hpx { /////////////////////////////////////////////////////////////////////////// // DPO for hpx::ends_with HPX_INLINE_CONSTEXPR_VARIABLE struct ends_with_t final : hpx::functional::tag_fallback<ends_with_t> { private: // clang-format off template <typename InIter1, typename InIter2, typename Pred = hpx::parallel::v1::detail::equal_to, HPX_CONCEPT_REQUIRES_( hpx::traits::is_iterator_v<InIter1> && hpx::traits::is_iterator_v<InIter2> && hpx::is_invocable_v<Pred, typename std::iterator_traits<InIter1>::value_type, typename std::iterator_traits<InIter2>::value_type > )> // clang-format on friend bool tag_fallback_dispatch(hpx::ends_with_t, InIter1 first1, InIter1 last1, InIter2 first2, InIter2 last2, Pred&& pred = Pred()) { static_assert(hpx::traits::is_input_iterator_v<InIter1>, "Required at least input iterator."); static_assert(hpx::traits::is_input_iterator_v<InIter2>, "Required at least input iterator."); return hpx::parallel::v1::detail::ends_with().call( hpx::execution::seq, first1, last1, first2, last2, std::forward<Pred>(pred), parallel::util::projection_identity{}, parallel::util::projection_identity{}); } // clang-format off template <typename ExPolicy, typename FwdIter1, typename FwdIter2, typename Pred = ranges::equal_to, HPX_CONCEPT_REQUIRES_( hpx::is_execution_policy<ExPolicy>::value && hpx::traits::is_iterator_v<FwdIter1> && hpx::traits::is_iterator_v<FwdIter2> && hpx::is_invocable_v<Pred, typename std::iterator_traits<FwdIter1>::value_type, typename std::iterator_traits<FwdIter2>::value_type > )> // clang-format on friend typename parallel::util::detail::algorithm_result<ExPolicy, bool>::type tag_fallback_dispatch(hpx::ends_with_t, ExPolicy&& policy, FwdIter1 first1, FwdIter1 last1, FwdIter2 first2, FwdIter2 last2, Pred&& pred = Pred()) { static_assert(hpx::traits::is_forward_iterator_v<FwdIter1>, "Required at least forward iterator."); static_assert(hpx::traits::is_forward_iterator_v<FwdIter2>, "Required at least forward iterator."); return hpx::parallel::v1::detail::ends_with().call( std::forward<ExPolicy>(policy), first1, last1, first2, last2, std::forward<Pred>(pred), parallel::util::projection_identity{}, parallel::util::projection_identity{}); } } ends_with{}; } // namespace hpx #endif // DOXYGEN
6,279
1,198
/* Copyright 2017-2019 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #ifndef LULLABY_BASE_ASSET_LOADER_H_ #define LULLABY_BASE_ASSET_LOADER_H_ #include <algorithm> #include <functional> #include <memory> #include <string> #include "lullaby/modules/file/asset.h" #include "lullaby/util/async_processor.h" #include "lullaby/util/registry.h" #include "lullaby/util/typeid.h" namespace lull { // The AssetLoader is used for loading Asset objects. It provides two main // mechanisms for loading: // - Immediate/Blocking: the entire loading process is performed immediately on // the calling thread. // - Asynchronous: the load is performed using an AsyncProcessor and callbacks // are used to manage the flow of the asset through the system. // // See asset.h for more details. class AssetLoader { public: // The AssetLoader uses an external function to do the actual disk load // operation. It is assumed that this function is thread-safe. // Note: The function signature is based on fplbase::LoadFile. using LoadFileFn = std::function<bool(const char* filename, std::string*)>; // An optional callback that can be used to track errors on file operations. using OnErrorFn = std::function<void(const std::string& filename, ErrorCode error)>; // Constructs the AssetLoader using the default load function. explicit AssetLoader(Registry* registry); // Constructs the AssetLoader using the specified load function. explicit AssetLoader(LoadFileFn load_fn); AssetLoader(const AssetLoader& rhs) = delete; AssetLoader& operator=(const AssetLoader& rhs) = delete; // Creates an Asset object of type |T| using the specified constructor |Args| // and load the data specified by |filename| into the asset. This call blocks // the calling thread until the load is complete and finalized. template <typename T, typename... Args> std::shared_ptr<T> LoadNow(const std::string& filename, Args&&... args); // Similar to LoadNow, but loads the data into the provided |asset| (instead // of creating the asset itself). template <typename T> void LoadIntoNow(const std::string& filename, const std::shared_ptr<T>& asset); // Creates an Asset object of type |T| using the specified constructor |Args| // and load the data specified by |filename| into the asset. This call uses a // worker thread to perform the actual loading of the data after which the // Finalize() function can be called to finish the loading process. template <typename T, typename... Args> std::shared_ptr<T> LoadAsync(const std::string& filename, Args&&... args); // Similar to LoadAsync, but loads the data into the provided |asset| (instead // of creating the asset itself). template <typename T> void LoadIntoAsync(const std::string& filename, const std::shared_ptr<T>& asset); // Finalizes any assets that were loaded asynchronously and are ready for // finalizing. This function should be called on the thread on which it is // safe to Finalize the asset being loaded. If |max_num_assets_to_finalize| // is specified, then this function will only attempt to finalize a limited // number of assets per call. // Returns: the number of async load operations still pending. int Finalize(); int Finalize(int max_num_assets_to_finalize); // Sets a load function so that assets can be loaded from different places // using custom load functions. void SetLoadFunction(LoadFileFn load_fn); // Returns the load function set in |SetLoadFunction|. LoadFileFn GetLoadFunction() const; // Returns the default load function. LoadFileFn GetDefaultLoadFunction() const; // Sets a callback that is called when an error occurs during a load // operation. void SetOnErrorFunction(OnErrorFn error_fn); // Starts loading assets asynchronously. This is done automatically on // construction and it only needs to be called explicitly after Stop. void StartAsyncLoads(); // Stops loading assets asynchronously. Blocks until the currently loading // asset has completed. Call StartAsyncLoads to resume loading the assets. void StopAsyncLoads(); private: // Flag indicating the type of load operation being performed. enum LoadMode { kImmediate, kAsynchronous, }; // Internal structure to represent the load request. struct LoadRequest { LoadRequest(const std::string& filename, const AssetPtr& asset); AssetPtr asset; // Asset object to load data into. std::string filename; // Filename of data being loaded. std::string data; // Actual data contents being loaded. ErrorCode error; }; using LoadRequestPtr = std::shared_ptr<LoadRequest>; // Prepares a load request for the given asset. void LoadImpl(const std::string& filename, const AssetPtr& asset, LoadMode mode); // Performs the actual loading for both immediate and asynchronous requests. void DoLoad(LoadRequest* req, LoadMode mode) const; // Performs the "finalizing" for both immediate and asynchronous requests. void DoFinalize(LoadRequest* req, LoadMode mode) const; Registry* registry_ = nullptr; LoadFileFn load_fn_; // Client-provided function for performing actual load. OnErrorFn error_fn_; // Client-provided function for tracking errors. int pending_requests_ = 0; // Number of requests queued for async loading. AsyncProcessor<LoadRequestPtr> processor_; // Async processor for loading // data on a worker thread. }; template <typename T, typename... Args> std::shared_ptr<T> AssetLoader::LoadNow(const std::string& filename, Args&&... args) { auto ptr = std::make_shared<T>(std::forward<Args>(args)...); LoadImpl(filename, ptr, kImmediate); return ptr; } template <typename T, typename... Args> std::shared_ptr<T> AssetLoader::LoadAsync(const std::string& filename, Args&&... args) { auto ptr = std::make_shared<T>(std::forward<Args>(args)...); LoadImpl(filename, ptr, kAsynchronous); return ptr; } template <typename T> void AssetLoader::LoadIntoNow(const std::string& filename, const std::shared_ptr<T>& asset) { LoadImpl(filename, asset, kImmediate); } template <typename T> void AssetLoader::LoadIntoAsync(const std::string& filename, const std::shared_ptr<T>& asset) { LoadImpl(filename, asset, kAsynchronous); } } // namespace lull LULLABY_SETUP_TYPEID(lull::AssetLoader); #endif // LULLABY_BASE_ASSET_LOADER_H_
2,226
34,359
/*++ Copyright (c) Microsoft Corporation. Licensed under the MIT license. Module Name: font.h Abstract: This module contains the data structures, data types, and procedures related to fonts. Author: <NAME> (thereses) 15-Jan-1991 Revision History: --*/ #pragma once #ifndef FONT_H #define FONT_H #define INITIAL_FONTS 20 #define FONT_INCREMENT 3 // clang-format off #define EF_NEW 0x0001 // a newly available face #define EF_OLD 0x0002 // a previously available face #define EF_ENUMERATED 0x0004 // all sizes have been enumerated #define EF_OEMFONT 0x0008 // an OEM face #define EF_TTFONT 0x0010 // a TT face #define EF_DEFFACE 0x0020 // the default face #define EF_DBCSFONT 0x0040 // the DBCS font // clang-format on /* * FONT_INFO * * The distinction between the desired and actual font dimensions obtained * is important in the case of TrueType fonts, in which there is no guarantee * that what you ask for is what you will get. * * Note that the correspondence between "Desired" and "Actual" is broken * whenever the user changes his display driver, because GDI uses driver * parameters to control the font rasterization. * * The SizeDesired is {0, 0} if the font is a raster font. */ typedef struct _FONT_INFO { HFONT hFont; COORD Size; // font size obtained COORD SizeWant; // 0;0 if Raster font LONG Weight; LPTSTR FaceName; BYTE Family; BYTE tmCharSet; } FONT_INFO, *PFONT_INFO; #pragma warning(push) #pragma warning(disable : 4200) // nonstandard extension used : zero-sized array in struct/union typedef struct tagFACENODE { struct tagFACENODE* pNext; DWORD dwFlag; TCHAR atch[]; } FACENODE, *PFACENODE; #pragma warning(pop) #define TM_IS_TT_FONT(x) (((x)&TMPF_TRUETYPE) == TMPF_TRUETYPE) #define IS_BOLD(w) ((w) >= FW_SEMIBOLD) #define SIZE_EQUAL(s1, s2) (((s1).X == (s2).X) && ((s1).Y == (s2).Y)) #define POINTS_PER_INCH 72 #define MIN_PIXEL_HEIGHT 5 #define MAX_PIXEL_HEIGHT 72 // // Function prototypes // VOID InitializeFonts(VOID); VOID DestroyFonts(VOID); [[nodiscard]] NTSTATUS EnumerateFonts(DWORD Flags); int FindCreateFont( __in DWORD Family, __in_ecount(LF_FACESIZE) LPWSTR ptszFace, __in COORD Size, __in LONG Weight, __in UINT CodePage); BOOL DoFontEnum( __in_opt HDC hDC, __in_ecount_opt(LF_FACESIZE) LPTSTR ptszFace, __in_ecount_opt(nTTPoints) PSHORT pTTPoints, __in UINT nTTPoints); [[nodiscard]] NTSTATUS GetTTFontFaceForCodePage(const UINT uiCodePage, _Out_writes_(cchFaceName) PWSTR pszFaceName, const size_t cchFaceName); bool IsFontSizeCustom(__in PCWSTR pwszFaceName, const __in SHORT sSize); void CreateSizeForAllTTFonts(const __in SHORT sSize); #endif /* !FONT_H */
1,315
1,208
from typing import Any, Callable, TypeVar AnyCallableT = TypeVar("AnyCallableT", bound=Callable[..., Any]) # noqa: VNE001
43
517
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: tensorflow/core/util/event.proto package org.tensorflow.proto.util; /** * <pre> * Indicates the behavior of the worker when an internal error or shutdown * signal is received. * </pre> * * Protobuf enum {@code tensorflow.WorkerShutdownMode} */ public enum WorkerShutdownMode implements com.google.protobuf.ProtocolMessageEnum { /** * <code>DEFAULT = 0;</code> */ DEFAULT(0), /** * <code>NOT_CONFIGURED = 1;</code> */ NOT_CONFIGURED(1), /** * <code>WAIT_FOR_COORDINATOR = 2;</code> */ WAIT_FOR_COORDINATOR(2), /** * <code>SHUTDOWN_AFTER_TIMEOUT = 3;</code> */ SHUTDOWN_AFTER_TIMEOUT(3), UNRECOGNIZED(-1), ; /** * <code>DEFAULT = 0;</code> */ public static final int DEFAULT_VALUE = 0; /** * <code>NOT_CONFIGURED = 1;</code> */ public static final int NOT_CONFIGURED_VALUE = 1; /** * <code>WAIT_FOR_COORDINATOR = 2;</code> */ public static final int WAIT_FOR_COORDINATOR_VALUE = 2; /** * <code>SHUTDOWN_AFTER_TIMEOUT = 3;</code> */ public static final int SHUTDOWN_AFTER_TIMEOUT_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static WorkerShutdownMode valueOf(int value) { return forNumber(value); } public static WorkerShutdownMode forNumber(int value) { switch (value) { case 0: return DEFAULT; case 1: return NOT_CONFIGURED; case 2: return WAIT_FOR_COORDINATOR; case 3: return SHUTDOWN_AFTER_TIMEOUT; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<WorkerShutdownMode> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< WorkerShutdownMode> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<WorkerShutdownMode>() { public WorkerShutdownMode findValueByNumber(int number) { return WorkerShutdownMode.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.tensorflow.proto.util.EventProtos.getDescriptor().getEnumTypes().get(1); } private static final WorkerShutdownMode[] VALUES = values(); public static WorkerShutdownMode valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private WorkerShutdownMode(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:tensorflow.WorkerShutdownMode) }
1,317
14,668
<gh_stars>1000+ // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CRAZY_LINKER_SYSTEM_LINKER_H #define CRAZY_LINKER_SYSTEM_LINKER_H #ifdef __ANDROID__ #include <android/dlext.h> #endif #include <dlfcn.h> namespace crazy { // Convenience wrapper for the system linker functions. // Also helps synchronize access to the global link map list. // // TODO(digit): Use this in the future to mock different versions/behaviours // of the Android system linker for unit-testing purposes. struct SystemLinker { // Wrapper for dlopen(). static void* Open(const char* path, int flags); #ifdef __ANDROID__ // Returns true iff this system linker provides android_dlopen_ext(). static bool HasAndroidOpenExt(); // Calls android_dlopen_ext() if available, returns nullptr if it is not // available otherwise. static void* AndroidOpenExt(const char* path, int flags, const android_dlextinfo* info); #endif // __ANDROID__ // Wrapper for dlclose(). static int Close(void* handle); // Result type for Resolve() below. struct SearchResult { void* address = nullptr; void* library = nullptr; constexpr bool IsValid() const { return library != nullptr; } }; // Wrapper for dlsym(). static SearchResult Resolve(void* handle, const char* symbol); // Wrapper for dlerror(). static const char* Error(); // Wrapper for dladdr(); static int AddressInfo(void* addr, Dl_info* info); }; } // namespace crazy #endif // CRAZY_LINKER_SYSTEM_LINKER_H
574
974
// Blend2D - 2D Vector Graphics Powered by a JIT Compiler // // * Official Blend2D Home Page: https://blend2d.com // * Official Github Repository: https://github.com/blend2d/blend2d // // Copyright (c) 2017-2020 The Blend2D Authors // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. #ifndef BLEND2D_RGBA_H_INCLUDED #define BLEND2D_RGBA_H_INCLUDED #include "./api.h" BL_DIAGNOSTIC_PUSH(BL_DIAGNOSTIC_NO_SHADOW) //! \addtogroup blend2d_api_styling //! \{ // ============================================================================ // [BLRgba32] // ============================================================================ //! 32-bit RGBA color (8-bit per component) stored as `0xAARRGGBB`. struct BLRgba32 { union { uint32_t value; struct { #if BL_BYTE_ORDER == 1234 // LITTLE ENDIAN uint32_t b : 8; uint32_t g : 8; uint32_t r : 8; uint32_t a : 8; #else uint32_t a : 8; uint32_t r : 8; uint32_t g : 8; uint32_t b : 8; #endif }; }; // -------------------------------------------------------------------------- #ifdef __cplusplus //! \name Construction & Destruction //! \{ BL_INLINE BLRgba32() noexcept = default; BL_INLINE BLRgba32(const BLRgba32&) noexcept = default; BL_INLINE explicit BLRgba32(uint32_t rgba32) noexcept : value(rgba32) {} BL_INLINE explicit BLRgba32(const BLRgba64& rgba64) noexcept { reset(rgba64); } BL_INLINE BLRgba32(uint32_t r, uint32_t g, uint32_t b, uint32_t a = 0xFFu) noexcept : value((a << 24) | (r << 16) | (g << 8) | b) {} //! \} //! \name Overloaded Operators //! \{ BL_INLINE explicit operator bool() const noexcept { return this->value != 0; } BL_NODISCARD BL_INLINE bool operator==(const BLRgba32& other) const noexcept { return equals(other); } BL_NODISCARD BL_INLINE bool operator!=(const BLRgba32& other) const noexcept { return !equals(other); } //! \} //! \name Common Functionality //! \{ BL_INLINE void reset() noexcept { this->value = 0u; } BL_INLINE void reset(uint32_t rgba32) noexcept { this->value = rgba32;} BL_INLINE void reset(uint32_t r, uint32_t g, uint32_t b, uint32_t a = 0xFFu) noexcept { *this = BLRgba32(r, g, b, a); } BL_INLINE void reset(const BLRgba32& rgba32) noexcept { value = rgba32.value; } BL_INLINE void reset(const BLRgba64& rgba64) noexcept; BL_NODISCARD BL_INLINE bool equals(const BLRgba32& other) const noexcept { return blEquals(this->value, other.value); } //! \} //! \name Utilities //! \{ //! Tests whether the color is fully-opaque (alpha equals 0xFFFF). BL_NODISCARD BL_INLINE bool isOpaque() const noexcept { return this->value >= 0xFF000000u; } //! Tests whether the color is fully-transparent (alpha equals 0). BL_NODISCARD BL_INLINE bool isTransparent() const noexcept { return this->value <= 0x00FFFFFFu; } //! \} #endif // -------------------------------------------------------------------------- }; #ifdef __cplusplus static BL_INLINE BLRgba32 blMin(const BLRgba32& a, const BLRgba32& b) noexcept { return BLRgba32(blMin((a.value >> 16) & 0xFFu, (b.value >> 16) & 0xFFu), blMin((a.value >> 8) & 0xFFu, (b.value >> 8) & 0xFFu), blMin((a.value ) & 0xFFu, (b.value ) & 0xFFu), blMin((a.value >> 24) & 0xFFu, (b.value >> 24) & 0xFFu)); } static BL_INLINE BLRgba32 blMax(const BLRgba32& a, const BLRgba32& b) noexcept { return BLRgba32(blMax((a.value >> 16) & 0xFFu, (b.value >> 16) & 0xFFu), blMax((a.value >> 8) & 0xFFu, (b.value >> 8) & 0xFFu), blMax((a.value ) & 0xFFu, (b.value ) & 0xFFu), blMax((a.value >> 24) & 0xFFu, (b.value >> 24) & 0xFFu)); } #endif // ============================================================================ // [BLRgba64] // ============================================================================ //! 64-bit RGBA color (16-bit per component) stored as `0xAAAARRRRGGGGBBBB`. struct BLRgba64 { union { uint64_t value; struct { #if BL_BYTE_ORDER == 1234 // LITTLE ENDIAN uint32_t b : 16; uint32_t g : 16; uint32_t r : 16; uint32_t a : 16; #else uint32_t a : 16; uint32_t r : 16; uint32_t g : 16; uint32_t b : 16; #endif }; }; // -------------------------------------------------------------------------- #ifdef __cplusplus //! \name Construction & Destruction //! \{ BL_INLINE BLRgba64() noexcept = default; BL_INLINE BLRgba64(const BLRgba64&) noexcept = default; BL_INLINE explicit BLRgba64(uint64_t rgba64) noexcept : value(rgba64) {} BL_INLINE BLRgba64(uint32_t r, uint32_t g, uint32_t b, uint32_t a = 0xFFFFu) noexcept : value(((uint64_t)a << 48) | ((uint64_t)r << 32) | ((uint64_t)g << 16) | ((uint64_t)b ) ) {} BL_INLINE explicit BLRgba64(const BLRgba32& rgba32) noexcept { reset(rgba32); } //! \} //! \name Overloaded Operators //! \{ BL_INLINE explicit operator bool() const noexcept { return this->value != 0; } BL_INLINE bool operator==(const BLRgba64& other) const noexcept { return equals(other); } BL_INLINE bool operator!=(const BLRgba64& other) const noexcept { return !equals(other); } //! \} //! \name Common Functionality //! \{ BL_INLINE void reset() noexcept { this->value = 0u; } BL_INLINE void reset(uint64_t rgba64) noexcept { this->value = rgba64; } BL_INLINE void reset(uint32_t r, uint32_t g, uint32_t b, uint32_t a = 0xFFFFu) noexcept { *this = BLRgba64(r, g, b, a); } BL_INLINE void reset(const BLRgba64& rgba64) noexcept { this->value = rgba64.value; } BL_INLINE void reset(const BLRgba32& rgba32) noexcept { reset(rgba32.r | (uint32_t(rgba32.r) << 8u), rgba32.g | (uint32_t(rgba32.g) << 8u), rgba32.b | (uint32_t(rgba32.b) << 8u), rgba32.a | (uint32_t(rgba32.a) << 8u)); } BL_INLINE bool equals(const BLRgba64& other) const noexcept { return blEquals(this->value, other.value); } //! \} //! \name Utilities //! \{ //! Tests whether the color is fully-opaque (alpha equals 0xFFFF). BL_NODISCARD BL_INLINE bool isOpaque() const noexcept { return this->value >= 0xFFFF000000000000u; } //! Tests whether the color is fully-transparent (alpha equals 0). BL_NODISCARD BL_INLINE bool isTransparent() const noexcept { return this->value <= 0x0000FFFFFFFFFFFFu; } //! \} #endif // -------------------------------------------------------------------------- }; #ifdef __cplusplus BL_NODISCARD static BL_INLINE BLRgba64 blMin(const BLRgba64& a, const BLRgba64& b) noexcept { return BLRgba64(blMin(uint32_t((a.value >> 32) & 0xFFFFu), uint32_t((b.value >> 32) & 0xFFFFu)), blMin(uint32_t((a.value >> 16) & 0xFFFFu), uint32_t((b.value >> 16) & 0xFFFFu)), blMin(uint32_t((a.value ) & 0xFFFFu), uint32_t((b.value ) & 0xFFFFu)), blMin(uint32_t((a.value >> 48) & 0xFFFFu), uint32_t((b.value >> 48) & 0xFFFFu))); } BL_NODISCARD static BL_INLINE BLRgba64 blMax(const BLRgba64& a, const BLRgba64& b) noexcept { return BLRgba64(blMax(uint32_t((a.value >> 32) & 0xFFFFu), uint32_t((b.value >> 32) & 0xFFFFu)), blMax(uint32_t((a.value >> 16) & 0xFFFFu), uint32_t((b.value >> 16) & 0xFFFFu)), blMax(uint32_t((a.value ) & 0xFFFFu), uint32_t((b.value ) & 0xFFFFu)), blMax(uint32_t((a.value >> 48) & 0xFFFFu), uint32_t((b.value >> 48) & 0xFFFFu))); } #endif // ============================================================================ // [BLRgba] // ============================================================================ //! 128-bit RGBA color stored as 4 32-bit floating point values in [RGBA] order. struct BLRgba { float r; float g; float b; float a; // -------------------------------------------------------------------------- #ifdef __cplusplus //! \name Construction & Destruction //! \{ BL_INLINE BLRgba() noexcept = default; constexpr BLRgba(const BLRgba&) noexcept = default; constexpr BLRgba(float r, float g, float b, float a = 1.0f) noexcept : r(r), g(g), b(b), a(a) {} BL_INLINE BLRgba(const BLRgba32& rgba32) noexcept : r(float(int(rgba32.r)) * (1.0f / 255.0f)), g(float(int(rgba32.g)) * (1.0f / 255.0f)), b(float(int(rgba32.b)) * (1.0f / 255.0f)), a(float(int(rgba32.a)) * (1.0f / 255.0f)) {} BL_INLINE BLRgba(const BLRgba64& rgba64) noexcept : r(float(int(rgba64.r)) * (1.0f / 65535.0f)), g(float(int(rgba64.g)) * (1.0f / 65535.0f)), b(float(int(rgba64.b)) * (1.0f / 65535.0f)), a(float(int(rgba64.a)) * (1.0f / 65535.0f)) {} //! \} //! \name Overloaded Operators //! \{ constexpr explicit operator bool() const noexcept { return (this->r == 0.0f) & (this->g == 0.0f) & (this->b == 0.0f) & (this->a == 0.0f) ; } BL_NODISCARD BL_INLINE bool operator==(const BLRgba& other) const noexcept { return equals(other); } BL_NODISCARD BL_INLINE bool operator!=(const BLRgba& other) const noexcept { return !equals(other); } //! \} //! \name Common Functionality //! \{ BL_INLINE void reset() noexcept { reset(0.0f, 0.0f, 0.0f, 0.0f); } BL_INLINE void reset(const BLRgba32& rgba32) noexcept { *this = BLRgba(rgba32); } BL_INLINE void reset(const BLRgba64& rgba64) noexcept { *this = BLRgba(rgba64); } BL_INLINE void reset(const BLRgba& other) noexcept { reset(other.r, other.g, other.b, other.a); } BL_INLINE void reset(float r, float g, float b, float a = 1.0f) noexcept { this->r = r; this->g = g; this->b = b; this->a = a; } BL_NODISCARD BL_INLINE bool equals(const BLRgba32& rgba32) const noexcept { return equals(BLRgba(rgba32)); } BL_NODISCARD BL_INLINE bool equals(const BLRgba64& rgba64) const noexcept { return equals(BLRgba(rgba64)); } BL_NODISCARD BL_INLINE bool equals(const BLRgba& other) const noexcept { return blEquals(this->r, other.r) & blEquals(this->g, other.g) & blEquals(this->b, other.b) & blEquals(this->a, other.a) ; } BL_NODISCARD BL_INLINE bool equals(float r, float g, float b, float a = 1.0f) const noexcept { return blEquals(this->r, r) & blEquals(this->g, g) & blEquals(this->b, b) & blEquals(this->a, a) ; } //! \} //! \name Utilities //! \{ //! Tests whether the color is fully-opaque (alpha equals 1.0). BL_NODISCARD constexpr bool isOpaque() const noexcept { return this->a >= 1.0; } //! Tests whether the color is fully-transparent (alpha equals 0.0). BL_NODISCARD constexpr bool isTransparent() const noexcept { return this->a == 0.0; } //! \} #endif // -------------------------------------------------------------------------- }; #ifdef __cplusplus template<> BL_NODISCARD constexpr BL_INLINE BLRgba blMin(const BLRgba& a, const BLRgba& b) noexcept { return BLRgba(blMin(a.r, b.r), blMin(a.g, b.g), blMin(a.b, b.b), blMin(a.a, b.a)); } template<> BL_NODISCARD constexpr BL_INLINE BLRgba blMax(const BLRgba& a, const BLRgba& b) noexcept { return BLRgba(blMax(a.r, b.r), blMax(a.g, b.g), blMax(a.b, b.b), blMax(a.a, b.a)); } #endif // ============================================================================ // [Out of Class] // ============================================================================ #ifdef __cplusplus BL_INLINE void BLRgba32::reset(const BLRgba64& rgba64) noexcept { uint32_t hi = uint32_t(rgba64.value >> 32); uint32_t lo = uint32_t(rgba64.value & 0xFFFFFFFFu); this->value = ((hi & 0xFF000000) ) + ((lo & 0xFF000000) >> 16) + ((hi & 0x0000FF00) << 8) + ((lo & 0x0000FF00) >> 8) ; } #endif // ============================================================================ // [Constraints] // ============================================================================ #ifdef __cplusplus static_assert(sizeof(BLRgba) == 16, "'BLRgba' struct must be exactly 16 bytes long"); static_assert(sizeof(BLRgba32) == 4, "'BLRgba32' struct must be exactly 4 bytes long"); static_assert(sizeof(BLRgba64) == 8, "'BLRgba64' struct must be exactly 8 bytes long"); #endif //! \} BL_DIAGNOSTIC_POP #endif // BLEND2D_RGBA_H_INCLUDED
5,512
836
# Example usage with pygmsh package: # https://github.com/nschloe/pygmsh import pygmsh with pygmsh.occ.Geometry() as geom: geom.characteristic_length_min = 0.1 geom.characteristic_length_max = 0.1 rectangle = geom.add_rectangle([-1.0, -1.0, 0.0], 2.0, 2.0) disk1 = geom.add_disk([-1.2, 0.0, 0.0], 0.5) disk2 = geom.add_disk([+1.2, 0.0, 0.0], 0.5) disk3 = geom.add_disk([0.0, -0.9, 0.0], 0.5) disk4 = geom.add_disk([0.0, +0.9, 0.0], 0.5) flat = geom.boolean_difference( geom.boolean_union([rectangle, disk1, disk2]), geom.boolean_union([disk3, disk4]), ) geom.extrude(flat, [0, 0, 0.3]) msh = geom.generate_mesh() from vedo import TetMesh, show lines, triangles, tetras, vertices = msh.cells m = TetMesh([msh.points, tetras[1]]).tomesh() show(m, "Drag the sphere,\nright-click to zoom", axes=1, interactive=False).addCutterTool(mode='sphere')
439
2,716
<reponame>devLupin/face.evoLVe from utils import * from tqdm import tqdm from paddle import inference import os, cv2 import numpy as np from PIL import ImageDraw, ImageFont, Image import pickle class FaceEval: def __init__(self): self.threshold = 0.4 self.mtcnn = MTCNN() self.face_eval = self.init_resnet50_predictor('../model/Backbone') self.face_db_path = 'FaceDatabase' self.face_data_path = 'face_data.fdb' self.face_db = self.load_face_data() self.mtcnn_input_scale = 0.4 # 缩放图片加快计算 def update_face_data(self): ''' 用于更新人脸数据库 :return: ''' face_db = {} assert os.path.exists(self.face_db_path), 'face_db_path {} not exist'.format(self.face_db_path) for path in tqdm(os.listdir(self.face_db_path)): name = os.path.basename(path).split('.')[0] image_path = os.path.join(self.face_db_path, path) # print(image_path) img = cv2.imdecode(np.fromfile(image_path, dtype=np.uint8), -1) imgs, _ = self.mtcnn.infer_image(img, img) if imgs is None or len(imgs) > 1: print('人脸库中的 %s 图片包含不是1张人脸,自动跳过该图片' % image_path) continue imgs = self.process(imgs) feature = self.infer(imgs) face_db[name] = feature[0] with open(self.face_data_path, "wb") as f: pickle.dump(face_db, f) print('finished faceDatabase transform!') return face_db def load_face_data(self): if not os.path.exists(self.face_data_path): print('face_data_path not exist!,try to get faceDatabase transform!') face_db = self.update_face_data() return face_db with open(self.face_data_path, "rb") as f: face_db = pickle.load(f) print('finished load face_data!') return face_db @staticmethod def process(imgs): imgs1 = [] for img in imgs: img = img.transpose((2, 0, 1)) img = (img - 127.5) / 127.5 imgs1.append(img) if len(imgs1) > 1: imgs = np.array(imgs1).astype('float32') else: imgs = imgs1[0][np.newaxis, :].astype('float32') return imgs @staticmethod def init_resnet50_predictor(model_dir): model_file = model_dir + '.pdmodel' params_file = model_dir + '.pdiparams' config = inference.Config() config.set_prog_file(model_file) config.set_params_file(params_file) config.use_gpu() config.enable_use_gpu(500, 0) predictor = inference.create_predictor(config) return predictor def infer(self, imgs): ''' 人脸对比 :param img: :return: ''' # 获取输入的名称 input_names = self.face_eval.get_input_names() handle_image = self.face_eval.get_input_handle(input_names[0]) # 设置输入 input_img_size = imgs.shape handle_image.reshape([input_img_size[0], 3, input_img_size[2], input_img_size[3]]) handle_image.copy_from_cpu(imgs) # 运行predictor self.face_eval.run() # 获取输出 output_names = self.face_eval.get_output_names() features = self.face_eval.get_output_handle(output_names[0]) features = features.copy_to_cpu() # numpy.ndarray类型 return features def recognition(self, img): orimg_shape = img.shape resize_img = cv2.resize(img, (int(orimg_shape[1] * self.mtcnn_input_scale), int(orimg_shape[0] * self.mtcnn_input_scale))) imgs, boxes = self.mtcnn.infer_image(resize_img, img, self.mtcnn_input_scale) if imgs is None: return None, None imgs = self.process(imgs) features = self.infer(imgs) names = [] probs = [] for i in range(len(features)): feature = features[i] results_dict = {} for name in self.face_db.keys(): feature1 = self.face_db[name] prob = np.dot(feature, feature1) / (np.linalg.norm(feature) * np.linalg.norm(feature1)) results_dict[name] = prob results = sorted(results_dict.items(), key=lambda d: d[1], reverse=True) result = results[0] prob = float(result[1]) probs.append(prob) if prob > self.threshold: name = result[0] names.append(name) else: names.append('unknow') return boxes, names def add_text(self, img, text, left, top, color=(0, 0, 0), size=20): if isinstance(img, np.ndarray): img = Image.fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)) draw = ImageDraw.Draw(img) font = ImageFont.truetype('simsun.ttc', size) draw.text((left, top), text, color, font=font) return cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR) # 画出人脸框和关键点 def draw_face(self, img, boxes_c, names): if boxes_c is not None: for i in range(boxes_c.shape[0]): bbox = boxes_c[i, :4] name = names[i] corpbbox = [int(bbox[0]), int(bbox[1]), int(bbox[2]), int(bbox[3])] # 画人脸框 cv2.rectangle(img, (corpbbox[0], corpbbox[1]), (corpbbox[2], corpbbox[3]), (255, 0, 0), 2) # 判别为人脸的名字 # font = cv2.FONT_HERSHEY_SIMPLEX # 定义字体 # img = cv2.putText(img, name, (corpbbox[0], corpbbox[1]), font, 0.5, (0, 255, 0), 1) img = self.add_text(img, name, corpbbox[0], corpbbox[1] + 25, color=(255, 255, 0), size=30) cv2.imshow("result", img) cv2.waitKey(1) return img if __name__ == '__main__': test = FaceEval() test.update_face_data() cap = cv2.VideoCapture('test.mp4') ret = True while ret: ret, img = cap.read() if ret: boxes, names = test.recognition(img) print(names) img = test.draw_face(img, boxes, names)
3,310
8,969
<reponame>omerlevran46/sdk // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. #include "platform/globals.h" #if defined(DART_HOST_OS_FUCHSIA) #include "bin/crypto.h" #include <zircon/syscalls.h> namespace dart { namespace bin { bool Crypto::GetRandomBytes(intptr_t count, uint8_t* buffer) { zx_cprng_draw(buffer, count); return true; } } // namespace bin } // namespace dart #endif // defined(DART_HOST_OS_FUCHSIA)
216
403
<reponame>clayne/vkhr #ifndef VKHR_INPUT_MAP_HH #define VKHR_INPUT_MAP_HH #include <vkhr/window.hh> #include <imgui.h> #include <imgui_impl_glfw.h> #include <glm/glm.hpp> #include <string> #include <vector> #include <unordered_map> namespace vkhr { namespace Input { enum class State { Pressed = GLFW_PRESS, Released = GLFW_RELEASE, Repeat = GLFW_REPEAT, JustPressed, JustReleased }; enum class Key { Unknown = GLFW_KEY_UNKNOWN, Space = GLFW_KEY_SPACE, Apostrophe = GLFW_KEY_APOSTROPHE, Comma = GLFW_KEY_COMMA, Minus = GLFW_KEY_MINUS, Period = GLFW_KEY_PERIOD, Slash = GLFW_KEY_SLASH, Zero = GLFW_KEY_0, One = GLFW_KEY_1, Two = GLFW_KEY_2, Three = GLFW_KEY_3, Four = GLFW_KEY_4, Five = GLFW_KEY_5, Six = GLFW_KEY_6, Seven = GLFW_KEY_7, Eight = GLFW_KEY_8, Nine = GLFW_KEY_9, Semicolon = GLFW_KEY_SEMICOLON, Equal = GLFW_KEY_EQUAL, A = GLFW_KEY_A, B = GLFW_KEY_B, C = GLFW_KEY_C, D = GLFW_KEY_D, E = GLFW_KEY_E, F = GLFW_KEY_F, G = GLFW_KEY_G, H = GLFW_KEY_H, I = GLFW_KEY_I, J = GLFW_KEY_J, K = GLFW_KEY_K, L = GLFW_KEY_L, M = GLFW_KEY_M, N = GLFW_KEY_N, O = GLFW_KEY_O, P = GLFW_KEY_P, Q = GLFW_KEY_Q, R = GLFW_KEY_R, S = GLFW_KEY_S, T = GLFW_KEY_T, U = GLFW_KEY_U, V = GLFW_KEY_V, W = GLFW_KEY_W, X = GLFW_KEY_X, Y = GLFW_KEY_Y, Z = GLFW_KEY_Z, LeftBracket = GLFW_KEY_LEFT_BRACKET, Backslash = GLFW_KEY_BACKSLASH, RightBracket = GLFW_KEY_RIGHT_BRACKET, GraveAccent = GLFW_KEY_GRAVE_ACCENT, World1 = GLFW_KEY_WORLD_1, World2 = GLFW_KEY_WORLD_2, Escape = GLFW_KEY_ESCAPE, Enter = GLFW_KEY_ENTER, Tab = GLFW_KEY_TAB, Backspace = GLFW_KEY_BACKSPACE, Insert = GLFW_KEY_INSERT, Delete = GLFW_KEY_DELETE, Right = GLFW_KEY_RIGHT, Left = GLFW_KEY_LEFT, Down = GLFW_KEY_DOWN, Up = GLFW_KEY_UP, PageUp = GLFW_KEY_PAGE_UP, PageDown = GLFW_KEY_PAGE_DOWN, Home = GLFW_KEY_HOME, End = GLFW_KEY_END, CapsLock = GLFW_KEY_CAPS_LOCK, ScrollLock = GLFW_KEY_SCROLL_LOCK, NumLock = GLFW_KEY_NUM_LOCK, PrintScreen = GLFW_KEY_PRINT_SCREEN, Pause = GLFW_KEY_PAUSE, F1 = GLFW_KEY_F1, F2 = GLFW_KEY_F2, F3 = GLFW_KEY_F3, F4 = GLFW_KEY_F4, F5 = GLFW_KEY_F5, F6 = GLFW_KEY_F6, F7 = GLFW_KEY_F7, F8 = GLFW_KEY_F8, F9 = GLFW_KEY_F9, F10 = GLFW_KEY_F10, F11 = GLFW_KEY_F11, F12 = GLFW_KEY_F12, F13 = GLFW_KEY_F13, F14 = GLFW_KEY_F14, F15 = GLFW_KEY_F15, F16 = GLFW_KEY_F16, F17 = GLFW_KEY_F17, F18 = GLFW_KEY_F18, F19 = GLFW_KEY_F19, F20 = GLFW_KEY_F20, F21 = GLFW_KEY_F21, F22 = GLFW_KEY_F22, F23 = GLFW_KEY_F23, F24 = GLFW_KEY_F24, F25 = GLFW_KEY_F25, Keypad0 = GLFW_KEY_KP_0, Keypad1 = GLFW_KEY_KP_1, Keypad2 = GLFW_KEY_KP_2, Keypad3 = GLFW_KEY_KP_3, Keypad4 = GLFW_KEY_KP_4, Keypad5 = GLFW_KEY_KP_5, Keypad6 = GLFW_KEY_KP_6, Keypad7 = GLFW_KEY_KP_7, Keypad8 = GLFW_KEY_KP_8, Keypad9 = GLFW_KEY_KP_9, KeypadDecimal = GLFW_KEY_KP_DECIMAL, KeypadDivide = GLFW_KEY_KP_DIVIDE, KeypadMultiply = GLFW_KEY_KP_MULTIPLY, KeypadSubtract = GLFW_KEY_KP_SUBTRACT, KeypadAdd = GLFW_KEY_KP_ADD, KeypadEnter = GLFW_KEY_KP_ENTER, KeypadEqual = GLFW_KEY_KP_EQUAL, LeftShift = GLFW_KEY_LEFT_SHIFT, LeftControl = GLFW_KEY_LEFT_CONTROL, LeftAlt = GLFW_KEY_LEFT_ALT, LeftSuper = GLFW_KEY_LEFT_SUPER, RightShift = GLFW_KEY_RIGHT_SHIFT, RightControl = GLFW_KEY_RIGHT_CONTROL, RightAlt = GLFW_KEY_RIGHT_ALT, RightSuper = GLFW_KEY_RIGHT_SUPER, Menu = GLFW_KEY_MENU }; enum class MouseButton { Left = GLFW_MOUSE_BUTTON_LEFT, Middle = GLFW_MOUSE_BUTTON_MIDDLE, Right = GLFW_MOUSE_BUTTON_RIGHT }; } class InputMap final { public: InputMap(Window& window); void unbind(const std::string& id); void bind(const std::string& id, Input::Key key); void bind(const std::string& id, const std::vector<Input::Key>& keys); void bind(const std::string& id, Input::MouseButton mouse_button); void bind(const std::string& id, const std::vector<Input::MouseButton>& mouse_buttons); std::vector<Input::MouseButton> get_mouse_button_map(const std::string& id) const; std::vector<Input::Key> get_key_map(const std::string& id) const; bool pressed(Input::Key key) const; bool just_pressed(Input::Key key); bool just_released(Input::Key key); bool released(Input::Key key) const; bool pressed(Input::MouseButton key) const; bool just_pressed(Input::MouseButton key); bool just_released(Input::MouseButton key); bool released(Input::MouseButton key) const; bool pressed(const std::string& id) const; bool just_pressed(const std::string& id); bool just_released(const std::string& id); bool released(const std::string& id) const; glm::vec2 get_mouse_position() const; glm::vec2 get_scroll_offset() const; void reset_scrolling_offset(); void freeze_cursor(); void unlock_cursor(); void toggle_mouse_lock(); bool is_mouse_locked() const; private: // FIXME: Since we can't pass around private member functions // to the GLFW callback functions, we do a hack and map these // window pointers to their specific input mappers on create. // static void mouse_button_callback(GLFWwindow*, int, int, int); static std::unordered_map<GLFWwindow*, InputMap*> callback_map; static void scroll_callback(GLFWwindow*, double, double); static void key_callback(GLFWwindow*, int, int, int, int); static void char_callback(GLFWwindow*, unsigned int); std::unordered_map<Input::Key, Input::State> key_state_map; std::unordered_map<Input::MouseButton, Input::State> mouse_button_state; std::unordered_multimap<std::string, Input::MouseButton> mouse_button_map; std::unordered_multimap<std::string, Input::Key> key_map; glm::vec2 scroll_offsets { 0.0f }; bool mouse_locked { false }; GLFWwindow* handle; }; } #endif
3,657
408
<reponame>em3ndez/trireme package io.apigee.trireme.kernel.test; import io.apigee.trireme.kernel.Charsets; import io.apigee.trireme.kernel.OSException; import io.apigee.trireme.kernel.handles.IOCompletionHandler; import io.apigee.trireme.kernel.handles.NIOSocketHandle; import io.apigee.trireme.kernel.handles.TLSHandle; import io.apigee.trireme.kernel.tls.AllTrustingManager; import io.apigee.trireme.kernel.tls.TLSConnection; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Ignore; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.nio.ByteBuffer; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.concurrent.ExecutionException; import static org.junit.Assert.*; public class TLSHandleClientTest { private static SocketServer server; private static StubNodeRuntime runtime; @Ignore("Broken in Java 11") @Test public void testEcho() throws InterruptedException, IOException, NoSuchAlgorithmException, KeyManagementException, ExecutionException { final OutputAccumulator output = new OutputAccumulator(); final String TEST = "Hello There Server!"; final ByteBuffer cmd = TestCommand.makeCommand("ECHO", TEST.getBytes(Charsets.ASCII)); NIOSocketHandle nioHandle = new NIOSocketHandle(runtime); // What we do in Trireme -- set up SSLContext with a trust manager // that says yes to everything. Then call the trust manager later // manually to return errors the way that Node wants. SSLContext context = SSLContext.getInstance("TLS"); context.init(null, new TrustManager[] { AllTrustingManager.INSTANCE }, null); TLSConnection tls = new TLSConnection(runtime, false, "localhost", server.getPort()); TrustManager[] tms = TLSUtils.getTrustManagers(); tls.init(context, null, (X509TrustManager)tms[0]); final TLSHandle handle = new TLSHandle(nioHandle, tls); runtime.executeScriptTask(new Runnable() { @Override public void run() { try { handle.connect("localhost", server.getPort(), new IOCompletionHandler<Integer>() { @Override public void ioComplete(int errCode, Integer value) { // For TLS handshake to work, we need to be reading. // TODO what if we're not? Add a test. handle.startReading(output); handle.write(cmd, null); } }); } catch (OSException ose) { output.ioComplete(ose.getCode(), null); } } }, null); while (output.getResultLength() < TEST.length()) { Thread.sleep(50L); } runtime.executeScriptTask(new Runnable() { @Override public void run() { handle.shutdown(new IOCompletionHandler<Integer>() { @Override public void ioComplete(int errCode, Integer value) { handle.close(); } }); } }, null); String result = new String(output.getResults(), Charsets.ASCII); assertEquals(TEST, result); } @Ignore("Broken in Java 11") @Test public void testEchoRemoteEnd() throws InterruptedException, IOException, NoSuchAlgorithmException, KeyManagementException, ExecutionException { final OutputAccumulator output = new OutputAccumulator(); final String TEST = "Hello There Server!"; final ByteBuffer cmd = TestCommand.makeCommand("ECHO", TEST.getBytes(Charsets.ASCII)); NIOSocketHandle nioHandle = new NIOSocketHandle(runtime); // What we do in Trireme -- set up SSLContext with a trust manager // that says yes to everything. Then call the trust manager later // manually to return errors the way that Node wants. SSLContext context = SSLContext.getInstance("TLS"); context.init(null, new TrustManager[] { AllTrustingManager.INSTANCE }, null); TLSConnection tls = new TLSConnection(runtime, false, "localhost", server.getPort()); TrustManager[] tms = TLSUtils.getTrustManagers(); tls.init(context, null, (X509TrustManager)tms[0]); final TLSHandle handle = new TLSHandle(nioHandle, tls); runtime.executeScriptTask(new Runnable() { @Override public void run() { try { handle.connect("localhost", server.getPort(), new IOCompletionHandler<Integer>() { @Override public void ioComplete(int errCode, Integer value) { // For TLS handshake to work, we need to be reading. // TODO what if we're not? Add a test. handle.startReading(output); handle.write(cmd, null); handle.write(TestCommand.makeCommand("END ", null), null); } }); } catch (OSException ose) { output.ioComplete(ose.getCode(), null); } } }, null); while (output.getResultLength() < TEST.length()) { Thread.sleep(50L); } runtime.executeScriptTask(new Runnable() { @Override public void run() { handle.shutdown(new IOCompletionHandler<Integer>() { @Override public void ioComplete(int errCode, Integer value) { handle.close(); } }); } }, null); String result = new String(output.getResults(), Charsets.ASCII); assertEquals(TEST, result); } @BeforeClass public static void init() throws IOException { runtime = new StubNodeRuntime(); server = new SocketServer(TLSUtils.makeServerContext()); } @AfterClass public static void terminate() { server.close(); runtime.close(); } }
3,278
1,840
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.cli.admin.segmentstore; import io.pravega.cli.admin.AdminCommandState; import io.pravega.cli.admin.serializers.ContainerKeySerializer; import io.pravega.cli.admin.serializers.ContainerMetadataSerializer; import io.pravega.cli.admin.serializers.SltsKeySerializer; import io.pravega.cli.admin.serializers.SltsMetadataSerializer; import io.pravega.cli.admin.utils.TestUtils; import io.pravega.client.ClientConfig; import io.pravega.client.EventStreamClientFactory; import io.pravega.client.stream.EventStreamWriter; import io.pravega.client.stream.EventWriterConfig; import io.pravega.client.stream.StreamConfiguration; import io.pravega.client.stream.impl.JavaSerializer; import io.pravega.controller.server.WireCommandFailedException; import io.pravega.segmentstore.contracts.Attributes; import io.pravega.shared.security.auth.DefaultCredentials; import io.pravega.test.common.AssertExtensions; import io.pravega.test.common.SecurityConfigDefaults; import io.pravega.test.integration.utils.SetupUtils; import lombok.Cleanup; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.Before; import org.junit.After; import org.junit.rules.Timeout; import java.io.File; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Properties; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.ENTRY_COUNT; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.KEY_LENGTH; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.LENGTH; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.SEGMENT_NAME; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.START_OFFSET; import static io.pravega.cli.admin.serializers.AbstractSerializer.appendField; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_ID; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_LENGTH; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_NAME; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_SEALED; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_START_OFFSET; import static io.pravega.shared.NameUtils.getMetadataSegmentName; import static io.pravega.test.integration.utils.TestUtils.pathToConfig; /** * This test is for testing the segment store cli commands. */ public abstract class AbstractSegmentStoreCommandsTest { // Setup utility. protected static final SetupUtils SETUP_UTILS = new SetupUtils(); protected static final AtomicReference<AdminCommandState> STATE = new AtomicReference<>(); protected static final int CONTAINER_COUNT = 1; @Rule public final Timeout globalTimeout = new Timeout(60, TimeUnit.SECONDS); private ClientConfig clientConfig; public void setup(boolean enableAuth, boolean enableTls) throws Exception { ClientConfig.ClientConfigBuilder clientConfigBuilder = ClientConfig.builder().controllerURI(SETUP_UTILS.getControllerUri()); STATE.set(new AdminCommandState()); SETUP_UTILS.startAllServices(enableAuth, enableTls); Properties pravegaProperties = new Properties(); pravegaProperties.setProperty("cli.controller.rest.uri", SETUP_UTILS.getControllerRestUri().toString()); pravegaProperties.setProperty("cli.controller.grpc.uri", SETUP_UTILS.getControllerUri().toString()); pravegaProperties.setProperty("pravegaservice.zk.connect.uri", SETUP_UTILS.getZkTestServer().getConnectString()); pravegaProperties.setProperty("pravegaservice.container.count", String.valueOf(CONTAINER_COUNT)); pravegaProperties.setProperty("pravegaservice.admin.gateway.port", String.valueOf(SETUP_UTILS.getAdminPort())); if (enableAuth) { clientConfigBuilder = clientConfigBuilder.credentials(new DefaultCredentials(SecurityConfigDefaults.AUTH_ADMIN_PASSWORD, SecurityConfigDefaults.AUTH_ADMIN_USERNAME)); pravegaProperties.setProperty("cli.channel.auth", Boolean.toString(true)); pravegaProperties.setProperty("cli.credentials.username", SecurityConfigDefaults.AUTH_ADMIN_USERNAME); pravegaProperties.setProperty("cli.credentials.pwd", SecurityConfigDefaults.AUTH_ADMIN_PASSWORD); } if (enableTls) { clientConfigBuilder = clientConfigBuilder.trustStore(pathToConfig() + SecurityConfigDefaults.TLS_CA_CERT_FILE_NAME) .validateHostName(false); pravegaProperties.setProperty("cli.channel.tls", Boolean.toString(true)); pravegaProperties.setProperty("cli.trustStore.location", "../../config/" + SecurityConfigDefaults.TLS_CA_CERT_FILE_NAME); pravegaProperties.setProperty("cli.trustStore.access.token.ttl.seconds", Integer.toString(300)); } STATE.get().getConfigBuilder().include(pravegaProperties); clientConfig = clientConfigBuilder.build(); } @Test public void testGetSegmentInfoCommand() throws Exception { TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "getinfo", StreamConfiguration.builder().build()); String commandResult = TestUtils.executeCommand("segmentstore get-segment-info segmentstore/getinfo/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_abortStream/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_requeststream/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGcommitStreamReaders/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGscaleGroup/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGkvtStreamReaders/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGabortStreamReaders/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/containers/metadata_0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore get-segment-info not/exists/0 localhost", STATE.get())); Assert.assertNotNull(GetSegmentInfoCommand.descriptor()); } @Test public void testReadSegmentRangeCommand() throws Exception { // Create a temporary directory. Path tempDirPath = Files.createTempDirectory("readSegmentDir"); String filename = Paths.get(tempDirPath.toString(), "tmp" + System.currentTimeMillis(), "readSegmentTest.txt").toString(); TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "readsegment", StreamConfiguration.builder().build()); @Cleanup EventStreamClientFactory factory = EventStreamClientFactory.withScope("segmentstore", clientConfig); @Cleanup EventStreamWriter<String> writer = factory.createEventWriter("readsegment", new JavaSerializer<>(), EventWriterConfig.builder().build()); writer.writeEvents("rk", Arrays.asList("a", "2", "3")); writer.flush(); // Check to make sure that the file exists and data is written into it. String commandResult = TestUtils.executeCommand("segmentstore read-segment segmentstore/readsegment/0.#epoch.0 0 8 localhost " + filename, STATE.get()); Assert.assertTrue(commandResult.contains("The segment data has been successfully written into")); File file = new File(filename); Assert.assertTrue(file.exists()); Assert.assertNotEquals(0, file.length()); AssertExtensions.assertThrows(FileAlreadyExistsException.class, () -> TestUtils.executeCommand("segmentstore read-segment _system/_RGcommitStreamReaders/0.#epoch.0 0 8 localhost " + filename, STATE.get())); // Delete file created during the test. Files.deleteIfExists(Paths.get(filename)); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore read-segment not/exists/0 0 1 localhost " + filename, STATE.get())); Assert.assertNotNull(ReadSegmentRangeCommand.descriptor()); // Delete file created during the test. Files.deleteIfExists(Paths.get(filename)); // Delete the temporary directory. tempDirPath.toFile().deleteOnExit(); } @Test public void testGetSegmentAttributeCommand() throws Exception { TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "getattribute", StreamConfiguration.builder().build()); String commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute segmentstore/getattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore get-segment-attribute not/exists/0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get())); Assert.assertNotNull(GetSegmentAttributeCommand.descriptor()); } @Test public void testUpdateSegmentAttributeCommand() throws Exception { TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "updateattribute", StreamConfiguration.builder().build()); // First, get the existing value of that attribute for the segment. String commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute segmentstore/updateattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); long oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertNotEquals(0L, oldValue); // Update the Segment to a value of 0. commandResult = TestUtils.executeCommand("segmentstore update-segment-attribute segmentstore/updateattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " 0 " + oldValue + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("UpdateSegmentAttribute:")); // Check that the value has been updated. commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute segmentstore/updateattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertEquals(0L, oldValue); // Do the same for an internal segment. commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertNotEquals(0L, oldValue); // Update the Segment to a value of 0. commandResult = TestUtils.executeCommand("segmentstore update-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " 0 " + oldValue + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("UpdateSegmentAttribute:")); // Check that the value has been updated. commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertEquals(0L, oldValue); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore update-segment-attribute not/exists/0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " 0 0 localhost", STATE.get())); Assert.assertNotNull(UpdateSegmentAttributeCommand.descriptor()); } @Test public void testFlushToStorageCommandAllCase() throws Exception { String commandResult = TestUtils.executeCommand("container flush-to-storage all localhost", STATE.get()); for (int id = 0; id < CONTAINER_COUNT; id++) { Assert.assertTrue(commandResult.contains("Flushed the Segment Container with containerId " + id + " to Storage.")); } Assert.assertNotNull(FlushToStorageCommand.descriptor()); } @Test public void testFlushToStorageCommand() throws Exception { String commandResult = TestUtils.executeCommand("container flush-to-storage 0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("Flushed the Segment Container with containerId 0 to Storage.")); Assert.assertNotNull(FlushToStorageCommand.descriptor()); } @Test public void testSetSerializerCommand() throws Exception { Assert.assertNull(STATE.get().getKeySerializer()); Assert.assertNull(STATE.get().getValueSerializer()); String commandResult = TestUtils.executeCommand("table-segment set-serializer dummy", STATE.get()); Assert.assertTrue(commandResult.contains("Serializers named dummy do not exist.")); Assert.assertNull(STATE.get().getKeySerializer()); Assert.assertNull(STATE.get().getValueSerializer()); commandResult = TestUtils.executeCommand("table-segment set-serializer slts", STATE.get()); Assert.assertTrue(commandResult.contains("Serializers changed to slts successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof SltsKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof SltsMetadataSerializer); commandResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(commandResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); } @Test public void testGetTableSegmentInfoCommand() throws Exception { String tableSegmentName = getMetadataSegmentName(0); String commandResult = TestUtils.executeCommand("table-segment get-info " + tableSegmentName + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains(tableSegmentName)); Assert.assertTrue(commandResult.contains(SEGMENT_NAME)); Assert.assertTrue(commandResult.contains(START_OFFSET)); Assert.assertTrue(commandResult.contains(LENGTH)); Assert.assertTrue(commandResult.contains(ENTRY_COUNT)); Assert.assertTrue(commandResult.contains(KEY_LENGTH)); } @Test public void testListTableSegmentKeysCommand() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); int keyCount = 5; String commandResult = TestUtils.executeCommand("table-segment list-keys " + tableSegmentName + " " + keyCount + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("List of at most " + keyCount + " keys in " + tableSegmentName)); } @Test public void testGetTableSegmentEntryCommand() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; String commandResult = TestUtils.executeCommand("table-segment get " + tableSegmentName + " " + key + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("container metadata info:")); Assert.assertTrue(commandResult.contains(SEGMENT_ID)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_NAME)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_SEALED)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_START_OFFSET)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_LENGTH)); } @Test public void testPutTableSegmentEntryCommand() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; StringBuilder newValueBuilder = new StringBuilder(); appendField(newValueBuilder, SEGMENT_ID, "1"); appendField(newValueBuilder, SEGMENT_PROPERTIES_NAME, key); appendField(newValueBuilder, SEGMENT_PROPERTIES_SEALED, "false"); appendField(newValueBuilder, SEGMENT_PROPERTIES_START_OFFSET, "0"); appendField(newValueBuilder, SEGMENT_PROPERTIES_LENGTH, "10"); appendField(newValueBuilder, "80000000-0000-0000-0000-000000000000", "1632728432718"); String commandResult = TestUtils.executeCommand("table-segment put " + tableSegmentName + " localhost " + key + " " + newValueBuilder.toString(), STATE.get()); Assert.assertTrue(commandResult.contains("Successfully updated the key " + key + " in table " + tableSegmentName)); } @Test public void testModifyTableSegmentEntryCommandValidFieldCase() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; StringBuilder newFieldValueBuilder = new StringBuilder(); appendField(newFieldValueBuilder, SEGMENT_PROPERTIES_START_OFFSET, "20"); appendField(newFieldValueBuilder, SEGMENT_PROPERTIES_LENGTH, "30"); appendField(newFieldValueBuilder, "80000000-0000-0000-0000-000000000000", "1632728432718"); appendField(newFieldValueBuilder, "dummy_field", "dummy"); String commandResult = TestUtils.executeCommand("table-segment modify " + tableSegmentName + " localhost " + key + " " + newFieldValueBuilder.toString(), STATE.get()); Assert.assertTrue(commandResult.contains("dummy_field field does not exist.")); Assert.assertTrue(commandResult.contains("Successfully modified the following fields in the value for key " + key + " in table " + tableSegmentName)); } @Test public void testModifyTableSegmentEntryCommandInValidFieldCase() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; StringBuilder newFieldValueBuilder = new StringBuilder(); appendField(newFieldValueBuilder, "dummy_field", "dummy"); String commandResult = TestUtils.executeCommand("table-segment modify " + tableSegmentName + " localhost " + key + " " + newFieldValueBuilder.toString(), STATE.get()); Assert.assertTrue(commandResult.contains("dummy_field field does not exist.")); Assert.assertTrue(commandResult.contains("No fields provided to modify.")); } @After public void tearDown() throws Exception { SETUP_UTILS.stopAllServices(); STATE.get().close(); } //endregion //region Actual Test Implementations public static class SecureSegmentStoreCommandsTest extends AbstractSegmentStoreCommandsTest { @Before public void startUp() throws Exception { setup(true, true); } } public static class SegmentStoreCommandsTest extends AbstractSegmentStoreCommandsTest { @Before public void startUp() throws Exception { setup(false, false); } } //endregion }
8,562
325
<reponame>undeadinu/viskell package nl.utwente.viskell.haskell.env; import java.util.ArrayList; import java.util.List; import com.google.common.base.Strings; import nl.utwente.viskell.haskell.type.Type; import nl.utwente.viskell.haskell.type.TypeCon; /** All information about a haskell datatype. */ public class DataTypeInfo { public final class Constructor { /** Haskell constructor name */ private final String name; /** type of the constructor when used as a function */ private final Type type; private Constructor(String name, Type type) { super(); this.name = name; this.type = type; } /** @return the haskell name of the constructor */ public String getName() { return name; } /** @return the type of the constructor when used as a function */ public Type getType() { return type; } /** @return the datatype of which this constructor is part of */ public DataTypeInfo getDataType() { return DataTypeInfo.this; } @Override public String toString() { return this.name + " :: " + this.type.prettyPrint(); } } /** type constructor corresponding to this datatype */ private final TypeCon typecon; /** number of type arguments of this datatype */ private final int typeArity; /** whether this is a builtin datatype and thus can't be matched on */ private final boolean builtin; /** list of data constructors */ private final List<Constructor> constructors; public DataTypeInfo(TypeCon typecon, int typeArity, boolean builtin) { super(); this.typecon = typecon; this.typeArity = typeArity; this.builtin = builtin; this.constructors = new ArrayList<>(); } /** * Add a constructor to this data type * @param name of the constructor * @param type as function of the constructor */ protected void addConstructor(String name, Type type) { this.constructors.add(this.new Constructor(name, type)); } /** @return the type constructor corresponding to this datatype */ public TypeCon getTypecon() { return typecon; } /** @return the number of type arguments of this datatype */ public int getTypeArity() { return typeArity; } /** @return whether this is a builtin datatype and thus can't be matched on */ public boolean isBuiltin() { return builtin; } /** @return the list of all data constructors */ public List<Constructor> getConstructors() { return constructors; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("data "); sb.append(this.typecon.prettyPrint()); sb.append(" :: "); sb.append(Strings.repeat("* -> ", this.typeArity)); sb.append("*"); if (!this.builtin) { sb.append(" where"); for (Constructor cons : this.constructors) { sb.append("\n\t"); sb.append(cons.toString()); } } return sb.toString(); } }
1,377
445
<gh_stars>100-1000 /* * Copyright (c) 2019 <NAME> * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR * PERFORMANCE OF THIS SOFTWARE. */ #include "smooth.h" #include "time-util.h" #include <math.h> double smooth(struct smooth* self, double input) { uint64_t now = gettime_us(); double dt = (now - self->last_time) * 1.0e-6; self->last_time = now; double factor = 1.0 - exp(-dt / self->time_constant); double result = factor * input + (1.0 - factor) * self->last_result; self->last_result = result; return result; }
377
575
<filename>components/update_client/patch/patch_impl.h // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_UPDATE_CLIENT_PATCH_PATCH_IMPL_H_ #define COMPONENTS_UPDATE_CLIENT_PATCH_PATCH_IMPL_H_ #include <memory> #include "base/callback.h" #include "base/macros.h" #include "base/memory/ref_counted.h" #include "components/services/patch/public/mojom/file_patcher.mojom.h" #include "components/update_client/patcher.h" #include "mojo/public/cpp/bindings/pending_remote.h" namespace update_client { class PatchChromiumFactory : public PatcherFactory { public: using Callback = base::RepeatingCallback<mojo::PendingRemote<patch::mojom::FilePatcher>()>; explicit PatchChromiumFactory(Callback callback); scoped_refptr<Patcher> Create() const override; protected: ~PatchChromiumFactory() override; private: const Callback callback_; DISALLOW_COPY_AND_ASSIGN(PatchChromiumFactory); }; } // namespace update_client #endif // COMPONENTS_UPDATE_CLIENT_PATCH_PATCH_IMPL_H_
391
2,151
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_METRICS_PAGE_LOAD_METRICS_PROVIDER_H_ #define CHROME_BROWSER_METRICS_PAGE_LOAD_METRICS_PROVIDER_H_ #include "base/macros.h" #include "components/metrics/metrics_provider.h" // MetricsProvider that interfaces with page_load_metrics. Note that this class // is currently only used on Android. class PageLoadMetricsProvider : public metrics::MetricsProvider { public: PageLoadMetricsProvider(); ~PageLoadMetricsProvider() override; // metrics:MetricsProvider: void OnAppEnterBackground() override; private: DISALLOW_COPY_AND_ASSIGN(PageLoadMetricsProvider); }; #endif // CHROME_BROWSER_METRICS_PAGE_LOAD_METRICS_PROVIDER_H_
272
591
# coding=utf-8 # Copyright 2021 The Edward2 Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for random variable.""" import re from absl.testing import parameterized import edward2 as ed import numpy as np import tensorflow as tf import tensorflow_probability as tfp class FakeDistribution(tfp.distributions.Distribution): """Fake distribution class for testing.""" def __init__(self): super(FakeDistribution, self).__init__( dtype=tf.float32, reparameterization_type=tfp.distributions.FULLY_REPARAMETERIZED, validate_args=False, allow_nan_stats=True) def _sample_n(self, *args, **kwargs): return tf.ones(shape=(4, 4)) def sample(self, *args, **kwargs): return tf.ones(shape=(4, 4)) class FakeDistributionNoSample(tfp.distributions.Distribution): """Fake distribution class for testing.""" def __init__(self): super(FakeDistributionNoSample, self).__init__( dtype=None, reparameterization_type=tfp.distributions.FULLY_REPARAMETERIZED, validate_args=False, allow_nan_stats=True) class RandomVariableTest(parameterized.TestCase, tf.test.TestCase): def testConstructor(self): x = ed.RandomVariable(tfp.distributions.Poisson(rate=np.ones([2, 5])), value=np.ones([2, 5])) self.assertAllEqual(tf.convert_to_tensor(x), x.value) with self.assertRaises(ValueError): _ = ed.RandomVariable(tfp.distributions.Bernoulli(probs=0.5), value=tf.zeros([2, 5], dtype=tf.int32)) x = ed.RandomVariable(FakeDistribution()) with self.assertRaises(NotImplementedError): _ = ed.RandomVariable(FakeDistributionNoSample()) def testGradientsFirstOrder(self): x = ed.RandomVariable(tfp.distributions.Normal(0., 1.)) def f(x): return 2. * x with tf.GradientTape() as tape: tape.watch(x.value) y = f(x) z = tape.gradient(y, [x.value])[0] self.assertEqual(z, 2.) def testGradientsSecondOrder(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) def f(x): return 2 * (x ** 2) with tf.GradientTape() as tape2: tape2.watch(x.value) with tf.GradientTape() as tape: tape.watch(x.value) y = f(x) z = tape.gradient(y, [x.value])[0] z = tape2.gradient(z, [x.value])[0] self.assertEqual(z, 4.0) def testStr(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0), value=1.234) pattern = "RandomVariable(\"1.234\", shape=(), dtype=float32" regexp = re.escape(pattern) self.assertRegexpMatches(str(x), regexp) def testRepr(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0), value=1.234) string = ("<ed.RandomVariable '{name}' shape=() " "dtype=float32 numpy=1.234>".format(name=x.distribution.name)) self.assertEqual(repr(x), string) def testNumpy(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0), value=1.23) self.assertEqual(x, tf.constant(1.23)) def testOperatorsAdd(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x + y z_value = x.value + y self.assertAllEqual(z, z_value) def testOperatorsRadd(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y + x z_value = y + x.value self.assertAllEqual(z, z_value) def testOperatorsSub(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x - y z_value = x.value - y self.assertAllEqual(z, z_value) def testOperatorsRsub(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y - x z_value = y - x.value self.assertAllEqual(z, z_value) def testOperatorsMul(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x * y z_value = x.value * y self.assertAllEqual(z, z_value) def testOperatorsRmul(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y * x z_value = y * x.value self.assertAllEqual(z, z_value) def testOperatorsDiv(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x / y z_value = x.value / y self.assertAllEqual(z, z_value) def testOperatorsRdiv(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y / x z_value = y / x.value self.assertAllEqual(z, z_value) def testOperatorsFloordiv(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x // y z_value = x.value // y self.assertAllEqual(z, z_value) def testOperatorsRfloordiv(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y // x z_value = y // x.value self.assertAllEqual(z, z_value) def testOperatorsMod(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x % y z_value = x.value % y self.assertAllEqual(z, z_value) def testOperatorsRmod(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y % x z_value = y % x.value self.assertAllEqual(z, z_value) def testOperatorsLt(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x < y z_value = x.value < y self.assertAllEqual(z, z_value) def testOperatorsLe(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x <= y z_value = x.value <= y self.assertAllEqual(z, z_value) def testOperatorsGt(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x > y z_value = x.value > y self.assertAllEqual(z, z_value) def testOperatorsGe(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x >= y z_value = x.value >= y self.assertAllEqual(z, z_value) def testOperatorsGetitem(self): x = ed.RandomVariable(tfp.distributions.Normal(tf.random.normal([3, 4]), 1.)) z = x[0:2, 2:3] z_value = x.value[0:2, 2:3] self.assertIsInstance(z, ed.RandomVariable) self.assertAllEqual(z.distribution.mean(), x.distribution.mean()[0:2, 2:3]) self.assertAllEqual(tf.convert_to_tensor(z), z_value) def testOperatorsPow(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = x ** y z_value = x.value ** y self.assertAllEqual(z, z_value) def testOperatorsRpow(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 z = y ** x z_value = y ** x.value self.assertAllEqual(z, z_value) def testOperatorsNeg(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) z = -x z_value = -x.value self.assertAllEqual(z, z_value) def testOperatorsAbs(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) z = abs(x) z_value = abs(x.value) self.assertAllEqual(z, z_value) def testOperatorsHash(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 self.assertNotEqual(hash(x), hash(y)) self.assertEqual(hash(x), id(x)) # TODO(trandustin): Re-enable test. # def testOperatorsEq(self): # x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) # self.assertEqual(x, x) def testOperatorsNe(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = 5.0 self.assertNotEqual(x, y) def testOperatorsBoolNonzero(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) with self.assertRaises(TypeError): _ = not x def testArrayPriority(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 1.0)) y = np.array(5.0, dtype=np.float32) z = y / x z_value = y / x.value self.assertAllEqual(z, z_value) def testConvertToTensor(self): x = ed.RandomVariable(tfp.distributions.Normal(0.0, 0.1)) with self.assertRaises(ValueError): _ = tf.convert_to_tensor(x, dtype=tf.int32) @parameterized.parameters( {"probs": 0.5, "sample_shape": [], "batch_shape": [], "event_shape": []}, {"probs": np.zeros([2, 3]) + 0.5, "sample_shape": [], "batch_shape": [2, 3], "event_shape": []}, {"probs": 0.5, "sample_shape": [2], "batch_shape": [], "event_shape": []}, {"probs": 0.5, "sample_shape": [2], "batch_shape": [], "event_shape": []}, {"probs": 0.5, "sample_shape": [2, 4], "batch_shape": [], "event_shape": []}, ) def testShape(self, probs, sample_shape, batch_shape, event_shape): rv = ed.RandomVariable(tfp.distributions.Bernoulli(probs=probs), sample_shape=sample_shape) self.assertEqual(rv.shape, sample_shape + batch_shape + event_shape) self.assertEqual(rv.shape, rv.shape) self.assertEqual(rv.sample_shape, sample_shape) self.assertEqual(rv.distribution.batch_shape, batch_shape) self.assertEqual(rv.distribution.event_shape, event_shape) def testRandomTensorSample(self): num_samples = tf.cast(tfp.distributions.Poisson(rate=5.).sample(), tf.int32) _ = ed.RandomVariable(tfp.distributions.Normal(loc=0.0, scale=1.0), sample_shape=num_samples) if __name__ == "__main__": tf.test.main()
4,459
3,990
package com.hotbitmapgg.bilibili.entity.region; import android.os.Parcel; import android.os.Parcelable; import com.google.gson.annotations.SerializedName; import java.util.ArrayList; import java.util.List; /** * Created by hcc on 2016/10/11 18:20 * <EMAIL> * <p> * 分区数据模型类 */ public class RegionTypesInfo implements Parcelable { private int code; private String message; private String ver; private List<DataBean> data; public int getCode() { return code; } public void setCode(int code) { this.code = code; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getVer() { return ver; } public void setVer(String ver) { this.ver = ver; } public List<DataBean> getData() { return data; } public void setData(List<DataBean> data) { this.data = data; } public static class DataBean implements Parcelable { private int tid; private int reid; private String name; private String logo; @SerializedName("goto") private String gotoX; private String param; private List<ChildrenBean> children; public int getTid() { return tid; } public void setTid(int tid) { this.tid = tid; } public int getReid() { return reid; } public void setReid(int reid) { this.reid = reid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getLogo() { return logo; } public void setLogo(String logo) { this.logo = logo; } public String getGotoX() { return gotoX; } public void setGotoX(String gotoX) { this.gotoX = gotoX; } public String getParam() { return param; } public void setParam(String param) { this.param = param; } public List<ChildrenBean> getChildren() { return children; } public void setChildren(List<ChildrenBean> children) { this.children = children; } public static class ChildrenBean implements Parcelable { private int tid; private int reid; private String name; private String logo; @SerializedName("goto") private String gotoX; private String param; public int getTid() { return tid; } public void setTid(int tid) { this.tid = tid; } public int getReid() { return reid; } public void setReid(int reid) { this.reid = reid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getLogo() { return logo; } public void setLogo(String logo) { this.logo = logo; } public String getGotoX() { return gotoX; } public void setGotoX(String gotoX) { this.gotoX = gotoX; } public String getParam() { return param; } public void setParam(String param) { this.param = param; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.tid); dest.writeInt(this.reid); dest.writeString(this.name); dest.writeString(this.logo); dest.writeString(this.gotoX); dest.writeString(this.param); } public ChildrenBean() { } protected ChildrenBean(Parcel in) { this.tid = in.readInt(); this.reid = in.readInt(); this.name = in.readString(); this.logo = in.readString(); this.gotoX = in.readString(); this.param = in.readString(); } public static final Creator<ChildrenBean> CREATOR = new Creator<ChildrenBean>() { @Override public ChildrenBean createFromParcel(Parcel source) { return new ChildrenBean(source); } @Override public ChildrenBean[] newArray(int size) { return new ChildrenBean[size]; } }; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.tid); dest.writeInt(this.reid); dest.writeString(this.name); dest.writeString(this.logo); dest.writeString(this.gotoX); dest.writeString(this.param); dest.writeList(this.children); } public DataBean() { } protected DataBean(Parcel in) { this.tid = in.readInt(); this.reid = in.readInt(); this.name = in.readString(); this.logo = in.readString(); this.gotoX = in.readString(); this.param = in.readString(); this.children = new ArrayList<ChildrenBean>(); in.readList(this.children, ChildrenBean.class.getClassLoader()); } public static final Creator<DataBean> CREATOR = new Creator<DataBean>() { @Override public DataBean createFromParcel(Parcel source) { return new DataBean(source); } @Override public DataBean[] newArray(int size) { return new DataBean[size]; } }; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.code); dest.writeString(this.message); dest.writeString(this.ver); dest.writeList(this.data); } public RegionTypesInfo() { } protected RegionTypesInfo(Parcel in) { this.code = in.readInt(); this.message = in.readString(); this.ver = in.readString(); this.data = new ArrayList<DataBean>(); in.readList(this.data, DataBean.class.getClassLoader()); } public static final Creator<RegionTypesInfo> CREATOR = new Creator<RegionTypesInfo>() { @Override public RegionTypesInfo createFromParcel(Parcel source) { return new RegionTypesInfo(source); } @Override public RegionTypesInfo[] newArray(int size) { return new RegionTypesInfo[size]; } }; }
3,771