max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
612 | <reponame>yerang823/landmark-detection
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
# python GEN_MPII.py
###############################################################
import os, sys, math, copy, json, torch
import os.path as osp
import numpy as np
from pathlib import Path
from collections import OrderedDict, defaultdict
lib_dir = (Path(__file__).parent / '..' / 'lib').resolve()
if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir))
assert sys.version_info.major == 3, 'Please upgrade from {:} to Python 3.x'.format(sys.version_info)
from scipy.io import loadmat
from xvision import draw_points, normalize_L
import datasets
# Change this paths according to your directories
this_dir = osp.dirname(os.path.abspath(__file__))
SAVE_DIR = osp.join(this_dir, 'lists', 'MPII')
HOME_STR = 'DOME_HOME'
if HOME_STR not in os.environ: HOME_STR = 'HOME'
assert HOME_STR in os.environ, 'Doest not find the HOME dir : {}'.format(HOME_STR)
print ('This dir : {}, HOME : [{}] : {}'.format(this_dir, HOME_STR, os.environ[HOME_STR]))
if not osp.isdir(SAVE_DIR): os.makedirs(SAVE_DIR)
image_dir = osp.join(os.environ[HOME_STR], 'datasets', 'landmark-datasets', 'MPII', 'images')
mat_path = osp.join(os.environ[HOME_STR], 'datasets', 'landmark-datasets', 'MPII', 'mpii_human_pose_v1_u12_2', 'mpii_human_pose_v1_u12_1.mat')
print ('The MPII image dir : {:}'.format(image_dir))
print ('The MPII annotation file : {:}'.format(mat_path))
assert osp.isdir(image_dir), 'The image dir : {:} does not exist'.format(image_dir)
assert osp.isfile(mat_path), 'The annotation file : {:} does not exist'.format(mat_path)
def get_bounding_box(center, scale):
x1 = center[0] - scale * 136
y1 = center[1] - scale * 144
x2 = center[0] + scale * 136
y2 = center[1] + scale * 144
return (x1, y1, x2, y2)
def get_person(person, image_path):
pts = np.zeros((3, 16), dtype='float32')
for point in person['points']:
idx = point['id'] - 1
pts[0, idx] = point['x']
pts[1, idx] = point['y']
pts[2, idx] = 1
box = get_bounding_box(person['center'], person['scale'])
head = person['head']
headsize = 0.6 * np.sqrt( (head[2]-head[0])**2 + (head[3]-head[1])**2 )
data = {'points': pts,
'box-default': box,
'normalizeL-head': headsize,
'current_frame': image_path,
'previous_frame': None,
'next_frame': None}
return data
def check_in_image(data, aux_info):
box = data['box-default']
points = data['points']
oks = []
for idx in range(points.shape[1]):
if int(points[2,idx]+0.5) == 1:
ok = points[0,idx] >= box[0] and points[0,idx] <= box[2] and points[1,idx] >= box[1] and points[1,idx] <= box[3]
if not ok: print('{:} has {:02}-th point is out of box ({:}) : {:}'.format(aux_info, idx, box, points[:,idx]))
oks.append(ok)
return len(oks) == sum(oks)
def save_to_file_trainval(save_dir, trains, valids):
save_dir = Path(save_dir)
if not save_dir.exists(): save_dir.mkdir(parents=True, exist_ok=True)
## Train
mean_landmark = [[] for i in range(16)]
TDatas, OKs = [], []
for index, DX in enumerate(trains):
image_path = osp.join(image_dir, DX['name'])
assert osp.isfile(image_path), '{:} does not exist'.format(image_path)
for person in DX['persons']:
data = get_person(person, image_path)
TDatas.append( data )
ok = check_in_image(data, 'TRAIN-{:}'.format(index))
OKs.append( ok )
# calculate means
box, landmarks = data['box-default'], data['points']
for idx in range(landmarks.shape[1]):
if int(landmarks[2, idx] + 0.5) == 0: continue
x, y = float(landmarks[0,idx]-box[0]), float(landmarks[1,idx]-box[1])
x, y = normalize_L(x, box[2]-box[0]), normalize_L(y, box[3]-box[1])
mean_landmark[idx].append( (x,y) )
torch.save(TDatas, save_dir / 'train.pth')
print ('Training has {:} persons with {:} % having out-of-box person.'.format(len(TDatas), 100 - np.array(OKs).mean() * 100))
# Validation
VDatas, OKs = [], []
for index, DX in enumerate(valids):
image_path = osp.join(image_dir, DX['name'])
assert osp.isfile(image_path), '{:} does not exist'.format(image_path)
for person in DX['persons']:
data = get_person(person, image_path)
VDatas.append( data )
ok = check_in_image(data, 'VALID-{:}'.format(index))
OKs.append( ok )
# calculate means
box, landmarks = data['box-default'], data['points']
for idx in range(landmarks.shape[1]):
if int(landmarks[2, idx] + 0.5) == 0: continue
x, y = float(landmarks[0,idx]-box[0]), float(landmarks[1,idx]-box[1])
x, y = normalize_L(x, box[2]-box[0]), normalize_L(y, box[3]-box[1])
mean_landmark[idx].append( (x,y) )
print ('Validation has {:} persons with {:} % having out-of-box person.'.format(len(VDatas), 100 - np.array(OKs).mean() * 100))
torch.save(VDatas, save_dir / 'valid.pth')
torch.save(TDatas + VDatas, save_dir / 'trainval.pth')
mean_landmark = [np.array(x) for x in mean_landmark]
mean_landmark = [np.mean(x, axis=0) for x in mean_landmark]
mean_landmark = np.array(mean_landmark)
image = draw_points(mean_landmark, 600, 500, True)
image.save(osp.join(save_dir, 'MPII-trainval.png'))
torch.save({'default': mean_landmark}, osp.join(save_dir, 'MPII-trainval-mean.pth'))
def parse_anno_simple(anno, selects):
image = anno['image']['name'][0,0][0]
annorects = np.reshape(anno['annorect'], (anno['annorect'].size,))
annorects = [annorects[i-1] for i in selects]
persons = []
# different persons
for anno in annorects:
# head pose
#x1, y1, x2, y2 = float(anno['x1']), float(anno['y1']), float(anno['x2']), float(anno['y2'])
center_x, center_y, scale = float(anno['objpos'][0,0]['x']), float(anno['objpos'][0,0]['y']), float(anno['scale'])
person = {
'center': [center_x, center_y],
'scale' : scale}
persons.append( person )
return {'name': image,
'persons': persons}
def parse_anno(anno, selects):
image = anno['image']['name'][0,0][0]
vidx, frame_sec = anno['vididx'], anno['frame_sec']
assert vidx.size == 1 and frame_sec.size == 1
vidx = vidx[0,0]
frame_sec = frame_sec[0,0]
annorects = np.reshape(anno['annorect'], (anno['annorect'].size,))
annorects = [annorects[i-1] for i in selects]
persons = []
# different persons
for anno in annorects:
# head pose
x1, y1, x2, y2 = float(anno['x1']), float(anno['y1']), float(anno['x2']), float(anno['y2'])
center_x, center_y, scale = float(anno['objpos'][0,0]['x']), float(anno['objpos'][0,0]['y']), float(anno['scale'])
if anno['annopoints'].size == 0:
_points = []
else:
_points = np.squeeze(anno['annopoints']['point'][0,0], axis=0)
points = []
for x in _points:
data = {'x': float(x['x']),
'y': float(x['y']),
'id': int(x['id'])}
if 'is_visible' not in x or x['is_visible'].size == 0: # visible
is_visible = True
elif x['is_visible'].size == 1: #
is_visible = bool( int(x['is_visible']) )
else:
raise ValueError('invalid visible: {:}'.format(x['is_visible']))
data['is_visible'] = is_visible
points.append( data )
person = {'head' : [x1, y1, x2, y2],
'center': [center_x, center_y],
'scale' : scale,
'points': points}
persons.append( person )
return {'name': image,
'vidx': vidx,
'frame_sec': frame_sec,
'persons': persons}
def load_splits(split_dir):
assert osp.isdir(split_dir), '{:} is not a dir'.format(split_dir)
cfile = open(osp.join(split_dir, 'train.txt'), 'r')
Ltrain = cfile.readlines()
Ltrain = [x.strip() for x in Ltrain]
cfile.close()
# validation
cfile = open(osp.join(split_dir, 'valid.txt'), 'r')
Lvalid = cfile.readlines()
Lvalid = [x.strip() for x in Lvalid]
cfile.close()
# validation
cfile = open(osp.join(split_dir, 'test.txt'), 'r')
Ltest = cfile.readlines()
Ltest = [x.strip() for x in Ltest ]
cfile.close()
return Ltrain, Lvalid, Ltest
if __name__ == "__main__":
mat = loadmat(mat_path)
matdata = mat['RELEASE']
print ('{:}'.format( matdata.dtype ))
annolist = np.squeeze( matdata['annolist'][0,0] )
img_train = np.squeeze( matdata['img_train'][0,0] )
single_person = np.squeeze( matdata['single_person'][0,0] )
act = np.squeeze( matdata['act'][0,0] )
video_list = np.squeeze( matdata['video_list'][0,0] )
video_list = [x[0] for x in video_list]
ACT = []
for xx in act:
if xx['act_name'].shape == (1,):
act_name = xx['act_name'][0]
elif xx['act_name'].shape == (0,):
act_name = None
else: raise ValueError('{:}'.format( xx['act_name'] ))
if xx['cat_name'].shape == (1,):
cat_name = xx['cat_name'][0]
elif xx['cat_name'].shape == (0,):
cat_name = None
else: raise ValueError('{:}'.format( xx['cat_name'] ))
x = {'act_name': act_name,
'cat_name': cat_name,
'act_id' : xx['act_id'][0,0]}
ACT.append( x )
Ltrain, Lvalid, Ltest = load_splits( osp.join(this_dir, 'cache', 'MPII-Split') )
# get training
trains, valids, tests, corrupts = [], [], [], []
for idx, is_train in enumerate(img_train):
image = annolist[idx]['image']['name'][0,0][0]
print ('handle {:5d}/{:}-th data : {:}'.format(idx+1, len(img_train), image))
if is_train:
if single_person[idx].size == 0: continue
select = np.reshape(single_person[idx], (single_person[idx].size,))
if image in Ltrain or image in Lvalid:
#data = {'anno': parse_anno( annolist[idx], select ),
# 'act' : ACT[idx]
# }
data = parse_anno( annolist[idx], select )
else:
corrupts.append( image )
continue
if image in Ltrain : trains.append( data )
elif image in Lvalid: valids.append( data )
else: raise ValueError('Invalid Data : {:}'.format( image ))
else:
#assert image in Ltest, '{:} has something wrong'.format( image )
select = np.reshape(single_person[idx], (single_person[idx].size,))
data = parse_anno_simple( annolist[idx], select )
print ('save data into {:}'.format(SAVE_DIR))
save_to_file_trainval(SAVE_DIR, trains, valids)
#save_to_file_test (SAVE_DIR, tests)
| 4,604 |
776 | # -*- coding: utf-8 -*-
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
# holder of all proprietary rights on this computer program.
# You can only use this computer program if you have closed
# a license agreement with MPG or you get the right to use the computer
# program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and
# liable to prosecution.
#
# Copyright©2020 Max-Planck-Gesellschaft zur Förderung
# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
# for Intelligent Systems. All rights reserved.
#
# Contact: <NAME>, <EMAIL>
from typing import NewType, List, Union
import numpy as np
import torch
__all__ = [
'Tensor',
'Array',
]
Tensor = NewType('Tensor', torch.Tensor)
Array = NewType('Array', np.ndarray)
| 268 |
2,151 | <filename>src/core/SkRefDict.cpp<gh_stars>1000+
/*
* Copyright 2011 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "SkRefDict.h"
#include "SkString.h"
struct SkRefDict::Impl {
Impl* fNext;
SkString fName;
SkRefCnt* fData;
};
SkRefDict::SkRefDict() : fImpl(nullptr) {}
SkRefDict::~SkRefDict() {
this->removeAll();
}
SkRefCnt* SkRefDict::find(const char name[]) const {
if (nullptr == name) {
return nullptr;
}
Impl* rec = fImpl;
while (rec) {
if (rec->fName.equals(name)) {
return rec->fData;
}
rec = rec->fNext;
}
return nullptr;
}
void SkRefDict::set(const char name[], SkRefCnt* data) {
if (nullptr == name) {
return;
}
Impl* rec = fImpl;
Impl* prev = nullptr;
while (rec) {
if (rec->fName.equals(name)) {
if (data) {
// replace
data->ref();
rec->fData->unref();
rec->fData = data;
} else {
// remove
rec->fData->unref();
if (prev) {
prev->fNext = rec->fNext;
} else {
fImpl = rec->fNext;
}
delete rec;
}
return;
}
prev = rec;
rec = rec->fNext;
}
// if get here, name was not found, so add it
data->ref();
rec = new Impl;
rec->fName.set(name);
rec->fData = data;
// prepend to the head of our list
rec->fNext = fImpl;
fImpl = rec;
}
void SkRefDict::removeAll() {
Impl* rec = fImpl;
while (rec) {
Impl* next = rec->fNext;
rec->fData->unref();
delete rec;
rec = next;
}
fImpl = nullptr;
}
| 991 |
372 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.androidmanagement.v1.model;
/**
* A default activity for handling intents that match a particular intent filter.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Android Management API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class PersistentPreferredActivity extends com.google.api.client.json.GenericJson {
/**
* The intent actions to match in the filter. If any actions are included in the filter, then an
* intent's action must be one of those values for it to match. If no actions are included, the
* intent action is ignored.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> actions;
/**
* The intent categories to match in the filter. An intent includes the categories that it
* requires, all of which must be included in the filter in order to match. In other words, adding
* a category to the filter has no impact on matching unless that category is specified in the
* intent.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> categories;
/**
* The activity that should be the default intent handler. This should be an Android component
* name, e.g. com.android.enterprise.app/.MainActivity. Alternatively, the value may be the
* package name of an app, which causes Android Device Policy to choose an appropriate activity
* from the app to handle the intent.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String receiverActivity;
/**
* The intent actions to match in the filter. If any actions are included in the filter, then an
* intent's action must be one of those values for it to match. If no actions are included, the
* intent action is ignored.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getActions() {
return actions;
}
/**
* The intent actions to match in the filter. If any actions are included in the filter, then an
* intent's action must be one of those values for it to match. If no actions are included, the
* intent action is ignored.
* @param actions actions or {@code null} for none
*/
public PersistentPreferredActivity setActions(java.util.List<java.lang.String> actions) {
this.actions = actions;
return this;
}
/**
* The intent categories to match in the filter. An intent includes the categories that it
* requires, all of which must be included in the filter in order to match. In other words, adding
* a category to the filter has no impact on matching unless that category is specified in the
* intent.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getCategories() {
return categories;
}
/**
* The intent categories to match in the filter. An intent includes the categories that it
* requires, all of which must be included in the filter in order to match. In other words, adding
* a category to the filter has no impact on matching unless that category is specified in the
* intent.
* @param categories categories or {@code null} for none
*/
public PersistentPreferredActivity setCategories(java.util.List<java.lang.String> categories) {
this.categories = categories;
return this;
}
/**
* The activity that should be the default intent handler. This should be an Android component
* name, e.g. com.android.enterprise.app/.MainActivity. Alternatively, the value may be the
* package name of an app, which causes Android Device Policy to choose an appropriate activity
* from the app to handle the intent.
* @return value or {@code null} for none
*/
public java.lang.String getReceiverActivity() {
return receiverActivity;
}
/**
* The activity that should be the default intent handler. This should be an Android component
* name, e.g. com.android.enterprise.app/.MainActivity. Alternatively, the value may be the
* package name of an app, which causes Android Device Policy to choose an appropriate activity
* from the app to handle the intent.
* @param receiverActivity receiverActivity or {@code null} for none
*/
public PersistentPreferredActivity setReceiverActivity(java.lang.String receiverActivity) {
this.receiverActivity = receiverActivity;
return this;
}
@Override
public PersistentPreferredActivity set(String fieldName, Object value) {
return (PersistentPreferredActivity) super.set(fieldName, value);
}
@Override
public PersistentPreferredActivity clone() {
return (PersistentPreferredActivity) super.clone();
}
}
| 1,603 |
13,846 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
package com.microsoft.signalr;
final class HandshakeResponseMessage {
private final String error;
public HandshakeResponseMessage() {
this(null);
}
public HandshakeResponseMessage(String error) {
this.error = error;
}
public String getHandshakeError() {
return error;
}
}
| 150 |
335 | <filename>K/Kenyapithecus_noun.json
{
"word": "Kenyapithecus",
"definitions": [
"An extinct ape, Kenyapithecus wickeri, known from Middle Miocene fossil remains found at Fort Ternan, Kenya, dated to about 14 million years ago, and regarded by some as the ancestor of all great apes."
],
"parts-of-speech": "Noun"
} | 120 |
369 | <reponame>Shiva-D/rtos-course<gh_stars>100-1000
/******************** (C) COPYRIGHT 2008 STMicroelectronics ********************
* File Name : stm32f10x_iwdg.h
* Author : MCD Application Team
* Version : V2.0.1
* Date : 06/13/2008
* Description : This file contains all the functions prototypes for the
* IWDG firmware library.
********************************************************************************
* THE PRESENT FIRMWARE WHICH IS FOR GUIDANCE ONLY AIMS AT PROVIDING CUSTOMERS
* WITH CODING INFORMATION REGARDING THEIR PRODUCTS IN ORDER FOR THEM TO SAVE TIME.
* AS A RESULT, STMICROELECTRONICS SHALL NOT BE HELD LIABLE FOR ANY DIRECT,
* INDIRECT OR CONSEQUENTIAL DAMAGES WITH RESPECT TO ANY CLAIMS ARISING FROM THE
* CONTENT OF SUCH FIRMWARE AND/OR THE USE MADE BY CUSTOMERS OF THE CODING
* INFORMATION CONTAINED HEREIN IN CONNECTION WITH THEIR PRODUCTS.
*******************************************************************************/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __STM32F10x_IWDG_H
#define __STM32F10x_IWDG_H
/* Includes ------------------------------------------------------------------*/
#include "stm32f10x_map.h"
/* Exported types ------------------------------------------------------------*/
/* Exported constants --------------------------------------------------------*/
/* Write access to IWDG_PR and IWDG_RLR registers */
#define IWDG_WriteAccess_Enable ((u16)0x5555)
#define IWDG_WriteAccess_Disable ((u16)0x0000)
#define IS_IWDG_WRITE_ACCESS(ACCESS) (((ACCESS) == IWDG_WriteAccess_Enable) || \
((ACCESS) == IWDG_WriteAccess_Disable))
/* IWDG prescaler */
#define IWDG_Prescaler_4 ((u8)0x00)
#define IWDG_Prescaler_8 ((u8)0x01)
#define IWDG_Prescaler_16 ((u8)0x02)
#define IWDG_Prescaler_32 ((u8)0x03)
#define IWDG_Prescaler_64 ((u8)0x04)
#define IWDG_Prescaler_128 ((u8)0x05)
#define IWDG_Prescaler_256 ((u8)0x06)
#define IS_IWDG_PRESCALER(PRESCALER) (((PRESCALER) == IWDG_Prescaler_4) || \
((PRESCALER) == IWDG_Prescaler_8) || \
((PRESCALER) == IWDG_Prescaler_16) || \
((PRESCALER) == IWDG_Prescaler_32) || \
((PRESCALER) == IWDG_Prescaler_64) || \
((PRESCALER) == IWDG_Prescaler_128)|| \
((PRESCALER) == IWDG_Prescaler_256))
/* IWDG Flag */
#define IWDG_FLAG_PVU ((u16)0x0001)
#define IWDG_FLAG_RVU ((u16)0x0002)
#define IS_IWDG_FLAG(FLAG) (((FLAG) == IWDG_FLAG_PVU) || ((FLAG) == IWDG_FLAG_RVU))
#define IS_IWDG_RELOAD(RELOAD) ((RELOAD) <= 0xFFF)
/* Exported macro ------------------------------------------------------------*/
/* Exported functions ------------------------------------------------------- */
void IWDG_WriteAccessCmd(u16 IWDG_WriteAccess);
void IWDG_SetPrescaler(u8 IWDG_Prescaler);
void IWDG_SetReload(u16 Reload);
void IWDG_ReloadCounter(void);
void IWDG_Enable(void);
FlagStatus IWDG_GetFlagStatus(u16 IWDG_FLAG);
#endif /* __STM32F10x_IWDG_H */
/******************* (C) COPYRIGHT 2008 STMicroelectronics *****END OF FILE****/
| 1,477 |
2,482 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.service.resources;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.nutch.metadata.Nutch;
import org.apache.nutch.service.impl.ServiceWorker;
import org.apache.nutch.service.model.request.ServiceConfig;
import org.apache.nutch.service.model.response.ServiceInfo;
import org.apache.nutch.tools.CommonCrawlDataDumper;
/**
* The services resource defines an endpoint to enable the user to carry out
* Nutch jobs like dump, commoncrawldump, etc.
*/
@Path("/services")
public class ServicesResource {
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
@GET
@Path("/commoncrawldump/{crawlId}")
public Response listDumpPaths(@PathParam("crawlId") String crawlId) {
File dumpFilePath = new File(crawlId + File.separator + "dump/");
File dumpFileList[] = dumpFilePath.listFiles();
List<String> fileNames = new ArrayList<>();
if (dumpFileList != null) {
for (File f : dumpFileList) {
fileNames.add(f.getPath());
}
}
ServiceInfo info = new ServiceInfo();
info.setDumpPaths(fileNames);
return Response.ok().entity(info).type(MediaType.APPLICATION_JSON).build();
}
@POST
@Path("/commoncrawldump")
public Response commoncrawlDump(ServiceConfig serviceConfig) {
String crawlId = serviceConfig.getCrawlId();
String outputDir = crawlId + File.separator + "dump" + File.separator
+ "commoncrawl-" + sdf.format(System.currentTimeMillis());
Map<String, Object> args = serviceConfig.getArgs();
args.put("outputDir", outputDir);
if (!args.containsKey(Nutch.ARG_SEGMENTDIR)) {
args.put("segment", crawlId + File.separator + "segments");
}
serviceConfig.setArgs(args);
ServiceWorker worker = new ServiceWorker(serviceConfig,
new CommonCrawlDataDumper());
worker.run();
return Response.ok(outputDir).type(MediaType.TEXT_PLAIN).build();
}
}
| 973 |
1,039 | #include "Tracker.h"
#include "BYTETracker.h"
#include <fstream>
NvMOTContext::NvMOTContext(const NvMOTConfig &configIn, NvMOTConfigResponse &configResponse) {
configResponse.summaryStatus = NvMOTConfigStatus_OK;
}
NvMOTStatus NvMOTContext::processFrame(const NvMOTProcessParams *params, NvMOTTrackedObjBatch *pTrackedObjectsBatch) {
for (uint streamIdx = 0; streamIdx < pTrackedObjectsBatch->numFilled; streamIdx++){
NvMOTTrackedObjList *trackedObjList = &pTrackedObjectsBatch->list[streamIdx];
NvMOTFrame *frame = ¶ms->frameList[streamIdx];
std::vector<NvObject> nvObjects(frame->objectsIn.numFilled);
for (uint32_t numObjects = 0; numObjects < frame->objectsIn.numFilled; numObjects++) {
NvMOTObjToTrack *objectToTrack = &frame->objectsIn.list[numObjects];
NvObject nvObject;
nvObject.prob = objectToTrack->confidence;
nvObject.label = objectToTrack->classId;
nvObject.rect[0] = objectToTrack->bbox.x;
nvObject.rect[1] = objectToTrack->bbox.y;
nvObject.rect[2] = objectToTrack->bbox.width;
nvObject.rect[3] = objectToTrack->bbox.height;
nvObject.associatedObjectIn = objectToTrack;
nvObjects.push_back(nvObject);
}
if (byteTrackerMap.find(frame->streamID) == byteTrackerMap.end())
byteTrackerMap.insert(std::pair<uint64_t, std::shared_ptr<BYTETracker>>(frame->streamID, std::make_shared<BYTETracker>(15, 30)));
std::vector<STrack> outputTracks = byteTrackerMap.at(frame->streamID)->update(nvObjects);
NvMOTTrackedObj *trackedObjs = new NvMOTTrackedObj[512];
int filled = 0;
for (STrack &sTrack: outputTracks) {
std::vector<float> tlwh = sTrack.original_tlwh;
NvMOTRect motRect{tlwh[0], tlwh[1], tlwh[2], tlwh[3]};
NvMOTTrackedObj *trackedObj = new NvMOTTrackedObj;
trackedObj->classId = 0;
trackedObj->trackingId = (uint64_t) sTrack.track_id;
trackedObj->bbox = motRect;
trackedObj->confidence = 1;
trackedObj->age = (uint32_t) sTrack.tracklet_len;
trackedObj->associatedObjectIn = sTrack.associatedObjectIn;
trackedObj->associatedObjectIn->doTracking = true;
trackedObjs[filled++] = *trackedObj;
}
trackedObjList->streamID = frame->streamID;
trackedObjList->frameNum = frame->frameNum;
trackedObjList->valid = true;
trackedObjList->list = trackedObjs;
trackedObjList->numFilled = filled;
trackedObjList->numAllocated = 512;
}
}
NvMOTStatus NvMOTContext::processFramePast(const NvMOTProcessParams *params,
NvDsPastFrameObjBatch *pPastFrameObjectsBatch) {
return NvMOTStatus_OK;
}
NvMOTStatus NvMOTContext::removeStream(const NvMOTStreamId streamIdMask) {
if (byteTrackerMap.find(streamIdMask) != byteTrackerMap.end()){
std::cout << "Removing tracker for stream: " << streamIdMask << std::endl;
byteTrackerMap.erase(streamIdMask);
}
return NvMOTStatus_OK;
}
| 1,677 |
455 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.test.microbench;
import org.apache.cassandra.io.util.BufferedDataOutputStreamPlus;
import org.apache.cassandra.io.util.BufferedDataOutputStreamTest;
import org.apache.cassandra.io.util.WrappedDataOutputStreamPlus;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.infra.Blackhole;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import java.util.concurrent.TimeUnit;
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 3,jvmArgsAppend = "-Xmx512M")
@Threads(1)
@State(Scope.Benchmark)
public class OutputStreamBench
{
BufferedOutputStream hole;
WrappedDataOutputStreamPlus streamA;
BufferedDataOutputStreamPlus streamB;
byte foo;
int foo1;
long foo2;
double foo3;
float foo4;
short foo5;
char foo6;
String tinyM = BufferedDataOutputStreamTest.fourByte;
String smallM;
String largeM;
String tiny = "a";
String small = "adsjglhnafsjk;gujfakyhgukafshgjkahfsgjkhafs;jkhausjkgaksfj;gafskdghajfsk;g";
String large;
@Setup
public void setUp(final Blackhole bh) {
StringBuilder sb = new StringBuilder();
for (int ii = 0; ii < 11; ii++) {
sb.append(BufferedDataOutputStreamTest.fourByte);
sb.append(BufferedDataOutputStreamTest.threeByte);
sb.append(BufferedDataOutputStreamTest.twoByte);
}
smallM = sb.toString();
sb = new StringBuilder();
while (sb.length() < 1024 * 12) {
sb.append(small);
}
large = sb.toString();
sb = new StringBuilder();
while (sb.length() < 1024 * 12) {
sb.append(smallM);
}
largeM = sb.toString();
hole = new BufferedOutputStream(new OutputStream() {
@Override
public void write(int b) throws IOException
{
bh.consume(b);
}
@Override
public void write(byte b[]) throws IOException {
bh.consume(b);
}
@Override
public void write(byte b[], int a, int c) throws IOException {
bh.consume(b);
bh.consume(a);
bh.consume(c);
}
});
streamA = new WrappedDataOutputStreamPlus(hole);
streamB = new BufferedDataOutputStreamPlus(new WritableByteChannel() {
@Override
public boolean isOpen()
{
return true;
}
@Override
public void close() throws IOException
{
}
@Override
public int write(ByteBuffer src) throws IOException
{
bh.consume(src);
int remaining = src.remaining();
src.position(src.limit());
return remaining;
}
}, 8192);
}
@Benchmark
public void testBOSByte() throws IOException
{
streamA.write(foo);
}
@Benchmark
public void testBDOSPByte() throws IOException
{
streamB.write(foo);
}
@Benchmark
public void testBOSInt() throws IOException
{
streamA.writeInt(foo1);
}
@Benchmark
public void testBDOSPInt() throws IOException
{
streamB.writeInt(foo1);
}
@Benchmark
public void testBOSLong() throws IOException
{
streamA.writeLong(foo2);
}
@Benchmark
public void testBDOSPLong() throws IOException
{
streamB.writeLong(foo2);
}
@Benchmark
public void testBOSMixed() throws IOException
{
streamA.write(foo);
streamA.writeInt(foo1);
streamA.writeLong(foo2);
streamA.writeDouble(foo3);
streamA.writeFloat(foo4);
streamA.writeShort(foo5);
streamA.writeChar(foo6);
}
@Benchmark
public void testBDOSPMixed() throws IOException
{
streamB.write(foo);
streamB.writeInt(foo1);
streamB.writeLong(foo2);
streamB.writeDouble(foo3);
streamB.writeFloat(foo4);
streamB.writeShort(foo5);
streamB.writeChar(foo6);
}
@Benchmark
public void testMTinyStringBOS() throws IOException {
streamA.writeUTF(tinyM);
}
@Benchmark
public void testMTinyStringBDOSP() throws IOException {
streamB.writeUTF(tinyM);
}
@Benchmark
public void testMTinyLegacyWriteUTF() throws IOException {
BufferedDataOutputStreamTest.writeUTFLegacy(tinyM, hole);
}
@Benchmark
public void testMSmallStringBOS() throws IOException {
streamA.writeUTF(smallM);
}
@Benchmark
public void testMSmallStringBDOSP() throws IOException {
streamB.writeUTF(smallM);
}
@Benchmark
public void testMSmallLegacyWriteUTF() throws IOException {
BufferedDataOutputStreamTest.writeUTFLegacy(smallM, hole);
}
@Benchmark
public void testMLargeStringBOS() throws IOException {
streamA.writeUTF(largeM);
}
@Benchmark
public void testMLargeStringBDOSP() throws IOException {
streamB.writeUTF(largeM);
}
@Benchmark
public void testMLargeLegacyWriteUTF() throws IOException {
BufferedDataOutputStreamTest.writeUTFLegacy(largeM, hole);
}
@Benchmark
public void testTinyStringBOS() throws IOException {
streamA.writeUTF(tiny);
}
@Benchmark
public void testTinyStringBDOSP() throws IOException {
streamB.writeUTF(tiny);
}
@Benchmark
public void testTinyLegacyWriteUTF() throws IOException {
BufferedDataOutputStreamTest.writeUTFLegacy(tiny, hole);
}
@Benchmark
public void testSmallStringBOS() throws IOException {
streamA.writeUTF(small);
}
@Benchmark
public void testSmallStringBDOSP() throws IOException {
streamB.writeUTF(small);
}
@Benchmark
public void testSmallLegacyWriteUTF() throws IOException {
BufferedDataOutputStreamTest.writeUTFLegacy(small, hole);
}
@Benchmark
public void testRLargeStringBOS() throws IOException {
streamA.writeUTF(large);
}
@Benchmark
public void testRLargeStringBDOSP() throws IOException {
streamB.writeUTF(large);
}
@Benchmark
public void testRLargeLegacyWriteUTF() throws IOException {
BufferedDataOutputStreamTest.writeUTFLegacy(large, hole);
}
} | 3,179 |
4,071 | package com.github.ompc.greys.core.util;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* 反射工具类 Created by <EMAIL> on 15/5/18.
*/
public class GaReflectUtils {
/**
* 获取对象某个成员的值
*
* @param <T> T
* @param target 目标对象
* @param field 目标属性
* @return 目标属性值
* @throws IllegalArgumentException 非法参数
* @throws IllegalAccessException 非法进入
*/
public static <T> T getValue(Object target, Field field) throws IllegalArgumentException, IllegalAccessException {
final boolean isAccessible = field.isAccessible();
try {
field.setAccessible(true);
//noinspection unchecked
return (T) field.get(target);
} finally {
field.setAccessible(isAccessible);
}
}
/**
* 设置对象某个成员的值
*
* @param field 属性对象
* @param value 属性值
* @param target 目标对象
* @throws IllegalArgumentException 非法参数
* @throws IllegalAccessException 非法进入
*/
public static void setValue(Field field, Object value, Object target) throws IllegalArgumentException, IllegalAccessException {
final boolean isAccessible = field.isAccessible();
try {
field.setAccessible(true);
field.set(target, value);
} finally {
field.setAccessible(isAccessible);
}
}
/**
* 获取一个类下的所有成员(包括父类、私有成员)
*
* @param clazz 目标类
* @return 类下所有属性
*/
public static Set<Field> getFields(Class<?> clazz) {
final Set<Field> fields = new LinkedHashSet<Field>();
final Class<?> parentClazz = clazz.getSuperclass();
Collections.addAll(fields, clazz.getDeclaredFields());
if (null != parentClazz) {
fields.addAll(getFields(parentClazz));
}
return fields;
}
/**
* 获取一个类下的指定成员
*
* @param clazz 目标类
* @param name 属性名
* @return 属性
*/
public static Field getField(Class<?> clazz, String name) {
for (Field field : getFields(clazz)) {
if (GaCheckUtils.isEquals(field.getName(), name)) {
return field;
}
}//for
return null;
}
/**
* 将字符串转换为指定类型,目前只支持9种类型:8种基本类型(包括其包装类)以及字符串
*
* @param t 目标对象类型
* @param value 目标值
* @return 类型转换后的值
*/
@SuppressWarnings("unchecked")
public static <T> T valueOf(Class<T> t, String value) {
if (GaCheckUtils.isIn(t, int.class, Integer.class)) {
return (T) Integer.valueOf(value);
} else if (GaCheckUtils.isIn(t, long.class, Long.class)) {
return (T) Long.valueOf(value);
} else if (GaCheckUtils.isIn(t, double.class, Double.class)) {
return (T) Double.valueOf(value);
} else if (GaCheckUtils.isIn(t, float.class, Float.class)) {
return (T) Float.valueOf(value);
} else if (GaCheckUtils.isIn(t, char.class, Character.class)) {
return (T) Character.valueOf(value.charAt(0));
} else if (GaCheckUtils.isIn(t, byte.class, Byte.class)) {
return (T) Byte.valueOf(value);
} else if (GaCheckUtils.isIn(t, boolean.class, Boolean.class)) {
return (T) Boolean.valueOf(value);
} else if (GaCheckUtils.isIn(t, short.class, Short.class)) {
return (T) Short.valueOf(value);
} else if (GaCheckUtils.isIn(t, String.class)) {
return (T) value;
} else {
return null;
}
}
/**
* 定义类
*
* @param targetClassLoader 目标classLoader
* @param className 类名称
* @param classByteArray 类字节码数组
* @return 定义的类
* @throws NoSuchMethodException
* @throws InvocationTargetException
* @throws IllegalAccessException
*/
public static Class<?> defineClass(
final ClassLoader targetClassLoader,
final String className,
final byte[] classByteArray) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
final Method defineClassMethod = ClassLoader.class.getDeclaredMethod(
"defineClass",
String.class,
byte[].class,
int.class,
int.class
);
synchronized (defineClassMethod) {
final boolean acc = defineClassMethod.isAccessible();
try {
defineClassMethod.setAccessible(true);
return (Class<?>) defineClassMethod.invoke(
targetClassLoader,
className,
classByteArray,
0,
classByteArray.length
);
} finally {
defineClassMethod.setAccessible(acc);
}
}
}
// /**
// * 获取目标类的ClassLoader<br/>
// * 因为JVM的ClassLoader采用双亲委派,所以按层次排序
// *
// * @param targetClass 目标类
// * @return ClassLoader层次列表(按层次排序,从近到远)
// */
// public static ArrayList<ClassLoader> recGetClassLoader(final Class<?> targetClass) {
// final ArrayList<ClassLoader> classLoaderList = new ArrayList<ClassLoader>();
// ClassLoader loader = targetClass.getClassLoader();
// if (null != loader) {
// classLoaderList.add(loader);
// while (true) {
// loader = loader.getParent();
// if (null == loader) {
// break;
// }
// classLoaderList.add(loader);
// }
// }
// return classLoaderList;
// }
/**
* 获取目标类的父类
* 因为Java的类继承关系是单父类的,所以按照层次排序
*
* @param targetClass 目标类
* @return 目标类的父类列表(顺序按照类继承顺序倒序)
*/
public static ArrayList<Class<?>> recGetSuperClass(Class<?> targetClass) {
final ArrayList<Class<?>> superClassList = new ArrayList<Class<?>>();
Class<?> currentClass = targetClass;
do {
final Class<?> superClass = currentClass.getSuperclass();
if (null == superClass) {
break;
}
superClassList.add(currentClass = superClass);
} while (true);
return superClassList;
}
/**
* 计算ClassType
*
* @param targetClass 目标类
* @return 计算出的ClassType
*/
public static int computeClassType(Class<?> targetClass) {
int type = 0;
if (targetClass.isAnnotation())
type |= TYPE_ANNOTATION;
if (targetClass.isAnonymousClass())
type |= TYPE_ANONYMOUS;
if (targetClass.isArray())
type |= TYPE_ARRAY;
if (targetClass.isEnum())
type |= TYPE_ENUM;
if (targetClass.isInterface())
type |= TYPE_INTERFACE;
if (targetClass.isLocalClass())
type |= TYPE_LOCAL;
if (targetClass.isMemberClass())
type |= TYPE_MEMBER;
if (targetClass.isPrimitive())
type |= TYPE_PRIMITIVE;
if (targetClass.isSynthetic())
type |= TYPE_SYNTHETIC;
return type;
}
public static final int TYPE_ANNOTATION = 1 << 0;
public static final int TYPE_ANONYMOUS = 1 << 1;
public static final int TYPE_ARRAY = 1 << 2;
public static final int TYPE_ENUM = 1 << 3;
public static final int TYPE_INTERFACE = 1 << 4;
public static final int TYPE_LOCAL = 1 << 5;
public static final int TYPE_MEMBER = 1 << 6;
public static final int TYPE_PRIMITIVE = 1 << 7;
public static final int TYPE_SYNTHETIC = 1 << 8;
/**
* 默认类型(全匹配)
*/
public static final int DEFAULT_TYPE =
TYPE_ANNOTATION
| TYPE_ANONYMOUS | TYPE_ARRAY | TYPE_ENUM
| TYPE_INTERFACE | TYPE_LOCAL | TYPE_MEMBER
| TYPE_PRIMITIVE | TYPE_SYNTHETIC;
/**
* 计算类修饰符
*
* @param targetClass 目标类
* @return 类修饰符
*/
public static int computeModifier(final Class<?> targetClass) {
return targetClass.getModifiers();
}
public static int computeModifier(final Method targetMethod) {
return targetMethod.getModifiers();
}
public static int computeModifier(final Constructor<?> targetConstructor) {
return targetConstructor.getModifiers();
}
public static int computeModifier(final Field targetField) {
return targetField.getModifiers();
}
public static final int MOD_PUBLIC = Modifier.PUBLIC;
public static final int MOD_PRIVATE = Modifier.PRIVATE;
public static final int MOD_PROTECTED = Modifier.PROTECTED;
public static final int MOD_STATIC = Modifier.STATIC;
public static final int MOD_FINAL = Modifier.FINAL;
public static final int MOD_SYNCHRONIZED = Modifier.SYNCHRONIZED;
public static final int MOD_VOLATILE = Modifier.VOLATILE;
public static final int MOD_TRANSIENT = Modifier.TRANSIENT;
public static final int MOD_NATIVE = Modifier.NATIVE;
public static final int MOD_ABSTRACT = Modifier.ABSTRACT;
public static final int MOD_STRICT = Modifier.STRICT;
/**
* 默认匹配修饰符(全匹配)
*/
public static final int DEFAULT_MOD =
MOD_FINAL
| MOD_PROTECTED | MOD_VOLATILE | MOD_STATIC | MOD_PUBLIC | MOD_SYNCHRONIZED
| MOD_TRANSIENT | MOD_ABSTRACT | MOD_NATIVE | MOD_STRICT | MOD_PRIVATE;
}
| 4,962 |
417 | <reponame>Parcons/Torque3D
/*
* Copyright 2006 Sony Computer Entertainment Inc.
*
* Licensed under the SCEA Shared Source License, Version 1.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at:
* http://research.scea.com/scea_shared_source_license.html
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing permissions and limitations under the
* License.
*/
#include <dae/daeErrorHandler.h>
#include <modules/stdErrPlugin.h>
daeErrorHandler *daeErrorHandler::_instance = NULL;
std::unique_ptr<daeErrorHandler> daeErrorHandler::_defaultInstance(new stdErrPlugin);
daeErrorHandler::daeErrorHandler() {
}
daeErrorHandler::~daeErrorHandler() {
}
void daeErrorHandler::setErrorHandler( daeErrorHandler *eh ) {
_instance = eh;
}
daeErrorHandler *daeErrorHandler::get() {
if ( _instance == NULL ) {
return _defaultInstance.get();
}
return _instance;
}
| 350 |
468 | <reponame>bilke/UniCAVE<filename>OpenGLDLL/GLFunctions/WGL/WGL_NV_gpu_affinity_Include.h
#define GLI_INCLUDE_WGL_NV_GPU_AFFINITY
enum Main {
WGL_ERROR_INCOMPATIBLE_AFFINITY_MASKS_NV = 0x20D0,
WGL_ERROR_MISSING_AFFINITY_MASK_NV = 0x20D1,
};
GLboolean wglEnumGpusNV(GLuint iGpuIndex, void* * phGpu);
GLboolean wglEnumGpuDevicesNV(void* hGpu, GLuint iDeviceIndex, void* lpGpuDevice);
void* wglCreateAffinityDCNV(const void* * phGpuList);
GLboolean wglEnumGpusFromAffinityDCNV(void* hAffinityDC, GLuint iGpuIndex, void* * hGpu);
GLboolean wglDeleteDCNV(void* hdc);
| 281 |
304 | <gh_stars>100-1000
import sys
import json as _json
from .regex import isregex
from .headers import HTTPHeaderDict
from .helpers import trigger_methods
from .matchers.url import protoregex
if sys.version_info < (3,): # Python 2
from urlparse import urlparse, parse_qs, urlunparse
else: # Python 3
from urllib.parse import urlparse, parse_qs, urlunparse
class Request(object):
"""
Request object representing the request mock expectation DSL.
Arguments:
method (str): HTTP method to match. Defaults to ``GET``.
url (str): URL request to intercept and match.
headers (dict): HTTP headers to match.
query (dict): URL query params to match. Complementely to URL
defined query params.
body (str|regex): request body payload to match.
json (str|dict|list): JSON payload body structure to match.
xml (str): XML payload data structure to match.
Attributes:
method (str): HTTP method to match. Defaults to ``GET``.
url (str): URL request to intercept and match.
headers (dict): HTTP headers to match.
query (dict): URL query params to match. Complementely to URL
defined query params.
body (str|regex): request body payload to match.
json (str|dict|list): JSON payload body structure to match.
xml (str): XML payload data structure to match.
"""
# Store keys
keys = ('method', 'headers', 'body', 'url', 'query')
def __init__(self, method='GET', **kw):
self._url = None
self._body = None
self._query = None
self._method = method
self._extra = kw.get('extra')
self._headers = HTTPHeaderDict()
trigger_methods(self, kw)
@property
def method(self):
return self._method
@method.setter
def method(self, method):
self._method = method
@property
def headers(self):
return self._headers
@headers.setter
def headers(self, headers):
if not hasattr(headers, '__setitem__'):
raise TypeError('headers must be a dictionary')
self._headers.extend(headers)
@property
def extra(self):
return self._extra
@extra.setter
def extra(self, extra):
if not isinstance(extra, dict):
raise TypeError('extra must be a dictionary')
self._extra = extra
@property
def url(self):
return self._url
@property
def rawurl(self):
return self._url if isregex(self._url) else urlunparse(self._url)
@url.setter
def url(self, url):
if isregex(url):
self._url = url
else:
if not protoregex.match(url):
url = 'http://{}'.format(url)
self._url = urlparse(url)
self._query = (parse_qs(self._url.query)
if self._url.query else self._query)
@property
def query(self, url):
return self._query
@query.setter
def query(self, params):
self._query = parse_qs(params)
@property
def body(self):
return self._body
@body.setter
def body(self, body):
if hasattr(body, 'decode'):
try:
body = body.decode('utf-8', 'strict')
except Exception:
pass
self._body = body
@property
def json(self):
return _json.loads(self._body)
@json.setter
def json(self, data):
if isinstance(data, str):
self._body = data
else:
self._body = _json.dumps(data)
@property
def xml(self):
return self._body
@xml.setter
def xml(self, data):
self._body = data
def copy(self):
"""
Copies the current Request object instance for side-effects purposes.
Returns:
pook.Request: copy of the current Request instance.
"""
req = type(self)()
req.__dict__ = self.__dict__.copy()
req._headers = self.headers.copy()
return req
def __repr__(self):
"""
Returns an human friendly readable instance data representation.
Returns:
str
"""
entries = []
entries.append('Method: {}'.format(self._method))
entries.append('URL: {}'.format(
self._url if isregex(self._url) else self.rawurl))
if self._query:
entries.append('Query: {}'.format(self._query))
if self._headers:
entries.append('Headers: {}'.format(self._headers))
if self._body:
entries.append('Body: {}'.format(self._body))
separator = '=' * 50
return (separator + '\n{}\n' + separator).format('\n'.join(entries))
| 2,076 |
410 | import uuid
from rentomatic.domain.storageroom import StorageRoom
def test_storageroom_model_init():
code = uuid.uuid4()
storageroom = StorageRoom(code, size=200, price=10,
longitude=-0.09998975,
latitude=51.75436293)
assert storageroom.code == code
assert storageroom.size == 200
assert storageroom.price == 10
assert storageroom.longitude == -0.09998975
assert storageroom.latitude == 51.75436293
def test_storageroom_model_from_dict():
code = uuid.uuid4()
storageroom = StorageRoom.from_dict(
{
'code': code,
'size': 200,
'price': 10,
'longitude': -0.09998975,
'latitude': 51.75436293
}
)
assert storageroom.code == code
assert storageroom.size == 200
assert storageroom.price == 10
assert storageroom.longitude == -0.09998975
assert storageroom.latitude == 51.75436293
def test_storageroom_model_to_dict():
storageroom_dict = {
'code': uuid.uuid4(),
'size': 200,
'price': 10,
'longitude': -0.09998975,
'latitude': 51.75436293
}
storageroom = StorageRoom.from_dict(storageroom_dict)
assert storageroom.to_dict() == storageroom_dict
def test_storageroom_model_comparison():
storageroom_dict = {
'code': uuid.uuid4(),
'size': 200,
'price': 10,
'longitude': -0.09998975,
'latitude': 51.75436293
}
storageroom1 = StorageRoom.from_dict(storageroom_dict)
storageroom2 = StorageRoom.from_dict(storageroom_dict)
assert storageroom1 == storageroom2
| 828 |
2,023 | <reponame>tdiprima/code
'''
Python 3 code.
scipy.mgrid is a useful!
This short implementation comes without installing numpy.
Supports complex step. "3j" means "three equi-spaced
numbers from the span including the ends".
>>> print(mgrid[1:2:3j])
[1.0, 1.5, 2.0]
Supports floating point step.
>>> print(mgrid[1:2:0.5])
[1.0, 1.5]
does not support multi-dimensions,
indicated by a tuple to scipy.mgrid
>>> print(mgrid[1:2,3:5])
Traceback (most recent call last):
File "p.py", line 16, in __getitem__
start,stop,step = s.start,s.stop,s.step
AttributeError: 'tuple' object has no attribute 'start'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "p.py", line 18, in __getitem__
raise TypeError('expected a slice')
TypeError: expected a slice
Deviates from scipy.mgrid.
>>> mgrid[2:-1] # scipy.mgrid returns an empty list.
[2, 1, 0]
'''
def convert_slice_to_list(Slice,list_length):
'''
A fun idiom.
This gets python to deal with the "None"
in slices like slice(None,None,3)
'''
return list(range(list_length))[Slice]
class poor_man_1D_mgrid:
'''
grid()[slice] emulates the scipy mgrid function for vectors.
'''
def __getitem__(self,s):
try:
start,stop,step = s.start,s.stop,s.step
except:
raise TypeError('expected a slice')
start = start or 0
step = step or 1
L = stop-start
if isinstance(step,complex):
intervals = max(int(0.5+abs(step)),2)
step = L/(intervals-1)
halfway = start+L/2
l,r = [],[]
for i in range(int(intervals/2)):
delta = step*i
r.append(stop-delta)
l.append(start+delta)
if intervals & 1:
l.append(l[-1]+(r[-1]-l[-1])/2)
l.extend(reversed(r))
return l
if (L < 0) and (step == 1):
step = -1
if step*L < 0:
raise ValueError('avoid infinite list')
return [start+step*i for i in range(max(int(0.5+L/step),1))]
mgrid = poor_man_1D_mgrid()
| 1,113 |
14,668 | <filename>extensions/shell/browser/system_logs/shell_system_logs_fetcher.cc
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/shell/browser/system_logs/shell_system_logs_fetcher.h"
#include "components/feedback/system_logs/system_logs_fetcher.h"
#include "extensions/shell/browser/system_logs/log_sources/basic_log_source.h"
namespace system_logs {
SystemLogsFetcher* BuildShellSystemLogsFetcher(
content::BrowserContext* browser_context) {
// Deletes itself after Fetch() is completes.
SystemLogsFetcher* fetcher =
new SystemLogsFetcher(/* scrub_data= */ true,
/* first_party_extension_ids= */ nullptr);
fetcher->AddSource(std::make_unique<BasicLogSource>(browser_context));
return fetcher;
}
} // namespace system_logs
| 322 |
3,531 | <reponame>ggvl/lvgl<gh_stars>1000+
#include "../../lv_examples.h"
#if LV_USE_MONKEY && LV_BUILD_EXAMPLES
void lv_example_monkey_1(void)
{
/*Create pointer monkey test*/
lv_monkey_config_t config;
lv_monkey_config_init(&config);
config.type = LV_INDEV_TYPE_POINTER;
config.period_range.min = 10;
config.period_range.max = 100;
lv_monkey_t * monkey = lv_monkey_create(&config);
/*Start monkey test*/
lv_monkey_set_enable(monkey, true);
}
#endif
| 205 |
9,734 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include <benchmark/benchmark.h>
#include "arrow/array.h"
#include "arrow/testing/gtest_util.h"
#include "arrow/testing/random.h"
#include "arrow/compute/api_vector.h"
namespace arrow {
namespace compute {
using ::arrow::internal::checked_pointer_cast;
static constexpr random::SeedType kRandomSeed = 0xabcdef;
static constexpr random::SeedType kLongLength = 16384;
static std::shared_ptr<Array> MakeReplacements(random::RandomArrayGenerator* generator,
const BooleanArray& mask) {
int64_t count = 0;
for (int64_t i = 0; i < mask.length(); i++) {
count += mask.Value(i) && mask.IsValid(i);
}
return generator->Int64(count, /*min=*/-65536, /*max=*/65536, /*null_probability=*/0.1);
}
static void ReplaceWithMaskLowSelectivityBench(
benchmark::State& state) { // NOLINT non-const reference
random::RandomArrayGenerator generator(kRandomSeed);
const int64_t len = state.range(0);
const int64_t offset = state.range(1);
auto values =
generator.Int64(len, /*min=*/-65536, /*max=*/65536, /*null_probability=*/0.1)
->Slice(offset);
auto mask = checked_pointer_cast<BooleanArray>(
generator.Boolean(len, /*true_probability=*/0.1, /*null_probability=*/0.1)
->Slice(offset));
auto replacements = MakeReplacements(&generator, *mask);
for (auto _ : state) {
ABORT_NOT_OK(ReplaceWithMask(values, mask, replacements));
}
state.SetBytesProcessed(state.iterations() * (len - offset) * 8);
}
static void ReplaceWithMaskHighSelectivityBench(
benchmark::State& state) { // NOLINT non-const reference
random::RandomArrayGenerator generator(kRandomSeed);
const int64_t len = state.range(0);
const int64_t offset = state.range(1);
auto values =
generator.Int64(len, /*min=*/-65536, /*max=*/65536, /*null_probability=*/0.1)
->Slice(offset);
auto mask = checked_pointer_cast<BooleanArray>(
generator.Boolean(len, /*true_probability=*/0.9, /*null_probability=*/0.1)
->Slice(offset));
auto replacements = MakeReplacements(&generator, *mask);
for (auto _ : state) {
ABORT_NOT_OK(ReplaceWithMask(values, mask, replacements));
}
state.SetBytesProcessed(state.iterations() * (len - offset) * 8);
}
BENCHMARK(ReplaceWithMaskLowSelectivityBench)->Args({kLongLength, 0});
BENCHMARK(ReplaceWithMaskLowSelectivityBench)->Args({kLongLength, 99});
BENCHMARK(ReplaceWithMaskHighSelectivityBench)->Args({kLongLength, 0});
BENCHMARK(ReplaceWithMaskHighSelectivityBench)->Args({kLongLength, 99});
} // namespace compute
} // namespace arrow
| 1,167 |
526 | /* SPDX-License-Identifier: Apache 2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.commonservices.multitenant.ffdc.exceptions;
import org.odpi.openmetadata.commonservices.ffdc.exceptions.OMAGCheckedExceptionBase;
import org.odpi.openmetadata.frameworks.auditlog.messagesets.ExceptionMessageDefinition;
import java.util.Map;
/**
* NewInstanceException is thrown if the OMAG service is not able to register a new instance
* of itself in its instance map.
*/
public class NewInstanceException extends OMAGCheckedExceptionBase
{
private static final long serialVersionUID = 1L;
/**
* This is the typical constructor used for creating an NewInstanceException.
*
* @param messageDefinition content of the message
* @param className name of class reporting error
* @param actionDescription description of function it was performing when error detected
*/
public NewInstanceException(ExceptionMessageDefinition messageDefinition,
String className,
String actionDescription)
{
super(messageDefinition, className, actionDescription);
}
/**
* This is the typical constructor used for creating an NewInstanceException.
* The properties allow additional information to be associated with the exception.
*
* @param messageDefinition content of the message
* @param className name of class reporting error
* @param actionDescription description of function it was performing when error detected
* @param relatedProperties arbitrary properties that may help with diagnosing the problem.
*/
public NewInstanceException(ExceptionMessageDefinition messageDefinition,
String className,
String actionDescription,
Map<String, Object> relatedProperties)
{
super(messageDefinition, className, actionDescription, relatedProperties);
}
/**
* This is the constructor used for creating an NewInstanceException when an unexpected error has been caught.
* The properties allow additional information to be associated with the exception.
*
* @param messageDefinition content of the message
* @param className name of class reporting error
* @param actionDescription description of function it was performing when error detected
* @param caughtError previous error causing this exception
*/
public NewInstanceException(ExceptionMessageDefinition messageDefinition,
String className,
String actionDescription,
Throwable caughtError)
{
super(messageDefinition, className, actionDescription, caughtError);
}
/**
* This is the constructor used for creating an NewInstanceException when an unexpected error has been caught.
* The properties allow additional information to be associated with the exception.
*
* @param messageDefinition content of the message
* @param className name of class reporting error
* @param actionDescription description of function it was performing when error detected
* @param caughtError previous error causing this exception
* @param relatedProperties arbitrary properties that may help with diagnosing the problem.
*/
public NewInstanceException(ExceptionMessageDefinition messageDefinition,
String className,
String actionDescription,
Throwable caughtError,
Map<String, Object> relatedProperties)
{
super(messageDefinition, className, actionDescription, caughtError, relatedProperties);
}
}
| 1,539 |
5,169 | {
"name": "SocketIOChatClient",
"version": "0.1.1",
"summary": "A simple SocketIO client with live chat UI",
"description": "A simple SocketIO client with live chat user interface",
"homepage": "https://github.com/Chaatz/SocketIOChatClient",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/Chaatz/SocketIOChatClient.git",
"tag": "0.1.1"
},
"platforms": {
"ios": "8.0"
},
"requires_arc": true,
"source_files": "Pod/Classes/**/*",
"resource_bundles": {
"SocketIOChatClient": [
"Pod/Assets/*.xib"
]
},
"dependencies": {
"Socket.IO-Client-Swift": [
],
"Cartography": [
]
}
}
| 294 |
340 | //==================================================================================================
/**
EVE - Expressive Vector Engine
Copyright : EVE Contributors & Maintainers
SPDX-License-Identifier: MIT
**/
//==================================================================================================
#include "test.hpp"
#include <eve/constant/valmin.hpp>
#include <eve/constant/valmax.hpp>
#include <eve/constant/inf.hpp>
#include <eve/constant/minf.hpp>
#include <eve/constant/eps.hpp>
#include <eve/constant/nan.hpp>
#include <eve/function/nb_values.hpp>
#include <eve/function/inc.hpp>
#include <eve/function/dec.hpp>
#include <eve/function/all.hpp>
#include <eve/function/is_odd.hpp>
//==================================================================================================
// Types tests
//==================================================================================================
EVE_TEST_TYPES( "Check return types of eve::nb_values(simd)"
, eve::test::simd::all_types
)
<typename T>(eve::as<T>)
{
using i_t = eve::as_integer_t<T, unsigned>;
using v_t = eve::element_type_t<T>;
using vi_t = eve::element_type_t<i_t>;
TTS_EXPR_IS( eve::nb_values(T(), T()), i_t );
TTS_EXPR_IS( eve::nb_values(T(), v_t()), i_t );
TTS_EXPR_IS( eve::nb_values(v_t(), T()), i_t );
TTS_EXPR_IS( eve::nb_values(v_t(), v_t()), vi_t );
};
//==================================================================================================
// Tests for eve::nb_values
//==================================================================================================
EVE_TEST_TYPES( "Check behavior of eve::nb_values(simd)"
, eve::test::simd::all_types
)
<typename T>(eve::as<T>)
{
using r_t = eve::as_integer_t<T, unsigned>;
if constexpr(eve::floating_value<T>)
{
if constexpr(eve::platform::supports_invalids)
{
TTS_EQUAL(eve::nb_values(eve::inf(eve::as<T>()) , eve::inf(eve::as<T>())) , r_t(0) );
TTS_EQUAL(eve::nb_values(eve::minf(eve::as<T>()) , eve::minf(eve::as<T>())) , r_t(0) );
TTS_EQUAL(eve::nb_values(eve::nan(eve::as<T>()) , eve::nan(eve::as<T>())) , eve::valmax(eve::as<r_t>()));
}
auto eps = eve::eps(eve::as<T>());
TTS_EQUAL( eve::nb_values(T(1) , eve::inc(eps) ) , r_t(1));
TTS_EQUAL( eve::nb_values(T(1) , -eve::dec(eps) ) , r_t(2));
TTS_EQUAL( eve::nb_values(T(1) , -eve::dec(eps/2)) , r_t(1));
TTS_EQUAL( eve::nb_values(T(-0.), T(0) ) , r_t(1));
TTS_EXPECT(eve::all(eve::is_odd(eve::nb_values(T(-10), T(10)))));
}
else if constexpr(eve::signed_value<T>)
{
TTS_EXPECT(eve::all(eve::is_even(eve::nb_values(T(-10), T(10)))));
}
else
{
TTS_EQUAL(eve::nb_values(T(1), T(10)), r_t(9));
TTS_EQUAL(eve::nb_values(T(0), T( 0)), r_t(0));
}
};
| 1,173 |
852 | #include "FWCore/Framework/interface/Event.h"
#include "EventFilter/L1TRawToDigi/plugins/PackerFactory.h"
#include "GTTokens.h"
#include "GlobalExtBlkPacker.h"
namespace l1t {
namespace stage2 {
Blocks GlobalExtBlkPacker::pack(const edm::Event& event, const PackerTokens* toks) {
edm::Handle<GlobalExtBlkBxCollection> exts;
event.getByToken(static_cast<const GTTokens*>(toks)->getExtToken(), exts);
unsigned int wdPerBX = 6; //should this be configured someplace else?
Blocks res;
for (int blk = 0; blk < 4; blk++) {
unsigned int blkID = blk * 2 + 24;
unsigned int extOffset = blk * 64;
//vector of words
std::vector<uint32_t> load;
for (int i = exts->getFirstBX(); i <= exts->getLastBX(); ++i) {
for (auto j = exts->begin(i); j != exts->end(i); ++j) {
for (unsigned int wd = 0; wd < wdPerBX; wd++) {
uint32_t word = 0;
if (wd < 2) {
unsigned int startExt = wd * 32 + extOffset;
for (unsigned bt = 0; bt < 32; bt++) {
if (j->getExternalDecision(bt + startExt))
word |= (0x1 << bt);
} //end loop over bits
} //endif wrd < 2
load.push_back(word);
} //loop over words
} //end loop over alg objects.(trivial one per BX)
} //end loop over bx
res.push_back(Block(blkID, load));
} //loop over blks
return res;
}
} // namespace stage2
} // namespace l1t
DEFINE_L1T_PACKER(l1t::stage2::GlobalExtBlkPacker);
| 785 |
3,153 | # -*- coding: utf-8 -*-
"""
@author:XuMing(<EMAIL>)
@description:
"""
import os
import sys
import numpy as np
import torch
sys.path.append('../..')
from pycorrector.seq2seq import config
from pycorrector.seq2seq.data_reader import SOS_TOKEN, EOS_TOKEN
from pycorrector.seq2seq.data_reader import load_word_dict
from pycorrector.seq2seq.seq2seq import Seq2Seq
from pycorrector.seq2seq.convseq2seq import ConvSeq2Seq
from pycorrector.seq2seq.data_reader import PAD_TOKEN
from pycorrector.seq2seq.seq2seq_model import Seq2SeqModel
from pycorrector.utils.logger import logger
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class Inference(object):
def __init__(self, arch, model_dir, src_vocab_path=None, trg_vocab_path=None,
embed_size=50, hidden_size=50, dropout=0.5, max_length=128):
logger.debug("device: {}".format(device))
if arch in ['seq2seq', 'convseq2seq']:
self.src_2_ids = load_word_dict(src_vocab_path)
self.trg_2_ids = load_word_dict(trg_vocab_path)
self.id_2_trgs = {v: k for k, v in self.trg_2_ids.items()}
if arch == 'seq2seq':
logger.debug('use seq2seq model.')
self.model = Seq2Seq(encoder_vocab_size=len(self.src_2_ids),
decoder_vocab_size=len(self.trg_2_ids),
embed_size=embed_size,
enc_hidden_size=hidden_size,
dec_hidden_size=hidden_size,
dropout=dropout).to(device)
model_path = os.path.join(model_dir, 'seq2seq.pth')
self.model.load_state_dict(torch.load(model_path))
self.model.eval()
else:
logger.debug('use convseq2seq model.')
trg_pad_idx = self.trg_2_ids[PAD_TOKEN]
self.model = ConvSeq2Seq(encoder_vocab_size=len(self.src_2_ids),
decoder_vocab_size=len(self.trg_2_ids),
embed_size=embed_size,
enc_hidden_size=hidden_size,
dec_hidden_size=hidden_size,
dropout=dropout,
trg_pad_idx=trg_pad_idx,
device=device,
max_length=max_length).to(device)
model_path = os.path.join(model_dir, 'convseq2seq.pth')
self.model.load_state_dict(torch.load(model_path))
self.model.eval()
elif arch == 'bertseq2seq':
# Bert Seq2seq model
logger.debug('use bert seq2seq model.')
use_cuda = True if torch.cuda.is_available() else False
# encoder_type=None, encoder_name=None, decoder_name=None
self.model = Seq2SeqModel("bert", "{}/encoder".format(model_dir),
"{}/decoder".format(model_dir), use_cuda=use_cuda)
else:
logger.error('error arch: {}'.format(arch))
raise ValueError("Model arch choose error. Must use one of seq2seq model.")
self.arch = arch
self.max_length = max_length
def predict(self, sentence_list):
result = []
if self.arch in ['seq2seq', 'convseq2seq']:
for query in sentence_list:
out = []
tokens = [token.lower() for token in query]
tokens = [SOS_TOKEN] + tokens + [EOS_TOKEN]
src_ids = [self.src_2_ids[i] for i in tokens if i in self.src_2_ids]
sos_idx = self.trg_2_ids[SOS_TOKEN]
if self.arch == 'seq2seq':
src_tensor = torch.from_numpy(np.array(src_ids).reshape(1, -1)).long().to(device)
src_tensor_len = torch.from_numpy(np.array([len(src_ids)])).long().to(device)
sos_tensor = torch.Tensor([[self.trg_2_ids[SOS_TOKEN]]]).long().to(device)
translation, attn = self.model.translate(src_tensor, src_tensor_len, sos_tensor, self.max_length)
translation = [self.id_2_trgs[i] for i in translation.data.cpu().numpy().reshape(-1) if
i in self.id_2_trgs]
else:
src_tensor = torch.from_numpy(np.array(src_ids).reshape(1, -1)).long().to(device)
translation, attn = self.model.translate(src_tensor, sos_idx)
translation = [self.id_2_trgs[i] for i in translation if i in self.id_2_trgs]
for word in translation:
if word != EOS_TOKEN:
out.append(word)
else:
break
result.append(''.join(out))
elif self.arch == 'bertseq2seq':
corrected_sents = self.model.predict(sentence_list)
result = [i.replace(' ', '') for i in corrected_sents]
else:
raise ValueError('error arch.')
return result
if __name__ == "__main__":
m = Inference(config.arch,
config.model_dir,
config.src_vocab_path,
config.trg_vocab_path,
embed_size=config.embed_size,
hidden_size=config.hidden_size,
dropout=config.dropout,
max_length=config.max_length
)
inputs = [
'老是较书。',
'感谢等五分以后,碰到一位很棒的奴生跟我可聊。',
'遇到一位很棒的奴生跟我聊天。',
'遇到一位很美的女生跟我疗天。',
'他们只能有两个选择:接受降新或自动离职。',
'王天华开心得一直说话。'
]
outputs = m.predict(inputs)
for a, b in zip(inputs, outputs):
print('input :', a)
print('predict:', b)
print()
# result:
# input:由我起开始做。
# output:我开始做。
# input:没有解决这个问题,
# output:没有解决的问题,
# input:由我起开始做。
| 3,515 |
3,102 | <reponame>medismailben/llvm-project
// RUN: %clang_cc1 -triple x86_64-linux-gnu -fsyntax-only -ast-dump=json %s | FileCheck %s
unsigned char implicitcast_0(unsigned int x) {
return x;
}
signed char implicitcast_1(unsigned int x) {
return x;
}
unsigned char implicitcast_2(signed int x) {
return x;
}
signed char implicitcast_3(signed int x) {
return x;
}
//----------------------------------------------------------------------------//
unsigned char cstylecast_0(unsigned int x) {
return (unsigned char)x;
}
signed char cstylecast_1(unsigned int x) {
return (signed char)x;
}
unsigned char cstylecast_2(signed int x) {
return (unsigned char)x;
}
signed char cstylecast_3(signed int x) {
return (signed char)x;
}
//----------------------------------------------------------------------------//
unsigned char cxxstaticcast_0(unsigned int x) {
return static_cast<unsigned char>(x);
}
signed char cxxstaticcast_1(unsigned int x) {
return static_cast<signed char>(x);
}
unsigned char cxxstaticcast_2(signed int x) {
return static_cast<unsigned char>(x);
}
signed char cxxstaticcast_3(signed int x) {
return static_cast<signed char>(x);
}
//----------------------------------------------------------------------------//
using UnsignedChar = unsigned char;
using SignedChar = signed char;
using UnsignedInt = unsigned int;
using SignedInt = signed int;
UnsignedChar cxxfunctionalcast_0(UnsignedInt x) {
return UnsignedChar(x);
}
SignedChar cxxfunctionalcast_1(UnsignedInt x) {
return SignedChar(x);
}
UnsignedChar cxxfunctionalcast_2(SignedInt x) {
return UnsignedChar(x);
}
SignedChar cxxfunctionalcast_3(SignedInt x) {
return SignedChar(x);
}
// NOTE: CHECK lines have been autogenerated by gen_ast_dump_json_test.py
// using --filters=ImplicitCastExpr,CStyleCastExpr,CXXStaticCastExpr,CXXFunctionalCastExpr
// CHECK: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 148,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 148,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 148,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 148,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 148,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 148,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 208,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 208,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 208,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 208,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 208,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 208,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 268,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 268,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 268,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 268,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 268,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 268,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 326,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 326,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 326,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 326,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 326,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 326,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CStyleCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 468,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 483,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CStyleCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 541,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 554,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CStyleCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 612,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 627,
// CHECK-NEXT: "col": 25,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CStyleCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 683,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 696,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXStaticCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 841,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 11
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 869,
// CHECK-NEXT: "col": 38,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 868,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 868,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 868,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 868,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 868,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 868,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXStaticCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 930,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 11
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 956,
// CHECK-NEXT: "col": 36,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 955,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 955,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 955,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 955,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 955,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 955,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXStaticCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1017,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 11
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1045,
// CHECK-NEXT: "col": 38,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1044,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1044,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "unsigned char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1044,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1044,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1044,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1044,
// CHECK-NEXT: "col": 37,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXStaticCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1104,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 11
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1130,
// CHECK-NEXT: "col": 36,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1129,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1129,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "signed char"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1129,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1129,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1129,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1129,
// CHECK-NEXT: "col": 35,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "qualType": "int"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXFunctionalCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1410,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 12
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1424,
// CHECK-NEXT: "col": 24,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned char",
// CHECK-NEXT: "qualType": "UnsignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1423,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1423,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned char",
// CHECK-NEXT: "qualType": "UnsignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1423,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1423,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned int",
// CHECK-NEXT: "qualType": "UnsignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1423,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1423,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned int",
// CHECK-NEXT: "qualType": "UnsignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned int",
// CHECK-NEXT: "qualType": "UnsignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXFunctionalCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1487,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 10
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1499,
// CHECK-NEXT: "col": 22,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "signed char",
// CHECK-NEXT: "qualType": "SignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1498,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1498,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "signed char",
// CHECK-NEXT: "qualType": "SignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1498,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1498,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned int",
// CHECK-NEXT: "qualType": "UnsignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1498,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1498,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned int",
// CHECK-NEXT: "qualType": "UnsignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned int",
// CHECK-NEXT: "qualType": "UnsignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXFunctionalCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1562,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 12
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1576,
// CHECK-NEXT: "col": 24,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned char",
// CHECK-NEXT: "qualType": "UnsignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1575,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1575,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "unsigned char",
// CHECK-NEXT: "qualType": "UnsignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1575,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1575,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "int",
// CHECK-NEXT: "qualType": "SignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1575,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1575,
// CHECK-NEXT: "col": 23,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "int",
// CHECK-NEXT: "qualType": "SignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "int",
// CHECK-NEXT: "qualType": "SignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK: "kind": "CXXFunctionalCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1637,
// CHECK-NEXT: "col": 10,
// CHECK-NEXT: "tokLen": 10
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1649,
// CHECK-NEXT: "col": 22,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "signed char",
// CHECK-NEXT: "qualType": "SignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "NoOp",
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1648,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1648,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "signed char",
// CHECK-NEXT: "qualType": "SignedChar",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "IntegralCast",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ImplicitCastExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1648,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1648,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "int",
// CHECK-NEXT: "qualType": "SignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "rvalue",
// CHECK-NEXT: "castKind": "LValueToRValue",
// CHECK-NEXT: "isPartOfExplicitCast": true,
// CHECK-NEXT: "inner": [
// CHECK-NEXT: {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "DeclRefExpr",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "begin": {
// CHECK-NEXT: "offset": 1648,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: },
// CHECK-NEXT: "end": {
// CHECK-NEXT: "offset": 1648,
// CHECK-NEXT: "col": 21,
// CHECK-NEXT: "tokLen": 1
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "int",
// CHECK-NEXT: "qualType": "SignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: },
// CHECK-NEXT: "valueCategory": "lvalue",
// CHECK-NEXT: "referencedDecl": {
// CHECK-NEXT: "id": "0x{{.*}}",
// CHECK-NEXT: "kind": "ParmVarDecl",
// CHECK-NEXT: "name": "x",
// CHECK-NEXT: "type": {
// CHECK-NEXT: "desugaredQualType": "int",
// CHECK-NEXT: "qualType": "SignedInt",
// CHECK-NEXT: "typeAliasDeclId": "0x{{.*}}"
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
// CHECK-NEXT: ]
// CHECK-NEXT: }
| 26,990 |
14,668 | <filename>ios/testing/earl_grey/base_earl_grey_test_case.h
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_TESTING_EARL_GREY_BASE_EARL_GREY_TEST_CASE_H_
#define IOS_TESTING_EARL_GREY_BASE_EARL_GREY_TEST_CASE_H_
#import <XCTest/XCTest.h>
#import "ios/testing/earl_grey/app_launch_configuration.h"
// Base class for all Earl Grey tests.
// Provides EG1-compatible start-of-test-case hooks for EG2 tests,
// as well as handling common EG2 app-launching logic.
// This class also sets up code coverage by default.
@interface BaseEarlGreyTestCase : XCTestCase
// Invoked once per test case after launching test app from -setUp.
// Subclasses can use this method to perform class level setup instead of
// overriding +setUp, as due to EG2 limitations (crbug.com/961879) +setUp would
// execute before the application is launched and thus not function in the
// expected way. Subclasses must not call this method directly. Protected
// method.
+ (void)setUpForTestCase;
// Invoked upon starting each test method in a test case.
- (void)setUp NS_REQUIRES_SUPER;
// Provides an |AppLaunchConfiguration| for host app used across a TestCase.
// Subclasses must override this method to change app launching configuration
// (f.e. features or flags). Default implementation returns default
// AppLaunchConfiguration object.
- (AppLaunchConfiguration)appConfigurationForTestCase;
@end
#endif // IOS_TESTING_EARL_GREY_BASE_EARL_GREY_TEST_CASE_H_
| 489 |
435 | {
"copyright_text": null,
"description": "Abstract\n~~~~~~~~\n\nOpenShift Origin \u00e8 la Platform-as-a-Service opensource di riferimento.\nBasata su Kubernetes e Docker, contiene features aggiuntive e\nintegrazioni con altri componenti che semplificano le pratiche di\nDevOps.\n\nDopo una breve introduzione ad Openshift ed alla sua architettura,\nvedremo come:\n\n- fare il setup di infrastrutture applicative microservice-based (es.\n microservizi Python Flask/Django, single page application Angular,\n ecc\u2026)\n- creare una piattaforma di Continuous Integration e Continuous\n Delivery\n- implementare e gestire la CI/CD di microservice-based application\n sfruttando l\u2019integrazione con Git e Jenkins\n\nAgenda\n~~~~~~\n\n- architettura di base di OpenShift\n- come costruire un *project* OpenShift: *builds* e *deployments*\n- automatizzare il setup mediante *template*\n- utilizzare Git, Jenkins e Openshift per creare una semplice pipeline\n di CI/CD\n- strategie di deployment avanzate: *blue-green deployment* , *A/B\n deployment*\n\nPrerequisiti\n~~~~~~~~~~~~\n\n- conoscenza base di Git e Jenkins\n- conoscenza base dei concetti CI/CD e DevOps\n\nin \\_\\_on **venerd\u00ec 20 aprile** at 11:45 `**See\nschedule** </p3/schedule/pycon9/>`__\n",
"duration": 2359,
"language": "ita",
"recorded": "2018-04-20",
"related_urls": [
{
"label": "Conference schedule",
"url": "https://www.pycon.it/p3/schedule/pycon9/"
}
],
"speakers": [
"<NAME>"
],
"tags": [
"microservices",
"continuous-integration",
"git",
"continuous-delivery",
"kubernetes",
"devops",
"jenkins",
"docker",
"OpenShift"
],
"thumbnail_url": "https://i.ytimg.com/vi/m_CWNZ870eI/maxresdefault.jpg",
"title": "DevOps di applicazioni Python (e non solo) su OpenShift",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=m_CWNZ870eI"
}
]
}
| 782 |
371 | import numpy as np
import pandas as pd
from convokit.transformer import Transformer
from convokit.speaker_convo_helpers.speaker_convo_attrs import SpeakerConvoAttrs
from itertools import chain
from collections import Counter
from convokit.speaker_convo_helpers.speaker_convo_lifestage import SpeakerConvoLifestage
from convokit import Utterance
from convokit.surprise import Surprise
from sklearn.feature_extraction.text import CountVectorizer
from tqdm import tqdm
from typing import List
def _join_all_tokens(parses):
joined = []
for parse in parses:
for sent in parse:
joined += [tok['tok'].lower() for tok in sent['toks']]
return joined
def _nan_mean(arr):
arr = [x for x in arr if not np.isnan(x)]
if len(arr) > 0:
return np.mean(arr)
else:
return np.nan
def _perplexity(test_text, train_text):
N_train, N_test = len(train_text), len(test_text)
if min(N_train, N_test) == 0: return np.nan
train_counts = Counter(train_text)
return sum(
-np.log(train_counts.get(tok, 1)/N_train) for tok in test_text
)/N_test
class SpeakerConvoDiversity(Transformer):
'''
Implements methodology to compute the linguistic divergence between a speaker's activity in each conversation in a corpus (i.e., the language of their utterances) and a reference language model trained over a different set of conversations/speakers. See `SpeakerConvoDiversityWrapper` for more specific implementation which compares language used by individuals within fixed lifestages, and see the implementation of this wrapper for examples of calls to this transformer.
The transformer assumes that a corpus has already been tokenized (via a call to `TextParser`).
In general, this is appropriate for cases when the reference language model you wish to compare against varies across different speaker/conversations; in contrast, if you wish to compare many conversations to a _single_ language model (e.g., one trained on past conversations) then this will be inefficient.
This will produce attributes per speaker-conversation (i.e., the behavior of a speaker in a conversation); hence it takes as parameters functions which will subset the data at a speaker-conversation level. these functions operate on a table which has as columns:
* `speaker`: speaker ID
* `convo_id`: conversation ID
* `convo_idx`: n where this conversation is the nth that the speaker participated in
* `tokens`: all utterances the speaker contributed to the conversation, concatenated together as a single list of words
* any other speaker-conversation, speaker, or conversation-level metadata required to filter input and select reference language models per speaker-conversation (passed in via the `speaker_convo_cols`, `speaker_cols` and `convo_cols` parameters)
The table is the output of calling `Corpus.get_full_attribute_table`; see documentation of that function for further reference.
The transformer supports two broad types of comparisons:
* if `groupby=[]`, then each text will be compared against a single reference text (specified by `select_fn`)
* if `groupby=[key]` then each text will be compared against a set of reference texts, where each reference text represents a different chunk of the data, aggregated by `key` (e.g., each text could be compared against the utterances contributed by different speakers, such that in each iteration of a divergence computation, the text is compared against just the utterances of a single speaker.)
:param cmp_select_fn: the subset of speaker-conversation entries to compute divergences for. function of the form fn(df, aux) where df is a data frame indexed by speaker-conversation, and aux is any auxiliary parametsr required; returns a boolean mask over the dataframe.
:param ref_select_fn: the subset of speaker-conversation entries to compute reference language models over. function of the form fn(df, aux) where df is a data frame indexed by speaker-conversation, and aux is any auxiliary parameters required; returns a boolean mask over the dataframe.
:param select_fn: function of the form fn(df, row, aux) where df is a data frame indexed by speaker-conversation, row is a row of a dataframe indexed by speaker-conversation, and aux is any auxiliary parameters required; returns a boolean mask over the dataframe.
:param divergence_fn: function to compute divergence between a speaker-conversation and reference texts. By default, the transformer will compute unigram perplexity scores, as implemented by the `compute_divergences` function. However, you can also specify your own divergence function (e.g., some sort of bigram divergence) using the same function signature.
:param speaker_convo_cols: additional speaker-convo attributes used as input to the selector functions
:param speaker_cols: additional speaker-level attributes
:param convo_cols: additional conversation-level attributes
:param model_key_cols: list of attributes that is a subset of the attributes retrieved using `Corpus.get_full_attribute_table`. these attributes specify which speaker-convo entries correspond to the same reference text. `select_fn` should return the same boolean mask over the dataframe for speaker-convo entries which have the same values for all these attributes.
:param groupby: whether to aggregate the reference texts according to the specified keys (leave empty to avoid aggregation).
:param aux_input: a dictionary of auxiliary input to the selector functions and the divergence computation
:param recompute_tokens: whether to reprocess tokens by aggregating all tokens across different utterances made by a speaker in a conversation. by default, will cache existing output.
:param verbosity: frequency of status messages.
'''
def __init__(self, output_field,
cmp_select_fn=lambda df, aux: np.ones(len(df)).astype(bool),
ref_select_fn=lambda df, aux: np.ones(len(df)).astype(bool),
select_fn=lambda df, row, aux: np.ones(len(df)).astype(bool),
speaker_convo_cols=[], speaker_cols=[], convo_cols=[], model_key_cols=['speaker', 'convo_id'],
groupby=[], aux_input={}, recompute_tokens=False, verbosity=0):
self.output_field = output_field
self.surprise_attr_name = f"surprise_{output_field}"
self.cmp_select_fn = cmp_select_fn
self.ref_select_fn = ref_select_fn
self.select_fn = select_fn
self.speaker_convo_cols = speaker_convo_cols
self.speaker_cols = speaker_cols
self.convo_cols = convo_cols
self.model_key_cols = model_key_cols
self.groupby = groupby
self.aux_input = aux_input
self.verbosity = verbosity
self.agg_tokens = SpeakerConvoAttrs('tokens',
agg_fn=_join_all_tokens,
recompute=recompute_tokens)
self.model_key_map = {}
def transform(self, corpus):
if self.verbosity > 0:
print('joining tokens across conversation utterances')
corpus = self.agg_tokens.transform(corpus)
speaker_convo_cols = list(set(self.speaker_convo_cols + ['tokens']))
input_table = corpus.get_full_attribute_table(
list(set(self.speaker_convo_cols + ['tokens'])),
self.speaker_cols, self.convo_cols
)
surprise_transformer = self._init_surprise(lambda utt: self._get_model_key(utt, self.model_key_cols, input_table))
surprise_transformer.fit(corpus, text_func=lambda utt: self._get_text_func(utt, input_table))
surprise_transformer.transform(corpus, 'speaker', target_text_func=lambda utt: self._get_utt_row(utt, input_table).tokens)
self._set_output(corpus, input_table)
return corpus
def _get_utt_row(self, utt: Utterance, df: pd.DataFrame):
"""
Returns the row in `df` corresponding to `utt` using the speaker and conversation id of `utt`.
"""
return df.loc[f'{utt.speaker.id}__{utt.conversation_id}']
def _get_model_key(self, utt: Utterance, model_key_cols: List[str], df: pd.DataFrame):
"""
Returns the model key used by `Surprise` that corresponds to `utt` and `model_key_cols`.
Finds the row in `df` corresponding to `utt` and creates a model key using the values for the attributes in `model_key_cols` in that row.
"""
utt_row = self._get_utt_row(utt, df)
key = '.'.join([str(utt_row[col]) for col in model_key_cols])
self.model_key_map[key] = (utt_row['speaker'], utt_row['convo_id'])
return key
def _init_surprise(self, model_key_selector):
"""
Initializes an instance of the `Surprise` transformer with paramters corresponding to this instance of `SpeakerConvoDiversity`.
"""
target_sample_size = self.aux_input['cmp_sample_size'] if 'cmp_sample_size' in self.aux_input else 200
context_sample_size = self.aux_input['ref_sample_size'] if 'ref_sample_size' in self.aux_input else 1000
n_samples = self.aux_input['n_iters'] if 'n_iters' in self.aux_input else 50
return Surprise(model_key_selector, tokenizer=lambda x: x, surprise_attr_name=self.surprise_attr_name, target_sample_size=target_sample_size, context_sample_size=context_sample_size, n_samples=n_samples, smooth=False)
def _get_text_func(self, utt: Utterance, df: pd.DataFrame):
"""
Returns the reference text that should be to calculate speaker convo diversity for the speaker-convo group that `utt` belongs to.
"""
utt_row = self._get_utt_row(utt, df)
ref_subset = df[self.ref_select_fn(df, self.aux_input)]
ref_subset = ref_subset[self.select_fn(ref_subset, utt_row, self.aux_input)]
if not self.groupby:
return [np.array(list(chain(*ref_subset.tokens.values)))]
ref_subset = ref_subset.groupby(self.groupby).tokens.agg(lambda x: list(chain(*x))).reset_index()
ref_subset['tokens'] = ref_subset.tokens.map(np.array)
return ref_subset.tokens.values
def _get_row(self, df, fields, vals):
"""
Retrieves the row of `df` where each attribute `fields[i]` has the value `vals[i]`.
Assumes that there is exactly one row in `df` with fields equal to vals.
"""
str_df = df.astype('str')
mask = np.ones(df.shape[0], dtype=bool)
for field, val in zip(fields, vals):
mask &= (str_df[field] == val)
return df[mask].iloc[0]
def _set_output(self, corpus, df):
"""
Adds `self.output_field` to speaker convo info using scores returned by `Surprise` transformer.
"""
entries = []
for speaker in tqdm(corpus.iter_speakers(), desc='set output'):
if self.surprise_attr_name in speaker.meta:
scores = speaker.meta[self.surprise_attr_name]
for key, score in scores.items():
if np.isnan(score):
continue
speaker, convo_id = self.model_key_map[key]
corpus.set_speaker_convo_info(speaker, convo_id, self.output_field, score)
class SpeakerConvoDiversityWrapper(Transformer):
'''
Implements methodology for calculating linguistic diversity per life-stage. A wrapper around `SpeakerConvoDiversity`.
Outputs the following (speaker, conversation) attributes:
* `div__self` (within-diversity)
* `div__other` (across-diversity)
* `div__adj` (relative diversity)
Note that `np.nan` is returned for (speaker, conversation) pairs with not enough text.
:param output_field: prefix of attributes to output, defaults to 'div'
:param lifestage_size: number of conversations per lifestage
:param max_exp: highest experience level (i.e., # convos taken) to compute diversity scores for.
:param sample_size: number of words to sample per convo
:param min_n_utterances: minimum number of utterances a speaker contributes per convo for that (speaker, convo) to get scored
:param n_iters: number of samples to take for perplexity scoring
:param cohort_delta: timespan between when speakers start for them to be counted as part of the same cohort. defaults to 2 months
:param verbosity: amount of output to print
'''
def __init__(self, output_field='div', lifestage_size=20, max_exp=120,
sample_size=200, min_n_utterances=1, n_iters=50, cohort_delta=60*60*24*30*2, verbosity=100):
aux_input = {'n_iters': n_iters, 'cmp_sample_size': sample_size,
'ref_sample_size': (lifestage_size//2) * sample_size,
'max_exp': max_exp, 'min_n_utterances': min_n_utterances,
'cohort_delta': cohort_delta, 'lifestage_size': lifestage_size}
self.lifestage_transform = SpeakerConvoLifestage(lifestage_size)
self.output_field = output_field
# SpeakerConvoDiversity transformer to compute within-diversity
self.self_div = SpeakerConvoDiversity(output_field + '__self',
cmp_select_fn=lambda df, aux: (df.convo_idx < aux['max_exp']) & (df.n_convos__speaker >= aux['max_exp'])\
& (df.tokens.map(len) >= aux['cmp_sample_size']) & (df.n_utterances >= aux['min_n_utterances']),
ref_select_fn = lambda df, aux: np.ones(len(df)).astype(bool),
select_fn = lambda df, row, aux: (df.convo_idx % 2 != row.convo_idx % 2)\
& (df.speaker == row.speaker) & (df.lifestage == row.lifestage),
speaker_convo_cols=['n_utterances','lifestage'], speaker_cols=['n_convos'],
model_key_cols=['convo_idx', 'speaker', 'lifestage'],
groupby=[], aux_input=aux_input, verbosity=verbosity
)
# SpeakerConvoDiversity transformer to compute across-diversity
self.other_div = SpeakerConvoDiversity(output_field + '__other',
cmp_select_fn=lambda df, aux: (df.convo_idx < aux['max_exp']) & (df.n_convos__speaker >= aux['max_exp'])\
& (df.tokens.map(len) >= aux['cmp_sample_size']) & (df.n_utterances >= aux['min_n_utterances']),
ref_select_fn=lambda df, aux: np.ones(len(df)).astype(bool),
select_fn = lambda df, row, aux: (df.convo_idx % 2 != row.convo_idx % 2)\
& (df.speaker != row.speaker) & (df.lifestage == row.lifestage)\
& (df.n_convos__speaker >= (row.lifestage + 1) * aux['lifestage_size'])\
& (df.start_time__speaker.between(row.start_time__speaker - aux['cohort_delta'],
row.start_time__speaker + aux['cohort_delta'])),
speaker_convo_cols=['n_utterances', 'lifestage'], speaker_cols=['n_convos', 'start_time'],
model_key_cols=['convo_idx', 'speaker', 'lifestage'],
groupby=['speaker', 'lifestage'], aux_input=aux_input, verbosity=verbosity
)
self.verbosity = verbosity
def transform(self, corpus):
if self.verbosity > 0:
print('getting lifestages')
corpus = self.lifestage_transform.transform(corpus)
if self.verbosity > 0:
print('getting within diversity')
corpus = self.self_div.transform(corpus)
if self.verbosity > 0:
print('getting across diversity')
corpus = self.other_div.transform(corpus)
if self.verbosity > 0:
print('getting relative diversity')
div_table = corpus.get_full_attribute_table([self.output_field + '__self',
self.output_field + '__other'])
div_table = div_table[div_table[self.output_field + '__self'].notnull() | div_table[self.output_field + '__other'].notnull()]
div_table[self.output_field + '__adj'] = div_table[self.output_field + '__other'] \
- div_table[self.output_field + '__self']
for idx, (_, row) in enumerate(div_table.iterrows()):
if (idx > 0) and (self.verbosity > 0) and (idx % self.verbosity == 0):
print(idx, '/', len(div_table))
if not np.isnan(row[self.output_field + '__adj']):
corpus.set_speaker_convo_info(row.speaker, row.convo_id, self.output_field + '__adj',
row[self.output_field + '__adj'])
return corpus
| 5,478 |
581 | import os
import numpy as np
import re
from glove.glove import Glove
from gensim.models import Doc2Vec
from model_downloader import ModelDownloader
from data_utils import TextTooShortException
class WordVectorEmbedder:
'''
generic class to embed words into word vectors
'''
def __init__(self, model_type, model_fullpath=None, model_group=None, model_subset=None, model_args={}):
'''
initialize a model from a saved object file
'''
self.model_type = model_type
if self.model_type == 'word2vec':
# default model
if model_fullpath is None or re.search('GoogleNews-vectors-negative300.bin', model_fullpath):
model_dir = '/data'
model_group = 'google-news'
model_subset = 'GoogleNews-vectors-negative300.bin'
model_args = { 'binary': True }
# setup importer and converter
self.model_import_method = Doc2Vec.load_word2vec_format
self.word_vector = self.word_vector_word2vec
elif self.model_type == 'glove':
# default model
if model_fullpath is None:
model_dir = '/data'
model_group = 'twitter-2b'
model_subset = 'glove.twitter.27B.200d'
# setup importer and converter
self.model_import_method = Glove.load_obj
self.word_vector = self.word_vector_glove
else:
raise NameError("Error! You must specify a model type from: <word2vec|glove>")
# save subset for documentation
self.model_subset = model_subset
# download and save the model (ModelDownloader will skip if exists)
if not model_fullpath:
downloader = ModelDownloader(self.model_type)
downloader.download_and_save(outdir=model_dir, datafile=model_subset, dataset=model_group)
# locate the model
model_fullpath = downloader.download_fullpath(model_dir, model_subset)
# load the model
print("Loading model from {}...".format(model_fullpath))
self.model = self.model_import_method(model_fullpath, **model_args)
# setup the word lookup
if self.model_type == 'word2vec':
self.word_set = set(self.model.index2word)
else:
self.word_set = set(self.model.dictionary)
def num_features(self):
if self.model_type == 'word2vec':
return self.model.vector_size
else:
return self.model.no_components
def word_vector_glove(self, word):
'''
get glove vector for given word
'''
word_idx = self.model.dictionary[word]
return self.model.word_vectors[word_idx]
def word_vector_word2vec(self, word):
'''
get glove vector for given word
'''
return self.model[word]
def embed_words_into_vectors(self, words, num_features=None):
'''
embed words into model's vector space
'''
# store vectors as list
vectors = []
# process tokens
for word in words:
try:
# add vector
vectors.append(self.word_vector(word))
# ignore words not in dictionary
except KeyError as e:
pass
# build fixed-length set if necessary
if num_features:
# truncate if longer
if (len(vectors) >= num_features):
vectors = vectors[:num_features]
# pad if necessary by appending right-sized 0 vectors
else:
padding_length = num_features - len(vectors)
for i in xrange(padding_length):
vectors.append(np.zeros(self.num_features()))
# return ndarray of embedded words
return np.array(vectors)
def embed_words_into_vectors_concatenated(self, words, num_features=None):
vectors = self.embed_words_into_vectors(words, num_features)
return vectors.flatten()
def embed_words_into_vectors_averaged(self, words):
'''
embed words into model's averaged vector space
'''
# Function to average all of the word vectors in a given
# paragraph
# choose model
if self.model_type == 'glove':
word_ids = [self.model.dictionary[word] for word in words if word in self.word_set]
return np.nan_to_num(np.mean(self.model.word_vectors[word_ids], axis=0))
else:
# process valid words
valid_words = [word for word in words if word in self.word_set]
if len(valid_words):
# get vectors for valid words
vectors = self.word_vector(valid_words)
# find the average/paragraph vector
return np.nan_to_num(np.mean(vectors, axis=0))
else:
raise TextTooShortException()
| 2,313 |
656 | # encoding:utf-8
import matplotlib
matplotlib.use('Qt4Agg')
import matplotlib.pyplot as plt
## 散点图
plt.scatter(200,200,marker='x')
plt.show()
| 69 |
531 | package com.adjust.sdk;
/**
* Created by pfms on 16/02/16.
*/
public interface OnSessionTrackingFailedListener {
void onFinishedSessionTrackingFailed(AdjustSessionFailure failureResponseData);
}
| 62 |
709 | //-----------------------------------------------
//
// This file is part of the Siv3D Engine.
//
// Copyright (c) 2008-2021 <NAME>
// Copyright (c) 2016-2021 OpenSiv3D Project
//
// Licensed under the MIT License.
//
//-----------------------------------------------
# pragma once
namespace s3d
{
namespace Scene
{
inline void Resize(const int32 width, const int32 height)
{
Resize(s3d::Size{ width, height });
}
inline int32 Width() noexcept
{
return Size().x;
}
inline int32 Height() noexcept
{
return Size().y;
}
inline Point Center() noexcept
{
return (Size() / 2);
}
inline Vec2 CenterF() noexcept
{
return (Size() * 0.5);
}
inline s3d::Rect Rect() noexcept
{
return s3d::Rect{ Size() };
}
}
}
| 295 |
3,227 | <reponame>ffteja/cgal
// Copyright (c) 2003 INRIA Sophia-Antipolis (France).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org)
//
// $URL$
// $Id$
// SPDX-License-Identifier: LGPL-3.0-or-later OR LicenseRef-Commercial
//
//
// Author(s) : <NAME>
#ifndef CGAL_INTERNAL_INTERSECTIONS_3_POINT_3_SPHERE_3_INTERSECTION_H
#define CGAL_INTERNAL_INTERSECTIONS_3_POINT_3_SPHERE_3_INTERSECTION_H
namespace CGAL {
namespace Intersections {
namespace internal {
template <class K>
inline
bool
do_intersect(const typename K::Point_3& pt,
const typename K::Sphere_3& sphere,
const K& k)
{
return k.has_on_boundary_3_object()(sphere, pt);
}
template <class K>
inline
bool
do_intersect(const typename K::Sphere_3& sphere,
const typename K::Point_3& pt,
const K& k)
{
return k.has_on_boundary_3_object()(sphere, pt);
}
} // namespace internal
} // namespace Intersections
} // namespace CGAL
#endif // CGAL_INTERNAL_INTERSECTIONS_3_POINT_3_SPHERE_3_INTERSECTION_H
| 443 |
2,338 | """
Test that C++ template classes that have integer parameters work correctly.
We must reconstruct the types correctly so the template types are correct
and display correctly, and also make sure the expression parser works and
is able the find all needed functions when evaluating expressions
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TemplateArgsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def prepareProcess(self):
self.build()
# Create a target by the debugger.
exe = self.getBuildArtifact("a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Set breakpoints inside and outside methods that take pointers to the
# containing struct.
line = line_number('main.cpp', '// Breakpoint 1')
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", line, num_expected_locations=1, loc_exact=True)
arguments = None
environment = None
# Now launch the process, and do not stop at entry point.
process = target.LaunchSimple(
arguments, environment, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
# Get the thread of the process
self.assertEqual(
process.GetState(), lldb.eStateStopped,
PROCESS_STOPPED)
thread = lldbutil.get_stopped_thread(
process, lldb.eStopReasonBreakpoint)
# Get frame for current thread
return thread.GetSelectedFrame()
def test_integer_args(self):
frame = self.prepareProcess()
testpos = frame.FindVariable('testpos')
self.assertTrue(
testpos.IsValid(),
'make sure we find a local variabble named "testpos"')
self.assertEquals(testpos.GetType().GetName(), 'TestObj<1>')
expr_result = frame.EvaluateExpression("testpos.getArg()")
self.assertTrue(
expr_result.IsValid(),
'got a valid expression result from expression "testpos.getArg()"')
self.assertEquals(expr_result.GetValue(), "1", "testpos.getArg() == 1")
self.assertEqual(
expr_result.GetType().GetName(), "int",
'expr_result.GetType().GetName() == "int"')
testneg = frame.FindVariable('testneg')
self.assertTrue(
testneg.IsValid(),
'make sure we find a local variabble named "testneg"')
self.assertEquals(testneg.GetType().GetName(), 'TestObj<-1>')
expr_result = frame.EvaluateExpression("testneg.getArg()")
self.assertTrue(
expr_result.IsValid(),
'got a valid expression result from expression "testneg.getArg()"')
self.assertEqual(
expr_result.GetValue(), "-1",
"testneg.getArg() == -1")
self.assertEqual(
expr_result.GetType().GetName(), "int",
'expr_result.GetType().GetName() == "int"')
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr24489")
def test_template_template_args(self):
frame = self.prepareProcess()
c1 = frame.FindVariable('c1')
self.assertTrue(
c1.IsValid(),
'make sure we find a local variabble named "c1"')
self.assertEquals(c1.GetType().GetName(), 'C<float, T1>')
f1 = c1.GetChildMemberWithName("V").GetChildAtIndex(0).GetChildMemberWithName("f")
self.assertEquals(f1.GetType().GetName(), 'float')
self.assertEquals(f1.GetValue(), '1.5')
c2 = frame.FindVariable('c2')
self.assertTrue(
c2.IsValid(),
'make sure we find a local variabble named "c2"')
self.assertEquals(c2.GetType().GetName(), 'C<double, T1, T2>')
f2 = c2.GetChildMemberWithName("V").GetChildAtIndex(0).GetChildMemberWithName("f")
self.assertEquals(f2.GetType().GetName(), 'double')
self.assertEquals(f2.GetValue(), '1.5')
f3 = c2.GetChildMemberWithName("V").GetChildAtIndex(1).GetChildMemberWithName("f")
self.assertEquals(f3.GetType().GetName(), 'double')
self.assertEquals(f3.GetValue(), '2.5')
f4 = c2.GetChildMemberWithName("V").GetChildAtIndex(1).GetChildMemberWithName("i")
self.assertEquals(f4.GetType().GetName(), 'int')
self.assertEquals(f4.GetValue(), '42')
# Gcc does not generate the necessary DWARF attribute for enum template
# parameters.
@expectedFailureAll(bugnumber="llvm.org/pr28354", compiler="gcc")
@skipIf(dwarf_version=['<', '4'])
def test_enum_args(self):
frame = self.prepareProcess()
# Make sure "member" can be displayed and also used in an expression
# correctly
member = frame.FindVariable('member')
self.assertTrue(
member.IsValid(),
'make sure we find a local variabble named "member"')
self.assertEqual(member.GetType().GetName(),
'EnumTemplate<EnumType::Member>')
expr_result = frame.EvaluateExpression("member.getMember()")
self.assertTrue(
expr_result.IsValid(),
'got a valid expression result from expression "member.getMember()"')
self.assertEqual(
expr_result.GetValue(), "123",
"member.getMember() == 123")
self.assertEqual(
expr_result.GetType().GetName(), "int",
'expr_result.GetType().GetName() == "int"')
# Make sure "subclass" can be displayed and also used in an expression
# correctly
subclass = frame.FindVariable('subclass')
self.assertTrue(
subclass.IsValid(),
'make sure we find a local variabble named "subclass"')
self.assertEqual(subclass.GetType().GetName(),
'EnumTemplate<EnumType::Subclass>')
expr_result = frame.EvaluateExpression("subclass.getMember()")
self.assertTrue(
expr_result.IsValid(),
'got a valid expression result from expression "subclass.getMember()"')
self.assertEqual(
expr_result.GetValue(), "246",
"subclass.getMember() == 246")
self.assertEqual(
expr_result.GetType().GetName(), "int",
'expr_result.GetType().GetName() == "int"')
| 2,756 |
5,823 | <reponame>onix39/engine<filename>shell/platform/glfw/headless_event_loop.h<gh_stars>1000+
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_GLFW_HEADLESS_EVENT_LOOP_H_
#define FLUTTER_SHELL_PLATFORM_GLFW_HEADLESS_EVENT_LOOP_H_
#include <condition_variable>
#include "flutter/shell/platform/glfw/event_loop.h"
namespace flutter {
// An event loop implementation that only handles Flutter Engine task
// scheduling.
class HeadlessEventLoop : public EventLoop {
public:
using TaskExpiredCallback = std::function<void(const FlutterTask*)>;
HeadlessEventLoop(std::thread::id main_thread_id,
const TaskExpiredCallback& on_task_expired);
~HeadlessEventLoop();
// Disallow copy.
HeadlessEventLoop(const HeadlessEventLoop&) = delete;
HeadlessEventLoop& operator=(const HeadlessEventLoop&) = delete;
private:
// |EventLoop|
void WaitUntil(const TaskTimePoint& time) override;
// |EventLoop|
void Wake() override;
std::condition_variable task_queue_condition_;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_GLFW_HEADLESS_EVENT_LOOP_H_
| 424 |
561 | <filename>startup/GafferOSLUI/oslExamples.py<gh_stars>100-1000
import Gaffer
import GafferUI
import GafferOSL
GafferUI.Examples.registerExample(
"Compositing/OSL Image Processing",
"$GAFFER_ROOT/resources/examples/compositing/OSLImageProcessing.gfr",
description = "Demonstrates the use of OSL networks and the OSLImage node for image processing and pattern generation.",
notableNodes = [
GafferOSL.OSLImage
]
)
GafferUI.Examples.registerExample(
"Scene Processing/OSL Mesh Manipulation",
"$GAFFER_ROOT/resources/examples/sceneProcessing/OSLMeshManipulation.gfr",
description = "Demonstrates the use of OSL networks and the OSLObject node for mesh manipulation.",
notableNodes = [
GafferOSL.OSLObject
]
)
| 243 |
474 | <reponame>DennyDai/angr-management<filename>angrmanagement/ui/widgets/qvextemps_viewer.py
import logging
from PySide2.QtWidgets import QFrame, QLabel, QVBoxLayout, QHBoxLayout, QScrollArea, QSizePolicy
from PySide2.QtCore import Qt, QSize
from .qast_viewer import QASTViewer
l = logging.getLogger('ui.widgets.qvextemps_viewer')
class QVEXTempsViewer(QFrame):
def __init__(self, state, parent, workspace):
super(QVEXTempsViewer, self).__init__(parent)
self.workspace = workspace
self.state = state
# widgets
self._area = None
self._tmps = {}
self._init_widgets()
#
# Overridden methods
#
def sizeHint(self, *args, **kwargs):
return QSize(100, 100)
#
# Private methods
#
def _init_widgets(self):
area = QScrollArea()
area.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
area.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
area.setWidgetResizable(True)
self._area = area
base_layout = QVBoxLayout()
base_layout.addWidget(area)
self.setLayout(base_layout)
def _load_tmps(self):
state = self.state.am_obj
layout = QVBoxLayout()
self._tmps.clear()
if state is None:
tmps = {}
else:
tmps = state.scratch.temps
# tmps
for tmp_id, tmp_value in tmps.items():
sublayout = QHBoxLayout()
lbl_tmp_name = QLabel(self)
lbl_tmp_name.setProperty('class', 'reg_viewer_label')
lbl_tmp_name.setText("tmp_%d" % tmp_id)
lbl_tmp_name.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sublayout.addWidget(lbl_tmp_name)
sublayout.addSpacing(10)
tmp_viewer = QASTViewer(tmp_value, workspace=self.workspace, parent=self)
self._tmps[tmp_id] = tmp_viewer
sublayout.addWidget(tmp_viewer)
layout.addLayout(sublayout)
layout.setSpacing(0)
layout.addStretch(0)
layout.setContentsMargins(2, 2, 2, 2)
# the container
container = QFrame()
container.setAutoFillBackground(True)
palette = container.palette()
palette.setColor(container.backgroundRole(), Qt.white)
container.setPalette(palette)
container.setLayout(layout)
self._area.setWidget(container)
def _watch_state(self, **kwargs):
self._load_tmps()
| 1,160 |
1,587 | <filename>spring-boot/exception-handling/src/main/java/io/reflectoring/exception/service/ProductService.java
package io.reflectoring.exception.service;
import io.reflectoring.exception.commons.I18Constants;
import io.reflectoring.exception.entity.Category;
import io.reflectoring.exception.entity.Product;
import io.reflectoring.exception.exception.NoSuchElementFoundException;
import io.reflectoring.exception.model.ProductInput;
import io.reflectoring.exception.repository.ProductRepository;
import lombok.AllArgsConstructor;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Service;
import java.util.Locale;
import java.util.Objects;
@Service
@AllArgsConstructor
public class ProductService {
private final ProductRepository repository;
private final MessageSource messageSource;
public Product getProduct(String id) {
return repository.findById(id).orElseThrow(()->
new NoSuchElementFoundException(getLocalMessage(I18Constants.NO_ITEM_FOUND.getKey(), id)));
}
public Product addProduct(ProductInput productInput){
Product product = new Product();
product.setName(productInput.getName());
product.setPrice(productInput.getPrice());
product.setWeight(product.getWeight());
product.setCategory(Objects.isNull(productInput.getCategory())? Category.BOOKS: productInput.getCategory());
return repository.save(product);
}
private String getLocalMessage(String key, String... params){
return messageSource.getMessage(key,
params,
Locale.ENGLISH);
}
}
| 547 |
769 | <reponame>chouxianyu/TFA
from .coco_evaluation import COCOEvaluator
from .evaluator import (
DatasetEvaluator,
DatasetEvaluators,
inference_context,
inference_on_dataset,
)
from .lvis_evaluation import LVISEvaluator
from .pascal_voc_evaluation import PascalVOCDetectionEvaluator
from .testing import print_csv_format, verify_results
__all__ = [k for k in globals().keys() if not k.startswith("_")]
| 157 |
406 | #ifndef THEFT_AUTOSHRINK_H
#define THEFT_AUTOSHRINK_H
#include "theft_types_internal.h"
#include <limits.h>
#define AUTOSHRINK_ENV_TAG 0xa5
#define AUTOSHRINK_BIT_POOL_TAG 'B'
struct autoshrink_bit_pool {
/* Bits will always be rounded up to a multiple of 64 bits,
* and be aligned as a uint64_t. */
uint8_t *bits;
bool shrinking; /* is this pool shrinking? */
size_t bits_filled; /* how many bits are available */
size_t bits_ceil; /* ceiling for bit buffer */
size_t limit; /* after limit bytes, return 0 */
size_t consumed;
size_t request_count;
size_t request_ceil;
uint32_t *requests;
size_t generation;
size_t *index;
};
/* How large should the default autoshrink bit pool be?
* The pool will be filled and grown on demand, but an
* excessively small initial pool will lead to several
* reallocs in quick succession. */
#define DEF_POOL_SIZE (64 * 8*sizeof(uint64_t))
/* How large should the buffer for request sizes be by default? */
#define DEF_REQUESTS_CEIL2 4 /* constrain to a power of 2 */
#define DEF_REQUESTS_CEIL (1 << DEF_REQUESTS_CEIL2)
/* Default: Decide we've reached a local minimum after
* this many unsuccessful shrinks in a row. */
#define DEF_MAX_FAILED_SHRINKS 100
/* When attempting to drop records, default to odds of
* (1+DEF_DROP_THRESHOLD) in (1 << DEF_DROP_BITS). */
#define DEF_DROP_THRESHOLD 0
#define DEF_DROP_BITS 5
/* Max number of pooled random bits to give to alloc callback
* before returning 0 forever. Default: No limit. */
#define DEF_POOL_LIMIT ULLONG_MAX
/* Magic value to disable selecting a request to drop in
* drop_from_bit_pool, because it complicates tests. */
#define DO_NOT_DROP (0xFFFFFFFFLU)
typedef uint64_t autoshrink_prng_fun(uint8_t bits, void *udata);
#define TWO_EVENLY 0x80
#define FOUR_EVENLY 0x40
#define MODEL_MIN 0x08
#define MODEL_MAX 0x80
#define DROPS_MIN 0x10
#define DROPS_MAX 0xA0
enum autoshrink_action {
ASA_DROP = 0x01,
ASA_SHIFT = 0x02,
ASA_MASK = 0x04,
ASA_SWAP = 0x08,
ASA_SUB = 0x10,
};
enum autoshrink_weight {
WEIGHT_DROP = 0x00,
WEIGHT_SHIFT = 0x01,
WEIGHT_MASK = 0x02,
WEIGHT_SWAP = 0x03,
WEIGHT_SUB = 0x04,
};
struct autoshrink_model {
enum autoshrink_action cur_tried;
enum autoshrink_action cur_set;
enum autoshrink_action next_action;
uint8_t weights[5];
};
struct autoshrink_env {
// config
uint8_t arg_i;
size_t pool_size;
size_t pool_limit;
enum theft_autoshrink_print_mode print_mode;
size_t max_failed_shrinks;
uint64_t drop_threshold;
uint8_t drop_bits;
struct autoshrink_model model;
struct autoshrink_bit_pool *bit_pool;
// allow injecting a fake prng, for testing
bool leave_trailing_zeroes;
autoshrink_prng_fun *prng;
void *udata;
};
enum mutation {
MUT_SHIFT,
MUT_MASK,
MUT_SWAP,
MUT_SUB,
};
#define LAST_MUTATION MUT_SUB
#define MUTATION_TYPE_BITS 2
struct change_info {
enum mutation t;
size_t pos;
uint32_t size;
union {
uint8_t shift;
uint64_t mask;
uint64_t and;
uint64_t sub;
uint8_t swap_unused;
} u;
};
struct autoshrink_env *
theft_autoshrink_alloc_env(struct theft *t, uint8_t arg_i,
const struct theft_type_info *type_info);
void theft_autoshrink_free_env(struct theft *t, struct autoshrink_env *env);
enum theft_autoshrink_wrap {
THEFT_AUTOSHRINK_WRAP_OK,
THEFT_AUTOSHRINK_WRAP_ERROR_MEMORY = -1,
THEFT_AUTOSHRINK_WRAP_ERROR_MISUSE = -2,
};
enum theft_autoshrink_wrap
theft_autoshrink_wrap(struct theft *t,
struct theft_type_info *type_info, struct theft_type_info *wrapper);
void theft_autoshrink_free_bit_pool(struct theft *t,
struct autoshrink_bit_pool *pool);
void
theft_autoshrink_bit_pool_random(struct theft *t,
struct autoshrink_bit_pool *pool,
uint32_t bit_count, bool save_request,
uint64_t *buf);
void
theft_autoshrink_get_real_args(struct theft *t,
void **dst, void **src);
void
theft_autoshrink_update_model(struct theft *t,
uint8_t arg_id, enum theft_trial_res res,
uint8_t adjustment);
/* Alloc callback, with autoshrink_env passed along. */
enum theft_alloc_res
theft_autoshrink_alloc(struct theft *t, struct autoshrink_env *env,
void **instance);
theft_hash
theft_autoshrink_hash(struct theft *t, const void *instance,
struct autoshrink_env *env, void *type_env);
void
theft_autoshrink_print(struct theft *t, FILE *f,
struct autoshrink_env *env, const void *instance, void *type_env);
enum theft_shrink_res
theft_autoshrink_shrink(struct theft *t,
struct autoshrink_env *env,
uint32_t tactic, void **output,
struct autoshrink_bit_pool **output_bit_pool);
/* This is only exported for testing. */
void theft_autoshrink_dump_bit_pool(FILE *f, size_t bit_count,
const struct autoshrink_bit_pool *pool,
enum theft_autoshrink_print_mode print_mode);
/* Set the next action the model will deliver. (This is a hook for testing.) */
void theft_autoshrink_model_set_next(struct autoshrink_env *env,
enum autoshrink_action action);
#endif
| 2,128 |
436 | /*
* Copyright 2003-2009 the original author or authors.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.jdon.jivejdon.spi.component.email;
import com.jdon.annotation.Component;
import com.jdon.container.pico.Startable;
import com.jdon.jivejdon.domain.model.account.Account;
import com.jdon.jivejdon.domain.model.shortmessage.ShortMessage;
import com.jdon.jivejdon.util.EmailTask;
import com.jdon.util.Debug;
@Component("subscriptionEmail")
public class SubscriptionEmail implements Startable {
private final static String module = SubscriptionEmail.class.getName();
private EmailHelper emailHelper;
private SubscriptionEmailParams subscriptionEmailParam;
public SubscriptionEmail(EmailHelper emailHelper, SubscriptionEmailParams subscriptionEmailParam) {
super();
this.emailHelper = emailHelper;
this.subscriptionEmailParam = subscriptionEmailParam;
}
public void send(Account account, ShortMessage sm) {
Debug.logVerbose("sendSubscriptionEmail ", module);
if (!account.isEmailValidate()) {
Debug.logWarning("this email not Validate :" + account.getEmail());
return;
}
String body = sm.getMessageBody();
String subject = sm.getMessageTitle();
String toEmail = account.getEmail();
String toName = account.getUsername();
String fromName = sm.getMessageFrom();
String fromEmail = subscriptionEmailParam.getFromEmail();
EmailVO emailVO = new EmailVO(toName, toEmail, fromName, fromEmail, subject, body, EmailTask.HTML_FORMAT);
emailHelper.send(emailVO);
Debug.logVerbose("email is over", module);
}
@Override
public void start() {
// TODO Auto-generated method stub
}
@Override
public void stop() {
this.emailHelper.stop();
}
}
| 655 |
427 | <reponame>JaminChan/eos_win<filename>externals/wasm-compiler/llvm/utils/opt-viewer/opt-viewer.py
#!/usr/bin/env python2.7
from __future__ import print_function
desc = '''Generate HTML output to visualize optimization records from the YAML files
generated with -fsave-optimization-record and -fdiagnostics-show-hotness.
The tools requires PyYAML and Pygments Python packages.
For faster parsing, you may want to use libYAML with PyYAML.'''
import yaml
# Try to use the C parser.
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
import argparse
import os.path
import re
import subprocess
import shutil
from pygments import highlight
from pygments.lexers.c_cpp import CppLexer
from pygments.formatters import HtmlFormatter
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('yaml_files', nargs='+')
parser.add_argument('output_dir')
parser.add_argument('-source-dir', '-s', default='', help='set source directory')
args = parser.parse_args()
p = subprocess.Popen(['c++filt', '-n'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def demangle(name):
p.stdin.write(name + '\n')
return p.stdout.readline().rstrip()
class Remark(yaml.YAMLObject):
max_hotness = 0
# Work-around for http://pyyaml.org/ticket/154.
yaml_loader = Loader
@classmethod
def should_display_hotness(cls):
# If max_hotness is 0 at the end, we assume hotness information is
# missing and no relative hotness information is displayed
return cls.max_hotness != 0
# Map function names to their source location for function where inlining happened
caller_loc = dict()
def __getattr__(self, name):
# If hotness is missing, assume 0
if name == 'Hotness':
return 0
raise AttributeError
@property
def File(self):
return self.DebugLoc['File']
@property
def Line(self):
return int(self.DebugLoc['Line'])
@property
def Column(self):
return self.DebugLoc['Column']
@property
def DebugLocString(self):
return "{}:{}:{}".format(self.File, self.Line, self.Column)
@property
def DemangledFunctionName(self):
return demangle(self.Function)
@classmethod
def make_link(cls, File, Line):
return "{}#L{}".format(SourceFileRenderer.html_file_name(File), Line)
@property
def Link(self):
return Remark.make_link(self.File, self.Line)
def getArgString(self, mapping):
mapping = mapping.copy()
dl = mapping.get('DebugLoc')
if dl:
del mapping['DebugLoc']
assert(len(mapping) == 1)
(key, value) = mapping.items()[0]
if key == 'Caller' or key == 'Callee':
value = demangle(value)
if dl and key != 'Caller':
return "<a href={}>{}</a>".format(
Remark.make_link(dl['File'], dl['Line']), value)
else:
return value
@property
def message(self):
# Args is a list of mappings (dictionaries)
values = [self.getArgString(mapping) for mapping in self.Args]
return "".join(values)
@property
def RelativeHotness(self):
if Remark.should_display_hotness():
return "{}%".format(int(round(self.Hotness * 100 / Remark.max_hotness)))
else:
return ''
@property
def key(self):
return (self.__class__, self.Pass, self.Name, self.File, self.Line, self.Column, self.message)
class Analysis(Remark):
yaml_tag = '!Analysis'
@property
def color(self):
return "white"
class AnalysisFPCommute(Analysis):
yaml_tag = '!AnalysisFPCommute'
class AnalysisAliasing(Analysis):
yaml_tag = '!AnalysisAliasing'
class Passed(Remark):
yaml_tag = '!Passed'
@property
def color(self):
return "green"
class Missed(Remark):
yaml_tag = '!Missed'
@property
def color(self):
return "red"
class SourceFileRenderer:
def __init__(self, filename):
existing_filename = None
if os.path.exists(filename):
existing_filename = filename
else:
fn = os.path.join(args.source_dir, filename)
if os.path.exists(fn):
existing_filename = fn
self.stream = open(os.path.join(args.output_dir, SourceFileRenderer.html_file_name(filename)), 'w')
if existing_filename:
self.source_stream = open(existing_filename)
else:
self.source_stream = None
print('''
<html>
<h1>Unable to locate file {}</h1>
</html>
'''.format(filename), file=self.stream)
self.html_formatter = HtmlFormatter()
self.cpp_lexer = CppLexer()
def render_source_line(self, linenum, line):
html_line = highlight(line, self.cpp_lexer, self.html_formatter)
print('''
<tr>
<td><a name=\"L{linenum}\">{linenum}</a></td>
<td></td>
<td></td>
<td>{html_line}</td>
</tr>'''.format(**locals()), file=self.stream)
def render_inline_remarks(self, r, line):
inlining_context = r.DemangledFunctionName
dl = Remark.caller_loc.get(r.Function)
if dl:
link = Remark.make_link(dl['File'], dl['Line'] - 2)
inlining_context = "<a href={link}>{r.DemangledFunctionName}</a>".format(**locals())
# Column is the number of characters *including* tabs, keep those and
# replace everything else with spaces.
indent = line[:r.Column - 1]
indent = re.sub('\S', ' ', indent)
print('''
<tr>
<td></td>
<td>{r.RelativeHotness}</td>
<td class=\"column-entry-{r.color}\">{r.Pass}</td>
<td><pre style="display:inline">{indent}</pre><span class=\"column-entry-yellow\"> {r.message} </span></td>
<td class=\"column-entry-yellow\">{inlining_context}</td>
</tr>'''.format(**locals()), file=self.stream)
def render(self, line_remarks):
if not self.source_stream:
return
print('''
<html>
<head>
<link rel='stylesheet' type='text/css' href='style.css'>
</head>
<body>
<div class="centered">
<table>
<tr>
<td>Line</td>
<td>Hotness</td>
<td>Optimization</td>
<td>Source</td>
<td>Inline Context</td>
</tr>''', file=self.stream)
for (linenum, line) in enumerate(self.source_stream.readlines(), start=1):
self.render_source_line(linenum, line)
for remark in line_remarks.get(linenum, []):
self.render_inline_remarks(remark, line)
print('''
</table>
</body>
</html>''', file=self.stream)
@classmethod
def html_file_name(cls, filename):
return filename.replace('/', '_') + ".html"
class IndexRenderer:
def __init__(self):
self.stream = open(os.path.join(args.output_dir, 'index.html'), 'w')
def render_entry(self, r):
print('''
<tr>
<td><a href={r.Link}>{r.DebugLocString}</a></td>
<td>{r.RelativeHotness}</td>
<td>{r.DemangledFunctionName}</td>
<td class=\"column-entry-{r.color}\">{r.Pass}</td>
</tr>'''.format(**locals()), file=self.stream)
def render(self, all_remarks):
print('''
<html>
<head>
<link rel='stylesheet' type='text/css' href='style.css'>
</head>
<body>
<div class="centered">
<table>
<tr>
<td>Source Location</td>
<td>Hotness</td>
<td>Function</td>
<td>Pass</td>
</tr>''', file=self.stream)
for remark in all_remarks:
self.render_entry(remark)
print('''
</table>
</body>
</html>''', file=self.stream)
all_remarks = dict()
file_remarks = dict()
for input_file in args.yaml_files:
f = open(input_file)
docs = yaml.load_all(f, Loader=Loader)
for remark in docs:
# Avoid remarks withoug debug location or if they are duplicated
if not hasattr(remark, 'DebugLoc') or remark.key in all_remarks:
continue
all_remarks[remark.key] = remark
file_remarks.setdefault(remark.File, dict()).setdefault(remark.Line, []).append(remark)
Remark.max_hotness = max(Remark.max_hotness, remark.Hotness)
# Set up a map between function names and their source location for function where inlining happened
for remark in all_remarks.itervalues():
if type(remark) == Passed and remark.Pass == "inline" and remark.Name == "Inlined":
for arg in remark.Args:
caller = arg.get('Caller')
if caller:
Remark.caller_loc[caller] = arg['DebugLoc']
if Remark.should_display_hotness():
sorted_remarks = sorted(all_remarks.itervalues(), key=lambda r: r.Hotness, reverse=True)
else:
sorted_remarks = sorted(all_remarks.itervalues(), key=lambda r: (r.File, r.Line, r.Column))
if not os.path.exists(args.output_dir):
os.mkdir(args.output_dir)
for (filename, remarks) in file_remarks.iteritems():
SourceFileRenderer(filename).render(remarks)
IndexRenderer().render(sorted_remarks)
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)), "style.css"), args.output_dir)
| 3,781 |
310 | <reponame>dreeves/usesthis
{
"name": "OpenTable (iOS)",
"description": "An app for making restaurant reservations.",
"url": "https://itunes.apple.com/us/app/opentable-restaurant-reservations/id296581815"
} | 77 |
312 | <filename>tools/console/src/test/java/org/eclipse/rdf4j/console/command/ConvertTest.java<gh_stars>100-1000
/*******************************************************************************
* Copyright (c) 2018 Eclipse RDF4J contributors.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*******************************************************************************/
package org.eclipse.rdf4j.console.command;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import org.eclipse.rdf4j.RDF4JException;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.github.jsonldjava.utils.JsonUtils;
/**
* @author <NAME>
*/
public class ConvertTest extends AbstractCommandTest {
private Convert cmd;
private File from;
@BeforeEach
public void setup() throws IOException, RDF4JException {
when(mockConsoleIO.askProceed("File exists, continue ?", false)).thenReturn(Boolean.TRUE);
cmd = new Convert(mockConsoleIO, mockConsoleState, defaultSettings);
from = new File(locationFile, "alien.ttl");
copyFromResource("convert/alien.ttl", from);
}
@AfterEach
@Override
public void tearDown() {
from.delete();
}
@Test
public final void testConvert() throws IOException {
File json = new File(locationFile, "alien.jsonld");
cmd.execute("convert", from.getAbsolutePath(), json.getAbsolutePath());
assertTrue(json.length() > 0, "File is empty");
Object o = null;
try {
o = JsonUtils.fromInputStream(Files.newInputStream(json.toPath()));
} catch (IOException ioe) {
//
}
assertTrue(o != null, "Invalid JSON");
}
@Test
public final void testConvertWorkDir() throws IOException {
setWorkingDir(cmd);
File json = new File(locationFile, "alien.jsonld");
cmd.execute("convert", from.getName(), json.getName());
assertTrue(json.length() > 0, "File is empty");
Object o = null;
try {
o = JsonUtils.fromInputStream(Files.newInputStream(json.toPath()));
} catch (IOException ioe) {
//
}
assertTrue(o != null, "Invalid JSON");
}
@Test
public final void testConvertParseError() throws IOException {
File wrong = new File(locationFile, "wrong.nt");
Files.write(wrong.toPath(), "error".getBytes());
File json = new File(locationFile, "empty.jsonld");
cmd.execute("convert", wrong.toString(), json.toString());
verify(mockConsoleIO).writeError(anyString());
assertFalse(mockConsoleIO.wasErrorWritten());
}
@Test
public final void testConvertInvalidFormat() throws IOException {
File qyx = new File(locationFile, "alien.qyx");
cmd.execute("convert", from.toString(), qyx.toString());
verify(mockConsoleIO).writeError("No RDF writer for " + qyx.toString());
}
}
| 1,051 |
1,602 | <filename>third-party/libfabric/libfabric-src/include/rbtree.h<gh_stars>1000+
/*
* Copyright (c) 2015 Cray Inc. All rights reserved.
*
* This software is available to you under a choice of one of two
* licenses. You may choose to be licensed under the terms of the GNU
* General Public License (GPL) Version 2, available from the file
* COPYING in the main directory of this source tree, or the
* BSD license below:
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/*
* Copied from http://oopweb.com/Algorithms/Documents/Sman/VolumeFrames.html?/Algorithms/Documents/Sman/Volume/RedBlackTrees_files/s_rbt.htm
*
* Disclosure from the author's main page:
* (http://oopweb.com/Algorithms/Documents/Sman/VolumeFrames.html?/Algorithms/Documents/Sman/Volume/RedBlackTrees_files/s_rbt.htm)
*
* Source code when part of a software project may be used freely
* without reference to the author.
*
*/
#ifndef RBTREE_H_
#define RBTREE_H_
typedef enum {
RBT_STATUS_OK,
RBT_STATUS_MEM_EXHAUSTED,
RBT_STATUS_DUPLICATE_KEY,
RBT_STATUS_KEY_NOT_FOUND
} RbtStatus;
typedef void *RbtIterator;
typedef void *RbtHandle;
RbtHandle rbtNew(int(*compare)(void *a, void *b));
// create red-black tree
// parameters:
// compare pointer to function that compares keys
// return 0 if a == b
// return < 0 if a < b
// return > 0 if a > b
// returns:
// handle use handle in calls to rbt functions
void rbtDelete(RbtHandle h);
// destroy red-black tree
RbtStatus rbtInsert(RbtHandle h, void *key, void *value);
// insert key/value pair
RbtStatus rbtErase(RbtHandle h, RbtIterator i);
// delete node in tree associated with iterator
// this function does not free the key/value pointers
RbtIterator rbtNext(RbtHandle h, RbtIterator i);
// return ++i
RbtIterator rbtBegin(RbtHandle h);
// return pointer to first node
RbtIterator rbtEnd(RbtHandle h);
// return pointer to one past last node
void rbtKeyValue(RbtHandle h, RbtIterator i, void **key, void **value);
// returns key/value pair associated with iterator
RbtIterator rbtFindLeftmost(RbtHandle h, void *key,
int(*compare)(void *a, void *b));
// returns iterator associated with left-most match. This is useful when a new
// key might invalidate the uniqueness property of the tree.
RbtIterator rbtFind(RbtHandle h, void *key);
// returns iterator associated with key
void rbtTraversal(RbtHandle h, RbtIterator it, void *handler_arg,
void(*handler)(void *arg, RbtIterator it));
// tree traversal that visits (applies handler()) each node in the tree data
// strucutre exactly once.
void *rbtRoot(RbtHandle h);
// returns the root of the tree
#endif /* RBTREE_H_ */
| 1,239 |
2,981 | <filename>chrome/common/extensions/docs/examples/extensions/calendar/_locales/cs/messages.json
{"name":{"message":"Kontrola Kalend\u00e1\u0159e Google (od spole\u010dnosti Google)"},"title":{"message":"Kontrola Kalend\u00e1\u0159e Google"},"description":{"message":"Umo\u017e\u0148uje rychle zobrazit \u010das, kter\u00fd zb\u00fdv\u00e1 do dal\u0161\u00ed sch\u016fzky napl\u00e1novan\u00e9 v kter\u00e9mkoli z va\u0161ich kalend\u00e1\u0159\u016f. Kliknut\u00edm na tla\u010d\u00edtko p\u0159ejdete do kalend\u00e1\u0159e."},"direction":{"message":"ltr"},"notitle":{"message":"(Bez n\u00e1zvu)"},"optionstitle":{"message":"Kontrola Kalend\u00e1\u0159e Google"},"minutes":{"message":"$1 min","placeholders":{"1":{"content":"$1"}}},"hours":{"message":"$1 h","placeholders":{"1":{"content":"$1"}}},"days":{"message":"$1 d","placeholders":{"1":{"content":"$1"}}},"multicalendartext":{"message":"Podpora n\u011bkolika kalend\u00e1\u0159\u016f"},"extensionname":{"message":"Kontrola Kalend\u00e1\u0159e Google"},"status_saved":{"message":"Nastaven\u00ed byla ulo\u017eena."},"status_saving":{"message":"Ukl\u00e1d\u00e1n\u00ed...."},"multicalendartooltip":{"message":"Za\u0161krtnut\u00edm pol\u00ed\u010dka pros\u00edm povolte podporu v\u00edce kalend\u00e1\u0159\u016f"},"imagetooltip":{"message":"Kalend\u00e1\u0159 Google"}}
| 559 |
1,862 | <reponame>devshop2019/mixlyTest
#ifndef BLINKER_AUTO_SUBDEVICE_H
#define BLINKER_AUTO_SUBDEVICE_H
#if defined(BLINKER_WIFI_SUBDEVICE)
#if ARDUINO >= 100
#include <Arduino.h>
#else
#include <WProgram.h>
#endif
#include <EEPROM.h>
// #include "Blinker/BlinkerAuto.h"
#include "Blinker/BlinkerConfig.h"
#include "Blinker/BlinkerDebug.h"
#include "Blinker/BlinkerUtility.h"
#ifndef ARDUINOJSON_VERSION_MAJOR
#include "modules/ArduinoJson/ArduinoJson.h"
#endif
class BlinkerAutoSubdevice
{
private :
uint8_t a_num;
// {
// "auto":{
// "ena":1,//_autoState
// "id":123456,//_autoId
// "logic":"numberic",//_logicType
// "data":
// [
// {
// "key":"humi",
// "value":40,
// "type":"<",//_targetState|_compareType
// "dur":10
// }
// ],
// "range":[540, 1260],
// }
// }
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// | | | | | | _time1 0-1440min 11 | _time2 0-1440min 11
// | | | | | _duration 0-60min 6
// | | | | _targetState|_compareType on/off|less/equal/greater 2
// | | | _targetState|_compareType on/off|less/equal/greater
// |
// | logic_type state/numberic 2
// autoData
// | _linkNum
// - - - - - - - -
// | | |_logicType state/numberic/and/or 2
// | | _autoState true/false 1
// | _haveAuto
// |
// typestate
};
#endif
#endif
| 997 |
558 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_IO_CORE_KERNELS_ARROW_UTIL_H_
#define TENSORFLOW_IO_CORE_KERNELS_ARROW_UTIL_H_
#include "arrow/api.h"
#include "arrow/ipc/api.h"
#include "arrow/util/io_util.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/status.h"
namespace tensorflow {
// Forward declaration
class Tensor;
class TensorShape;
namespace data {
#define CHECK_ARROW(arrow_status) \
do { \
arrow::Status _s = (arrow_status); \
if (!_s.ok()) { \
return errors::Internal(_s.ToString()); \
} \
} while (false)
namespace ArrowUtil {
// Convert Arrow Data Type to TensorFlow
Status GetTensorFlowType(std::shared_ptr<::arrow::DataType> dtype,
::tensorflow::DataType* out);
// Convert TensorFlow Data Type to Arrow
Status GetArrowType(::tensorflow::DataType dtype,
std::shared_ptr<::arrow::DataType>* out);
// Assign equivalent TensorShape for the given Arrow Array
Status AssignShape(std::shared_ptr<arrow::Array> array, int64 i,
int64 batch_size, TensorShape* out_shape);
// Assign DataType and equivalent TensorShape for the given Arrow Array
Status AssignSpec(std::shared_ptr<arrow::Array> array, int64 i,
int64 batch_size, ::tensorflow::DataType* out_dtype,
TensorShape* out_shape);
// Assign elements of an Arrow Array to a Tensor
Status AssignTensor(std::shared_ptr<arrow::Array> array, int64 i,
Tensor* out_tensor);
// Checks the Arrow Array datatype matches the expected TF datatype
Status CheckArrayType(std::shared_ptr<arrow::DataType> type,
::tensorflow::DataType expected_type);
// Make list and primitive array data
Status MakeArrayData(std::shared_ptr<arrow::DataType> type,
std::vector<int64> array_lengths,
std::vector<std::shared_ptr<arrow::Buffer>> buffers,
std::shared_ptr<arrow::ArrayData>* out_data);
// Parse the given endpoint to extract type and value strings
Status ParseEndpoint(std::string endpoint, std::string* endpoint_type,
std::string* endpoint_value);
// Parse the given IPv4 host string to get address and port
Status ParseHost(std::string host, std::string* host_address,
std::string* host_port);
} // namespace ArrowUtil
} // namespace data
} // namespace tensorflow
#endif // TENSORFLOW_IO_CORE_KERNELS_ARROW_UTIL_H_
| 1,286 |
713 | <gh_stars>100-1000
# My solution using Hepa
import heapq
def findThreeLargestNumbers(array):
hp = []
for num in array:
if len(hp) < 3:
heapq.heappush(hp, num)
else:
if hp[0] < num:
heapq.heappop(hp)
heapq.heappush(hp, num)
return sorted(hp)
# Solution providd by Algoexpert
# O(n) time | O(1) space
def find_three_largest_number(array):
three_largest_number = [None, None, None]
for num in array:
update_largest(num, three_largest_number)
return three_largest_number
def update_largest(number, three_largest_number):
if three_largest_number[2] is None or number > three_largest_number[2]:
shift_and_update(three_largest_number, number, 2)
elif three_largest_number[1] is None or number > three_largest_number[1]:
shift_and_update(three_largest_number, number, 1)
elif three_largest_number[0] is None or number > three_largest_number[0]:
shift_and_update(three_largest_number, number, 0)
def shift_and_update(three_largest_number, number, index):
for i in range(index + 1):
if i == index:
three_largest_number[index] = number
else:
three_largest_number[i] = three_largest_number[i + 1]
given_numbers = [141, 1, 17, -7, -17, -27, 18, 541, 8, 7, 7]
largest_numbers = find_three_largest_number(given_numbers)
print("Largest numbers are: ", largest_numbers)
| 564 |
695 | <reponame>techkey/PTVS
def r_a(a, b):
return a
def r_b(a, b):
return b
def r_str():
return ''
def r_object():
return object()
class A:
def r_A(self):
return type(self)()
| 102 |
678 | /**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/GMM.framework/GMM
*/
#import <GMM/GMMWaypointFeedback.h>
#import <GMM/XXUnknownSuperclass.h>
@class NSString, NSMutableArray;
__attribute__((visibility("hidden")))
@interface GMMWaypointFeedback : XXUnknownSuperclass {
int _status; // 4 = 0x4
NSMutableArray *_waypoints; // 8 = 0x8
NSString *_streetViewPanoId; // 12 = 0xc
}
@property(retain, nonatomic) NSString *streetViewPanoId; // G=0x265c5; S=0x265d5; @synthesize=_streetViewPanoId
@property(readonly, assign, nonatomic) BOOL hasStreetViewPanoId; // G=0x26115;
@property(retain, nonatomic) NSMutableArray *waypoints; // G=0x26591; S=0x265a1; @synthesize=_waypoints
@property(assign, nonatomic) int status; // G=0x26571; S=0x26581; @synthesize=_status
// declared property setter: - (void)setStreetViewPanoId:(id)anId; // 0x265d5
// declared property getter: - (id)streetViewPanoId; // 0x265c5
// declared property setter: - (void)setWaypoints:(id)waypoints; // 0x265a1
// declared property getter: - (id)waypoints; // 0x26591
// declared property setter: - (void)setStatus:(int)status; // 0x26581
// declared property getter: - (int)status; // 0x26571
- (void)writeTo:(id)to; // 0x26415
- (BOOL)readFrom:(id)from; // 0x26251
- (id)dictionaryRepresentation; // 0x2619d
- (id)description; // 0x2612d
// declared property getter: - (BOOL)hasStreetViewPanoId; // 0x26115
- (id)waypointAtIndex:(unsigned)index; // 0x260f5
- (unsigned)waypointsCount; // 0x260d5
- (void)addWaypoint:(id)waypoint; // 0x26071
- (void)dealloc; // 0x26019
@end
@interface GMMWaypointFeedback (GMMProtoExtras)
- (id)description; // 0xb67d
@end
| 669 |
381 |
RPY_EXTERN char * jitlog_init(int);
RPY_EXTERN void jitlog_try_init_using_env(void);
RPY_EXTERN int jitlog_enabled();
RPY_EXTERN void jitlog_write_marked(char*, int);
RPY_EXTERN void jitlog_teardown();
| 85 |
582 | /**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.propertysections;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.commands.CompoundCommand;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Spinner;
import org.eclipse.ui.PlatformUI;
import com.archimatetool.editor.diagram.commands.DiagramModelObjectAlphaCommand;
import com.archimatetool.editor.diagram.commands.DiagramModelObjectOutlineAlphaCommand;
import com.archimatetool.model.IArchimatePackage;
import com.archimatetool.model.IDiagramModelObject;
/**
* Property Section for Opacity
*
* @author <NAME>
*/
public class OpacitySection extends AbstractECorePropertySection {
private static final String HELP_ID = "com.archimatetool.help.elementPropertySection"; //$NON-NLS-1$
/**
* Filter to show or reject this section depending on input value
*/
public static class Filter extends ObjectFilter {
@Override
public boolean isRequiredType(Object object) {
return (object instanceof IDiagramModelObject) && (shouldExposeFeature((EObject)object, IArchimatePackage.Literals.DIAGRAM_MODEL_OBJECT__ALPHA.getName())
|| shouldExposeFeature((EObject)object, IDiagramModelObject.FEATURE_LINE_ALPHA));
}
@Override
public Class<?> getAdaptableType() {
return IDiagramModelObject.class;
}
}
private Spinner fFillSpinner;
private Spinner fOutlineSpinner;
@Override
protected void createControls(Composite parent) {
((GridLayout)parent.getLayout()).horizontalSpacing = 30;
Composite group1 = createComposite(parent, 2, false);
fFillSpinner = createSpinnerControl(group1, Messages.OpacitySection_0, 0);
Composite group2 = createComposite(parent, 2, false);
fOutlineSpinner = createSpinnerControl(group2, Messages.OutlineOpacitySection_0, 1);
// Allow setting 1 or 2 columns
GridLayoutColumnHandler.create(parent, 2).updateColumns();
// Help ID
PlatformUI.getWorkbench().getHelpSystem().setHelp(parent, HELP_ID);
}
private Spinner createSpinnerControl(Composite parent, String label, int type) {
createLabel(parent, label, ITabbedLayoutConstants.STANDARD_LABEL_WIDTH, SWT.CENTER);
final Spinner spinner = new Spinner(parent, SWT.BORDER);
spinner.setMinimum(0);
spinner.setMaximum(255);
spinner.setIncrement(5);
getWidgetFactory().adapt(spinner, true, true);
Listener listener = (e) -> {
int newValue = spinner.getSelection();
CompoundCommand result = new CompoundCommand();
for(EObject dmo : getEObjects()) {
if(isAlive(dmo)) {
Command cmd;
if(type == 0) {
cmd = new DiagramModelObjectAlphaCommand((IDiagramModelObject)dmo, newValue);
}
else {
cmd = new DiagramModelObjectOutlineAlphaCommand((IDiagramModelObject)dmo, newValue);
}
if(cmd.canExecute()) {
result.add(cmd);
}
}
}
executeCommand(result.unwrap());
};
spinner.addListener(SWT.MouseUp, listener);
spinner.addListener(SWT.FocusOut, listener);
spinner.addListener(SWT.DefaultSelection, listener);
spinner.addDisposeListener((e) -> {
if(spinner != null && !spinner.isDisposed()) {
spinner.removeListener(SWT.MouseUp, listener);
spinner.removeListener(SWT.FocusOut, listener);
spinner.removeListener(SWT.DefaultSelection, listener);
}
});
return spinner;
}
@Override
protected void notifyChanged(Notification msg) {
if(msg.getNotifier() == getFirstSelectedObject()) {
Object feature = msg.getFeature();
if(feature == IArchimatePackage.Literals.DIAGRAM_MODEL_OBJECT__ALPHA
|| isFeatureNotification(msg, IDiagramModelObject.FEATURE_LINE_ALPHA)
|| feature == IArchimatePackage.Literals.LOCKABLE__LOCKED) {
update();
}
}
}
@Override
protected void update() {
if(fIsExecutingCommand) {
return;
}
IDiagramModelObject lastSelected = (IDiagramModelObject)getFirstSelectedObject();
fFillSpinner.setSelection(lastSelected.getAlpha());
fOutlineSpinner.setSelection(lastSelected.getLineAlpha());
fFillSpinner.setEnabled(!isLocked(lastSelected) && getFilter().shouldExposeFeature(lastSelected, IArchimatePackage.Literals.DIAGRAM_MODEL_OBJECT__ALPHA.getName()));
fOutlineSpinner.setEnabled(!isLocked(lastSelected) && getFilter().shouldExposeFeature(lastSelected, IDiagramModelObject.FEATURE_LINE_ALPHA));
}
@Override
protected IObjectFilter getFilter() {
return new Filter();
}
}
| 2,440 |
2,757 | /**@file
Copyright (c) 2006, Intel Corporation. All rights reserved.<BR>
This program and the accompanying materials
are licensed and made available under the terms and conditions of the BSD License
which accompanies this distribution. The full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
Module Name:
Metronome.c
Abstract:
NT Emulation Metronome Architectural Protocol Driver as defined in DXE CIS
**/
#include "Metronome.h"
//
// Global Variables
//
EFI_METRONOME_ARCH_PROTOCOL mMetronome = {
WinNtMetronomeDriverWaitForTick,
TICK_PERIOD
};
//
// Worker Functions
//
EFI_STATUS
EFIAPI
WinNtMetronomeDriverWaitForTick (
IN EFI_METRONOME_ARCH_PROTOCOL *This,
IN UINT32 TickNumber
)
/*++
Routine Description:
The WaitForTick() function waits for the number of ticks specified by
TickNumber from a known time source in the platform. If TickNumber of
ticks are detected, then EFI_SUCCESS is returned. The actual time passed
between entry of this function and the first tick is between 0 and
TickPeriod 100 nS units. If you want to guarantee that at least TickPeriod
time has elapsed, wait for two ticks. This function waits for a hardware
event to determine when a tick occurs. It is possible for interrupt
processing, or exception processing to interrupt the execution of the
WaitForTick() function. Depending on the hardware source for the ticks, it
is possible for a tick to be missed. This function cannot guarantee that
ticks will not be missed. If a timeout occurs waiting for the specified
number of ticks, then EFI_TIMEOUT is returned.
Arguments:
This - The EFI_METRONOME_ARCH_PROTOCOL instance.
TickNumber - Number of ticks to wait.
Returns:
EFI_SUCCESS - The wait for the number of ticks specified by TickNumber
succeeded.
--*/
{
UINT64 SleepTime;
//
// Calculate the time to sleep. Win API smallest unit to sleep is 1 millisec
// Tick Period is in 100ns units, divide by 10000 to convert to ms
//
SleepTime = DivU64x32 (MultU64x32 ((UINT64) TickNumber, TICK_PERIOD) + 9999, 10000);
gWinNt->Sleep ((UINT32) SleepTime);
return EFI_SUCCESS;
}
EFI_STATUS
EFIAPI
WinNtMetronomeDriverInitialize (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
/*++
Routine Description:
Initialize the Metronome Architectural Protocol driver
Arguments:
ImageHandle - ImageHandle of the loaded driver
SystemTable - Pointer to the System Table
Returns:
EFI_SUCCESS - Metronome Architectural Protocol created
EFI_OUT_OF_RESOURCES - Not enough resources available to initialize driver.
EFI_DEVICE_ERROR - A device error occured attempting to initialize the driver.
--*/
{
EFI_STATUS Status;
EFI_HANDLE Handle;
//
// Install the Metronome Architectural Protocol onto a new handle
//
Handle = NULL;
Status = gBS->InstallProtocolInterface (
&Handle,
&gEfiMetronomeArchProtocolGuid,
EFI_NATIVE_INTERFACE,
&mMetronome
);
return Status;
}
| 1,268 |
358 | <reponame>liaoziyang/ContentAssist
/*
* Copyright 2014
* Software Science and Technology Lab.
* Department of Computer Science, Ritsumeikan University
*/
package org.jtool.changerecorder.util;
/**
* Compares two strings.
* @author <NAME>
*/
public class StringComparator {
/**
* Tests if two texts are the same.
* @param text1 the first text to be compared to the second text
* @param text2 the second text to be compare to the first text
* @return <code>true</code> if both the texts are the same, otherwise <code>false</code>
*/
public static boolean isSame(String text1, String text2) {
if (text1 == null && text2 == null) {
return true;
}
if (text1 == null || text2 == null) {
return false;
}
return text1.compareTo(text2) == 0;
}
/**
* Tests if two texts are the same, ignoring case differences.
* @param text1 the first text to be compared to the second text
* @param text2 the second text to be compare to the first text
* @return <code>true</code> if the texts are the same, otherwise <code>false</code>
*/
public static boolean isSameIgnoreCase(String text1, String text2) {
if (text1 == null && text2 == null) {
return true;
}
if (text1 == null || text2 == null) {
return false;
}
return text1.compareToIgnoreCase(text2) == 0;
}
}
| 606 |
7,076 | <gh_stars>1000+
/*
* Copyright (C) 2017. Uber Technologies
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.uber.rib;
import android.app.Application;
import com.squareup.leakcanary.LeakCanary;
import com.squareup.leakcanary.RefWatcher;
import com.uber.rib.core.RibRefWatcher;
import java.util.concurrent.TimeUnit;
public class SampleApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
if (!LeakCanary.isInAnalyzerProcess(this)) {
// This process is dedicated to LeakCanary for heap analysis. You should not init your app in
// this process.
installLeakCanary();
}
}
/** Install leak canary for both activities and RIBs. */
private void installLeakCanary() {
final RefWatcher refWatcher =
LeakCanary.refWatcher(this).watchDelay(2, TimeUnit.SECONDS).buildAndInstall();
LeakCanary.install(this);
RibRefWatcher.getInstance()
.setReferenceWatcher(
new RibRefWatcher.ReferenceWatcher() {
@Override
public void watch(Object object) {
refWatcher.watch(object);
}
@Override
public void logBreadcrumb(String eventType, String data, String parent) {
// Ignore for now. Useful for collecting production analytics.
}
});
RibRefWatcher.getInstance().enableLeakCanary();
}
}
| 690 |
1,338 | <filename>src/bin/i2c/i2c.cpp
/*
* Copyright 2020, <NAME>, <EMAIL>.
* Distributed under the terms of the MIT license.
*/
#include <errno.h>
#include <fcntl.h>
#include <getopt.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <Drivers.h>
#include <AutoDeleter.h>
#include "i2c.h"
static struct option const kLongOptions[] = {
{"help", no_argument, 0, 'h'},
{NULL}
};
extern const char *__progname;
static const char *kProgramName = __progname;
void
usage(int returnValue)
{
fprintf(stderr, "Usage: %s <path-to-i2c-bus-device>\n", kProgramName);
exit(returnValue);
}
static int
scan_bus(const char *path)
{
int err = EXIT_SUCCESS;
int fd = open(path, O_RDONLY);
if (fd < 0) {
fprintf(stderr, "%s: Could not access path: %s\n", kProgramName,
strerror(errno));
return EXIT_FAILURE;
}
setbuf(stdout, NULL);
printf("Scanning I2C bus: %s\n", path);
FileDescriptorCloser closer(fd);
printf(" 0 1 2 3 4 5 6 7 8 9 a b c d e f\n");
for (int i = 0; i < 128; i+=16) {
printf("%02x: ", i);
for (int j = 0; j < 16; j++) {
uint16 addr = i + j;
uint8 cmd = 0;
uint8 data = 0;
i2c_ioctl_exec exec;
exec.addr = addr;
exec.op = I2C_OP_READ_STOP;
exec.cmdBuffer = &cmd;
exec.cmdLength = sizeof(cmd);
exec.buffer = &data;
exec.bufferLength = sizeof(data);
if (ioctl(fd, I2CEXEC, &exec, sizeof(exec)) == 0)
printf("%02x ", addr);
else
printf("-- ");
}
printf("\n");
}
close(fd);
return err;
}
int
main(int argc, char** argv)
{
int c;
while ((c = getopt_long(argc, argv, "h", kLongOptions, NULL)) != -1) {
switch (c) {
case 0:
break;
case 'h':
usage(0);
break;
default:
usage(1);
break;
}
}
if (argc - optind < 1)
usage(1);
const char* path = argv[optind++];
exit(scan_bus(path));
}
| 871 |
622 | <filename>Week 4/week4-076.Menu/test/MenuTest.java<gh_stars>100-1000
import fi.helsinki.cs.tmc.edutestutils.MockStdio;
import fi.helsinki.cs.tmc.edutestutils.Points;
import fi.helsinki.cs.tmc.edutestutils.ReflectionUtils;
import fi.helsinki.cs.tmc.edutestutils.Reflex;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.*;
import static org.junit.Assert.*;
public class MenuTest {
@Rule
public MockStdio stdio = new MockStdio();
@Test
@Points("76.1")
public void hasMethodAddMeal() throws Throwable {
String klassName = "Menu";
String metodi = "addMeal";
Reflex.ClassRef<Object> tuoteClass = Reflex.reflect(klassName);
Object olio = tuoteClass.constructor().takingNoParams().invoke();
assertTrue("Add class " + klassName + " the method: public void " + metodi + "(String meal) ", tuoteClass.method(olio, metodi)
.returningVoid().taking(String.class).isPublic());
String v = "\nThe code that caused the fault m = new Menu(); m.addMeal(\"Bratwurst\");";
tuoteClass.method(olio, metodi)
.returningVoid().taking(String.class).withNiceError(v).invoke("Bratwurst");
}
@Test
@Points("76.1")
public void addMealAddsTheMealToAttribute() throws Throwable {
Field ateriatField = null;
try {
ateriatField = Menu.class.getDeclaredField("meals");
} catch (NoSuchFieldException ex) {
fail("Ensure that class Menu has the attribute private ArrayList<String> meals;");
}
Menu lista = new Menu();
ateriatField.setAccessible(true);
Method m = ReflectionUtils.requireMethod(Menu.class, "addMeal", String.class);
ReflectionUtils.invokeMethod(void.class, m, lista, "eka");
try {
ArrayList<String> ateriat = (ArrayList<String>) ateriatField.get(lista);
if (ateriat.size() != 1) {
fail("calling addMeal should add the meal to the ArrayList meals");
}
ReflectionUtils.invokeMethod(void.class, m, lista, "toka");
if (ateriat.size() != 2) {
fail("After adding two differently named meals, the ArrayList "
+ "meals should have size 2.");
}
} catch (IllegalArgumentException ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Test
@Points("76.1")
public void addMealAddsSameMealOnlyOnce() throws NoSuchFieldException {
Field ateriatField = Menu.class.getDeclaredField("meals");
ateriatField.setAccessible(true);
Menu lista = new Menu();
Method m = ReflectionUtils.requireMethod(Menu.class, "addMeal", String.class);
try {
ReflectionUtils.invokeMethod(void.class, m, lista, "eka");
ReflectionUtils.invokeMethod(void.class, m, lista, "eka");
} catch (Throwable ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
}
ArrayList<String> ateriat;
try {
ateriat = (ArrayList<String>) ateriatField.get(lista);
if (ateriat.size() != 1) {
fail("Same meal should go to list only once!");
}
} catch (IllegalArgumentException ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Test
@Points("76.2")
public void hasMethodPrintMeals() throws Throwable {
String klassName = "Menu";
String metodi = "printMeals";
Reflex.ClassRef<Object> tuoteClass = Reflex.reflect(klassName);
Object olio = tuoteClass.constructor().takingNoParams().invoke();
assertTrue("Add the class " + klassName + " the method: public void " + metodi + "() ", tuoteClass.method(olio, metodi)
.returningVoid().takingNoParams().isPublic());
String v = "\nThe code that caused the fault m = new Menu(); m.printMeals();";
tuoteClass.method(olio, metodi)
.returningVoid().takingNoParams().withNiceError(v).invoke();
}
@Test
@Points("76.2")
public void printMealsWorks() {
String porkkanaSoppa = "Le porkkanaSuppa";
String kinkkukiusaus = "Kinkkukiusaus";
Menu lista = new Menu();
Method addMeal = ReflectionUtils.requireMethod(Menu.class, "addMeal", String.class);
try {
ReflectionUtils.invokeMethod(void.class, addMeal, lista, porkkanaSoppa);
ReflectionUtils.invokeMethod(void.class, addMeal, lista, kinkkukiusaus);
} catch (Throwable ex) {
fail("Ensure that printing of meals works");
}
Method m = ReflectionUtils.requireMethod(Menu.class, "printMeals");
try {
ReflectionUtils.invokeMethod(void.class, m, lista);
} catch (Throwable ex) {
fail("Ensure that printing of meals works when there are more than one meal added");
}
String out = stdio.getSysOut();
assertTrue("Ensure that printMeals prints all the added meals", out.contains(porkkanaSoppa) && out.contains(kinkkukiusaus));
assertTrue("Ensure that printMeals prints each meal to separate line. Now output is "+out, out.split("\n").length>1);
}
@Test
@Points("76.2")
public void hasMethodClearMenu() throws Throwable {
String klassName = "Menu";
String metodi = "clearMenu";
Reflex.ClassRef<Object> tuoteClass = Reflex.reflect(klassName);
Object olio = tuoteClass.constructor().takingNoParams().invoke();
assertTrue("Add the class " + klassName + " has method public void " + metodi + "() ", tuoteClass.method(olio, metodi)
.returningVoid().takingNoParams().isPublic());
String v = "\nThe code that caused the fault m = new Menu(); m.clearMenu();";
tuoteClass.method(olio, metodi)
.returningVoid().takingNoParams().withNiceError(v).invoke();
}
@Test
@Points("76.3")
public void clearingMenuWorks() throws Throwable {
Field ateriatField = null;
ateriatField = Menu.class.getDeclaredField("meals");
ateriatField.setAccessible(true);
Menu lista = new Menu();
Method addMeal = ReflectionUtils.requireMethod(Menu.class, "addMeal", String.class);
ReflectionUtils.invokeMethod(void.class, addMeal, lista, "eka");
ReflectionUtils.invokeMethod(void.class, addMeal, lista, "toka");
Method tyhjenna = ReflectionUtils.requireMethod(Menu.class, "clearMenu");
ReflectionUtils.invokeMethod(void.class, tyhjenna, lista);
try {
ArrayList<String> ateriat = (ArrayList<String>) ateriatField.get(lista);
if (ateriat == null) {
fail("Do not set ArrayList meals to null when clearing the menu");
}
if (!ateriat.isEmpty()) {
fail("ArrayList meals should have size of zero after a call to clearMenu");
}
} catch (IllegalArgumentException ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
Logger.getLogger(MenuTest.class.getName()).log(Level.SEVERE, null, ex);
}
Method m = ReflectionUtils.requireMethod(Menu.class, "printMeals");
try {
ReflectionUtils.invokeMethod(void.class, m, lista);
} catch (Throwable ex) {
fail("Ensure that printing meals works");
}
String out = stdio.getSysOut();
out = out.trim();
if (!out.isEmpty()) {
fail("After menu has been cleared, call to printMeals should not print anything");
}
}
}
| 3,500 |
808 | // Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fake_ddk/graph.h"
#include <stdio.h>
#include <string.h>
#include <cmath>
#include <iostream>
#include "logging.h" // NOLINT
#include "utility.h" // NOLINT
namespace fake_ddk {
Graph::Graph() {}
Graph::Graph(const std::vector<uint8_t>& buffer) {
FAKE_DDK_CHECK(deserialize_graph_from_buffer(this, buffer) ==
StatusType::SUCCESS);
}
Graph::~Graph() { Clear(); }
void Graph::Clear() {
for (auto& tensor : tensors_) {
if (tensor.lifetime != LifeTimeType::INPUT && tensor.buffer &&
tensor.length > 0) {
free(tensor.buffer);
tensor.buffer = nullptr;
tensor.length = 0;
}
}
tensors_.clear();
operators_.clear();
input_tensors_.clear();
output_tensors_.clear();
}
Tensor* Graph::AddTensor(const TensorAttr& attr, void* data) {
tensors_.emplace_back();
auto tensor = &tensors_.back();
tensor->attr = attr;
if (data) {
tensor->length = get_tensor_buffer_length(attr);
tensor->buffer = malloc(tensor->length);
FAKE_DDK_CHECK(tensor->buffer)
<< "Failed to allocate buffer for a constant tensor, out of memory!";
memcpy(tensor->buffer, data, tensor->length);
tensor->lifetime = LifeTimeType::CONSTANT;
} else {
tensor->length = 0;
tensor->buffer = nullptr;
tensor->lifetime = LifeTimeType::TEMPORARY_VARIABLE;
}
return tensor;
}
Operator* Graph::AddOperator(OperatorType type,
const std::vector<Tensor*>& input_tensors,
const std::vector<Tensor*>& output_tensors,
void* attr) {
operators_.emplace_back();
auto op = &operators_.back();
auto input_count = input_tensors.size();
auto output_count = output_tensors.size();
switch (type) {
case OperatorType::FAKE_DDK_CONV2D: {
op->attr.conv2d_attr = *static_cast<Conv2DAttr*>(attr);
FAKE_DDK_CHECK_EQ(input_count, 3); // input, filter and bias
FAKE_DDK_CHECK_EQ(output_count, 1); // output
} break;
default:
FAKE_DDK_LOG(FATAL) << "Unsupported op type " << static_cast<int>(type)
<< "!";
return nullptr;
}
op->type = type;
op->input_tensors = input_tensors;
op->output_tensors = output_tensors;
return op;
}
int Graph::IdentifyInputsAndOutputs(
const std::vector<Tensor*>& input_tensors,
const std::vector<Tensor*>& output_tensors) {
auto IsValid = [&](const Tensor* candidate) {
bool found = false;
for (auto& tensor : tensors_) {
if (candidate == &tensor) {
found = true;
break;
}
}
return found;
};
auto input_count = input_tensors.size();
auto output_count = output_tensors.size();
for (size_t i = 0; i < input_count; i++) {
auto tensor = input_tensors[i];
FAKE_DDK_CHECK(IsValid(tensor)) << "Failed to find input tensor " << i
<< " in graph !";
FAKE_DDK_CHECK(tensor->lifetime != LifeTimeType::CONSTANT)
<< "Input tensor should not be a constant tensor!";
tensor->lifetime = LifeTimeType::INPUT;
}
for (size_t i = 0; i < output_count; i++) {
auto tensor = output_tensors[i];
FAKE_DDK_CHECK(IsValid(tensor)) << "Failed to find output tensor " << i
<< " in graph !";
FAKE_DDK_CHECK(tensor->lifetime != LifeTimeType::CONSTANT)
<< "Output tensor should not be a constant tensor!";
tensor->lifetime = LifeTimeType::OUTPUT;
}
input_tensors_ = input_tensors;
output_tensors_ = output_tensors;
return StatusType::SUCCESS;
}
int Graph::QueryInputsAndOutputs(std::vector<TensorAttr>* input_attrs,
std::vector<TensorAttr>* output_attrs) {
auto input_count = input_tensors_.size();
auto output_count = output_tensors_.size();
input_attrs->resize(input_count);
output_attrs->resize(output_count);
for (size_t i = 0; i < input_count; i++) {
input_attrs->at(i) = input_tensors_[i]->attr;
}
for (size_t i = 0; i < output_count; i++) {
output_attrs->at(i) = output_tensors_[i]->attr;
}
return StatusType::SUCCESS;
}
} // namespace fake_ddk
| 2,000 |
9,724 | #include <unistd.h>
#include <signal.h>
#include "syscall.h"
#include "libc.h"
#ifdef __EMSCRIPTEN__
int __setxid_emscripten() {
errno = EPERM; // we don't allow dynamic syscalls, and don't need to support these anyhow
return -1;
}
#else
struct ctx {
int id, eid, sid;
int nr, ret;
};
static void do_setxid(void *p)
{
struct ctx *c = p;
if (c->ret<0) return;
int ret = __syscall(c->nr, c->id, c->eid, c->sid);
if (ret && !c->ret) {
/* If one thread fails to set ids after another has already
* succeeded, forcibly killing the process is the only safe
* thing to do. State is inconsistent and dangerous. Use
* SIGKILL because it is uncatchable. */
__block_all_sigs(0);
__syscall(SYS_kill, __syscall(SYS_getpid), SIGKILL);
}
c->err = ret;
}
int __setxid(int nr, int id, int eid, int sid)
{
/* ret is initially nonzero so that failure of the first thread does not
* trigger the safety kill above. */
struct ctx c = { .nr = nr, .id = id, .eid = eid, .sid = sid, .ret = 1 };
__synccall(do_setxid, &c);
return __syscall_ret(c.ret);
}
#endif
| 433 |
480 | /*
* Copyright [2013-2021], Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.polardbx.executor.handler;
import com.alibaba.polardbx.rpc.CdcRpcClient;
import com.alibaba.polardbx.rpc.cdc.CdcServiceGrpc.CdcServiceBlockingStub;
import com.alibaba.polardbx.rpc.cdc.MasterStatus;
import com.alibaba.polardbx.rpc.cdc.Request;
import com.alibaba.polardbx.executor.cursor.Cursor;
import com.alibaba.polardbx.executor.cursor.impl.ArrayResultCursor;
import com.alibaba.polardbx.executor.spi.IRepository;
import com.alibaba.polardbx.optimizer.context.ExecutionContext;
import com.alibaba.polardbx.optimizer.core.datatype.DataTypes;
import io.grpc.Channel;
import io.grpc.ManagedChannel;
import org.apache.calcite.rel.RelNode;
/**
*
*/
public class LogicalShowMasterStatusHandler extends HandlerCommon {
public LogicalShowMasterStatusHandler(IRepository repo) {
super(repo);
}
@Override
public Cursor handle(RelNode logicalPlan, ExecutionContext executionContext) {
final CdcServiceBlockingStub blockingStub = CdcRpcClient.getCdcRpcClient()
.getCdcServiceBlockingStub();
MasterStatus masterStatus = blockingStub.showMasterStatus(
Request.newBuilder().build());
ArrayResultCursor result = new ArrayResultCursor("SHOW MASTER STATUS");
result.addColumn("File", DataTypes.StringType);
result.addColumn("Position", DataTypes.LongType);
result.addColumn("Binlog_Do_DB", DataTypes.StringType);
result.addColumn("Binlog_Ignore_DB", DataTypes.StringType);
result.addColumn("Executed_Gtid_Set", DataTypes.StringType);
result.initMeta();
result.addRow(new Object[] {
masterStatus.getFile(), masterStatus.getPosition(), masterStatus.getBinlogDoDB(),
masterStatus.getBinlogIgnoreDB(), masterStatus.getExecutedGtidSet()});
Channel channel = blockingStub.getChannel();
if (channel instanceof ManagedChannel) {
((ManagedChannel) channel).shutdown();
}
return result;
}
}
| 932 |
12,278 | <reponame>markuspf/velocypack
////////////////////////////////////////////////////////////////////////////////
/// @brief Library to build up VPack documents.
///
/// DISCLAIMER
///
/// Copyright 2015 ArangoDB GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author <NAME>
/// @author <NAME>
/// @author Copyright 2015, ArangoDB GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#include "tests-common.h"
TEST(VersionTest, TestCompare) {
ASSERT_EQ(0, Version::compare(Version(1, 0, 0), Version(1, 0, 0)));
ASSERT_EQ(0, Version::compare(Version(1, 2, 3), Version(1, 2, 3)));
ASSERT_EQ(0, Version::compare(Version(0, 0, 1), Version(0, 0, 1)));
ASSERT_EQ(1, Version::compare(Version(2, 0, 0), Version(1, 0, 0)));
ASSERT_EQ(1, Version::compare(Version(1, 1, 0), Version(1, 0, 0)));
ASSERT_EQ(1, Version::compare(Version(1, 1, 0), Version(1, 0, 1)));
ASSERT_EQ(1, Version::compare(Version(1, 0, 1), Version(1, 0, 0)));
ASSERT_EQ(1, Version::compare(Version(1, 1, 1), Version(1, 0, 0)));
ASSERT_EQ(1, Version::compare(Version(1, 1, 1), Version(1, 0, 1)));
ASSERT_EQ(1, Version::compare(Version(1, 1, 1), Version(1, 1, 0)));
ASSERT_EQ(-1, Version::compare(Version(1, 0, 0), Version(2, 0, 0)));
ASSERT_EQ(-1, Version::compare(Version(1, 0, 0), Version(1, 1, 0)));
ASSERT_EQ(-1, Version::compare(Version(1, 0, 1), Version(1, 1, 0)));
ASSERT_EQ(-1, Version::compare(Version(1, 0, 0), Version(1, 0, 1)));
ASSERT_EQ(-1, Version::compare(Version(1, 0, 0), Version(1, 1, 1)));
ASSERT_EQ(-1, Version::compare(Version(1, 0, 1), Version(1, 1, 1)));
ASSERT_EQ(-1, Version::compare(Version(1, 1, 0), Version(1, 1, 1)));
}
TEST(VersionTest, TestDigits) {
int major = Version::BuildVersion.majorValue;
ASSERT_TRUE(major >= 0 && major <= 10);
int minor = Version::BuildVersion.minorValue;
ASSERT_TRUE(minor >= 0 && minor <= 10);
int patch = Version::BuildVersion.patchValue;
ASSERT_TRUE(patch >= 0 && patch <= 999);
}
TEST(VersionTest, TestFormat) {
std::string version = Version::BuildVersion.toString();
int majors = 0;
int minors = 0;
int patch = 0;
char const* p = version.c_str();
while (*p && *p >= '0' && *p <= '9') {
majors++;
++p;
}
ASSERT_TRUE(majors > 0);
ASSERT_EQ('.', *p);
++p;
while (*p && *p >= '0' && *p <= '9') {
minors++;
++p;
}
ASSERT_TRUE(minors > 0);
ASSERT_EQ('.', *p);
++p;
while (*p && *p >= '0' && *p <= '9') {
patch++;
++p;
}
ASSERT_TRUE(patch > 0);
ASSERT_EQ('\0', *p);
}
int main(int argc, char* argv[]) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| 1,227 |
577 | <filename>Lib/json/tests/test_dunderdict.py<gh_stars>100-1000
from cStringIO import StringIO
from json.tests import PyTest, CTest
class WrapDict(object):
def __init__(self, d):
# Force a copy of the items in d, otherwise __dict__ will be a
# PyDictionary, instead of the desired PyStringMap for this
# testing
self.__dict__.update(d)
class TestDunderDictDump(object):
def use_dunderdict(self, d):
return WrapDict(d).__dict__
def test_dump(self):
sio = StringIO()
self.json.dump(self.use_dunderdict({}), sio)
self.assertEqual(sio.getvalue(), '{}')
def test_dumps(self):
self.assertEqual(self.dumps(self.use_dunderdict({})), '{}')
def test_encode_truefalse(self):
self.assertEqual(self.dumps(
self.use_dunderdict({True: False, False: True}), sort_keys=True),
'{"false": true, "true": false}')
self.assertEqual(self.dumps(
self.use_dunderdict({2: 3.0, 4.0: 5L, False: 1, 6L: True}), sort_keys=True),
'{"false": 1, "2": 3.0, "4.0": 5, "6": true}')
class TestPyDump(TestDunderDictDump, PyTest): pass
class TestCDump(TestDunderDictDump, CTest): pass
| 545 |
405 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 14:58:00 2019
@author: truthless
"""
import os
from convlab.modules.word_policy.multiwoz.hdsa.predictor import HDSA_predictor
from convlab.modules.word_policy.multiwoz.hdsa.generator import HDSA_generator
from convlab.modules.policy.system.policy import SysPolicy
DEFAULT_DIRECTORY = "models"
DEFAULT_ARCHIVE_FILE = os.path.join(DEFAULT_DIRECTORY, "hdsa.zip")
class HDSA(SysPolicy):
def __init__(self, archive_file=DEFAULT_ARCHIVE_FILE, model_file=None, use_cuda=False):
self.predictor = HDSA_predictor(archive_file, model_file, use_cuda)
self.generator = HDSA_generator(archive_file, model_file, use_cuda)
def init_session(self):
self.generator.init_session()
def predict(self, state):
act, kb = self.predictor.predict(state)
response = self.generator.generate(state, act, kb)
return response
| 439 |
416 | <reponame>spring-operator/spring-roo
package org.springframework.roo.classpath.layers;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.util.Arrays;
import org.junit.Test;
import org.springframework.roo.model.JavaType;
/**
* Unit test of {@link MemberTypeAdditions}
*
* @author <NAME>
* @since 1.2.0
*/
public class MemberTypeAdditionsTest {
/**
* Asserts that
* {@link MemberTypeAdditions#buildMethodCall(String, String, java.util.Iterator)}
* builds the expected method call from the given parameters
*
* @param expectedMethodCall
* @param target
* @param method
* @param parameterNames
*/
private void assertMethodCall(final String expectedMethodCall, final String target,
final String method, final MethodParameter... parameters) {
assertEquals(expectedMethodCall,
MemberTypeAdditions.buildMethodCall(target, method, Arrays.asList(parameters)));
}
@Test
public void testGetInvokedFieldWhenBuilderIsNull() {
// Set up
final MemberTypeAdditions memberTypeAdditions =
new MemberTypeAdditions(null, "foo", "foo()", false, null);
// Invoke and check
assertNull(memberTypeAdditions.getInvokedField());
}
@Test
public void testGetMethodCallWithBlankTargetAndNoParameters() {
assertMethodCall("foo()", null, "foo");
}
@Test
public void testGetMethodCallWithBlankTargetAndTwoParameters() {
final MethodParameter firstNameParameter = new MethodParameter(JavaType.STRING, "firstName");
final MethodParameter lastNameParameter = new MethodParameter(JavaType.STRING, "lastName");
assertMethodCall("matchmakingService.marry(firstName, lastName)", "matchmakingService",
"marry", firstNameParameter, lastNameParameter);
}
@Test
public void testGetMethodCallWithTargetAndNoParameters() {
assertMethodCall("Foo.bar()", "Foo", "bar");
}
}
| 610 |
335 | {
"word": "Vignette",
"definitions": [
"Portray (someone) in the style of a vignette.",
"Produce (a photograph) in the style of a vignette by softening or shading away the edges of the subject."
],
"parts-of-speech": "Verb"
} | 102 |
330 | from pybamm import exp, constants
def graphite_diffusivity_Kim2011(sto, T):
"""
Graphite diffusivity [1].
References
----------
.. [1] <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>.
(2011). Multi-domain modeling of lithium-ion batteries encompassing
multi-physics in varied length scales. Journal of The Electrochemical
Society, 158(8), A955-A969.
Parameters
----------
sto: :class:`pybamm.Symbol`
Electrode stochiometry
T: :class:`pybamm.Symbol`
Dimensional temperature
Returns
-------
:class:`pybamm.Symbol`
Solid diffusivity
"""
D_ref = 9 * 10 ** (-14)
E_D_s = 4e3
arrhenius = exp(E_D_s / constants.R * (1 / 298.15 - 1 / T))
return D_ref * arrhenius
| 318 |
463 | #!/usr/bin/env python
from mpi4py import MPI
from pposgd_mpi.common import set_global_seeds
from pposgd_mpi import bench
from pposgd_mpi.common.mpi_fork import mpi_fork
import os.path as osp
import gym, logging
from pposgd_mpi.common import logger
import sys
def wrap_train(env):
from pposgd_mpi.common.atari_wrappers import (wrap_deepmind, FrameStack)
env = wrap_deepmind(env, clip_rewards=True)
env = FrameStack(env, 4)
return env
def train(env_id, num_timesteps, seed, num_cpu):
from pposgd_mpi import pposgd_simple, cnn_policy
import pposgd_mpi.common.tf_util as U
whoami = mpi_fork(num_cpu)
if whoami == "parent": return
rank = MPI.COMM_WORLD.Get_rank()
sess = U.single_threaded_session()
sess.__enter__()
logger.session().__enter__()
if rank != 0: logger.set_level(logger.DISABLED)
workerseed = seed + 10000 * MPI.COMM_WORLD.Get_rank()
set_global_seeds(workerseed)
env = gym.make(env_id)
def policy_fn(name, ob_space, ac_space): #pylint: disable=W0613
return cnn_policy.CnnPolicy(name=name, ob_space=ob_space, ac_space=ac_space)
env = bench.Monitor(env, osp.join(logger.get_dir(), "%i.monitor.json" % rank))
env.seed(workerseed)
gym.logger.setLevel(logging.WARN)
env = wrap_train(env)
num_timesteps /= 4 # because we're wrapping the envs to do frame skip
env.seed(workerseed)
pposgd_simple.learn(env, policy_fn,
max_timesteps=num_timesteps,
timesteps_per_batch=256,
clip_param=0.2, entcoeff=0.01,
optim_epochs=4, optim_stepsize=1e-3, optim_batchsize=64,
gamma=0.99, lam=0.95,
schedule='linear'
)
env.close()
def main():
train('PongNoFrameskip-v4', num_timesteps=40e6, seed=0, num_cpu=8)
if __name__ == '__main__':
main()
| 785 |
2,338 | //===- TestAttributes.cpp - MLIR Test Dialect Attributes --------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file contains attributes defined by the TestDialect for testing various
// features of MLIR.
//
//===----------------------------------------------------------------------===//
#include "TestAttributes.h"
#include "TestDialect.h"
#include "mlir/IR/Builders.h"
#include "mlir/IR/DialectImplementation.h"
#include "mlir/IR/Types.h"
#include "llvm/ADT/Hashing.h"
#include "llvm/ADT/SetVector.h"
#include "llvm/ADT/TypeSwitch.h"
using namespace mlir;
using namespace test;
//===----------------------------------------------------------------------===//
// AttrWithSelfTypeParamAttr
//===----------------------------------------------------------------------===//
Attribute AttrWithSelfTypeParamAttr::parse(DialectAsmParser &parser,
Type type) {
Type selfType;
if (parser.parseType(selfType))
return Attribute();
return get(parser.getContext(), selfType);
}
void AttrWithSelfTypeParamAttr::print(DialectAsmPrinter &printer) const {
printer << "attr_with_self_type_param " << getType();
}
//===----------------------------------------------------------------------===//
// AttrWithTypeBuilderAttr
//===----------------------------------------------------------------------===//
Attribute AttrWithTypeBuilderAttr::parse(DialectAsmParser &parser, Type type) {
IntegerAttr element;
if (parser.parseAttribute(element))
return Attribute();
return get(parser.getContext(), element);
}
void AttrWithTypeBuilderAttr::print(DialectAsmPrinter &printer) const {
printer << "attr_with_type_builder " << getAttr();
}
//===----------------------------------------------------------------------===//
// CompoundAAttr
//===----------------------------------------------------------------------===//
Attribute CompoundAAttr::parse(DialectAsmParser &parser, Type type) {
int widthOfSomething;
Type oneType;
SmallVector<int, 4> arrayOfInts;
if (parser.parseLess() || parser.parseInteger(widthOfSomething) ||
parser.parseComma() || parser.parseType(oneType) || parser.parseComma() ||
parser.parseLSquare())
return Attribute();
int intVal;
while (!*parser.parseOptionalInteger(intVal)) {
arrayOfInts.push_back(intVal);
if (parser.parseOptionalComma())
break;
}
if (parser.parseRSquare() || parser.parseGreater())
return Attribute();
return get(parser.getContext(), widthOfSomething, oneType, arrayOfInts);
}
void CompoundAAttr::print(DialectAsmPrinter &printer) const {
printer << "cmpnd_a<" << getWidthOfSomething() << ", " << getOneType()
<< ", [";
llvm::interleaveComma(getArrayOfInts(), printer);
printer << "]>";
}
//===----------------------------------------------------------------------===//
// CompoundAAttr
//===----------------------------------------------------------------------===//
Attribute TestI64ElementsAttr::parse(DialectAsmParser &parser, Type type) {
SmallVector<uint64_t> elements;
if (parser.parseLess() || parser.parseLSquare())
return Attribute();
uint64_t intVal;
while (succeeded(*parser.parseOptionalInteger(intVal))) {
elements.push_back(intVal);
if (parser.parseOptionalComma())
break;
}
if (parser.parseRSquare() || parser.parseGreater())
return Attribute();
return parser.getChecked<TestI64ElementsAttr>(
parser.getContext(), type.cast<ShapedType>(), elements);
}
void TestI64ElementsAttr::print(DialectAsmPrinter &printer) const {
printer << "i64_elements<[";
llvm::interleaveComma(getElements(), printer);
printer << "] : " << getType() << ">";
}
LogicalResult
TestI64ElementsAttr::verify(function_ref<InFlightDiagnostic()> emitError,
ShapedType type, ArrayRef<uint64_t> elements) {
if (type.getNumElements() != static_cast<int64_t>(elements.size())) {
return emitError()
<< "number of elements does not match the provided shape type, got: "
<< elements.size() << ", but expected: " << type.getNumElements();
}
if (type.getRank() != 1 || !type.getElementType().isSignlessInteger(64))
return emitError() << "expected single rank 64-bit shape type, but got: "
<< type;
return success();
}
//===----------------------------------------------------------------------===//
// Tablegen Generated Definitions
//===----------------------------------------------------------------------===//
#include "TestAttrInterfaces.cpp.inc"
#define GET_ATTRDEF_CLASSES
#include "TestAttrDefs.cpp.inc"
//===----------------------------------------------------------------------===//
// TestDialect
//===----------------------------------------------------------------------===//
void TestDialect::registerAttributes() {
addAttributes<
#define GET_ATTRDEF_LIST
#include "TestAttrDefs.cpp.inc"
>();
}
Attribute TestDialect::parseAttribute(DialectAsmParser &parser,
Type type) const {
StringRef attrTag;
if (failed(parser.parseKeyword(&attrTag)))
return Attribute();
{
Attribute attr;
auto parseResult = generatedAttributeParser(parser, attrTag, type, attr);
if (parseResult.hasValue())
return attr;
}
parser.emitError(parser.getNameLoc(), "unknown test attribute");
return Attribute();
}
void TestDialect::printAttribute(Attribute attr,
DialectAsmPrinter &printer) const {
if (succeeded(generatedAttributePrinter(attr, printer)))
return;
}
| 1,849 |
325 | package com.box.l10n.mojito.cli.command;
import com.beust.jcommander.IStringConverter;
import com.beust.jcommander.ParameterException;
import com.box.l10n.mojito.cli.filefinder.file.FileType;
import com.box.l10n.mojito.cli.filefinder.file.FileTypes;
import java.util.Arrays;
/**
*
* @author jaurambault
*/
public class FileTypeConverter implements IStringConverter<FileType> {
@Override
public FileType convert(String value) {
FileType fileType = null;
if (value != null) {
try {
fileType = com.box.l10n.mojito.cli.filefinder.file.FileTypes.valueOf(value.toUpperCase()).toFileType();
} catch (IllegalArgumentException iae) {
String msg = "Invalid file type [" + value + "], should be one of: " + Arrays.toString(FileTypes.values());
throw new ParameterException(msg);
}
}
return fileType;
}
}
| 397 |
314 | <filename>schemas/gltf-2.0/extensions/KHR_materials_clearcoat/glTF.KHR_materials_clearcoat.schema.json
{
"$schema" : "http://json-schema.org/draft-04/schema",
"title" : "KHR_materials_clearcoat glTF extension",
"type" : "object",
"description" : "glTF extension that defines the clearcoat material layer.",
"allOf" : [
{
"$ref" : "../../glTFProperty.schema.json"
}
],
"properties" : {
"clearcoatFactor" : {
"type" : "number",
"description" : "The clearcoat layer intensity (aka opacity) of the material. A value of 0.0 means the material has no clearcoat layer enabled.",
"default" : 0,
"minimum" : 0,
"maximum" : 1,
"short_description" : "The clearcoat layer intensity."
},
"clearcoatTexture" : {
"allOf" : [
{
"$ref" : "../../textureInfo.schema.json"
}
],
"description" : "The clearcoat layer intensity texture. These values are sampled from the R channel. The values are linear. Use value 1.0 if no texture is supplied.",
"short_description" : "The clearcoat layer intensity texture."
},
"clearcoatRoughnessFactor" : {
"type" : "number",
"description" : "The clearcoat layer roughness of the material.",
"default" : 0,
"minimum" : 0,
"maximum" : 1,
"short_description" : "The clearcoat layer roughness."
},
"clearcoatRoughnessTexture" : {
"allOf" : [
{
"$ref" : "../../textureInfo.schema.json"
}
],
"description" : "The clearcoat layer roughness texture. These values are sampled from the G channel. The values are linear. Use value 1.0 if no texture is supplied.",
"short_description" : "The clearcoat layer roughness texture."
},
"clearcoatNormalTexture" : {
"allOf" : [
{
"$ref" : "../../material.normalTextureInfo.schema.json"
}
],
"description" : "A tangent space normal map for the clearcoat layer. If desired, this may be a reference to the same normal map used by the base material. If not supplied, no normal mapping is applied to the clear coat layer.",
"short_description" : "The clearcoat normal map texture."
},
"extensions" : {},
"extras" : {}
}
}
| 1,177 |
3,055 | /*
Fontname: -FreeType-FreeUniversal-Bold-R-Normal--49-490-72-72-P-243-ISO10646-1
Copyright: FreeUniveral (c) <NAME> 2009 Original Font Sil-Sophia Copyright (c) SIL International, 1994-2008.
Glyphs: 45/243
BBX Build Mode: 0
*/
const uint8_t u8g2_font_fub35_t_symbol[2713] U8G2_FONT_SECTION("u8g2_font_fub35_t_symbol") =
"-\0\5\4\7\6\5\6\10D\61\0\366#\367$\372\0\0\0\0\4\373 \7\0\0\202\217\0#_"
"\237Q\202\243\255\224\362\212)\257\230\362\212!\257\230\362\212)\257\230\342\212)\257\230\242\36X\345\1U"
"\36X\345\201\245\212)\256\230\362\212)\257\230\362\210)\257\230\362\212)\257\224\242\36X\345\201U\36P"
"\345\201\245\212)\257\230\362\210)\257\230\362\212)\257\30\362\212)\257\230\362\212)\256\230\322\0$a\32"
"\66n\234k\334\331d\355\245\7\306y\240\230\7JAa\10D\216\30\343\32\203\230q\306 G\234\61"
"\310\21g\214v\306hH\214\326\34s\217= \326\3b=\20\232\307\206Hl\14\304\6yd\24G"
"Fqd\24G\6\71\342\214A\216@b\10D\36\70\344\1c\36 \351\65\26\213\35w\306\0%\206"
"\255R~\261\37\200\362N,n\275\322Z\253Ja\244\225STi\345\224TZI\5\225VR\71\305"
"\225T\16y%\25S^I\305\20XR)\5\226TH\221\345\224Rd\71\205\24ZJ)\304\266R"
"\314Q\253\224\262\324\71\205\64[H)\245\22RN\231\205\224S&!%\225XHI\5\226RR\201"
"\245\224T^\61%\225GNI\305\225SRi%\225S\134I\345\224VV)\305\21\366Zi\315\21"
"\230^\361\0&]\243q\202\247\207\325\7\202|`\300\7\10|\240\70d\216;\310\270\203\314\63\310\274"
"c\14D\344\304\63\316|\265\335u\325e\350\34w\216y \230K\240r\21D\214\71\5\15S\316A"
"\302\224\203^\71\251\231\243\230\71k\231\303\322A\14\241\224\324y\300\235\7\334y\300\241\7\206@\251\231"
"\4'\21\10\307\326\224\340\201\20N\60\302\376F\31D\0( \12\225j\222\304\220R\12\261\206!\266"
"a\210\335\60\304\376_\61\304\256\30b+\206\230R\25\3)!\12\225j\222\300\224\252\30b\212!\266"
"b\210]\61\304\376\337\60\304n\30b\33\206\30R\210E\0*\60\24\311\306\240e\220\201\12)\306\10"
"s\212(\211\10\262\220\11\306\30\24Hx\340\3G\30Q\216ih\21AR\21\345\230QL!\5\15"
"\62\12\0+\24\235N\203\261m\350\371\277\366\300\37`m\350\371\277\6\0,\27\13Fj\217\344\20C"
"\16\61\305\20SL)\305\224bJ)\6\0-\11\15C\246\221\340\37\30.\11\7\243\202\217\340\3\14"
"/(\221Sv\225\254\60\302*FXm\25V\61\302*FXm\25V\61\302*FXm\25V\255"
"\302*FXm\25\6\0\60K\231Q\202\234G\65\247\36\10\350\201q\36 \345\230S\14:\343$\63"
"\214\62\303,#\314\62\341,\23\316\62\341,\23\316\362\337\62\341,\23\316\62\302,#\314\62\302\250#"
"N\62\304\240C\216\71\346\1b\36\30\350\201\240\134S\10\0\61\30\220\221\202\234\351 dTY\343\201"
"\17\240\340\4#\312\234t\377\377\177\62/\231Q\206\234h\261\227\36\20\347\1R\36\60\3\31\64N:"
"\302\250\23\216:\362\26\217<\21A\14\246\227{\350\345\36z\351!\210\342\3\377\17\63@\230Q\202\234"
"f\255\207\36\20\346\1B\36\60\343\30$\16:\342\244\23N:\361\202'\36\227\26c\312\245\267Z\213"
"H\236x\244\221':\345\245\23\20:\342\234\63\36\60\344\1b\36\20\350\255u\0\64\71\232\61\202\234"
"-A\365\66\307\265\306L\70\314\204\263\214\70\313\210\243\314\70\351\214\223\14\71\310\224\203L\71\307\230\233"
"\61\347\24\203\16\71\350\20\223\16y\340\377\1\363\316\274\217\0\65C\230Q~\234\341\1\63\36\60\343\1"
"\63\36\60\343\1\63\36\60\343\304\373\306\71'\254\362@!\17\230\361\300\21\251 \201\316\21'\235`\324"
"\211w\321)\227N\70\351\4t\316x\300\214\7Jy` \267\330\1\66N\231Q\202\234H\65\247\36"
"\10\350\201a\36(\345\230C\320\71\343\240#\216\274\244\221\207\234s\304*'\64r\202\33\17 \261H"
"\12\312\240\220\320\11H\231\200\324O\235p\224\21G\31\201\320\31\7\235\201\314)\17\20\363\0\71\17\210"
"\344\232B\0\67,\230Q\202\234\340\377\201\24/x\342\201\10\36\210\340\201\10\36\210=\4\321C\20\301"
"\3\21<\20{\10\242\207 z\10\242\227\36b\0\70U\231Q\202\234\246\251\7\304y\200\224\7\14y"
"\300\214T\222@\7\211\223\216\70\351\210\223\216\70\351\210\223\316\70\347\20T\316y@$\327\326z \240"
"\7\206A\5\15t\316\70\351\204\263L\70\313\204\263\274\365\322\3!\35\201\16\22\17\240\361\200!\17\224"
"\363\200P\315\0\71K\231Q\202\234g\61\247\36\20\347\1R\36(\4\31\64\16:\303$\24\216:\341"
"\250\23\216\372K)\34\224\2*K<\200\304\13g\270p\10\23\347\230r\244\221\67q\320\31\7\235q"
"\16\42\247 \363\0\61\17\14\364@P\256)\4\0\0\0\0\4\377\377\3\224R\236\61\202\237/\364\300"
"\307\36\272\344\202\15>\367\330T\205(t\10\63\305\60r\220\42E\61p\230\2\305\61n\34\343D*"
"m$\303\304*L,\243\306\62J\264\222F\63H\274r\306\63F@SF,EH\63\306,CP"
"\23\36p\341\1\27\36x\3\251c\244Q\202\250M\335\7\202,\252<\322\314*\260\244\22\15*\262\234"
"BK\61\324\20C\15)\266\14c\215\60\326\10c\215\60\326\10c\215\60\326\10c\215\60\266\20C\15"
"\61\324\224B\213\61\322\234\42K*\260\254\342\212#\253\240\220\310)I,q\306\32J\234\241\36\10\347"
"\201g\36x\346\201g\36x\346\1\3\274N\234qZ\240\340\244SN:\345\244SN:\345\244SN"
":\345\244SN:\345\244SN:\345\244SN:\345\244SN:\345\244SN:\345\244SN:\345"
"\244S\16B\5\31E\36P\341\201\17\4\361\200\32\17\10A\316\11\247\336_\5\3\300Q\32-~\234"
"\344\201\63\36@\342\201\24\36Pa\214a\306\21e\230q\202\231N\60\223\33f\270QH#\205\64R"
"H#\205\264aH\33\206\264aH\33\206\60b\210\11\205\30b\2)\206\24\61\312)D\214\202\322\60"
"(\15\203\20)\312\230\301\10\1 \60\310\303\62~\305\37\200\362\201\71\261|P\326+\37\220\326\312\7"
"\244\224\302\310\7\244\234\242\312\7\244\234\222\312\7\244\244\202\312\7\244\244r\312\7\245\244r\310\7\246\244"
"b\312\7\246\244b\310\7\247\244R\312\7\247\244B\312\7\251\234R\312\7\251\234B\312\7\253\224R\310"
"\7\255\225b\316;j\225R\326Z\352\234BZj\266\220R\312)\245TB\312)\245\234\62\13)\247"
"\224r\312$\244\244\62J*\261\220\222\312(\251\300RJ*\243\244\2K)\251\214\222\312+\246\244\62"
"J*\217\234\222\312(\251\270rJ*\243\244\322J*\247\224r\212+\251\234R\312)\255\254R\312)"
"\245\70\302\336y\255\264\226\232#\60\265\364\12\15\65$\0 \71\36\214i\216\222\345\24SN\61\345\220"
"S\16\71\345\20T.s\312\61\307\30s\214\61\7 :\37\214i\216\222\340\30c\256r\314)\307\234"
"r\314!\247\34r\312!\247\34r\212)\247\0 D\67\231R~\234\264PB\313,\264\314B\313,"
"\224\320\62\13-\263\320\62\13%\264\314B\313,\264\314B\11-\263\320\62\13-\263PB\313,\264\314"
"B\313,\224T\0!\42G\234F\332\240`\211R\36\10\242\224B\206)\304\220a\12\61d\30\63D"
"\30d\230\21\304\20a\220aF\20b\204A\206\31a\4!\6\31f\204\21\204\30d\230!\212\30d"
"\230!\212\30d\230!\310\30d\230\61&\42\2Q\226S~\231\4\261\246\10))\250A\247\71\350\230"
"c\222\71\233D\322\216)\244\250A\206)B\20a\316\21%\231PV\11\204\221P\30\11\205\221@X"
"\11\204\225@\30\21\204\21AV\21d\221A\26!C\221B\22\61\3\21\64\14Q\253\31\5\0\42\17"
"x\250\65n\252\340\3j<\360\0\71\17< \222\221F\31i\224\221F\31i\224\221F\31i\224\221"
"F\31i\224\221F\31i\224\221F\31i\224\221F\31i\224\221F\31i\224\221F\31i\224\221F\31"
"i\224\221F\31i\224\221F\31i\224\221F\31i\224\221F\31i\224\221F\31i\224\221F\31i\224"
"\221F\31i\224\221F\31i\224\221F\235x\216rj<\20\320\3\1\42\21N\42\66j\244\340\3\201"
"<\360\312y\307\30I\314\231\342\34)\320\221!\31\31\322\341\27\67\335\360\213\233n\370\215[\235\370y"
"\236\370\271\32\320\250\342\14+\14\251\302\20+\314\250\244<\340\310\3o<\360\306\3\17\4\361\300\3\342"
"\3\21\4\0\42\22\12\235A\267\261\340\37`\42\32E\33\61\202\234\227\134\201E\226a\221e)\70\201"
"\206\23\306\64QF\30L(\262\304\32K,\242\4\33I\270\201\204\33H\274q\304\33F\304QD\34"
"E\310\61\304\34C\320!\4%A\324b\313\245\260\310b\1\42\36\64 \7\243\261&\310\220\320:e"
"\35\65H\241\10\21\343\20A\220Q'\231U-\302\312\252\22\11#\215\60\16\31\3\215PF)e\220"
"\241\220\62\206\235\1\0\42+(\24WZ\227\315\64\264\222\32\242(A\306\22q>H\377\377\7G\234"
" \35\34q(\62\206\62A\254\264P+\16\0\42H+\35G\243\261\4\301 \332\32\341\1q\320y"
"@\4\301\232\10\360|\310\230\30\4k\42< \16:\17\210 X\23\1\236\2\0\42`\34\35K\223"
"\261Wh\202i\346\201\77\300\36\301\364\336\3\177\200\31\202iZ\134\0\42d%\235M\207\261z\134C"
"\23T\60\305,\42\211\250\271$\37\214p\272\31N\67]\204\313\26\37\312\17\374\1\6\42e\42\235M"
"\207\261`hs\323U\67\207\61l\62\261g\242\230\203)&\211j\301\342\303\352\3\177\200\1%\312Z"
"\26S\202\232\211H\22\15\34a\270!\206\33c\260A\6\33e\250a\206\32g\240Y\32f\250a\306"
"\32d\260\61\206\33b\270\21\6\264\340\10\343\215\60\334\30\243\215\61\330(c\215\62\324\70#\215\63\320"
"H\343\214\64\314X\243\214\65\310hc\214\66\304x#\214g\42\221$\1\0";
| 4,855 |
346 | {
"packagingVersion": "2.0",
"name": "crate",
"version": "0.1.0",
"scm": "https://github.com/crate/crate-mesos-framework.git",
"maintainer": "<EMAIL>",
"description": "A Mesos Framework that allows running and resizing one or multiple Crate database clusters.",
"website": "https://crate.io",
"tags": ["database", "nosql"],
"framework": true,
"preInstallNotes": "This DC/OS Service is currently in preview. The Crate DCOS Service implementation is experimental and therefore there may be bugs or incomplete features. Never use alpha version software in production!\n\tPlease refer to the documentation at https://github.com/crate/crate-mesos-framework for information how to start a Crate cluster.",
"postInstallNotes": "Thanks for using Crate! Please read the documentation at https://crate.io/docs/stable/ for usage instructions.",
"postUninstallNotes": "The Crate DCOS Service has been uninstalled and will no longer run.",
"licenses": [
{
"name": "Apache License Version 2.0",
"url": "https://github.com/crate/crate-mesos-framework/blob/master/LICENSE"
}
]
}
| 341 |
575 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_METRICS_USAGE_SCENARIO_WEBRTC_EVENT_PROVIDER_H_
#define CHROME_BROWSER_METRICS_USAGE_SCENARIO_WEBRTC_EVENT_PROVIDER_H_
#include "base/containers/flat_map.h"
#include "base/scoped_multi_source_observation.h"
#include "base/scoped_observation.h"
#include "base/sequence_checker.h"
#include "content/public/browser/peer_connection_tracker_host_observer.h"
#include "content/public/browser/render_process_host.h"
#include "content/public/browser/render_process_host_observer.h"
class UsageScenarioDataStoreImpl;
// Provides events related to WebRTC to the data store.
class WebRtcEventProvider : public content::PeerConnectionTrackerHostObserver,
public content::RenderProcessHostObserver {
public:
// This class will not own |data_store| so it needs to be outlived by this
// provider.
explicit WebRtcEventProvider(UsageScenarioDataStoreImpl* data_store);
~WebRtcEventProvider() override;
WebRtcEventProvider(const WebRtcEventProvider& rhs) = delete;
WebRtcEventProvider& operator=(const WebRtcEventProvider& rhs) = delete;
// content::PeerConnectionTrackerHostObserver:
void OnPeerConnectionAdded(content::GlobalFrameRoutingId render_frame_host_id,
int lid,
base::ProcessId pid,
const std::string& url,
const std::string& rtc_configuration,
const std::string& constraints) override;
void OnPeerConnectionRemoved(
content::GlobalFrameRoutingId render_frame_host_id,
int lid) override;
void OnPeerConnectionUpdated(
content::GlobalFrameRoutingId render_frame_host_id,
int lid,
const std::string& type,
const std::string& value) override;
// content::RenderProcessHostObserver:
void RenderProcessExited(
content::RenderProcessHost* host,
const content::ChildProcessTerminationInfo& info) override;
private:
// Adds/removes an observation to a RenderProcessHost if it is the first/last
// peer connection to exist in that renderer.
void MaybeAddRenderProcessHostObservation(int render_process_host_id,
int lid);
void MaybeRemoveRenderProcessHostObservation(int render_process_host_id,
int lid);
// The data store for the video capture events. Must outlive |this|.
UsageScenarioDataStoreImpl* const data_store_;
// For each existing peer connection, tracks whether it is actually connected
// to another peer.
using PeerConnectionId = std::pair<int, int>;
base::flat_map<PeerConnectionId, bool /*is_connected*/> peer_connections_;
// Tracks with which renderer each connection is associated with. Used to
// ensure each RenderProcessHost is only observed once.
base::flat_map<content::RenderProcessHost*, base::flat_set<int>>
lids_per_renderers_;
base::ScopedMultiSourceObservation<content::RenderProcessHost,
content::RenderProcessHostObserver>
render_process_host_observations_{this};
SEQUENCE_CHECKER(sequence_checker_);
};
#endif // CHROME_BROWSER_METRICS_USAGE_SCENARIO_WEBRTC_EVENT_PROVIDER_H_
| 1,273 |
319 | <reponame>Kobting/ModTheSpire
package com.evacipated.cardcrawl.modthespire;
import com.google.gson.*;
import com.google.gson.annotations.SerializedName;
import com.vdurmont.semver4j.Semver;
import com.vdurmont.semver4j.SemverException;
import javax.swing.*;
import java.io.*;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
import java.util.Properties;
public class ModInfo implements Serializable
{
/**
*
*/
private static final long serialVersionUID = 7452562412479584982L;
public transient URL jarURL;
public transient String statusMsg = " ";
public transient boolean isWorkshop = false;
@SerializedName("modid")
public String ID;
@SerializedName("name")
public String Name;
@SerializedName("version")
public Semver ModVersion;
@SerializedName("author_list")
public String[] Authors;
@SerializedName("credits")
public String Credits;
@SerializedName("description")
public String Description;
@SerializedName("mts_version")
public Semver MTS_Version;
@SerializedName("sts_version")
public String STS_Version;
@SerializedName("dependencies")
public String[] Dependencies;
@SerializedName("optional_dependencies")
public String[] OptionalDependencies;
@SerializedName("update_json")
public String UpdateJSON;
private ModInfo()
{
Name = "";
Authors = new String[]{};
Description = "";
MTS_Version = ModInfo.safeVersion("0.0.0");
STS_Version = null;
Dependencies = new String[]{};
OptionalDependencies = new String[]{};
UpdateJSON = null;
}
public String getIDName() {
if (ID == null || ID.isEmpty()) {
return Name;
} else {
return ID;
}
}
private static void closeLoader(URLClassLoader loader)
{
try {
if (loader != null) {
loader.close();
}
} catch (Exception e) {
System.out.println("Exception during loader.close(), URLClassLoader may be leaked. " + e.toString());
}
}
public static ModInfo ReadModInfo(File mod_jar)
{
Gson gson = new GsonBuilder()
.excludeFieldsWithModifiers(Modifier.STATIC, Modifier.TRANSIENT)
.registerTypeAdapter(Semver.class, new VersionDeserializer())
.setDateFormat("MM-dd-yyyy")
.create();
URLClassLoader loader = null;
try {
loader = new URLClassLoader(new URL[] {mod_jar.toURI().toURL()}, null);
InputStream in = loader.getResourceAsStream("ModTheSpire.json");
if (in == null) {
// Fallback to old info file
ModInfo info = ReadModInfoOld(mod_jar);
info.jarURL = mod_jar.toURI().toURL();
return info;
}
ModInfo info = gson.fromJson(new InputStreamReader(in, StandardCharsets.UTF_8), ModInfo.class);
info.jarURL = mod_jar.toURI().toURL();
in.close();
return info;
} catch (Exception e) {
System.out.println(mod_jar);
e.printStackTrace();
} finally {
if (loader != null) {
closeLoader(loader);
}
}
return null;
}
private static ModInfo ReadModInfoOld(File mod_jar)
{
ModInfo info = new ModInfo();
// Default mod name to jar name
info.Name = mod_jar.getName();
info.Name = info.Name.substring(0, info.Name.length() - 4);
URLClassLoader loader = null;
try {
loader = new URLClassLoader(new URL[] {mod_jar.toURI().toURL()});
// Read ModTheSpire.config
Properties prop = new Properties();
InputStream inProp = loader.getResourceAsStream("ModTheSpire.config");
if (inProp != null) {
prop.load(new InputStreamReader(inProp, StandardCharsets.UTF_8));
info.Name = prop.getProperty("name");
String author = prop.getProperty("author");
if (author != null && !author.isEmpty()) {
info.Authors = author.split(",");
}
info.MTS_Version = ModInfo.safeVersion(prop.getProperty("mts_version", "0.0.0"));
info.Description = prop.getProperty("description");
info.STS_Version = prop.getProperty("sts_version");
inProp.close();
}
} catch (Exception e) {
System.out.println("ERROR: Failed to read Mod info from " + mod_jar.getName());
} finally {
closeLoader(loader);
}
return info;
}
private static class VersionDeserializer implements JsonDeserializer<Semver>
{
@Override
public Semver deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException
{
try {
return safeVersion(jsonElement.getAsJsonPrimitive().getAsString());
} catch (SemverException e) {
return null;
}
}
}
@Override
public boolean equals(Object obj)
{
if (obj == this) {
return true;
}
if (!(obj instanceof ModInfo)) {
return false;
}
ModInfo info = (ModInfo) obj;
if (ID == null && info.ID == null) {
return Objects.equals(Name, info.Name);
} else {
return Objects.equals(ID, info.ID);
}
}
@Override
public int hashCode()
{
return Objects.hash(ID, Name);
}
private void writeObject(java.io.ObjectOutputStream out) throws IOException
{
out.writeObject(ID);
out.writeObject(Name);
out.writeObject(ModVersion.toString());
out.writeObject(Authors);
out.writeObject(Credits);
out.writeObject(Description);
out.writeObject(MTS_Version.toString());
out.writeObject(STS_Version);
out.writeObject(Dependencies);
out.writeObject(OptionalDependencies);
out.writeObject(UpdateJSON);
}
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException
{
ID = (String) in.readObject();
Name = (String) in.readObject();
ModVersion = safeVersion((String) in.readObject());
Authors = (String[]) in.readObject();
Credits = (String) in.readObject();
Description = (String) in.readObject();
MTS_Version = safeVersion((String) in.readObject());
STS_Version = (String) in.readObject();
Dependencies = (String[]) in.readObject();
OptionalDependencies = (String[]) in.readObject();
UpdateJSON = (String) in.readObject();
}
public static Semver safeVersion(String verString)
{
return new Semver(verString, Semver.SemverType.NPM);
}
}
| 3,151 |
372 | /*
* travesty, pure C interface to steinberg VST3 SDK
* Copyright (C) 2021 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any purpose with
* or without fee is hereby granted, provided that the above copyright notice and this
* permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
* TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN
* NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
* DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#pragma once
#include "base.h"
#include "bstream.h"
#include "align_push.h"
/**
* buses
*/
enum v3_media_types {
V3_AUDIO = 0,
V3_EVENT
};
inline static const char *
v3_media_type_str(int32_t type)
{
switch (type) {
case V3_AUDIO: return "V3_AUDIO";
case V3_EVENT: return "V3_EVENT";
default: return "[unknown]";
}
}
enum v3_bus_direction {
V3_INPUT = 0,
V3_OUTPUT
};
inline static const char *
v3_bus_direction_str(int32_t d)
{
switch (d) {
case V3_INPUT: return "V3_INPUT";
case V3_OUTPUT: return "V3_OUTPUT";
default: return "[unknown]";
}
}
enum v3_bus_types {
V3_MAIN = 0,
V3_AUX
};
enum v3_bus_flags {
V3_DEFAULT_ACTIVE = 1,
V3_IS_CONTROL_VOLTAGE = 1 << 1
};
struct v3_bus_info {
int32_t media_type;
int32_t direction;
int32_t channel_count;
v3_str_128 bus_name;
int32_t bus_type;
uint32_t flags;
};
/**
* component
*/
struct v3_routing_info;
struct v3_component {
struct v3_plugin_base;
V3_API v3_result (*get_controller_class_id)
(void *self, v3_tuid class_id);
V3_API v3_result (*set_io_mode)
(void *self, int32_t io_mode);
V3_API int32_t (*get_bus_count)
(void *self, int32_t media_type, int32_t bus_direction);
V3_API v3_result (*get_bus_info)
(void *self, int32_t media_type, int32_t bus_direction,
int32_t bus_idx, struct v3_bus_info *bus_info);
V3_API v3_result (*get_routing_info)
(void *self, struct v3_routing_info *input,
struct v3_routing_info *output);
V3_API v3_result (*activate_bus)
(void *self, int32_t media_type, int32_t bus_direction,
int32_t bus_idx, v3_bool state);
V3_API v3_result (*set_active)
(void *self, v3_bool state);
V3_API v3_result (*set_state)
(void *self, struct v3_bstream **);
V3_API v3_result (*get_state)
(void *self, struct v3_bstream **);
};
static const v3_tuid v3_component_iid =
V3_ID(0xE831FF31, 0xF2D54301, 0x928EBBEE, 0x25697802);
#include "align_pop.h"
| 1,125 |
2,338 | <filename>clang-tools-extra/test/clang-reorder-fields/ClassSimpleCtor.cpp
// RUN: clang-reorder-fields -record-name Foo -fields-order s1,x,z,s2 %s -- | FileCheck %s
class Foo {
public:
Foo();
private:
int x; // CHECK: {{^ const char \*s1;}}
const char *s1; // CHECK-NEXT: {{^ int x;}}
const char *s2; // CHECK-NEXT: {{^ double z;}}
double z; // CHECK-NEXT: {{^ const char \*s2;}}
};
Foo::Foo():
x(12), // CHECK: {{^ s1\("abc"\),}}
s1("abc"), // CHECK-NEXT: {{^ x\(12\),}}
s2("def"), // CHECK-NEXT: {{^ z\(3.14\),}}
z(3.14) // CHECK-NEXT: {{^ s2\("def"\)}}
{}
int main() {
Foo foo;
return 0;
}
| 348 |
812 | package com.vpaliy.data.mapper;
import android.content.Context;
import android.icu.text.NumberFormat;
import android.support.annotation.VisibleForTesting;
import android.text.TextUtils;
import com.vpaliy.data.Config;
import com.vpaliy.data.R;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import retrofit2.http.Path;
@SuppressWarnings("WeakerAccess")
public class MapperUtils {
public static List<String> splitString(String string) {
if (string == null || string.isEmpty()) return null;
string = string.replace(" ", ",");
return Arrays.asList(string.split("\\s*,\\s*"));
}
public static String toString(List<String> strings) {
if (strings == null) return null;
return strings.toString().replaceAll("[\\[.\\].\\s+]", "");
}
public static int convertToInt(String number) {
if (number == null) return 0;
return Integer.parseInt(number);
}
public static String convertToStream(String streamUrl) {
if (streamUrl == null) return null;
return streamUrl + "?client_id=" + Config.CLIENT_ID;
}
public static String convertFromStream(String streamUrl) {
if (streamUrl == null || !streamUrl.contains("?client_id=")) return streamUrl;
return streamUrl.substring(0, streamUrl.indexOf("?client_id=") - 1);
}
public static String convertDuration(Context context, long millis) {
long time = TimeUnit.MILLISECONDS.toHours(millis);
String result = "";
if (time != 0) {
result = context.getResources().getQuantityString(R.plurals.hours, (int) (time), time);
}
time = TimeUnit.MILLISECONDS.toMinutes(millis) % 60;
if (time != 0) {
result += " " + context.getResources().getQuantityString(R.plurals.minutes, (int) (time), time);
}
return result;
}
public static String convertToRuntime(Context context, String duration) {
if (duration == null) return null;
String hr = context.getString(R.string.hour_label);
String hrs = context.getString(R.string.hours_label);
Pattern pattern = Pattern.compile("-?\\d+");
Matcher matcher = pattern.matcher(duration);
int runtime = 0;
int count = 0;
while (matcher.find()) count++;
matcher = matcher.reset();
if (count == 2) {
matcher.find();
runtime = Integer.parseInt(matcher.group()) * 60;
if (matcher.find()) runtime += Integer.parseInt(matcher.group());
} else if (matcher.find()) {
runtime = Integer.parseInt(matcher.group());
duration = duration.trim();
if (duration.contains(hr) || duration.contains(hrs)) runtime *= 60;
}
return Integer.toString(runtime);
}
public static String convertDuration(Context context, String millis) {
if (!TextUtils.isEmpty(millis)) {
return convertDuration(context, Long.parseLong(millis));
}
return null;
}
}
| 1,038 |
522 | <filename>tensorflow/examples/wav_to_spectrogram/wav_to_spectrogram.cc
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/examples/wav_to_spectrogram/wav_to_spectrogram.h"
#include <vector>
#include "tensorflow/cc/ops/audio_ops.h"
#include "tensorflow/cc/ops/const_op.h"
#include "tensorflow/cc/ops/image_ops.h"
#include "tensorflow/cc/ops/standard_ops.h"
#include "tensorflow/core/framework/graph.pb.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/graph/default_device.h"
#include "tensorflow/core/graph/graph_def_builder.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/stringpiece.h"
#include "tensorflow/core/lib/core/threadpool.h"
#include "tensorflow/core/lib/io/path.h"
#include "tensorflow/core/lib/strings/stringprintf.h"
#include "tensorflow/core/platform/logging.h"
#include "tensorflow/core/platform/types.h"
#include "tensorflow/core/public/session.h"
#include "tensorflow/core/util/command_line_flags.h"
using tensorflow::DT_FLOAT;
using tensorflow::DT_UINT8;
using tensorflow::Output;
using tensorflow::TensorShape;
// Runs a TensorFlow graph to convert an audio file into a visualization.
tensorflow::Status WavToSpectrogram(const tensorflow::string& input_wav,
tensorflow::int32 window_size,
tensorflow::int32 stride, float brightness,
const tensorflow::string& output_image) {
auto root = tensorflow::Scope::NewRootScope();
using namespace tensorflow::ops; // NOLINT(build/namespaces)
// The following block creates a TensorFlow graph that:
// - Reads and decodes the audio file into a tensor of float samples.
// - Creates a float spectrogram from those samples.
// - Scales, clamps, and converts that spectrogram to 0 to 255 uint8's.
// - Reshapes the tensor so that it's [height, width, 1] for imaging.
// - Encodes it as a PNG stream and saves it out to a file.
Output file_reader = ReadFile(root.WithOpName("input_wav"), input_wav);
DecodeWav wav_decoder =
DecodeWav(root.WithOpName("wav_decoder"), file_reader);
Output spectrogram = AudioSpectrogram(root.WithOpName("spectrogram"),
wav_decoder.audio, window_size, stride);
Output brightness_placeholder =
Placeholder(root.WithOpName("brightness_placeholder"), DT_FLOAT,
Placeholder::Attrs().Shape(TensorShape({})));
Output mul = Mul(root.WithOpName("mul"), spectrogram, brightness_placeholder);
Output min_const = Const(root.WithOpName("min_const"), 255.0f);
Output min = Minimum(root.WithOpName("min"), mul, min_const);
Output cast = Cast(root.WithOpName("cast"), min, DT_UINT8);
Output expand_dims_const = Const(root.WithOpName("expand_dims_const"), -1);
Output expand_dims =
ExpandDims(root.WithOpName("expand_dims"), cast, expand_dims_const);
Output squeeze = Squeeze(root.WithOpName("squeeze"), expand_dims,
Squeeze::Attrs().SqueezeDims({0}));
Output png_encoder = EncodePng(root.WithOpName("png_encoder"), squeeze);
WriteFile file_writer =
WriteFile(root.WithOpName("output_image"), output_image, png_encoder);
tensorflow::GraphDef graph;
TF_RETURN_IF_ERROR(root.ToGraphDef(&graph));
// Build a session object from this graph definition. The power of TensorFlow
// is that you can reuse complex computations like this, so usually we'd run a
// lot of different inputs through it. In this example, we're just doing a
// one-off run, so we'll create it and then use it immediately.
std::unique_ptr<tensorflow::Session> session(
tensorflow::NewSession(tensorflow::SessionOptions()));
TF_RETURN_IF_ERROR(session->Create(graph));
// We're passing in the brightness as an input, so create a tensor to hold the
// value.
tensorflow::Tensor brightness_tensor(DT_FLOAT, TensorShape({}));
brightness_tensor.scalar<float>()() = brightness;
// Run the session to analyze the audio and write out the file.
TF_RETURN_IF_ERROR(
session->Run({{"brightness_placeholder", brightness_tensor}}, {},
{"output_image"}, nullptr));
return tensorflow::Status::OK();
}
| 1,715 |
1,143 | // =================================================================================================
// Copyright 2012 Twitter, Inc.
// -------------------------------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this work except in compliance with the License.
// You may obtain a copy of the License in the LICENSE file, or at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =================================================================================================
package com.twitter.common.webassets.jquery;
import com.google.common.io.Resources;
import com.google.common.net.MediaType;
import com.google.inject.AbstractModule;
import com.twitter.common.application.http.Registration;
/**
* A binding module to register jQuery HTTP assets.
*/
public final class JQueryModule extends AbstractModule {
@Override
protected void configure() {
Registration.registerHttpAsset(
binder(),
"/js/jquery.min.js",
Resources.getResource(JQueryModule.class, "js/jquery-1.8.2.min.js"),
MediaType.JAVASCRIPT_UTF_8.toString(),
true);
}
}
| 385 |
1,514 | <reponame>fpmuniz/stepmania
#include "global.h"
#include "Screen.h"
#include "PrefsManager.h"
#include "RageSound.h"
#include "RageLog.h"
#include "ThemeManager.h"
#include "ScreenManager.h"
#include "ActorUtil.h"
#include "InputEventPlus.h"
#include "InputMapper.h"
#define NEXT_SCREEN THEME->GetMetric (m_sName,"NextScreen")
#define PREV_SCREEN THEME->GetMetric (m_sName,"PrevScreen")
#define PREPARE_SCREENS THEME->GetMetric (m_sName,"PrepareScreens")
#define PERSIST_SCREENS THEME->GetMetric (m_sName,"PersistScreens")
#define GROUPED_SCREENS THEME->GetMetric (m_sName,"GroupedScreens")
static const char *ScreenTypeNames[] = {
"Attract",
"GameMenu",
"Gameplay",
"SystemMenu",
};
XToString( ScreenType );
LuaXType( ScreenType );
void Screen::InitScreen( Screen *pScreen )
{
pScreen->Init();
}
Screen::~Screen()
{
}
bool Screen::SortMessagesByDelayRemaining( const Screen::QueuedScreenMessage &m1,
const Screen::QueuedScreenMessage &m2 )
{
return m1.fDelayRemaining < m2.fDelayRemaining;
}
void Screen::Init()
{
ALLOW_OPERATOR_MENU_BUTTON.Load( m_sName, "AllowOperatorMenuButton" );
HANDLE_BACK_BUTTON.Load( m_sName, "HandleBackButton" );
REPEAT_RATE.Load( m_sName, "RepeatRate" );
REPEAT_DELAY.Load( m_sName, "RepeatDelay" );
LIGHTS_MODE.Load( m_sName, "LightsMode" );
m_Codes.Load( m_sName );
SetFOV( 0 );
m_smSendOnPop = SM_None;
m_bRunning = false;
m_CallingInputCallbacks= false;
ActorUtil::LoadAllCommandsFromName( *this, m_sName, "Screen" );
PlayCommandNoRecurse( Message("Init") );
vector<RString> asList;
split( PREPARE_SCREENS, ",", asList );
for( unsigned i = 0; i < asList.size(); ++i )
{
LOG->Trace( "Screen \"%s\" preparing \"%s\"", m_sName.c_str(), asList[i].c_str() );
SCREENMAN->PrepareScreen( asList[i] );
}
asList.clear();
split( GROUPED_SCREENS, ",", asList );
for( unsigned i = 0; i < asList.size(); ++i )
SCREENMAN->GroupScreen( asList[i] );
asList.clear();
split( PERSIST_SCREENS, ",", asList );
for( unsigned i = 0; i < asList.size(); ++i )
SCREENMAN->PersistantScreen( asList[i] );
}
void Screen::BeginScreen()
{
m_bRunning = true;
m_bFirstUpdate = true;
/* Screens set these when they determine their next screen dynamically. Reset them
* here, so a reused screen doesn't inherit these from the last time it was used. */
m_sNextScreen = RString();
m_fLockInputSecs = 0;
this->RunCommands( THEME->GetMetricA(m_sName, "ScreenOnCommand") );
if( m_fLockInputSecs == 0 )
m_fLockInputSecs = 0.0001f; // always lock for a tiny amount of time so that we throw away any queued inputs during the load.
this->PlayCommand( "Begin" );
}
void Screen::EndScreen()
{
this->PlayCommand( "End" );
m_bRunning = false;
}
void Screen::Update( float fDeltaTime )
{
ActorFrame::Update( fDeltaTime );
m_fLockInputSecs = max( 0, m_fLockInputSecs-fDeltaTime );
/* We need to ensure two things:
* 1. Messages must be sent in the order of delay. If two messages are sent
* simultaneously, one with a .001 delay and another with a .002 delay,
* the .001 delay message must be sent first.
* 2. Messages to be delivered simultaneously must be sent in the order queued.
*
* Sort by time to ensure #1; use a stable sort to ensure #2. */
stable_sort(m_QueuedMessages.begin(), m_QueuedMessages.end(), SortMessagesByDelayRemaining);
// Update the times of queued ScreenMessages.
for( unsigned i=0; i<m_QueuedMessages.size(); i++ )
{
/* Hack:
* If we simply subtract time and then send messages, we have a problem.
* Messages are queued to arrive at specific times, and those times line
* up with things like tweens finishing. If we send the message at the
* exact time given, then it'll be on the same cycle that would be rendering
* the last frame of a tween (such as an object going off the screen).
* However, when we send the message, we're likely to set up a new screen,
* which causes everything to stop in place; this results in actors
* occasionally not quite finishing their tweens.
* Let's delay all messages that have a non-zero time an extra frame. */
if( m_QueuedMessages[i].fDelayRemaining > 0.0001f )
{
m_QueuedMessages[i].fDelayRemaining -= fDeltaTime;
m_QueuedMessages[i].fDelayRemaining = max( m_QueuedMessages[i].fDelayRemaining, 0.0001f );
}
else
{
m_QueuedMessages[i].fDelayRemaining -= fDeltaTime;
}
}
/* Now dispatch messages. If the number of messages on the queue changes
* within HandleScreenMessage, someone cleared messages on the queue. This
* means we have no idea where 'i' is, so start over. Since we applied time
* already, this won't cause messages to be mistimed. */
for( unsigned i=0; i<m_QueuedMessages.size(); i++ )
{
if( m_QueuedMessages[i].fDelayRemaining > 0.0f )
continue; /* not yet */
// Remove the message from the list.
const ScreenMessage SM = m_QueuedMessages[i].SM;
m_QueuedMessages.erase( m_QueuedMessages.begin()+i );
i--;
unsigned iSize = m_QueuedMessages.size();
// send this sucker!
CHECKPOINT_M( ssprintf("ScreenMessage(%s)", ScreenMessageHelpers::ScreenMessageToString(SM).c_str()) );
this->HandleScreenMessage( SM );
// If the size changed, start over.
if( iSize != m_QueuedMessages.size() )
i = 0;
}
}
/* Returns true if the input was handled, or false if not handled. For
* overlays, this determines whether the event will be propagated to lower
* screens (i.e. it propagates from an overlay only when this returns false). */
bool Screen::Input( const InputEventPlus &input )
{
Message msg("");
if( m_Codes.InputMessage(input, msg) )
{
this->HandleMessage( msg );
return true;
}
// Don't send release messages with the default handler.
switch( input.type )
{
case IET_FIRST_PRESS:
case IET_REPEAT:
break; // OK
default:
return false; // don't care
}
// Always broadcast mouse input so themers can grab it. -aj
if( input.DeviceI == DeviceInput( DEVICE_MOUSE, MOUSE_LEFT ) )
MESSAGEMAN->Broadcast( (MessageID)(Message_LeftClick) );
if( input.DeviceI == DeviceInput( DEVICE_MOUSE, MOUSE_RIGHT ) )
MESSAGEMAN->Broadcast( (MessageID)(Message_RightClick) );
if( input.DeviceI == DeviceInput( DEVICE_MOUSE, MOUSE_MIDDLE ) )
MESSAGEMAN->Broadcast( (MessageID)(Message_MiddleClick) );
// Can't do MouseWheelUp and MouseWheelDown at the same time. -aj
if( input.DeviceI == DeviceInput( DEVICE_MOUSE, MOUSE_WHEELUP ) )
MESSAGEMAN->Broadcast( (MessageID)(Message_MouseWheelUp) );
else if( input.DeviceI == DeviceInput( DEVICE_MOUSE, MOUSE_WHEELDOWN ) )
MESSAGEMAN->Broadcast( (MessageID)(Message_MouseWheelDown) );
// default input handler used by most menus
switch( input.MenuI )
{
case GAME_BUTTON_MENUUP: return this->MenuUp ( input );
case GAME_BUTTON_MENUDOWN: return this->MenuDown ( input );
case GAME_BUTTON_MENULEFT: return this->MenuLeft ( input );
case GAME_BUTTON_MENURIGHT: return this->MenuRight( input );
case GAME_BUTTON_BACK:
// Only go back on first press. If somebody is backing out of the
// options screen, they might still be holding it when select music
// appears, and accidentally back out of that too. -Kyz
if(input.type == IET_FIRST_PRESS)
{
if( HANDLE_BACK_BUTTON )
return this->MenuBack( input );
}
return false;
case GAME_BUTTON_START: return this->MenuStart ( input );
case GAME_BUTTON_SELECT: return this->MenuSelect( input );
case GAME_BUTTON_COIN: return this->MenuCoin ( input );
default: return false;
}
}
void Screen::HandleScreenMessage( const ScreenMessage SM )
{
if( SM == SM_GoToNextScreen || SM == SM_GoToPrevScreen )
{
if( SCREENMAN->IsStackedScreen(this) )
SCREENMAN->PopTopScreen( m_smSendOnPop );
else
{
RString ToScreen= (SM == SM_GoToNextScreen? GetNextScreenName():GetPrevScreen());
if(ToScreen == "")
{
LuaHelpers::ReportScriptError("Error: Tried to go to empty screen.");
}
else
{
SCREENMAN->SetNewScreen(ToScreen);
}
}
}
else if( SM == SM_GainFocus )
{
if( REPEAT_RATE != -1.0f )
INPUTFILTER->SetRepeatRate( REPEAT_RATE );
if( REPEAT_DELAY != -1.0f )
INPUTFILTER->SetRepeatDelay( REPEAT_DELAY );
LIGHTSMAN->SetLightsMode( LIGHTS_MODE );
}
else if( SM == SM_LoseFocus )
{
INPUTFILTER->ResetRepeatRate();
}
}
RString Screen::GetNextScreenName() const
{
if( !m_sNextScreen.empty() )
return m_sNextScreen;
return NEXT_SCREEN;
}
void Screen::SetNextScreenName(RString const& name)
{
m_sNextScreen= name;
}
void Screen::SetPrevScreenName(RString const& name)
{
m_sPrevScreen= name;
}
RString Screen::GetPrevScreen() const
{
if( !m_sPrevScreen.empty() )
return m_sPrevScreen;
return PREV_SCREEN;
}
void Screen::PostScreenMessage( const ScreenMessage SM, float fDelay )
{
ASSERT( fDelay >= 0.0 );
QueuedScreenMessage QSM;
QSM.SM = SM;
QSM.fDelayRemaining = fDelay;
m_QueuedMessages.push_back( QSM );
}
void Screen::ClearMessageQueue()
{
m_QueuedMessages.clear();
}
void Screen::ClearMessageQueue( const ScreenMessage SM )
{
for( int i=m_QueuedMessages.size()-1; i>=0; i-- )
if( m_QueuedMessages[i].SM == SM )
m_QueuedMessages.erase( m_QueuedMessages.begin()+i );
}
bool Screen::PassInputToLua(const InputEventPlus& input)
{
if(m_InputCallbacks.empty() || m_fLockInputSecs > 0.0f
|| !AllowCallbackInput())
{
return false;
}
m_CallingInputCallbacks= true;
bool handled= false;
Lua* L= LUA->Get();
// Construct the table once, and reuse it.
lua_createtable(L, 0, 7);
{ // This block is meant to improve clarity. A subtable is created for
// storing the DeviceInput member.
lua_createtable(L, 0, 8);
Enum::Push(L, input.DeviceI.device);
lua_setfield(L, -2, "device");
Enum::Push(L, input.DeviceI.button);
lua_setfield(L, -2, "button");
lua_pushnumber(L, input.DeviceI.level);
lua_setfield(L, -2, "level");
lua_pushinteger(L, input.DeviceI.z);
lua_setfield(L, -2, "z");
lua_pushboolean(L, input.DeviceI.bDown);
lua_setfield(L, -2, "down");
lua_pushnumber(L, input.DeviceI.ts.Ago());
lua_setfield(L, -2, "ago");
lua_pushboolean(L, input.DeviceI.IsJoystick());
lua_setfield(L, -2, "is_joystick");
lua_pushboolean(L, input.DeviceI.IsMouse());
lua_setfield(L, -2, "is_mouse");
}
lua_setfield(L, -2, "DeviceInput");
Enum::Push(L, input.GameI.controller);
lua_setfield(L, -2, "controller");
LuaHelpers::Push(L, GameButtonToString(INPUTMAPPER->GetInputScheme(), input.GameI.button));
lua_setfield(L, -2, "button");
Enum::Push(L, input.type);
lua_setfield(L, -2, "type");
LuaHelpers::Push(L, GameButtonToString(INPUTMAPPER->GetInputScheme(), input.MenuI));
lua_setfield(L, -2, "GameButton");
Enum::Push(L, input.pn);
lua_setfield(L, -2, "PlayerNumber");
Enum::Push(L, input.mp);
lua_setfield(L, -2, "MultiPlayer");
for(map<callback_key_t, LuaReference>::iterator callback= m_InputCallbacks.begin();
callback != m_InputCallbacks.end() && !handled; ++callback)
{
callback->second.PushSelf(L);
lua_pushvalue(L, -2);
RString error= "Error running input callback: ";
LuaHelpers::RunScriptOnStack(L, error, 1, 1, true);
handled= lua_toboolean(L, -1);
lua_pop(L, 1);
}
lua_pop(L, 1);
LUA->Release(L);
m_CallingInputCallbacks= false;
if(!m_DelayedCallbackRemovals.empty())
{
for(vector<callback_key_t>::iterator key= m_DelayedCallbackRemovals.begin();
key != m_DelayedCallbackRemovals.end(); ++key)
{
InternalRemoveCallback(*key);
}
}
return handled;
}
void Screen::AddInputCallbackFromStack(lua_State* L)
{
callback_key_t key= lua_topointer(L, 1);
m_InputCallbacks[key]= LuaReference(L);
}
void Screen::RemoveInputCallback(lua_State* L)
{
callback_key_t key= lua_topointer(L, 1);
if(m_CallingInputCallbacks)
{
m_DelayedCallbackRemovals.push_back(key);
}
else
{
InternalRemoveCallback(key);
}
}
void Screen::InternalRemoveCallback(callback_key_t key)
{
map<callback_key_t, LuaReference>::iterator iter= m_InputCallbacks.find(key);
if(iter != m_InputCallbacks.end())
{
m_InputCallbacks.erase(iter);
}
}
// lua start
#include "LuaBinding.h"
/** @brief Allow Lua to have access to the Screen. */
class LunaScreen: public Luna<Screen>
{
public:
static int GetNextScreenName( T* p, lua_State *L ) { lua_pushstring(L, p->GetNextScreenName() ); return 1; }
static int SetNextScreenName( T* p, lua_State *L ) { p->SetNextScreenName(SArg(1)); COMMON_RETURN_SELF; }
static int GetPrevScreenName( T* p, lua_State *L ) { lua_pushstring(L, p->GetPrevScreen() ); return 1; }
static int SetPrevScreenName( T* p, lua_State *L ) { p->SetPrevScreenName(SArg(1)); COMMON_RETURN_SELF; }
static int lockinput( T* p, lua_State *L ) { p->SetLockInputSecs(FArg(1)); COMMON_RETURN_SELF; }
DEFINE_METHOD( GetScreenType, GetScreenType() )
static int PostScreenMessage( T* p, lua_State *L )
{
RString sMessage = SArg(1);
ScreenMessage SM = ScreenMessageHelpers::ToScreenMessage( sMessage );
p->PostScreenMessage( SM, IArg(2) );
COMMON_RETURN_SELF;
}
static int AddInputCallback(T* p, lua_State* L)
{
if(!lua_isfunction(L, 1))
{
luaL_error(L, "Input callback must be a function.");
}
p->AddInputCallbackFromStack(L);
COMMON_RETURN_SELF;
}
static int RemoveInputCallback(T* p, lua_State* L)
{
if(!lua_isfunction(L, 1))
{
luaL_error(L, "Input callback must be a function.");
}
p->RemoveInputCallback(L);
COMMON_RETURN_SELF;
}
LunaScreen()
{
ADD_METHOD( GetNextScreenName );
ADD_METHOD( SetNextScreenName );
ADD_METHOD( GetPrevScreenName );
ADD_METHOD( SetPrevScreenName );
ADD_METHOD( PostScreenMessage );
ADD_METHOD( lockinput );
ADD_METHOD( GetScreenType );
ADD_METHOD( AddInputCallback );
ADD_METHOD( RemoveInputCallback );
}
};
LUA_REGISTER_DERIVED_CLASS( Screen, ActorFrame )
// lua end
/*
* (c) 2001-2004 <NAME>, <NAME>
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, and/or sell copies of the Software, and to permit persons to
* whom the Software is furnished to do so, provided that the above
* copyright notice(s) and this permission notice appear in all copies of
* the Software and that both the above copyright notice(s) and this
* permission notice appear in supporting documentation.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
* THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS
* INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT
* OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
* OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
| 5,647 |
6,034 | <reponame>ssSlowDown/onemall
package cn.iocoder.mall.productservice.mq.producer;
import cn.iocoder.mall.productservice.mq.producer.message.ProductUpdateMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.rocketmq.client.producer.SendResult;
import org.apache.rocketmq.client.producer.SendStatus;
import org.apache.rocketmq.spring.core.RocketMQTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class ProductMQProducer {
@Autowired
private RocketMQTemplate rocketMQTemplate;
public void sendProductUpdateMessage(Integer id) {
// TODO 芋艿:后续优化下,考虑下一致性
try {
SendResult sendResult = rocketMQTemplate.syncSend(ProductUpdateMessage.TOPIC, new ProductUpdateMessage().setId(id));
if (!SendStatus.SEND_OK.equals(sendResult.getSendStatus())) {
log.error("[sendProductUpdateMessage][product({}) 发送更新消息失败,结果为({})]", id, sendResult);
}
} catch (Throwable throwable) {
log.error("[sendProductUpdateMessage][product({}) 发送更新消息失败,发生异常]", id, throwable);
}
}
}
| 532 |
547 | /*
* Copyright 2011 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.optimaize.langdetect.profiles;
import com.optimaize.langdetect.i18n.LdLocale;
import com.optimaize.langdetect.ngram.NgramExtractor;
import org.jetbrains.annotations.NotNull;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Builder for {@link LanguageProfile}.
*
* <p>This class does no internal synchronization.</p>
*
* @author <NAME>
*/
public class LanguageProfileBuilder {
@NotNull
private final LdLocale locale;
private int minimalFrequency = 1;
private NgramExtractor ngramExtractor;
private final Map<Integer, Map<String,Integer>> ngrams = new HashMap<>();
public LanguageProfileBuilder(@NotNull LdLocale locale) {
this.locale = locale;
}
@Deprecated
public LanguageProfileBuilder(@NotNull String locale) {
this.locale = LdLocale.fromString(locale);
}
/**
* Copy constructor.
*/
public LanguageProfileBuilder(@NotNull LanguageProfileBuilder languageProfileBuilder) {
this.locale = languageProfileBuilder.locale;
this.minimalFrequency = languageProfileBuilder.minimalFrequency;
this.ngramExtractor = languageProfileBuilder.ngramExtractor;
this.ngrams.putAll(languageProfileBuilder.ngrams);
}
public LanguageProfileBuilder ngramExtractor(@NotNull NgramExtractor ngramExtractor) {
this.ngramExtractor = ngramExtractor;
return this;
}
/**
* @param minimalFrequency 1-n, the default is 1. n-grams that occurred less often in the text are removed.
* This really should be set to something higher.
* Try to play with the number until you get a profile file of satisfying size,
* that produces good language detection results.
*/
public LanguageProfileBuilder minimalFrequency(int minimalFrequency) {
if (minimalFrequency < 1) throw new IllegalArgumentException("minimalFrequency must be >= 1, but was: "+minimalFrequency);
this.minimalFrequency = minimalFrequency;
return this;
}
/**
* In order to use this you must set the {@link #ngramExtractor} first.
*/
public LanguageProfileBuilder addText(CharSequence text) {
if (ngramExtractor==null) {
throw new IllegalStateException("NgramExtractor has not been set yet!");
}
for (Map.Entry<String, Integer> entry : ngramExtractor.extractCountedGrams(text).entrySet()) {
addGram(entry.getKey(), entry.getValue());
}
return this;
}
/**
* Shortcut for addGram(ngram, 1).
*/
public LanguageProfileBuilder addGram(String ngram) {
return addGram(ngram, 1);
}
/**
* If the builder already has this ngram, the given frequency is added to the current count.
*/
public LanguageProfileBuilder addGram(String ngram, int frequency) {
Map<String, Integer> map = ngrams.get(ngram.length());
if (map==null) {
map = new HashMap<>();
ngrams.put(ngram.length(), map);
}
Integer total = map.get(ngram);
if (total==null) total = 0;
total += frequency;
map.put(ngram, total);
return this;
}
public LanguageProfile build() {
if (minimalFrequency >1) {
removeNgramsWithLessFrequency();
}
return new LanguageProfileImpl(locale, ngrams);
}
private void removeNgramsWithLessFrequency() {
for (Map<String, Integer> map : ngrams.values()) {
Iterator<Map.Entry<String, Integer>> iterator = map.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, Integer> next = iterator.next();
if (next.getValue() < minimalFrequency) {
iterator.remove();
}
}
}
}
}
| 1,731 |
2,151 | <filename>third_party/blink/renderer/core/paint/line_box_list_painter.cc
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/core/paint/line_box_list_painter.h"
#include "third_party/blink/renderer/core/layout/api/line_layout_box_model.h"
#include "third_party/blink/renderer/core/layout/layout_box_model_object.h"
#include "third_party/blink/renderer/core/layout/line/inline_flow_box.h"
#include "third_party/blink/renderer/core/layout/line/line_box_list.h"
#include "third_party/blink/renderer/core/layout/line/root_inline_box.h"
#include "third_party/blink/renderer/core/paint/object_painter.h"
#include "third_party/blink/renderer/core/paint/paint_info.h"
#include "third_party/blink/renderer/platform/graphics/paint/paint_controller.h"
namespace blink {
static void AddPDFURLRectsForInlineChildrenRecursively(
const LayoutObject& layout_object,
const PaintInfo& paint_info,
const LayoutPoint& paint_offset) {
for (LayoutObject* child = layout_object.SlowFirstChild(); child;
child = child->NextSibling()) {
if (!child->IsLayoutInline() ||
ToLayoutBoxModelObject(child)->HasSelfPaintingLayer())
continue;
ObjectPainter(*child).AddPDFURLRectIfNeeded(paint_info, paint_offset);
AddPDFURLRectsForInlineChildrenRecursively(*child, paint_info,
paint_offset);
}
}
void LineBoxListPainter::Paint(const LayoutBoxModelObject& layout_object,
const PaintInfo& paint_info,
const LayoutPoint& paint_offset) const {
DCHECK(!ShouldPaintSelfOutline(paint_info.phase) &&
!ShouldPaintDescendantOutlines(paint_info.phase));
// Only paint during the foreground/selection phases.
if (paint_info.phase != PaintPhase::kForeground &&
paint_info.phase != PaintPhase::kSelection &&
paint_info.phase != PaintPhase::kTextClip &&
paint_info.phase != PaintPhase::kMask)
return;
// The only way an inline could paint like this is if it has a layer.
DCHECK(layout_object.IsLayoutBlock() ||
(layout_object.IsLayoutInline() && layout_object.HasLayer()));
if (paint_info.phase == PaintPhase::kForeground && paint_info.IsPrinting())
AddPDFURLRectsForInlineChildrenRecursively(layout_object, paint_info,
paint_offset);
// If we have no lines then we have no work to do.
if (!line_box_list_.First())
return;
if (!line_box_list_.AnyLineIntersectsRect(
LineLayoutBoxModel(const_cast<LayoutBoxModelObject*>(&layout_object)),
paint_info.GetCullRect(), paint_offset))
return;
// See if our root lines intersect with the dirty rect. If so, then we paint
// them. Note that boxes can easily overlap, so we can't make any assumptions
// based off positions of our first line box or our last line box.
for (InlineFlowBox* curr : line_box_list_) {
if (line_box_list_.LineIntersectsDirtyRect(
LineLayoutBoxModel(
const_cast<LayoutBoxModelObject*>(&layout_object)),
curr, paint_info.GetCullRect(), paint_offset)) {
RootInlineBox& root = curr->Root();
curr->Paint(paint_info, paint_offset, root.LineTop(), root.LineBottom());
}
}
}
} // namespace blink
| 1,324 |
548 | <filename>benchmark/tests/test_model/inceptionv3_pytorch/serving_model.json
{
"serving_type": "tensorrt",
"input_model": "./model/inceptionv3.onnx",
"export_path": "model_tensorrt",
"input_names": [
"x.1"
],
"input_formats": [
"channels_first"
],
"output_names": [
"924"
],
"input_signatures": [
"image"
],
"output_signatures": [
"label"
],
"model_name": "inceptionv3",
"job_id": "inceptionv3_pytorch",
"max_batch_size": 128
} | 216 |
491 | <filename>source/unit_test/test_keras_nodes.h
// Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations under
// the License.
//
// ╔════════════════════════════════════════════════════════════════════════════════════════╗
// ║──█████████╗───███████╗───████████╗───██╗──────██╗───███████╗───████████╗───████████╗───║
// ║──██╔══════╝──██╔════██╗──██╔════██╗──██║──────██║──██╔════██╗──██╔════██╗──██╔════██╗──║
// ║──████████╗───██║────██║──████████╔╝──██║──█╗──██║──█████████║──████████╔╝──██║────██║──║
// ║──██╔═════╝───██║────██║──██╔════██╗──██║█████╗██║──██╔════██║──██╔════██╗──██║────██║──║
// ║──██║─────────╚███████╔╝──██║────██║──╚████╔████╔╝──██║────██║──██║────██║──████████╔╝──║
// ║──╚═╝──────────╚══════╝───╚═╝────╚═╝───╚═══╝╚═══╝───╚═╝────╚═╝──╚═╝────╚═╝──╚═══════╝───║
// ╚════════════════════════════════════════════════════════════════════════════════════════╝
//
// Authors: <NAME> (<EMAIL>)
// Yzx (<EMAIL>)
// <NAME> (<EMAIL>)
// <NAME> (<EMAIL>)
#pragma once
#include <string>
#include "unit_test/unit_test_keras_helper.h"
class TestKerasNodes : public ::testing::Test {
protected:
void SetUp() override {
pb_path = std::string(tf_root_dir);
keras_h5_path = std::string(keras_root_dir);
// configuration
infer_mode = "float32";
threshold = 1e-3;
};
void TearDown() override{};
float threshold{1e-3};
std::string pb_path;
std::string keras_h5_path;
std::string infer_mode;
std::vector<std::string> output_names;
std::vector<std::pair<std::string, TF_Tensor*>> input_map;
};
TEST_F(TestKerasNodes, AvgPool) {
pb_path = pb_path + "average_pooling.pb";
keras_h5_path = keras_h5_path + "average_pooling.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 29, 17, 3});
input_map.push_back({"input", input.get()});
output_names = {"average_pooling2d/AvgPool", "average_pooling2d_1/AvgPool"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Activation) {
pb_path = pb_path + "activation.pb";
keras_h5_path = keras_h5_path + "activation.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 24, 24, 3});
input_map.push_back({"input", input.get()});
output_names = {"activation/Sigmoid", "activation_1/Relu", "activation_2/Tanh",
"activation_3/Elu"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Arithmetic) {
pb_path = pb_path + "arithmetic.pb";
keras_h5_path = keras_h5_path + "arithmetic.h5";
const int batch_size = 1;
const auto input1 = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 24, 24, 3});
const auto input2 = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 24, 24, 3});
input_map.push_back({"input1", input1.get()});
input_map.push_back({"input2", input2.get()});
output_names = {"add/add", "subtract/sub", "multiply/mul"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, BatchNorm) {
pb_path = pb_path + "batch_norm.pb";
keras_h5_path = keras_h5_path + "batch_norm.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 24, 24, 3});
input_map.push_back({"input_1", input.get()});
output_names = {"batch_normalization/FusedBatchNormV3"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Concatenate) {
pb_path = pb_path + "concatenate.pb";
keras_h5_path = keras_h5_path + "concatenate.h5";
const int batch_size = 1;
const auto input1 = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 25, 3});
const auto input2 = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 25, 3});
const auto input3 = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 25, 3});
input_map.push_back({"input1", input1.get()});
input_map.push_back({"input2", input2.get()});
input_map.push_back({"input3", input3.get()});
output_names = {"concatenate/concat", "concatenate_1/concat", "concatenate_2/concat"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Convolution) {
pb_path = pb_path + "conv2d.pb";
keras_h5_path = keras_h5_path + "conv2d.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 29, 3});
input_map.push_back({"input_2", input.get()});
output_names = {"conv2d/BiasAdd"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Conv2DActivation) {
pb_path = pb_path + "conv2d_activation.pb";
keras_h5_path = keras_h5_path + "conv2d_activation.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 29, 3});
input_map.push_back({"input_1", input.get()});
output_names = {"conv2d/Relu"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Cropping2D) {
pb_path = pb_path + "cropping2d.pb";
keras_h5_path = keras_h5_path + "cropping2d.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 29, 3});
input_map.push_back({"input_1", input.get()});
output_names = {"cropping2d/strided_slice"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, DepthwiseConv2d) {
pb_path = pb_path + "depthwise_conv2d.pb";
keras_h5_path = keras_h5_path + "depthwise_conv2d.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 29, 11});
input_map.push_back({"input_1", input.get()});
output_names = {"depthwise_conv2d/BiasAdd"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Embedding) {
pb_path = pb_path + "embedding.pb";
keras_h5_path = keras_h5_path + "embedding.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomIntTensor<int>(TF_INT32, {batch_size, 10}, 1000);
input_map.push_back({"input1_1", input.get()});
output_names = {"embedding_4/embedding_lookup/Identity_1"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Flatten) {
pb_path = pb_path + "flatten.pb";
keras_h5_path = keras_h5_path + "flatten.h5";
const int batch_size = 1;
auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 24, 24, 3});
input_map.push_back({"input_6", input.get()});
output_names = {"flatten/Reshape"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, FullyConnected) {
pb_path = pb_path + "dense.pb";
keras_h5_path = keras_h5_path + "dense.h5";
const int batch_size = 1;
auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 784});
input_map.push_back({"input_4", input.get()});
output_names = {"dense/BiasAdd"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, MaxPool) {
pb_path = pb_path + "max_pooling.pb";
keras_h5_path = keras_h5_path + "max_pooling.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 13, 33, 3});
input_map.push_back({"input", input.get()});
output_names = {"max_pooling2d/MaxPool", "max_pooling2d_1/MaxPool"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Permute) {
pb_path = pb_path + "permute.pb";
keras_h5_path = keras_h5_path + "permute.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 12, 24, 3});
input_map.push_back({"input_8", input.get()});
output_names = {"permute/transpose"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Reduce) {
pb_path = pb_path + "reduce.pb";
keras_h5_path = keras_h5_path + "reduce.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 24, 24, 3});
input_map.push_back({"input1", input.get()});
output_names = {"tf_op_layer_Mean/Mean", "tf_op_layer_Sum/Sum", "tf_op_layer_Max/Max",
"tf_op_layer_Min/Min"},
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, Softmax) {
pb_path = pb_path + "softmax.pb";
keras_h5_path = keras_h5_path + "softmax.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 12, 24, 3});
input_map.push_back({"input_11", input.get()});
output_names = {"softmax/Softmax"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, SeparableConv2d) {
pb_path = pb_path + "separable_conv2d.pb";
keras_h5_path = keras_h5_path + "separable_conv2d.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 23, 29, 11});
input_map.push_back({"input_1", input.get()});
output_names = {"separable_conv2d/BiasAdd", "separable_conv2d_1/separable_conv2d"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, ZeroPadding) {
pb_path = pb_path + "zero_padding_2d.pb";
keras_h5_path = keras_h5_path + "zero_padding_2d.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 12, 24, 3});
input_map.push_back({"input_1", input.get()});
output_names = {"zero_padding2d/Pad", "zero_padding2d_1/Pad", "zero_padding2d_2/Pad",
"zero_padding2d_3/Pad"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
#ifdef SUPPORT_RNN
TEST_F(TestKerasNodes, RNNTanh) {
pb_path = pb_path + "rnn_tanh.pb";
keras_h5_path = keras_h5_path + "rnn_tanh.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"simple_rnn/strided_slice_3", "simple_rnn_1/transpose_1"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, RNNRelu) {
pb_path = pb_path + "rnn_relu.pb";
keras_h5_path = keras_h5_path + "rnn_relu.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"simple_rnn/strided_slice_3", "simple_rnn_1/transpose_1"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, BiRNN) {
pb_path = pb_path + "bidirectional_rnn.pb";
keras_h5_path = keras_h5_path + "bidirectional_rnn.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"bidirectional/concat", "bidirectional_1/concat"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, LSTM) {
pb_path = pb_path + "lstm.pb";
keras_h5_path = keras_h5_path + "lstm.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"lstm/strided_slice_7", "lstm_1/transpose_1"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, BiLSTM) {
pb_path = pb_path + "bidirectional_lstm.pb";
keras_h5_path = keras_h5_path + "bidirectional_lstm.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"bidirectional/concat", "bidirectional_1/concat"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, GRU) {
pb_path = pb_path + "gru.pb";
keras_h5_path = keras_h5_path + "gru.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"gru/strided_slice_15", "gru_1/transpose_1"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
TEST_F(TestKerasNodes, BiGRU) {
pb_path = pb_path + "bidirectional_gru.pb";
keras_h5_path = keras_h5_path + "bidirectional_gru.h5";
const int batch_size = 1;
const auto input = fwd::tf_::CreateRandomTensor<float>(TF_FLOAT, {batch_size, 128, 10});
input_map.push_back({"input_1", input.get()});
output_names = {"bidirectional/concat", "bidirectional_1/concat"};
TestKerasInference(pb_path, keras_h5_path, input_map, output_names, batch_size, threshold);
}
#endif
| 6,073 |
2,671 | class A:
def __len__(self):
return 0
print bool(A())
class B:
def __len__(self):
return False
print bool(B())
class C:
def __nonzero__(self):
return 0
print bool(C())
class D:
def __nonzero__(self):
return False
print bool(D())
class E:
def __len__(self):
return 1
print bool(E())
class F:
def __nonzero__(self):
return 1
print bool(F())
class G:
def __nonzero__ (self):
return 0
def __len__ (self):
return 1
print bool(G())
print bool(A)
print bool(B)
print bool(C)
print bool(D)
print bool(E)
print bool(F)
print bool(G) | 297 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* MemoryMeasurementFailedException.java
*
* Created on August 11, 2003, 3:43 PM
*/
package org.netbeans.junit;
/** Throws when MemoryMeasurement methods are having some problem
* @author <NAME>
*/
public class MemoryMeasurementFailedException extends java.lang.RuntimeException {
/**
* Creates a new instance of <code>InitializationException</code> without detail message.
*/
public MemoryMeasurementFailedException() {
}
/**
* Constructs an instance of <code>InitializationException</code> with the specified detail message.
* @param msg the detail message.
*/
public MemoryMeasurementFailedException(String msg) {
super(msg);
}
/** Constructs an instance of <code>InitializationException</code> with the specified detail message.
* @param cause Cause of the exception
* @param msg the detail message.
*/
public MemoryMeasurementFailedException(String msg, Throwable cause) {
super(msg,cause);
}
}
| 522 |
443 | """Index Job.{status,date_created}
Revision ID: 4114cbbd0573
Revises: 5508859bed73
Create Date: 2014-01-06 11:28:15.691391
"""
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '5508859bed73'
from alembic import op
def upgrade():
op.create_index('idx_job_status_date_created', 'job', ['status', 'date_created'])
def downgrade():
op.drop_index('idx_job_status_date_created', 'job')
| 164 |
356 | package com.indeed.proctor.integration.sample;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import static com.indeed.proctor.integration.sample.ProctorSampleInterceptor.PROCTOR_GROUPS_ATTRIBUTE;
import static com.indeed.proctor.integration.sample.SampleProctorGroups.SAMPLE_1_TST;
@RestController
public class SampleController {
@RequestMapping("test")
@ResponseBody
public String getTest(final HttpServletRequest request) {
final SampleProctorGroups proctorGroups = (SampleProctorGroups) request.getAttribute(PROCTOR_GROUPS_ATTRIBUTE);
return SAMPLE_1_TST + proctorGroups.getSample1_tstValue();
}
}
| 279 |
343 | /* $Id$Revision: */
/* vim:set shiftwidth=4 ts=8: */
/*************************************************************************
* Copyright (c) 2011 AT&T Intellectual Property
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors: See CVS logs. Details at http://www.graphviz.org/
*************************************************************************/
#ifndef SEARCH_G_H
#define SEARCH_G_H
#include "structures.h"
typedef struct snode snode;
typedef struct sedge sedge;
struct snode {
int n_val, n_idx;
snode* n_dad;
sedge* n_edge;
short n_adj;
short save_n_adj;
struct cell* cells[2];
/* edges incident on this node
* -- stored as indices of the edges array in the graph
*/
int* adj_edge_list;
int index;
boolean isVert; /* true if node corresponds to vertical segment */
};
struct sedge {
double weight; /* weight of edge */
int cnt; /* paths using edge */
/* end-points of the edge
* -- stored as indices of the nodes vector in the graph
*/
int v1, v2;
};
typedef struct {
int nnodes, nedges;
int save_nnodes, save_nedges;
snode* nodes;
sedge* edges;
} sgraph;
extern void reset(sgraph*);
extern void gsave(sgraph*);
extern sgraph* createSGraph(int);
extern void freeSGraph (sgraph*);
extern void initSEdges (sgraph* g, int maxdeg);
extern int shortPath (sgraph* g, snode* from, snode* to);
extern snode* createSNode (sgraph*);
extern sedge* createSEdge (sgraph* g, snode* v0, snode* v1, double wt);
#endif
| 601 |
14,668 | <filename>chrome/browser/ui/web_applications/create_shortcut_browsertest.cc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/files/file_path.h"
#include "base/memory/scoped_refptr.h"
#include "base/path_service.h"
#include "base/run_loop.h"
#include "base/test/metrics/histogram_tester.h"
#include "base/test/metrics/user_action_tester.h"
#include "chrome/app/chrome_command_ids.h"
#include "chrome/browser/banners/test_app_banner_manager_desktop.h"
#include "chrome/browser/extensions/chrome_test_extension_loader.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_commands.h"
#include "chrome/browser/ui/browser_dialogs.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/browser/ui/web_applications/test/web_app_browsertest_util.h"
#include "chrome/browser/ui/web_applications/web_app_controller_browsertest.h"
#include "chrome/browser/web_applications/test/web_app_test_observers.h"
#include "chrome/browser/web_applications/web_app_constants.h"
#include "chrome/browser/web_applications/web_app_id.h"
#include "chrome/browser/web_applications/web_app_install_manager.h"
#include "chrome/browser/web_applications/web_app_prefs_utils.h"
#include "chrome/browser/web_applications/web_app_provider.h"
#include "chrome/browser/web_applications/web_app_registrar.h"
#include "chrome/browser/web_applications/web_app_sync_bridge.h"
#include "chrome/common/chrome_paths.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/browser_test_utils.h"
#include "extensions/common/extension.h"
#include "url/gurl.h"
namespace {
std::string LoadExtension(Profile* profile, const base::FilePath& path) {
extensions::ChromeTestExtensionLoader loader(profile);
scoped_refptr<const extensions::Extension> extension =
loader.LoadExtension(path);
EXPECT_TRUE(extension);
return extension->id();
}
} // namespace
namespace web_app {
class CreateShortcutBrowserTest : public WebAppControllerBrowserTest {
public:
AppId InstallShortcutAppForCurrentUrl(bool open_as_window = false) {
chrome::SetAutoAcceptWebAppDialogForTesting(true, open_as_window);
WebAppTestInstallObserver observer(profile());
observer.BeginListening();
CHECK(chrome::ExecuteCommand(browser(), IDC_CREATE_SHORTCUT));
AppId app_id = observer.Wait();
chrome::SetAutoAcceptWebAppDialogForTesting(false, false);
return app_id;
}
WebAppRegistrar& registrar() {
auto* provider = WebAppProvider::GetForTest(profile());
CHECK(provider);
return provider->registrar();
}
WebAppSyncBridge& sync_bridge() {
auto* provider = WebAppProvider::GetForTest(profile());
CHECK(provider);
return provider->sync_bridge();
}
};
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest,
CreateShortcutForInstallableSite) {
base::UserActionTester user_action_tester;
NavigateToURLAndWait(browser(), GetInstallableAppURL());
AppId app_id = InstallShortcutAppForCurrentUrl();
EXPECT_EQ(registrar().GetAppShortName(app_id), GetInstallableAppName());
// Shortcut apps to PWAs should launch in a tab.
EXPECT_EQ(registrar().GetAppUserDisplayMode(app_id), DisplayMode::kBrowser);
EXPECT_EQ(0, user_action_tester.GetActionCount("InstallWebAppFromMenu"));
EXPECT_EQ(1, user_action_tester.GetActionCount("CreateShortcut"));
}
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest, InstallSourceRecorded) {
ASSERT_TRUE(embedded_test_server()->Start());
// LatestWebAppInstallSource should be correctly set and reported to UMA for
// both installable and non-installable sites.
for (const GURL& url :
{GetInstallableAppURL(),
embedded_test_server()->GetURL(
"/web_apps/get_manifest.html?theme_color_only.json")}) {
base::HistogramTester histogram_tester;
NavigateToURLAndWait(browser(), url);
AppId app_id = InstallShortcutAppForCurrentUrl();
absl::optional<int> install_source =
GetWebAppInstallSource(profile()->GetPrefs(), app_id);
EXPECT_TRUE(install_source.has_value());
EXPECT_EQ(static_cast<webapps::WebappInstallSource>(*install_source),
webapps::WebappInstallSource::MENU_CREATE_SHORTCUT);
histogram_tester.ExpectUniqueSample(
"Webapp.Install.InstallEvent",
static_cast<int>(webapps::WebappInstallSource::MENU_CREATE_SHORTCUT),
1);
}
}
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest,
CanInstallOverTabShortcutApp) {
NavigateToURLAndWait(browser(), GetInstallableAppURL());
InstallShortcutAppForCurrentUrl();
Browser* new_browser =
NavigateInNewWindowAndAwaitInstallabilityCheck(GetInstallableAppURL());
EXPECT_EQ(GetAppMenuCommandState(IDC_CREATE_SHORTCUT, new_browser), kEnabled);
EXPECT_EQ(GetAppMenuCommandState(IDC_INSTALL_PWA, new_browser), kEnabled);
EXPECT_EQ(GetAppMenuCommandState(IDC_OPEN_IN_PWA_WINDOW, new_browser),
kNotPresent);
}
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest,
CannotInstallOverWindowShortcutApp) {
NavigateToURLAndWait(browser(), GetInstallableAppURL());
AppId app_id = InstallShortcutAppForCurrentUrl();
// Change launch container to open in window.
sync_bridge().SetAppUserDisplayMode(app_id, DisplayMode::kStandalone,
/*is_user_action=*/false);
Browser* new_browser =
NavigateInNewWindowAndAwaitInstallabilityCheck(GetInstallableAppURL());
EXPECT_EQ(GetAppMenuCommandState(IDC_CREATE_SHORTCUT, new_browser), kEnabled);
EXPECT_EQ(GetAppMenuCommandState(IDC_INSTALL_PWA, new_browser), kNotPresent);
EXPECT_EQ(GetAppMenuCommandState(IDC_OPEN_IN_PWA_WINDOW, new_browser),
kEnabled);
}
// Check that toolbar is not shown for shortcut apps within extensions pages.
// This simulates a case where the user has manually navigated to a page hosted
// within an extension, then added it as a shortcut app.
// Regression test for https://crbug.com/828233.
//
// TODO(crbug.com/1253234): Remove chrome-extension scheme for web apps.
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest,
ShouldShowCustomTabBarForExtensionPage) {
// This involves the creation of a regular (non-app) extension with a popup
// page, and the creation of a shortcut app created from the popup page URL
// (allowing the extension's popup page to be loaded in a window).
base::FilePath test_data_dir_;
base::PathService::Get(chrome::DIR_TEST_DATA, &test_data_dir_);
// Install the extension that has the popup page.
std::string extension_id =
LoadExtension(profile(), test_data_dir_.AppendASCII("extensions")
.AppendASCII("ui")
.AppendASCII("browser_action_popup"));
base::RunLoop().RunUntilIdle(); // Ensure the extension is fully loaded.
// Install the shortcut app that links to the extension's popup page.
const GURL popup_url("chrome-extension://" + extension_id + "/popup.html");
NavigateToURLAndWait(browser(), popup_url);
// TODO(crbug.com/1253234): IDC_CREATE_SHORTCUT command must become disabled.
ASSERT_TRUE(chrome::IsCommandEnabled(browser(), IDC_CREATE_SHORTCUT));
const AppId app_id = InstallShortcutAppForCurrentUrl();
Browser* const app_browser = LaunchWebAppBrowserAndWait(app_id);
CHECK(app_browser);
CHECK(app_browser != browser());
// Navigate to the app's launch page; the toolbar should not be visible,
// because extensions pages are secure.
NavigateAndCheckForToolbar(app_browser, popup_url, false);
}
// Tests that Create Shortcut doesn't timeout on a page that has a delayed
// iframe load. Context: crbug.com/1046883
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest, WorksAfterDelayedIFrameLoad) {
ASSERT_TRUE(embedded_test_server()->Start());
NavigateToURLAndWait(browser(), embedded_test_server()->GetURL(
"/favicon/page_with_favicon.html"));
// Append an iframe and wait for it to finish loading.
const char script[] = R"(
const iframe = document.createElement('iframe');
iframe.onload = _ => domAutomationController.send('success');
iframe.srcdoc = 'inner page';
document.body.appendChild(iframe);
)";
EXPECT_EQ(
content::EvalJs(browser()->tab_strip_model()->GetActiveWebContents(),
script, content::EXECUTE_SCRIPT_USE_MANUAL_REPLY)
.ExtractString(),
"success");
InstallShortcutAppForCurrentUrl();
}
// Tests that Create Shortcut on non-promotable sites still uses available
// manifest data.
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest,
UseNonPromotableManifestData) {
ASSERT_TRUE(embedded_test_server()->Start());
NavigateToURLAndWait(
browser(), embedded_test_server()->GetURL(
"/web_apps/get_manifest.html?theme_color_only.json"));
AppId app_id = InstallShortcutAppForCurrentUrl();
EXPECT_EQ(registrar().GetAppThemeColor(app_id),
SkColorSetRGB(0x12, 0x34, 0x56));
}
// Tests that Create Shortcut won't use manifest data that's invalid.
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest, IgnoreInvalidManifestData) {
ASSERT_TRUE(embedded_test_server()->Start());
GURL url = embedded_test_server()->GetURL(
"/web_apps/get_manifest.html?invalid_start_url.json");
NavigateToURLAndWait(browser(), url);
AppId app_id = InstallShortcutAppForCurrentUrl();
EXPECT_EQ(registrar().GetAppStartUrl(app_id), url);
}
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest,
CreateShortcutAgainOverwriteUserDisplayMode) {
base::UserActionTester user_action_tester;
NavigateToURLAndWait(browser(), GetInstallableAppURL());
AppId app_id = InstallShortcutAppForCurrentUrl();
EXPECT_EQ(registrar().GetAppShortName(app_id), GetInstallableAppName());
// Shortcut apps to PWAs should launch in a tab.
EXPECT_EQ(registrar().GetAppUserDisplayMode(app_id), DisplayMode::kBrowser);
// TODO(crbug.com/1275945): We need to wait a bit longer for the
// WebAppInstallTask to complete before starting another install.
// Move the install/update/uninstall events out of
// AppRegistrarObserver and into a WebAppInstallManagerObserver
// interface so they can be guaranteed to fire after the
// WebAppInstallTask's lifetime has ended.
base::RunLoop().RunUntilIdle();
InstallShortcutAppForCurrentUrl(/*open_as_window=*/true);
// Re-install with enabling open_as_window should update user display mode.
EXPECT_EQ(registrar().GetAppUserDisplayMode(app_id),
DisplayMode::kStandalone);
}
IN_PROC_BROWSER_TEST_F(CreateShortcutBrowserTest, OpenShortcutWindowOnlyOnce) {
base::UserActionTester user_action_tester;
NavigateToURLAndWait(browser(), GetInstallableAppURL());
WebAppTestInstallObserver observer(profile());
// The "Create shortcut" call is executed twice, but the dialog
// must be shown only once.
ASSERT_TRUE(chrome::ExecuteCommand(browser(), IDC_CREATE_SHORTCUT));
ASSERT_TRUE(chrome::ExecuteCommand(browser(), IDC_CREATE_SHORTCUT));
EXPECT_EQ(1u, provider().install_manager().GetInstallTaskCountForTesting());
}
} // namespace web_app
| 4,088 |
529 | <reponame>adamchainz/django-postgres-extra<filename>psqlextra/models/partitioned.py<gh_stars>100-1000
from django.db.models.base import ModelBase
from psqlextra.types import PostgresPartitioningMethod
from .base import PostgresModel
from .options import PostgresPartitionedModelOptions
class PostgresPartitionedModelMeta(ModelBase):
"""Custom meta class for :see:PostgresPartitionedModel.
This meta class extracts attributes from the inner
`PartitioningMeta` class and copies it onto a `_partitioning_meta`
attribute. This is similar to how Django's `_meta` works.
"""
default_method = PostgresPartitioningMethod.RANGE
default_key = []
def __new__(cls, name, bases, attrs, **kwargs):
new_class = super().__new__(cls, name, bases, attrs, **kwargs)
meta_class = attrs.pop("PartitioningMeta", None)
method = getattr(meta_class, "method", None)
key = getattr(meta_class, "key", None)
patitioning_meta = PostgresPartitionedModelOptions(
method=method or cls.default_method, key=key or cls.default_key
)
new_class.add_to_class("_partitioning_meta", patitioning_meta)
return new_class
class PostgresPartitionedModel(
PostgresModel, metaclass=PostgresPartitionedModelMeta
):
"""Base class for taking advantage of PostgreSQL's 11.x native support for
table partitioning."""
class Meta:
abstract = True
base_manager_name = "objects"
| 537 |
1,273 | <reponame>sunboy0523/gatk
package org.broadinstitute.hellbender.engine.spark;
import org.broadinstitute.barclay.argparser.Argument;
import java.io.Serializable;
public class AssemblyRegionReadShardArgumentCollection implements Serializable {
private static final long serialVersionUID = 1L;
public static final int DEFAULT_READSHARD_SIZE = 5000;
public static final int DEFAULT_READSHARD_PADDING_SIZE = 100;
@Argument(fullName="read-shard-size", shortName="read-shard-size", doc = "Maximum size of each read shard, in bases. For good performance, this should be much larger than the maximum assembly region size.", optional = true)
public int readShardSize = DEFAULT_READSHARD_SIZE;
@Argument(fullName="read-shard-padding", shortName="read-shard-padding", doc = "Each read shard has this many bases of extra context on each side. Read shards must have as much or more padding than assembly regions.", optional = true)
public int readShardPadding = DEFAULT_READSHARD_PADDING_SIZE;
}
| 303 |
2,151 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_CORE_TESTING_CORE_UNIT_TEST_HELPER_H_
#define THIRD_PARTY_BLINK_RENDERER_CORE_TESTING_CORE_UNIT_TEST_HELPER_H_
#include <gtest/gtest.h>
#include <memory>
#include "third_party/blink/renderer/core/dom/document.h"
#include "third_party/blink/renderer/core/frame/local_frame_client.h"
#include "third_party/blink/renderer/core/frame/local_frame_view.h"
#include "third_party/blink/renderer/core/frame/settings.h"
#include "third_party/blink/renderer/core/html/html_element.h"
#include "third_party/blink/renderer/core/layout/layout_view.h"
#include "third_party/blink/renderer/core/loader/empty_clients.h"
#include "third_party/blink/renderer/core/testing/page_test_base.h"
#include "third_party/blink/renderer/platform/testing/use_mock_scrollbar_settings.h"
#include "third_party/blink/renderer/platform/wtf/allocator.h"
namespace blink {
class SingleChildLocalFrameClient final : public EmptyLocalFrameClient {
public:
static SingleChildLocalFrameClient* Create() {
return new SingleChildLocalFrameClient();
}
void Trace(blink::Visitor* visitor) override {
visitor->Trace(child_);
EmptyLocalFrameClient::Trace(visitor);
}
// LocalFrameClient overrides:
LocalFrame* FirstChild() const override { return child_.Get(); }
LocalFrame* CreateFrame(const AtomicString& name,
HTMLFrameOwnerElement*) override;
void DidDetachChild() { child_ = nullptr; }
private:
explicit SingleChildLocalFrameClient() = default;
Member<LocalFrame> child_;
};
class LocalFrameClientWithParent final : public EmptyLocalFrameClient {
public:
static LocalFrameClientWithParent* Create(LocalFrame* parent) {
return new LocalFrameClientWithParent(parent);
}
void Trace(blink::Visitor* visitor) override {
visitor->Trace(parent_);
EmptyLocalFrameClient::Trace(visitor);
}
// FrameClient overrides:
void Detached(FrameDetachType) override;
LocalFrame* Parent() const override { return parent_.Get(); }
private:
explicit LocalFrameClientWithParent(LocalFrame* parent) : parent_(parent) {}
Member<LocalFrame> parent_;
};
class RenderingTest : public PageTestBase, public UseMockScrollbarSettings {
USING_FAST_MALLOC(RenderingTest);
public:
virtual FrameSettingOverrideFunction SettingOverrider() const {
return nullptr;
}
virtual ChromeClient& GetChromeClient() const;
explicit RenderingTest(LocalFrameClient* = nullptr);
const Node* HitTest(int x, int y);
protected:
void SetUp() override;
void TearDown() override;
LayoutView& GetLayoutView() const {
return *GetDocument().View()->GetLayoutView();
}
LocalFrame& ChildFrame() {
return *ToLocalFrame(GetFrame().Tree().FirstChild());
}
Document& ChildDocument() { return *ChildFrame().GetDocument(); }
void SetChildFrameHTML(const String&);
// Both enables compositing and runs the document lifecycle.
void EnableCompositing() {
GetPage().GetSettings().SetAcceleratedCompositingEnabled(true);
GetDocument().View()->SetParentVisible(true);
GetDocument().View()->SetSelfVisible(true);
GetDocument().View()->UpdateAllLifecyclePhases();
}
LayoutObject* GetLayoutObjectByElementId(const char* id) const {
const auto* element = GetElementById(id);
return element ? element->GetLayoutObject() : nullptr;
}
private:
Persistent<LocalFrameClient> local_frame_client_;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_CORE_TESTING_CORE_UNIT_TEST_HELPER_H_
| 1,212 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_sc.hxx"
//------------------------------------------------------------------
#include "scitems.hxx"
#include <svx/drawitem.hxx>
#include <svx/fontwork.hxx>
#include <svx/svdotext.hxx>
#include <svx/xdef.hxx>
#include <sfx2/objsh.hxx>
#include <sfx2/viewfrm.hxx>
#include <svx/svdoashp.hxx>
#include "drawsh.hxx"
#include "drawview.hxx"
#include "viewdata.hxx"
#include "tabvwsh.hxx"
#include "sc.hrc"
//------------------------------------------------------------------
void ScDrawShell::GetFormTextState(SfxItemSet& rSet)
{
const SdrObject* pObj = NULL;
SvxFontWorkDialog* pDlg = NULL;
ScDrawView* pDrView = pViewData->GetScDrawView();
const SdrMarkList& rMarkList = pDrView->GetMarkedObjectList();
sal_uInt16 nId = SvxFontWorkChildWindow::GetChildWindowId();
SfxViewFrame* pViewFrm = pViewData->GetViewShell()->GetViewFrame();
if ( pViewFrm->HasChildWindow(nId) )
pDlg = (SvxFontWorkDialog*)(pViewFrm->GetChildWindow(nId)->GetWindow());
if ( rMarkList.GetMarkCount() == 1 )
pObj = rMarkList.GetMark(0)->GetMarkedSdrObj();
const SdrTextObj* pTextObj = dynamic_cast< const SdrTextObj* >(pObj);
const bool bDeactivate(
!pObj ||
!pTextObj ||
!pTextObj->HasText() ||
dynamic_cast< const SdrObjCustomShape* >(pObj)); // #121538# no FontWork for CustomShapes
if(bDeactivate)
{
if ( pDlg )
pDlg->SetActive(sal_False);
rSet.DisableItem(XATTR_FORMTXTSTYLE);
rSet.DisableItem(XATTR_FORMTXTADJUST);
rSet.DisableItem(XATTR_FORMTXTDISTANCE);
rSet.DisableItem(XATTR_FORMTXTSTART);
rSet.DisableItem(XATTR_FORMTXTMIRROR);
rSet.DisableItem(XATTR_FORMTXTHIDEFORM);
rSet.DisableItem(XATTR_FORMTXTOUTLINE);
rSet.DisableItem(XATTR_FORMTXTSHADOW);
rSet.DisableItem(XATTR_FORMTXTSHDWCOLOR);
rSet.DisableItem(XATTR_FORMTXTSHDWXVAL);
rSet.DisableItem(XATTR_FORMTXTSHDWYVAL);
}
else
{
if ( pDlg )
{
SfxObjectShell* pDocSh = SfxObjectShell::Current();
if ( pDocSh )
{
const SfxPoolItem* pItem = pDocSh->GetItem( SID_COLOR_TABLE );
XColorListSharedPtr aColorTable;
if ( pItem )
aColorTable = static_cast< const SvxColorTableItem* >(pItem)->GetColorTable();
pDlg->SetActive();
if ( aColorTable.get() )
pDlg->SetColorTable( aColorTable );
else
{ DBG_ERROR( "ColorList not found :-/" ); }
}
}
SfxItemSet aViewAttr(pDrView->GetModel()->GetItemPool());
pDrView->GetAttributes(aViewAttr);
rSet.Set(aViewAttr);
}
}
| 1,335 |
6,036 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#include "bias_gelu.h"
#include "core/framework/tensorprotoutils.h"
#include "onnx/defs/tensor_proto_util.h"
#include "core/common/safeint.h"
#include "core/framework/tensor.h"
#include "core/providers/common.h"
namespace onnxruntime {
namespace contrib {
namespace bias_gelu_helper {
Status CheckInputs(const OpKernelContext* context) {
const Tensor* input = context->Input<Tensor>(0);
const Tensor* bias = context->Input<Tensor>(1);
const auto& input_dims = input->Shape().GetDims();
if (input_dims.size() < 1) {
return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT,
"Input 0 is expected to have 1 or more dimensions, got ", input_dims.size());
}
if (nullptr != bias) {
const auto& bias_dims = bias->Shape().GetDims();
if (bias_dims.size() != 1) {
return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT,
"Input 1 is expected to have 1 dimensions, got ", bias_dims.size());
}
if (bias_dims[0] != input_dims[input_dims.size() - 1]) {
return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT,
"Input 1 dimension 0 should have same length as the last dimension of input 0");
}
}
return Status::OK();
}
} // namespace bias_gelu_helper
} // namespace contrib
} // namespace onnxruntime
| 588 |
4,538 | [ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
"ccccccccccccccccccccccc",
"dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] | 82 |
4,551 | <reponame>quipper/robolectric
package org.robolectric.shadows;
import android.app.Application;
import android.app.LoadedApk;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.Resources;
import android.os.Build.VERSION_CODES;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.util.reflector.Accessor;
import org.robolectric.util.reflector.ForType;
@Implements(value = LoadedApk.class, isInAndroidSdk = false)
public class ShadowLoadedApk {
@Implementation
public ClassLoader getClassLoader() {
return this.getClass().getClassLoader();
}
@Implementation(minSdk = VERSION_CODES.O)
public ClassLoader getSplitClassLoader(String splitName) throws NameNotFoundException {
return this.getClass().getClassLoader();
}
/** Accessor interface for {@link LoadedApk}'s internals. */
@ForType(LoadedApk.class)
public interface _LoadedApk_ {
@Accessor("mApplication")
void setApplication(Application application);
@Accessor("mResources")
void setResources(Resources resources);
}
}
| 363 |
573 | import os
from dataclasses import dataclass
from typing import Union, Type
import pytest
from sentinelhub import AwsTileRequest, AwsProductRequest, read_data, write_data, DataCollection, AwsConstants
from sentinelhub.testing_utils import get_input_folder
INPUT_FOLDER = get_input_folder(__file__)
@dataclass
class SafeTestCase:
name: str
constructor: Union[Type[AwsTileRequest], Type[AwsProductRequest]]
args: list
kwargs: dict
def get_filename(self):
return os.path.join(INPUT_FOLDER, f'{self.name}.csv')
def get_request_data(self, return_request=False):
request = self.constructor(*self.args, **self.kwargs)
request_data = [(req.url, req.filename[:]) for req in request.get_download_list()]
if return_request:
return request, request_data
return request_data
def load_truth(self):
return [tuple(item) for item in read_data(self.get_filename())]
def save_test_case(self):
""" Use this method only to create new tests
"""
request, request_data = self.get_request_data(return_request=True)
write_data(self.get_filename(), request_data)
request.save_data()
TEST_CASES = [
SafeTestCase(
'L1C_02.01', AwsProductRequest,
['S2A_OPER_PRD_MSIL1C_PDMC_20151218T020842_R115_V20151217T224602_20151217T224602'],
dict(
bands=AwsConstants.S2_L1C_BANDS, metafiles=AwsConstants.S2_L1C_METAFILES, tile_list=['T59HNA'],
safe_format=True, data_folder=INPUT_FOLDER
)
),
SafeTestCase(
'L1C_02.01_tile', AwsTileRequest, [],
dict(
tile='29KQB', time='2016-04-12', aws_index=None, data_collection=DataCollection.SENTINEL2_L1C,
safe_format=True, data_folder=INPUT_FOLDER
)
),
SafeTestCase(
'L1C_02.02', AwsProductRequest,
['S2A_OPER_PRD_MSIL1C_PDMC_20160606T232310_R121_V20160526T084351_20160526T084351.SAFE'],
dict(tile_list=['34HCF'], safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.04_old', AwsProductRequest,
['S2A_OPER_PRD_MSIL1C_PDMC_20160910T174323_R071_V20160701T204642_20160701T204643'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.04', AwsProductRequest, ['S2A_MSIL1C_20170413T104021_N0204_R008_T31SCA_20170413T104021'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.05', AwsProductRequest, ['S2A_MSIL1C_20171012T112111_N0205_R037_T29SQC_20171012T112713'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.06', AwsProductRequest, ['S2A_MSIL1C_20180331T212521_N0206_R043_T07WFR_20180401T005612'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.06_2', AwsProductRequest, ['S2A_MSIL1C_20181004T175151_N0206_R141_T18XVM_20190219T160358'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.07', AwsProductRequest, ['S2A_MSIL1C_20181119T031011_N0207_R075_T50TLK_20181119T061056'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L1C_02.07_2', AwsProductRequest, ['S2A_MSIL1C_20190129T143751_N0207_R096_T20LLK_20190225T132350'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.01', AwsProductRequest,
['S2A_USER_PRD_MSIL2A_PDMC_20160310T041843_R138_V20160308T131142_20160308T131142'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase( # L2A_02.04 is the same
'L2A_02.05', AwsProductRequest, ['S2A_MSIL2A_20170827T105651_N0205_R094_T31WFN_20170827T105652'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.06', AwsProductRequest, ['S2B_MSIL2A_20180216T102059_N0206_R065_T35VLL_20180216T122659'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.07', AwsProductRequest, ['S2A_MSIL2A_20180402T151801_N0207_R068_T33XWJ_20180402T202222'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.08', AwsProductRequest, ['S2A_MSIL2A_20181005T104021_N0208_R008_T34WEU_20181007T220806'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.09', AwsProductRequest, ['S2B_MSIL2A_20181029T093109_N0209_R136_T35UMQ_20181029T122414'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.10', AwsProductRequest, ['S2B_MSIL2A_20181115T110319_N0210_R094_T32VLJ_20181115T142501'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_02.11', AwsProductRequest, ['S2B_MSIL2A_20190310T235739_N0211_R030_T57MVM_20190311T013927'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
SafeTestCase(
'L2A_00.01', AwsProductRequest, ['S2A_MSIL2A_20170414T101021_N0001_R022_T33UUQ_20190508T121145'],
dict(safe_format=True, data_folder=INPUT_FOLDER)
),
]
# Uncomment the following only when creating new test cases
# for test_case in TEST_CASES:
# test_case.save_test_case()
@pytest.mark.aws_integration
@pytest.mark.parametrize('test_case', TEST_CASES)
def test_safe_struct(test_case):
true_safe = test_case.load_truth()
req_safe = test_case.get_request_data()
assert true_safe == req_safe
| 2,724 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.