text
stringlengths
2
100k
meta
dict
package com.jeesuite.common.util; import java.text.SimpleDateFormat; import java.time.Clock; import java.time.DayOfWeek; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneId; import java.util.Calendar; import java.util.Date; import java.util.Locale; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateFormatUtils; /** * 日期处理工具类 * @description <br> * @author <a href="mailto:[email protected]">vakin</a> * @date 2012年12月20日 */ public class DateUtils extends org.apache.commons.lang3.time.DateUtils{ private static final String[] weekDays = { "sunday","monday", "tuesday", "wednesday", "thursday", "friday", "saturday"}; public static final String DATE_PATTERN = "yyyy-MM-dd"; public static final String TIMESTAMP_PATTERN = "yyyy-MM-dd HH:mm:ss"; public static final String TIME_PATTERN = "HH:mm"; /** * 解析日期<br> * 支持格式:<br> * generate by: vakin jiang at 2012-3-1 * * @param dateStr * @return */ public static Date parseDate(String dateStr) { SimpleDateFormat format = null; if (StringUtils.isBlank(dateStr)) return null; String _dateStr = dateStr.trim(); try { if (_dateStr.matches("\\d{1,2}[A-Z]{3}")) { _dateStr = _dateStr + (Calendar.getInstance().get(Calendar.YEAR) - 2000); } // 01OCT12 if (_dateStr.matches("\\d{1,2}[A-Z]{3}\\d{2}")) { format = new SimpleDateFormat("ddMMMyy", Locale.ENGLISH); } else if (_dateStr.matches("\\d{1,2}[A-Z]{3}\\d{4}.*")) {// 01OCT2012 // ,01OCT2012 // 1224,01OCT2012 // 12:24 _dateStr = _dateStr.replaceAll("[^0-9A-Z]", "") .concat("000000").substring(0, 15); format = new SimpleDateFormat("ddMMMyyyyHHmmss", Locale.ENGLISH); } else { StringBuffer sb = new StringBuffer(_dateStr); String[] tempArr = _dateStr.split("\\s+"); tempArr = tempArr[0].split("-|\\/"); if (tempArr.length == 3) { if (tempArr[1].length() == 1) { sb.insert(5, "0"); } if (tempArr[2].length() == 1) { sb.insert(8, "0"); } } _dateStr = sb.append("000000").toString().replaceAll("[^0-9]", "").substring(0, 14); if (_dateStr.matches("\\d{14}")) { format = new SimpleDateFormat("yyyyMMddHHmmss"); } } Date date = format.parse(_dateStr); return date; } catch (Exception e) { throw new RuntimeException("无法解析日期字符[" + dateStr + "]"); } } /** * 解析日期字符串转化成日期格式<br> * generate by: vakin jiang at 2012-3-1 * * @param dateStr * @param pattern * @return */ public static Date parseDate(String dateStr, String pattern) { try { SimpleDateFormat format = null; if (StringUtils.isBlank(dateStr)) return null; if (StringUtils.isNotBlank(pattern)) { format = new SimpleDateFormat(pattern); return format.parse(dateStr); } return parseDate(dateStr); } catch (Exception e) { throw new RuntimeException("无法解析日期字符[" + dateStr + "]"); } } /** * 获取一天开始时间<br> * generate by: vakin jiang at 2011-12-23 * * @param date * @return */ public static Date getDayBegin(Date date) { String format = DateFormatUtils.format(date, DATE_PATTERN); return parseDate(format.concat(" 00:00:00")); } /** * 获取一天结束时间<br> * generate by: vakin jiang at 2011-12-23 * * @param date * @return */ public static Date getDayEnd(Date date) { String format = DateFormatUtils.format(date, DATE_PATTERN); return parseDate(format.concat(" 23:59:59")); } /** * 时间戳格式转换为日期(年月日)格式<br> * generate by: vakin jiang at 2011-12-23 * * @param date * @return */ public static Date timestamp2Date(Date date) { return formatDate(date, DATE_PATTERN); } /** * 格式化日期格式为:ddMMMyy<br> * generate by: vakin jiang * at 2012-10-17 * @param dateStr * @return */ public static String format2ddMMMyy(Date date){ SimpleDateFormat format = new SimpleDateFormat("ddMMMyy", Locale.ENGLISH); return format.format(date).toUpperCase(); } /** * 格式化日期格式为:ddMMMyy<br> * generate by: vakin jiang * at 2012-10-17 * @param dateStr * @return */ public static String format2ddMMMyy(String dateStr){ SimpleDateFormat format = new SimpleDateFormat("ddMMMyy", Locale.ENGLISH); return format.format(DateUtils.parseDate(dateStr)).toUpperCase(); } /** * 格式化日期字符串<br> * generate by: vakin jiang at 2012-3-7 * * @param dateStr * @param patterns * @return */ public static String formatDateStr(String dateStr, String... patterns) { String pattern = TIMESTAMP_PATTERN; if (patterns != null && patterns.length > 0 && StringUtils.isNotBlank(patterns[0])) { pattern = patterns[0]; } return DateFormatUtils.format(parseDate(dateStr), pattern); } /** * 格式化日期为日期字符串<br> * generate by: vakin jiang at 2012-3-7 * * @param orig * @param patterns * @return */ public static String format(Date date, String... patterns) { if (date == null) return ""; String pattern = TIMESTAMP_PATTERN; if (patterns != null && patterns.length > 0 && StringUtils.isNotBlank(patterns[0])) { pattern = patterns[0]; } return DateFormatUtils.format(date, pattern); } public static String format2DateStr(Date date) { return format(date, DATE_PATTERN); } /** * 格式化日期为指定格式<br> * generate by: vakin jiang at 2012-3-7 * * @param orig * @param patterns * @return */ public static Date formatDate(Date orig, String... patterns) { String pattern = TIMESTAMP_PATTERN; if (patterns != null && patterns.length > 0 && StringUtils.isNotBlank(patterns[0])) { pattern = patterns[0]; } return parseDate(DateFormatUtils.format(orig, pattern)); } /** * 比较两个时间相差多少秒 * */ public static long getDiffSeconds(Date d1, Date d2) { return Math.abs((d2.getTime() - d1.getTime()) / 1000); } /** * 比较两个时间相差多少分钟 * */ public static long getDiffMinutes(Date d1, Date d2) { long diffSeconds = getDiffSeconds(d1, d2); return diffSeconds/60; } /** * 比较两个时间相差多少天 * */ public static long getDiffDay(Date d1, Date d2) { long between = Math.abs((d2.getTime() - d1.getTime()) / 1000); long day = between / 60 / 60 / 24; return (long) Math.floor(day); } /** * 返回传入时间月份的最后一天 * */ public static Date lastDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); int value = cal.getActualMaximum(Calendar.DAY_OF_MONTH); cal.set(Calendar.DAY_OF_MONTH, value); return cal.getTime(); } /** * 返回传入时间月份的第一天 * */ public static Date firstDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); int value = cal.getActualMinimum(Calendar.DAY_OF_MONTH); cal.set(Calendar.DAY_OF_MONTH, value); return cal.getTime(); } /** * 获取两个时间相差月份 * */ public static int getDiffMonth(Date start, Date end) { Calendar startCalendar = Calendar.getInstance(); startCalendar.setTime(start); Calendar endCalendar = Calendar.getInstance(); endCalendar.setTime(end); return (endCalendar.get(Calendar.YEAR) - startCalendar.get(Calendar.YEAR)) * 12 + endCalendar.get(Calendar.MONTH) - startCalendar.get(Calendar.MONTH); } /** * 计算并格式化消耗时间<br> * generate by: vakin jiang at 2012-2-16 * * @param startPoint * @return */ public static String formatTimeConsumingInfo(long startPoint) { StringBuffer buff = new StringBuffer(); long totalMilTimes = System.currentTimeMillis() - startPoint; int hour = (int) Math.floor(totalMilTimes / (60*60*1000)); int mi = (int) Math.floor(totalMilTimes / (60*1000)); int se = (int) Math.floor((totalMilTimes - 60000 * mi) / 1000); if(hour > 0)buff.append(hour).append("小时"); if(mi > 0)buff.append(mi).append("分"); if(hour == 0)buff.append(se).append("秒"); return buff.toString(); } /** * 判断是否为闰年<br> * generate by: zengqw at 2012-9-26 */ public static boolean isLeapYear(int year) { return (year % 4 == 0 && year % 100 != 0) || (year % 400 == 0); } public static Date add(Date date, int calendarField, int amount) { if (date == null) { throw new IllegalArgumentException("The date must not be null"); } Calendar c = Calendar.getInstance(); c.setTime(date); c.add(calendarField, amount); return c.getTime(); } public static String getDateWeekEnName(Date date){ Calendar cal = Calendar.getInstance(); cal.setTime(date); int w = cal.get(Calendar.DAY_OF_WEEK) - 1; if (w < 0) { w = 0; } return weekDays[w]; } public static Date firstDayOfWeek(Date date) { ZoneId zoneId = ZoneId.of("Asia/Shanghai"); long timeMills = LocalDate.now(Clock.fixed(Instant.ofEpochMilli(date.getTime()),zoneId)) .with(DayOfWeek.MONDAY) .atStartOfDay(zoneId) .toInstant() .toEpochMilli(); return new Date(timeMills); } public static Date lastDayOfWeek(Date date) { ZoneId zoneId = ZoneId.of("Asia/Shanghai"); long timeMills = LocalDate.now(Clock.fixed(Instant.ofEpochMilli(date.getTime()),zoneId)) .with(DayOfWeek.SUNDAY) .atStartOfDay(zoneId) .toInstant() .toEpochMilli(); return new Date(timeMills); } }
{ "pile_set_name": "Github" }
// // Context.m // MetalRenderer // // Created by Stuart Carnie on 6/9/18. // Copyright © 2018 Stuart Carnie. All rights reserved. // #import "Context.h" #import "Filter.h" #import <QuartzCore/QuartzCore.h> #import "metal_common.h" @interface BufferNode : NSObject @property (nonatomic, readonly) id<MTLBuffer> src; @property (nonatomic, readwrite) NSUInteger allocated; @property (nonatomic, readwrite) BufferNode *next; @end @interface BufferChain : NSObject - (instancetype)initWithDevice:(id<MTLDevice>)device blockLen:(NSUInteger)blockLen; - (bool)allocRange:(BufferRange *)range length:(NSUInteger)length; - (void)commitRanges; - (void)discard; @end @interface Texture() @property (nonatomic, readwrite) id<MTLTexture> texture; @property (nonatomic, readwrite) id<MTLSamplerState> sampler; @end @interface Context() - (bool)_initConversionFilters; @end @implementation Context { dispatch_semaphore_t _inflightSemaphore; id<MTLCommandQueue> _commandQueue; CAMetalLayer *_layer; id<CAMetalDrawable> _drawable; video_viewport_t _viewport; id<MTLSamplerState> _samplers[TEXTURE_FILTER_MIPMAP_NEAREST + 1]; Filter *_filters[RPixelFormatCount]; // convert to bgra8888 // main render pass state id<MTLRenderCommandEncoder> _rce; id<MTLCommandBuffer> _blitCommandBuffer; NSUInteger _currentChain; BufferChain *_chain[CHAIN_LENGTH]; MTLClearColor _clearColor; id<MTLRenderPipelineState> _states[GFX_MAX_SHADERS][2]; id<MTLRenderPipelineState> _clearState; bool _captureEnabled; id<MTLTexture> _backBuffer; unsigned _rotation; matrix_float4x4 _mvp_no_rot; matrix_float4x4 _mvp; Uniforms _uniforms; Uniforms _uniformsNoRotate; } - (instancetype)initWithDevice:(id<MTLDevice>)d layer:(CAMetalLayer *)layer library:(id<MTLLibrary>)l { if (self = [super init]) { _inflightSemaphore = dispatch_semaphore_create(MAX_INFLIGHT); _device = d; _layer = layer; #if TARGET_OS_OSX _layer.framebufferOnly = NO; _layer.displaySyncEnabled = YES; #endif _library = l; _commandQueue = [_device newCommandQueue]; _clearColor = MTLClearColorMake(0, 0, 0, 1); _uniforms.projectionMatrix = matrix_proj_ortho(0, 1, 0, 1); _rotation = 0; [self setRotation:0]; _mvp_no_rot = matrix_proj_ortho(0, 1, 0, 1); _mvp = matrix_proj_ortho(0, 1, 0, 1); { MTLSamplerDescriptor *sd = [MTLSamplerDescriptor new]; sd.label = @"NEAREST"; _samplers[TEXTURE_FILTER_NEAREST] = [d newSamplerStateWithDescriptor:sd]; sd.mipFilter = MTLSamplerMipFilterNearest; sd.label = @"MIPMAP_NEAREST"; _samplers[TEXTURE_FILTER_MIPMAP_NEAREST] = [d newSamplerStateWithDescriptor:sd]; sd.mipFilter = MTLSamplerMipFilterNotMipmapped; sd.minFilter = MTLSamplerMinMagFilterLinear; sd.magFilter = MTLSamplerMinMagFilterLinear; sd.label = @"LINEAR"; _samplers[TEXTURE_FILTER_LINEAR] = [d newSamplerStateWithDescriptor:sd]; sd.mipFilter = MTLSamplerMipFilterLinear; sd.label = @"MIPMAP_LINEAR"; _samplers[TEXTURE_FILTER_MIPMAP_LINEAR] = [d newSamplerStateWithDescriptor:sd]; } if (![self _initConversionFilters]) return nil; if (![self _initClearState]) return nil; if (![self _initMenuStates]) return nil; for (int i = 0; i < CHAIN_LENGTH; i++) { _chain[i] = [[BufferChain alloc] initWithDevice:_device blockLen:65536]; } } return self; } - (video_viewport_t *)viewport { return &_viewport; } - (void)setViewport:(video_viewport_t *)viewport { _viewport = *viewport; _uniforms.outputSize = simd_make_float2(_viewport.full_width, _viewport.full_height); } - (Uniforms *)uniforms { return &_uniforms; } - (void)setRotation:(unsigned)rotation { _rotation = 270 * rotation; /* Calculate projection. */ _mvp_no_rot = matrix_proj_ortho(0, 1, 0, 1); bool allow_rotate = true; if (!allow_rotate) { _mvp = _mvp_no_rot; return; } matrix_float4x4 rot = matrix_rotate_z((float)(M_PI * _rotation / 180.0f)); _mvp = simd_mul(rot, _mvp_no_rot); _uniforms.projectionMatrix = _mvp; _uniformsNoRotate.projectionMatrix = _mvp_no_rot; } - (void)setDisplaySyncEnabled:(bool)displaySyncEnabled { #if TARGET_OS_OSX _layer.displaySyncEnabled = displaySyncEnabled; #endif } - (bool)displaySyncEnabled { #if TARGET_OS_OSX return _layer.displaySyncEnabled; #else return NO; #endif } #pragma mark - shaders - (id<MTLRenderPipelineState>)getStockShader:(int)index blend:(bool)blend { assert(index > 0 && index < GFX_MAX_SHADERS); switch (index) { case VIDEO_SHADER_STOCK_BLEND: case VIDEO_SHADER_MENU: case VIDEO_SHADER_MENU_2: case VIDEO_SHADER_MENU_3: case VIDEO_SHADER_MENU_4: case VIDEO_SHADER_MENU_5: case VIDEO_SHADER_MENU_6: break; default: index = VIDEO_SHADER_STOCK_BLEND; break; } return _states[index][blend ? 1 : 0]; } - (MTLVertexDescriptor *)_spriteVertexDescriptor { MTLVertexDescriptor *vd = [MTLVertexDescriptor new]; vd.attributes[0].offset = 0; vd.attributes[0].format = MTLVertexFormatFloat2; vd.attributes[1].offset = offsetof(SpriteVertex, texCoord); vd.attributes[1].format = MTLVertexFormatFloat2; vd.attributes[2].offset = offsetof(SpriteVertex, color); vd.attributes[2].format = MTLVertexFormatFloat4; vd.layouts[0].stride = sizeof(SpriteVertex); return vd; } - (bool)_initClearState { MTLVertexDescriptor *vd = [self _spriteVertexDescriptor]; MTLRenderPipelineDescriptor *psd = [MTLRenderPipelineDescriptor new]; psd.label = @"clear_state"; MTLRenderPipelineColorAttachmentDescriptor *ca = psd.colorAttachments[0]; ca.pixelFormat = _layer.pixelFormat; psd.vertexDescriptor = vd; psd.vertexFunction = [_library newFunctionWithName:@"stock_vertex"]; psd.fragmentFunction = [_library newFunctionWithName:@"stock_fragment_color"]; NSError *err; _clearState = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating clear pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } return YES; } - (bool)_initMenuStates { MTLVertexDescriptor *vd = [self _spriteVertexDescriptor]; MTLRenderPipelineDescriptor *psd = [MTLRenderPipelineDescriptor new]; psd.label = @"stock"; MTLRenderPipelineColorAttachmentDescriptor *ca = psd.colorAttachments[0]; ca.pixelFormat = _layer.pixelFormat; ca.blendingEnabled = NO; ca.sourceRGBBlendFactor = MTLBlendFactorSourceAlpha; ca.destinationRGBBlendFactor = MTLBlendFactorOneMinusSourceAlpha; ca.sourceAlphaBlendFactor = MTLBlendFactorSourceAlpha; ca.destinationAlphaBlendFactor = MTLBlendFactorOneMinusSourceAlpha; psd.sampleCount = 1; psd.vertexDescriptor = vd; psd.vertexFunction = [_library newFunctionWithName:@"stock_vertex"]; psd.fragmentFunction = [_library newFunctionWithName:@"stock_fragment"]; NSError *err; _states[VIDEO_SHADER_STOCK_BLEND][0] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"stock_blend"; ca.blendingEnabled = YES; _states[VIDEO_SHADER_STOCK_BLEND][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } MTLFunctionConstantValues *vals; psd.label = @"snow_simple"; ca.blendingEnabled = YES; { vals = [MTLFunctionConstantValues new]; float values[3] = { 1.25f, // baseScale 0.50f, // density 0.15f, // speed }; [vals setConstantValue:&values[0] type:MTLDataTypeFloat withName:@"snowBaseScale"]; [vals setConstantValue:&values[1] type:MTLDataTypeFloat withName:@"snowDensity"]; [vals setConstantValue:&values[2] type:MTLDataTypeFloat withName:@"snowSpeed"]; } psd.fragmentFunction = [_library newFunctionWithName:@"snow_fragment" constantValues:vals error:&err]; _states[VIDEO_SHADER_MENU_3][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"snow"; ca.blendingEnabled = YES; { vals = [MTLFunctionConstantValues new]; float values[3] = { 3.50f, // baseScale 0.70f, // density 0.25f, // speed }; [vals setConstantValue:&values[0] type:MTLDataTypeFloat withName:@"snowBaseScale"]; [vals setConstantValue:&values[1] type:MTLDataTypeFloat withName:@"snowDensity"]; [vals setConstantValue:&values[2] type:MTLDataTypeFloat withName:@"snowSpeed"]; } psd.fragmentFunction = [_library newFunctionWithName:@"snow_fragment" constantValues:vals error:&err]; _states[VIDEO_SHADER_MENU_4][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"bokeh"; ca.blendingEnabled = YES; psd.fragmentFunction = [_library newFunctionWithName:@"bokeh_fragment"]; _states[VIDEO_SHADER_MENU_5][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"snowflake"; ca.blendingEnabled = YES; psd.fragmentFunction = [_library newFunctionWithName:@"snowflake_fragment"]; _states[VIDEO_SHADER_MENU_6][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"ribbon"; ca.blendingEnabled = NO; psd.vertexFunction = [_library newFunctionWithName:@"ribbon_vertex"]; psd.fragmentFunction = [_library newFunctionWithName:@"ribbon_fragment"]; _states[VIDEO_SHADER_MENU][0] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"ribbon_blend"; ca.blendingEnabled = YES; ca.sourceRGBBlendFactor = MTLBlendFactorOne; ca.destinationRGBBlendFactor = MTLBlendFactorOne; _states[VIDEO_SHADER_MENU][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"ribbon_simple"; ca.blendingEnabled = NO; psd.vertexFunction = [_library newFunctionWithName:@"ribbon_simple_vertex"]; psd.fragmentFunction = [_library newFunctionWithName:@"ribbon_simple_fragment"]; _states[VIDEO_SHADER_MENU_2][0] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } psd.label = @"ribbon_simple_blend"; ca.blendingEnabled = YES; ca.sourceRGBBlendFactor = MTLBlendFactorOne; ca.destinationRGBBlendFactor = MTLBlendFactorOne; _states[VIDEO_SHADER_MENU_2][1] = [_device newRenderPipelineStateWithDescriptor:psd error:&err]; if (err != nil) { RARCH_ERR("[Metal]: error creating pipeline state %s\n", err.localizedDescription.UTF8String); return NO; } return YES; } - (bool)_initConversionFilters { NSError *err = nil; _filters[RPixelFormatBGRA4Unorm] = [Filter newFilterWithFunctionName:@"convert_bgra4444_to_bgra8888" device:_device library:_library error:&err]; if (err) { RARCH_LOG("[Metal]: unable to create 'convert_bgra4444_to_bgra8888' conversion filter: %s\n", err.localizedDescription.UTF8String); return NO; } _filters[RPixelFormatB5G6R5Unorm] = [Filter newFilterWithFunctionName:@"convert_rgb565_to_bgra8888" device:_device library:_library error:&err]; if (err) { RARCH_LOG("[Metal]: unable to create 'convert_rgb565_to_bgra8888' conversion filter: %s\n", err.localizedDescription.UTF8String); return NO; } return YES; } - (Texture *)newTexture:(struct texture_image)image filter:(enum texture_filter_type)filter { assert(filter >= TEXTURE_FILTER_LINEAR && filter <= TEXTURE_FILTER_MIPMAP_NEAREST); if (!image.pixels || !image.width || !image.height) { /* Create a dummy texture instead. */ #define T0 0xff000000u #define T1 0xffffffffu static const uint32_t checkerboard[] = { T0, T1, T0, T1, T0, T1, T0, T1, T1, T0, T1, T0, T1, T0, T1, T0, T0, T1, T0, T1, T0, T1, T0, T1, T1, T0, T1, T0, T1, T0, T1, T0, T0, T1, T0, T1, T0, T1, T0, T1, T1, T0, T1, T0, T1, T0, T1, T0, T0, T1, T0, T1, T0, T1, T0, T1, T1, T0, T1, T0, T1, T0, T1, T0, }; #undef T0 #undef T1 image.pixels = (uint32_t *)checkerboard; image.width = 8; image.height = 8; filter = TEXTURE_FILTER_MIPMAP_NEAREST; } BOOL mipmapped = filter == TEXTURE_FILTER_MIPMAP_LINEAR || filter == TEXTURE_FILTER_MIPMAP_NEAREST; Texture *tex = [Texture new]; tex.texture = [self newTexture:image mipmapped:mipmapped]; tex.sampler = _samplers[filter]; return tex; } - (id<MTLTexture>)newTexture:(struct texture_image)image mipmapped:(bool)mipmapped { MTLTextureDescriptor *td = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm width:image.width height:image.height mipmapped:mipmapped]; id<MTLTexture> t = [_device newTextureWithDescriptor:td]; [t replaceRegion:MTLRegionMake2D(0, 0, image.width, image.height) mipmapLevel:0 withBytes:image.pixels bytesPerRow:4 * image.width]; if (mipmapped) { id<MTLCommandBuffer> cb = self.blitCommandBuffer; id<MTLBlitCommandEncoder> bce = [cb blitCommandEncoder]; [bce generateMipmapsForTexture:t]; [bce endEncoding]; } return t; } - (id<CAMetalDrawable>)nextDrawable { if (_drawable == nil) { _drawable = _layer.nextDrawable; } return _drawable; } - (void)convertFormat:(RPixelFormat)fmt from:(id<MTLTexture>)src to:(id<MTLTexture>)dst { assert(src.width == dst.width && src.height == dst.height); assert(fmt >= 0 && fmt < RPixelFormatCount); Filter *conv = _filters[fmt]; assert(conv != nil); [conv apply:self.blitCommandBuffer in:src out:dst]; } - (id<MTLCommandBuffer>)blitCommandBuffer { if (!_blitCommandBuffer) { _blitCommandBuffer = [_commandQueue commandBuffer]; _blitCommandBuffer.label = @"Blit command buffer"; } return _blitCommandBuffer; } - (void)_nextChain { _currentChain = (_currentChain + 1) % CHAIN_LENGTH; [_chain[_currentChain] discard]; } - (void)setCaptureEnabled:(bool)captureEnabled { if (_captureEnabled == captureEnabled) return; _captureEnabled = captureEnabled; //_layer.framebufferOnly = !captureEnabled; } - (bool)captureEnabled { return _captureEnabled; } - (bool)readBackBuffer:(uint8_t *)buffer { if (!_captureEnabled || _backBuffer == nil) return NO; if (_backBuffer.pixelFormat != MTLPixelFormatBGRA8Unorm) { RARCH_WARN("[Metal]: unexpected pixel format %d\n", _backBuffer.pixelFormat); return NO; } uint8_t *tmp = malloc(_backBuffer.width * _backBuffer.height * 4); [_backBuffer getBytes:tmp bytesPerRow:4 * _backBuffer.width fromRegion:MTLRegionMake2D(0, 0, _backBuffer.width, _backBuffer.height) mipmapLevel:0]; NSUInteger srcStride = _backBuffer.width * 4; uint8_t const *src = tmp + (_viewport.y * srcStride); NSUInteger dstStride = _viewport.width * 3; uint8_t *dst = buffer + (_viewport.height - 1) * dstStride; for (int y = 0; y < _viewport.height; y++, src += srcStride, dst -= dstStride) { for (int x = 0; x < _viewport.width; x++) { dst[3 * x + 0] = src[4 * (_viewport.x + x) + 0]; dst[3 * x + 1] = src[4 * (_viewport.x + x) + 1]; dst[3 * x + 2] = src[4 * (_viewport.x + x) + 2]; } } free(tmp); return YES; } - (void)begin { assert(_commandBuffer == nil); dispatch_semaphore_wait(_inflightSemaphore, DISPATCH_TIME_FOREVER); _commandBuffer = [_commandQueue commandBuffer]; _commandBuffer.label = @"Frame command buffer"; _backBuffer = nil; } - (id<MTLRenderCommandEncoder>)rce { assert(_commandBuffer != nil); if (_rce == nil) { MTLRenderPassDescriptor *rpd = [MTLRenderPassDescriptor new]; rpd.colorAttachments[0].clearColor = _clearColor; rpd.colorAttachments[0].loadAction = MTLLoadActionClear; rpd.colorAttachments[0].texture = self.nextDrawable.texture; if (_captureEnabled) { _backBuffer = self.nextDrawable.texture; } _rce = [_commandBuffer renderCommandEncoderWithDescriptor:rpd]; _rce.label = @"Frame command encoder"; } return _rce; } - (void)resetRenderViewport:(ViewportResetMode)mode { bool fullscreen = mode == kFullscreenViewport; MTLViewport vp = { .originX = fullscreen ? 0 : _viewport.x, .originY = fullscreen ? 0 : _viewport.y, .width = fullscreen ? _viewport.full_width : _viewport.width, .height = fullscreen ? _viewport.full_height : _viewport.height, .znear = 0, .zfar = 1, }; [self.rce setViewport:vp]; } - (void)resetScissorRect { MTLScissorRect sr = { .x = 0, .y = 0, .width = _viewport.full_width, .height = _viewport.full_height, }; [self.rce setScissorRect:sr]; } - (void)drawQuadX:(float)x y:(float)y w:(float)w h:(float)h r:(float)r g:(float)g b:(float)b a:(float)a { SpriteVertex v[4]; v[0].position = simd_make_float2(x, y); v[1].position = simd_make_float2(x + w, y); v[2].position = simd_make_float2(x, y + h); v[3].position = simd_make_float2(x + w, y + h); simd_float4 color = simd_make_float4(r, g, b, a); v[0].color = color; v[1].color = color; v[2].color = color; v[3].color = color; id<MTLRenderCommandEncoder> rce = self.rce; [rce setRenderPipelineState:_clearState]; [rce setVertexBytes:&v length:sizeof(v) atIndex:BufferIndexPositions]; [rce setVertexBytes:&_uniforms length:sizeof(_uniforms) atIndex:BufferIndexUniforms]; [rce drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4]; } - (void)end { assert(_commandBuffer != nil); [_chain[_currentChain] commitRanges]; if (_blitCommandBuffer) { #if TARGET_OS_OSX if (_captureEnabled) { id<MTLBlitCommandEncoder> bce = [_blitCommandBuffer blitCommandEncoder]; [bce synchronizeResource:_backBuffer]; [bce endEncoding]; } #endif // pending blits for mipmaps or render passes for slang shaders [_blitCommandBuffer commit]; [_blitCommandBuffer waitUntilCompleted]; _blitCommandBuffer = nil; } if (_rce) { [_rce endEncoding]; _rce = nil; } __block dispatch_semaphore_t inflight = _inflightSemaphore; [_commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _) { dispatch_semaphore_signal(inflight); }]; if (self.nextDrawable) { [_commandBuffer presentDrawable:self.nextDrawable]; } [_commandBuffer commit]; _commandBuffer = nil; _drawable = nil; [self _nextChain]; } - (bool)allocRange:(BufferRange *)range length:(NSUInteger)length { return [_chain[_currentChain] allocRange:range length:length]; } @end @implementation Texture @end @implementation BufferNode - (instancetype)initWithBuffer:(id<MTLBuffer>)src { if (self = [super init]) { _src = src; } return self; } @end @implementation BufferChain { id<MTLDevice> _device; NSUInteger _blockLen; BufferNode *_head; NSUInteger _offset; // offset into _current BufferNode *_current; NSUInteger _length; NSUInteger _allocated; } /* macOS requires constants in a buffer to have a 256 byte alignment. */ #ifdef TARGET_OS_MAC static const NSUInteger kConstantAlignment = 256; #else static const NSUInteger kConstantAlignment = 4; #endif - (instancetype)initWithDevice:(id<MTLDevice>)device blockLen:(NSUInteger)blockLen { if (self = [super init]) { _device = device; _blockLen = blockLen; } return self; } - (NSString *)debugDescription { return [NSString stringWithFormat:@"length=%ld, allocated=%ld", _length, _allocated]; } - (void)commitRanges { #if TARGET_OS_OSX for (BufferNode *n = _head; n != nil; n = n.next) { if (n.allocated > 0) { [n.src didModifyRange:NSMakeRange(0, n.allocated)]; } } #endif } - (void)discard { _current = _head; _offset = 0; _allocated = 0; } - (bool)allocRange:(BufferRange *)range length:(NSUInteger)length { MTLResourceOptions opts; opts = PLATFORM_METAL_RESOURCE_STORAGE_MODE; memset(range, 0, sizeof(*range)); if (!_head) { _head = [[BufferNode alloc] initWithBuffer:[_device newBufferWithLength:_blockLen options:opts]]; _length += _blockLen; _current = _head; _offset = 0; } if ([self _subAllocRange:range length:length]) return YES; while (_current.next) { [self _nextNode]; if ([self _subAllocRange:range length:length]) return YES; } NSUInteger blockLen = _blockLen; if (length > blockLen) { blockLen = length; } _current.next = [[BufferNode alloc] initWithBuffer:[_device newBufferWithLength:blockLen options:opts]]; if (!_current.next) return NO; _length += blockLen; [self _nextNode]; retro_assert([self _subAllocRange:range length:length]); return YES; } - (void)_nextNode { _current = _current.next; _offset = 0; } - (BOOL)_subAllocRange:(BufferRange *)range length:(NSUInteger)length { NSUInteger nextOffset = _offset + length; if (nextOffset <= _current.src.length) { _current.allocated = nextOffset; _allocated += length; range->data = _current.src.contents + _offset; range->buffer = _current.src; range->offset = _offset; _offset = MTL_ALIGN_BUFFER(nextOffset); return YES; } return NO; } @end
{ "pile_set_name": "Github" }
<!--- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <head> <title>Tutorial - Task Throttling</title> </head> ## [Helix Tutorial](./Tutorial.html): Task Throttling In this chapter, we\'ll learn how to control the parallel execution of tasks in the task framework. ### Task Throttling Configuration Helix can control the number of tasks that are executed in parallel according to multiple thresholds. Applications can set these thresholds in the following configuration items: * JobConfig.ConcurrentTasksPerInstance The number of concurrent tasks in this job that are allowed to run on an instance. * InstanceConfig.MAX_CONCURRENT_TASK The number of total concurrent tasks that are allowed to run on an instance. Also see [WorkflowConfig.ParallelJobs](./tutorial_task_framework.html). ### Job Priority for Task Throttling Whenever there are too many tasks to be scheduled according to the threshold, Helix will prioritize the older jobs. The age of a job is calculated based on the job start time.
{ "pile_set_name": "Github" }
; SPIR-V ; Version: 1.0 ; Generator: Khronos Glslang Reference Front End; 3 ; Bound: 47 ; Schema: 0 OpCapability Shader %1 = OpExtInstImport "GLSL.std.450" OpMemoryModel Logical GLSL450 OpEntryPoint Fragment %main "main" %counter %FragColor OpExecutionMode %main OriginUpperLeft OpSource GLSL 450 OpName %main "main" OpName %counter "counter" OpName %FragColor "FragColor" OpDecorate %counter Flat OpDecorate %counter Location 0 OpDecorate %FragColor Location 0 %void = OpTypeVoid %3 = OpTypeFunction %void %float = OpTypeFloat 32 %v4float = OpTypeVector %float 4 %8 = OpTypeFunction %v4float %int = OpTypeInt 32 1 %_ptr_Input_int = OpTypePointer Input %int %counter = OpVariable %_ptr_Input_int Input %int_10 = OpConstant %int 10 %bool = OpTypeBool %float_10 = OpConstant %float 10 %21 = OpConstantComposite %v4float %float_10 %float_10 %float_10 %float_10 %float_30 = OpConstant %float 30 %25 = OpConstantComposite %v4float %float_30 %float_30 %float_30 %float_30 %_ptr_Output_v4float = OpTypePointer Output %v4float %FragColor = OpVariable %_ptr_Output_v4float Output %_ptr_Function_v4float = OpTypePointer Function %v4float %false = OpConstantFalse %bool %44 = OpUndef %v4float %main = OpFunction %void None %3 %5 = OpLabel OpBranch %33 %33 = OpLabel %45 = OpPhi %v4float %44 %5 %44 %35 OpLoopMerge %34 %35 None OpBranch %36 %36 = OpLabel %37 = OpLoad %int %counter %38 = OpIEqual %bool %37 %int_10 OpSelectionMerge %39 None OpBranchConditional %38 %40 %41 %40 = OpLabel OpBranch %34 %41 = OpLabel OpBranch %34 %39 = OpLabel OpUnreachable %35 = OpLabel OpBranchConditional %false %33 %34 %34 = OpLabel %46 = OpPhi %v4float %21 %40 %25 %41 %44 %35 OpStore %FragColor %46 OpReturn OpFunctionEnd
{ "pile_set_name": "Github" }
/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package leaderelection import ( "net/http" "sync" "time" ) // HealthzAdaptor associates the /healthz endpoint with the LeaderElection object. // It helps deal with the /healthz endpoint being set up prior to the LeaderElection. // This contains the code needed to act as an adaptor between the leader // election code the health check code. It allows us to provide health // status about the leader election. Most specifically about if the leader // has failed to renew without exiting the process. In that case we should // report not healthy and rely on the kubelet to take down the process. type HealthzAdaptor struct { pointerLock sync.Mutex le *LeaderElector timeout time.Duration } // Name returns the name of the health check we are implementing. func (l *HealthzAdaptor) Name() string { return "leaderElection" } // Check is called by the healthz endpoint handler. // It fails (returns an error) if we own the lease but had not been able to renew it. func (l *HealthzAdaptor) Check(req *http.Request) error { l.pointerLock.Lock() defer l.pointerLock.Unlock() if l.le == nil { return nil } return l.le.Check(l.timeout) } // SetLeaderElection ties a leader election object to a HealthzAdaptor func (l *HealthzAdaptor) SetLeaderElection(le *LeaderElector) { l.pointerLock.Lock() defer l.pointerLock.Unlock() l.le = le } // NewLeaderHealthzAdaptor creates a basic healthz adaptor to monitor a leader election. // timeout determines the time beyond the lease expiry to be allowed for timeout. // checks within the timeout period after the lease expires will still return healthy. func NewLeaderHealthzAdaptor(timeout time.Duration) *HealthzAdaptor { result := &HealthzAdaptor{ timeout: timeout, } return result }
{ "pile_set_name": "Github" }
8
{ "pile_set_name": "Github" }
PREFIX : <http://example.org/#> SELECT * { :x ?p ?v . FILTER langMatches(lang(?v), "en-GB") . }
{ "pile_set_name": "Github" }
package com.cundong.recyclerview.sample; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import com.cundong.recyclerview.RecyclerViewUtils; import java.util.ArrayList; /** * Created by cundong on 2015/11/10. * <p/> * Sample入口 */ public class MainActivity extends AppCompatActivity { private static final Class<?>[] ACTIVITY = {LinearLayoutActivity.class, EndlessLinearLayoutActivity.class, EndlessGridLayoutActivity.class, EndlessStaggeredGridLayoutActivity.class}; private static final String[] TITLE = {"LinearLayoutSample", "EndlessLinearLayoutActivity", "EndlessGridLayoutActivity", "EndlessStaggeredGridLayoutActivity"}; private RecyclerView mRecyclerView = null; private DataAdapter mDataAdapter = null; private ArrayList<ListItem> mDataList = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.sample_activity); mRecyclerView = (RecyclerView) findViewById(R.id.list); mRecyclerView.setLayoutManager(new LinearLayoutManager(this)); mDataList = new ArrayList<>(); for (int i = 0; i < TITLE.length; i++) { ListItem item = new ListItem(); item.title = TITLE[i]; item.activity = ACTIVITY[i]; mDataList.add(item); } mDataAdapter = new DataAdapter(this); mDataAdapter.setData(mDataList); mRecyclerView.setAdapter(mDataAdapter); } private static class ListItem { public String title; public Class<?> activity; } private class DataAdapter extends RecyclerView.Adapter { private LayoutInflater mLayoutInflater; private ArrayList<ListItem> mDataList = new ArrayList<>(); public DataAdapter(Context context) { mLayoutInflater = LayoutInflater.from(context); } public void setData(ArrayList<ListItem> list) { this.mDataList = list; notifyDataSetChanged(); } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { return new ViewHolder(mLayoutInflater.inflate(R.layout.sample_item_button, parent, false)); } @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { ListItem listItem = mDataList.get(position); ViewHolder viewHolder = (ViewHolder) holder; viewHolder.button.setText(listItem.title); } @Override public int getItemCount() { return mDataList.size(); } private class ViewHolder extends RecyclerView.ViewHolder { private Button button; public ViewHolder(View itemView) { super(itemView); button = (Button) itemView.findViewById(R.id.button); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { ListItem listItem = mDataList.get(RecyclerViewUtils.getAdapterPosition(mRecyclerView, ViewHolder.this)); startActivity(new Intent(MainActivity.this, listItem.activity)); } }); } } } }
{ "pile_set_name": "Github" }
#!/bin/bash # Midnight Commander - push doc/hints/mc.hint file to Transifex # # Copyright (C) 2013 # The Free Software Foundation, Inc. # # Written by: # Slava Zanko <[email protected]>, 2013 # # This file is part of the Midnight Commander. # # The Midnight Commander is free software: you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation, either version 3 of the License, # or (at your option) any later version. # # The Midnight Commander is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. set -e MC_SOURCE_ROOT_DIR=${MC_SOURCE_ROOT_DIR:-$(dirname $(dirname $(dirname $(pwd))))} #*** include section (source functions, for example) ******************* source "${MC_SOURCE_ROOT_DIR}/maint/utils/sync-transifex/functions" #*** file scope functions ********************************************** #*** main code ********************************************************* WORK_DIR=$(initSyncDirIfNeeded "mc.hint") convertFromTextToPo "${MC_SOURCE_ROOT_DIR}/doc/hints/mc.hint" "${WORK_DIR}/mc.doc.pot" sendSourceToTransifex "${WORK_DIR}"
{ "pile_set_name": "Github" }
//! \file /* ** Copyright (C) - Triton ** ** This program is under the terms of the Apache License 2.0. */ #if defined(__unix__) || defined(__APPLE__) #include <triton/pythonBindings.hpp> #include <triton/pythonUtils.hpp> #include <triton/pythonXFunctions.hpp> #include <triton/architecture.hpp> #include <triton/unix.hpp> /*! \page py_SYSCALL_page SYSCALL \brief [**python api**] All information about the SYSCALL Python namespace. \tableofcontents \section SYSCALL_py_description Description <hr> According to the CPU architecture, the SYSCALL namespace contains all syscall numbers. The list of syscalls depends of your kernel version. That's why the list is generated at compile time by the [extract_syscall.py](https://github.com/JonathanSalwan/Triton/blob/master/src/scripts/extract_syscall.py) script. \section SYSCALL_py_api Python API - Items of the SYSCALL namespace <hr> Dependnig on your kernel version, this list is not exhaustive. - **SYSCALL.READ** - **SYSCALL.WRITE** - **SYSCALL.OPEN** - **SYSCALL.CLOSE** - **SYSCALL.STAT** - **SYSCALL.FSTAT** - **SYSCALL.LSTAT** - **SYSCALL.POLL** - **SYSCALL.LSEEK** - **SYSCALL.MMAP** - **SYSCALL.MPROTECT** - **SYSCALL.MUNMAP** - **SYSCALL.BRK** - **SYSCALL.RT_SIGACTION** - **SYSCALL.RT_SIGPROCMASK** - **SYSCALL.RT_SIGRETURN** - **SYSCALL.IOCTL** - **SYSCALL.PREAD64** - **SYSCALL.PWRITE64** - **SYSCALL.READV** - **SYSCALL.WRITEV** - **SYSCALL.ACCESS** - **SYSCALL.PIPE** - **SYSCALL.SELECT** - **SYSCALL.SCHED_YIELD** - **SYSCALL.MREMAP** - **SYSCALL.MSYNC** - **SYSCALL.MINCORE** - **SYSCALL.MADVISE** - **SYSCALL.SHMGET** - **SYSCALL.SHMAT** - **SYSCALL.SHMCTL** - **SYSCALL.DUP** - **SYSCALL.DUP2** - **SYSCALL.PAUSE** - **SYSCALL.NANOSLEEP** - **SYSCALL.GETITIMER** - **SYSCALL.ALARM** - **SYSCALL.SETITIMER** - **SYSCALL.GETPID** - **SYSCALL.SENDFILE** - **SYSCALL.SOCKET** - **SYSCALL.CONNECT** - **SYSCALL.ACCEPT** - **SYSCALL.SENDTO** - **SYSCALL.RECVFROM** - **SYSCALL.SENDMSG** - **SYSCALL.RECVMSG** - **SYSCALL.SHUTDOWN** - **SYSCALL.BIND** - **SYSCALL.LISTEN** - **SYSCALL.GETSOCKNAME** - **SYSCALL.GETPEERNAME** - **SYSCALL.SOCKETPAIR** - **SYSCALL.SETSOCKOPT** - **SYSCALL.GETSOCKOPT** - **SYSCALL.CLONE** - **SYSCALL.FORK** - **SYSCALL.VFORK** - **SYSCALL.EXECVE** - **SYSCALL.EXIT** - **SYSCALL.WAIT4** - **SYSCALL.KILL** - **SYSCALL.UNAME** - **SYSCALL.SEMGET** - **SYSCALL.SEMOP** - **SYSCALL.SEMCTL** - **SYSCALL.SHMDT** - **SYSCALL.MSGGET** - **SYSCALL.MSGSND** - **SYSCALL.MSGRCV** - **SYSCALL.MSGCTL** - **SYSCALL.FCNTL** - **SYSCALL.FLOCK** - **SYSCALL.FSYNC** - **SYSCALL.FDATASYNC** - **SYSCALL.TRUNCATE** - **SYSCALL.FTRUNCATE** - **SYSCALL.GETDENTS** - **SYSCALL.GETCWD** - **SYSCALL.CHDIR** - **SYSCALL.FCHDIR** - **SYSCALL.RENAME** - **SYSCALL.MKDIR** - **SYSCALL.RMDIR** - **SYSCALL.CREAT** - **SYSCALL.LINK** - **SYSCALL.UNLINK** - **SYSCALL.SYMLINK** - **SYSCALL.READLINK** - **SYSCALL.CHMOD** - **SYSCALL.FCHMOD** - **SYSCALL.CHOWN** - **SYSCALL.FCHOWN** - **SYSCALL.LCHOWN** - **SYSCALL.UMASK** - **SYSCALL.GETTIMEOFDAY** - **SYSCALL.GETRLIMIT** - **SYSCALL.GETRUSAGE** - **SYSCALL.SYSINFO** - **SYSCALL.TIMES** - **SYSCALL.PTRACE** - **SYSCALL.GETUID** - **SYSCALL.SYSLOG** - **SYSCALL.GETGID** - **SYSCALL.SETUID** - **SYSCALL.SETGID** - **SYSCALL.GETEUID** - **SYSCALL.GETEGID** - **SYSCALL.SETPGID** - **SYSCALL.GETPPID** - **SYSCALL.GETPGRP** - **SYSCALL.SETSID** - **SYSCALL.SETREUID** - **SYSCALL.SETREGID** - **SYSCALL.GETGROUPS** - **SYSCALL.SETGROUPS** - **SYSCALL.SETRESUID** - **SYSCALL.GETRESUID** - **SYSCALL.SETRESGID** - **SYSCALL.GETRESGID** - **SYSCALL.GETPGID** - **SYSCALL.SETFSUID** - **SYSCALL.SETFSGID** - **SYSCALL.GETSID** - **SYSCALL.CAPGET** - **SYSCALL.CAPSET** - **SYSCALL.RT_SIGPENDING** - **SYSCALL.RT_SIGTIMEDWAIT** - **SYSCALL.RT_SIGQUEUEINFO** - **SYSCALL.RT_SIGSUSPEND** - **SYSCALL.SIGALTSTACK** - **SYSCALL.UTIME** - **SYSCALL.MKNOD** - **SYSCALL.USELIB** - **SYSCALL.PERSONALITY** - **SYSCALL.USTAT** - **SYSCALL.STATFS** - **SYSCALL.FSTATFS** - **SYSCALL.SYSFS** - **SYSCALL.GETPRIORITY** - **SYSCALL.SETPRIORITY** - **SYSCALL.SCHED_SETPARAM** - **SYSCALL.SCHED_GETPARAM** - **SYSCALL.SCHED_SETSCHEDULER** - **SYSCALL.SCHED_GETSCHEDULER** - **SYSCALL.SCHED_GET_PRIORITY_MAX** - **SYSCALL.SCHED_GET_PRIORITY_MIN** - **SYSCALL.SCHED_RR_GET_INTERVAL** - **SYSCALL.MLOCK** - **SYSCALL.MUNLOCK** - **SYSCALL.MLOCKALL** - **SYSCALL.MUNLOCKALL** - **SYSCALL.VHANGUP** - **SYSCALL.MODIFY_LDT** - **SYSCALL.PIVOT_ROOT** - **SYSCALL._SYSCTL** - **SYSCALL.PRCTL** - **SYSCALL.ARCH_PRCTL** - **SYSCALL.ADJTIMEX** - **SYSCALL.SETRLIMIT** - **SYSCALL.CHROOT** - **SYSCALL.SYNC** - **SYSCALL.ACCT** - **SYSCALL.SETTIMEOFDAY** - **SYSCALL.MOUNT** - **SYSCALL.UMOUNT2** - **SYSCALL.SWAPON** - **SYSCALL.SWAPOFF** - **SYSCALL.REBOOT** - **SYSCALL.SETHOSTNAME** - **SYSCALL.SETDOMAINNAME** - **SYSCALL.IOPL** - **SYSCALL.IOPERM** - **SYSCALL.CREATE_MODULE** - **SYSCALL.INIT_MODULE** - **SYSCALL.DELETE_MODULE** - **SYSCALL.GET_KERNEL_SYMS** - **SYSCALL.QUERY_MODULE** - **SYSCALL.QUOTACTL** - **SYSCALL.NFSSERVCTL** - **SYSCALL.GETPMSG** - **SYSCALL.PUTPMSG** - **SYSCALL.AFS_SYSCALL** - **SYSCALL.TUXCALL** - **SYSCALL.SECURITY** - **SYSCALL.GETTID** - **SYSCALL.READAHEAD** - **SYSCALL.SETXATTR** - **SYSCALL.LSETXATTR** - **SYSCALL.FSETXATTR** - **SYSCALL.GETXATTR** - **SYSCALL.LGETXATTR** - **SYSCALL.FGETXATTR** - **SYSCALL.LISTXATTR** - **SYSCALL.LLISTXATTR** - **SYSCALL.FLISTXATTR** - **SYSCALL.REMOVEXATTR** - **SYSCALL.LREMOVEXATTR** - **SYSCALL.FREMOVEXATTR** - **SYSCALL.TKILL** - **SYSCALL.TIME** - **SYSCALL.FUTEX** - **SYSCALL.SCHED_SETAFFINITY** - **SYSCALL.SCHED_GETAFFINITY** - **SYSCALL.SET_THREAD_AREA** - **SYSCALL.IO_SETUP** - **SYSCALL.IO_DESTROY** - **SYSCALL.IO_GETEVENTS** - **SYSCALL.IO_SUBMIT** - **SYSCALL.IO_CANCEL** - **SYSCALL.GET_THREAD_AREA** - **SYSCALL.LOOKUP_DCOOKIE** - **SYSCALL.EPOLL_CREATE** - **SYSCALL.EPOLL_CTL_OLD** - **SYSCALL.EPOLL_WAIT_OLD** - **SYSCALL.REMAP_FILE_PAGES** - **SYSCALL.GETDENTS64** - **SYSCALL.SET_TID_ADDRESS** - **SYSCALL.RESTART_SYSCALL** - **SYSCALL.SEMTIMEDOP** - **SYSCALL.FADVISE64** - **SYSCALL.TIMER_CREATE** - **SYSCALL.TIMER_SETTIME** - **SYSCALL.TIMER_GETTIME** - **SYSCALL.TIMER_GETOVERRUN** - **SYSCALL.TIMER_DELETE** - **SYSCALL.CLOCK_SETTIME** - **SYSCALL.CLOCK_GETTIME** - **SYSCALL.CLOCK_GETRES** - **SYSCALL.CLOCK_NANOSLEEP** - **SYSCALL.EXIT_GROUP** - **SYSCALL.EPOLL_WAIT** - **SYSCALL.EPOLL_CTL** - **SYSCALL.TGKILL** - **SYSCALL.UTIMES** - **SYSCALL.VSERVER** - **SYSCALL.MBIND** - **SYSCALL.SET_MEMPOLICY** - **SYSCALL.GET_MEMPOLICY** - **SYSCALL.MQ_OPEN** - **SYSCALL.MQ_UNLINK** - **SYSCALL.MQ_TIMEDSEND** - **SYSCALL.MQ_TIMEDRECEIVE** - **SYSCALL.MQ_NOTIFY** - **SYSCALL.MQ_GETSETATTR** - **SYSCALL.KEXEC_LOAD** - **SYSCALL.WAITID** - **SYSCALL.ADD_KEY** - **SYSCALL.REQUEST_KEY** - **SYSCALL.KEYCTL** - **SYSCALL.IOPRIO_SET** - **SYSCALL.IOPRIO_GET** - **SYSCALL.INOTIFY_INIT** - **SYSCALL.INOTIFY_ADD_WATCH** - **SYSCALL.INOTIFY_RM_WATCH** - **SYSCALL.MIGRATE_PAGES** - **SYSCALL.OPENAT** - **SYSCALL.MKDIRAT** - **SYSCALL.MKNODAT** - **SYSCALL.FCHOWNAT** - **SYSCALL.FUTIMESAT** - **SYSCALL.NEWFSTATAT** - **SYSCALL.UNLINKAT** - **SYSCALL.RENAMEAT** - **SYSCALL.LINKAT** - **SYSCALL.SYMLINKAT** - **SYSCALL.READLINKAT** - **SYSCALL.FCHMODAT** - **SYSCALL.FACCESSAT** - **SYSCALL.PSELECT6** - **SYSCALL.PPOLL** - **SYSCALL.UNSHARE** - **SYSCALL.SET_ROBUST_LIST** - **SYSCALL.GET_ROBUST_LIST** - **SYSCALL.SPLICE** - **SYSCALL.TEE** - **SYSCALL.SYNC_FILE_RANGE** - **SYSCALL.VMSPLICE** - **SYSCALL.MOVE_PAGES** - **SYSCALL.UTIMENSAT** - **SYSCALL.EPOLL_PWAIT** - **SYSCALL.SIGNALFD** - **SYSCALL.TIMERFD_CREATE** - **SYSCALL.EVENTFD** - **SYSCALL.FALLOCATE** - **SYSCALL.TIMERFD_SETTIME** - **SYSCALL.TIMERFD_GETTIME** - **SYSCALL.ACCEPT4** - **SYSCALL.SIGNALFD4** - **SYSCALL.EVENTFD2** - **SYSCALL.EPOLL_CREATE1** - **SYSCALL.DUP3** - **SYSCALL.PIPE2** - **SYSCALL.INOTIFY_INIT1** - **SYSCALL.PREADV** - **SYSCALL.PWRITEV** - **SYSCALL.RT_TGSIGQUEUEINFO** - **SYSCALL.PERF_EVENT_OPEN** - **SYSCALL.RECVMMSG** - **SYSCALL.FANOTIFY_INIT** - **SYSCALL.FANOTIFY_MARK** - **SYSCALL.PRLIMIT64** - **SYSCALL.NAME_TO_HANDLE_AT** - **SYSCALL.OPEN_BY_HANDLE_AT** - **SYSCALL.CLOCK_ADJTIME** - **SYSCALL.SYNCFS** - **SYSCALL.SENDMMSG** - **SYSCALL.SETNS** - **SYSCALL.GETCPU** - **SYSCALL.PROCESS_VM_READV** - **SYSCALL.PROCESS_VM_WRITEV** - **SYSCALL.KCMP** - **SYSCALL.FINIT_MODULE** - **SYSCALL.SCHED_SETATTR** - **SYSCALL.SCHED_GETATTR** - **SYSCALL.RENAMEAT2** - **SYSCALL.SECCOMP** - **SYSCALL.GETRANDOM** - **SYSCALL.MEMFD_CREATE** - **SYSCALL.KEXEC_FILE_LOAD** - **SYSCALL.BPF** */ namespace triton { namespace bindings { namespace python { void initSyscall64Namespace(PyObject* syscallsDict64) { PyDict_Clear(syscallsDict64); for (triton::uint32 i = 0; i < triton::os::unix::NB_SYSCALL64; ++i) xPyDict_SetItemString(syscallsDict64, triton::os::unix::syscallmap64[i], PyLong_FromUint32(i)); } void initSyscall32Namespace(PyObject* syscallsDict32) { PyDict_Clear(syscallsDict32); #if defined(__unix__) for (triton::uint32 i = 0; i < triton::os::unix::NB_SYSCALL32; ++i) xPyDict_SetItemString(syscallsDict32, triton::os::unix::syscallmap32[i], PyLong_FromUint32(i)); #endif } }; /* python namespace */ }; /* bindings namespace */ }; /* triton namespace */ #endif /* __unix__ || __APPLE__ */
{ "pile_set_name": "Github" }
name = "StochasticDiffEq" uuid = "789caeaf-c7a9-5a7d-9973-96adeb23e2a0" repo = "https://github.com/SciML/StochasticDiffEq.jl.git"
{ "pile_set_name": "Github" }
# DESCRIPTION # The Multiplication Rule - Cards # ENDDESCRIPTION ## DBsubject(Probability) ## DBchapter(Sample Space) ## DBsection(Probability: direct computation, inclusion/exclusion) ## Institution(Piedmont) ## Author(Doug Torrance) ## Level(2) ## MO(1) ## TitleText1('Introduction to Statistics: Think & Do') ## AuthorText1('Stevens') ## EditionText1('4.1') ## Section1('4.4') ## Problem1('19 20') ## KEYWORDS('probability') DOCUMENT(); loadMacros( "PGstandard.pl", "MathObjects.pl", "PGchoicemacros.pl", ); TEXT(beginproblem()); Context("Numeric"); @ranks = (2, 3, 4, 5, 6, 7, 8, 9, 10, 'Jack', 'Queen', 'King', 'Ace'); ($rank1, $rank2) = @ranks[NchooseK(13, 2)]; $suit = list_random('Spade', 'Club', 'Heart', 'Diamond'); if ($rank1 == 8 || $rank1 eq 'Ace') { $article1 = 'an'; } else { $article1 = 'a'; } if ($rank2 == 8 || $rank2 eq 'Ace') { $article2 = 'an'; } else { $article2 = 'a'; } $with_jack_jack = Compute(4/52 * 4/52); $with_jack_queen = Compute(4/52 * 4/52); $with_jack_heart = Compute(4/52 * 13/52); $without_jack_jack = Compute(4/52 * 3/51); $without_jack_queen = Compute(4/52 * 4/51); BEGIN_TEXT Suppose you draw two cards $BBOLD with replacement$EBOLD from a standard deck. $PAR (a) What is the probability of getting $article1 $rank1 then $article1 $rank1 again? $BR \{ans_rule(20)\} $PAR (b) What is the probability of getting $article1 $rank1 then $article2 $rank2 ? $BR \{ans_rule(20)\} $PAR (c) What is the probability of getting $article1 $rank1 then a $suit? $BR \{ans_rule(20)\} $PAR Suppose you draw two cards $BBOLD without replacement$EBOLD from a standard deck. $PAR (d) What is the probability of getting $article1 $rank1 then $article1 $rank1 again? $BR \{ans_rule(20)\} $PAR (e) What is the probability of getting $article1 $rank1 then $article2 $rank2? $BR \{ans_rule(20)\} END_TEXT ANS($with_jack_jack->cmp); ANS($with_jack_queen->cmp); ANS($with_jack_heart->cmp); ANS($without_jack_jack->cmp); ANS($without_jack_queen->cmp); ENDDOCUMENT();
{ "pile_set_name": "Github" }
--[[ Copyright (c) 2014 Team Sparkle Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ]] argv = argv or {...} -- Important! We need to check the version! -- Because we do weird things with the engine, we have to block out vanilla Iceball clients. if not common.fork_marikiri_ver then error("This requires the Iceball-MK fork to run.") end -- And of course, we need to enable said "weird things". common.mk_compat_disable() client.mk_set_title("The Hole") -- Start the loader up. LOADER_FILE = "pkg/iceball/altgame/client_start.lua" dofile("pkg/iceball/lib/loader.lua")
{ "pile_set_name": "Github" }
/* * Copyright (c) 1992, 1993 * The Regents of the University of California. All rights reserved. * * This software was developed by the Computer Systems Engineering group * at Lawrence Berkeley Laboratory under DARPA contract BG 91-66 and * contributed to Berkeley. * * All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes software developed by the University of * California, Lawrence Berkeley Laboratory. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes software developed by the University of * California, Berkeley and its contributors. * 4. Neither the name of the University nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * @(#)intr.c 8.3 (Berkeley) 11/11/93 * * from: $Header: intr.c,v 1.22 93/09/26 19:48:06 torek Exp $ (LBL) */ #include <sys/param.h> #include <sys/kernel.h> #include <vm/vm.h> #include <net/netisr.h> #include <machine/cpu.h> #include <machine/ctlreg.h> #include <machine/instr.h> #include <machine/trap.h> #include <sparc/sparc/clockreg.h> /* * Stray interrupt handler. Clear it if possible. * If not, and if we get 10 interrupts in 10 seconds, panic. */ void strayintr(fp) struct clockframe *fp; { static int straytime, nstray; int timesince; printf("stray interrupt ipl %x pc=%x npc=%x psr=%b\n", fp->ipl, fp->pc, fp->npc, fp->psr, PSR_BITS); timesince = time.tv_sec - straytime; if (timesince <= 10) { if (++nstray > 9) panic("crazy interrupts"); } else { straytime = time.tv_sec; nstray = 1; } } extern int clockintr(); /* level 10 (clock) interrupt code */ static struct intrhand level10 = { clockintr }; extern int statintr(); /* level 14 (statclock) interrupt code */ static struct intrhand level14 = { statintr }; /* * Level 1 software interrupt (could also be Sbus level 1 interrupt). * Three possible reasons: * ROM console input needed * Network software interrupt * Soft clock interrupt */ int soft01intr(fp) void *fp; { extern int rom_console_input; if (rom_console_input && cnrom()) cnrint(); if (sir.sir_any) { /* * XXX this is bogus: should just have a list of * routines to call, a la timeouts. Mods to * netisr are not atomic and must be protected (gah). */ if (sir.sir_which[SIR_NET]) { int n, s; s = splhigh(); n = netisr; netisr = 0; splx(s); sir.sir_which[SIR_NET] = 0; #ifdef INET if (n & (1 << NETISR_ARP)) arpintr(); if (n & (1 << NETISR_IP)) ipintr(); #endif #ifdef NS if (n & (1 << NETISR_NS)) nsintr(); #endif #ifdef ISO if (n & (1 << NETISR_ISO)) clnlintr(); #endif } if (sir.sir_which[SIR_CLOCK]) { sir.sir_which[SIR_CLOCK] = 0; softclock(); } } return (1); } static struct intrhand level01 = { soft01intr }; /* * Level 15 interrupts are special, and not vectored here. * Only `prewired' interrupts appear here; boot-time configured devices * are attached via intr_establish() below. */ struct intrhand *intrhand[15] = { NULL, /* 0 = error */ &level01, /* 1 = software level 1 + Sbus */ NULL, /* 2 = Sbus level 2 */ NULL, /* 3 = SCSI + DMA + Sbus level 3 */ NULL, /* 4 = software level 4 (tty softint) */ NULL, /* 5 = Ethernet + Sbus level 4 */ NULL, /* 6 = software level 6 (not used) */ NULL, /* 7 = video + Sbus level 5 */ NULL, /* 8 = Sbus level 6 */ NULL, /* 9 = Sbus level 7 */ &level10, /* 10 = counter 0 = clock */ NULL, /* 11 = floppy */ NULL, /* 12 = zs hardware interrupt */ NULL, /* 13 = audio chip */ &level14, /* 14 = counter 1 = profiling timer */ }; static int fastvec; /* marks fast vectors (see below) */ #ifdef DIAGNOSTIC extern int sparc_interrupt[]; #endif /* * Attach an interrupt handler to the vector chain for the given level. * This is not possible if it has been taken away as a fast vector. */ void intr_establish(level, ih) int level; struct intrhand *ih; { register struct intrhand **p, *q; #ifdef DIAGNOSTIC register struct trapvec *tv; register int displ; #endif int s; s = splhigh(); if (fastvec & (1 << level)) panic("intr_establish: level %d interrupt tied to fast vector", level); #ifdef DIAGNOSTIC /* double check for legal hardware interrupt */ if (level != 1 && level != 4 && level != 6) { tv = &trapbase[T_L1INT - 1 + level]; displ = &sparc_interrupt[0] - &tv->tv_instr[1]; /* has to be `mov level,%l3; ba _sparc_interrupt; rdpsr %l0' */ if (tv->tv_instr[0] != I_MOVi(I_L3, level) || tv->tv_instr[1] != I_BA(0, displ) || tv->tv_instr[2] != I_RDPSR(I_L0)) panic("intr_establish(%d, %x)\n%x %x %x != %x %x %x", level, ih, tv->tv_instr[0], tv->tv_instr[1], tv->tv_instr[2], I_MOVi(I_L3, level), I_BA(0, displ), I_RDPSR(I_L0)); } #endif /* * This is O(N^2) for long chains, but chains are never long * and we do want to preserve order. */ for (p = &intrhand[level]; (q = *p) != NULL; p = &q->ih_next) continue; *p = ih; ih->ih_next = NULL; splx(s); } /* * Like intr_establish, but wires a fast trap vector. Only one such fast * trap is legal for any interrupt, and it must be a hardware interrupt. */ void intr_fasttrap(level, vec) int level; void (*vec) __P((void)); { register struct trapvec *tv; register u_long hi22, lo10; #ifdef DIAGNOSTIC register int displ; /* suspenders, belt, and buttons too */ #endif int s; tv = &trapbase[T_L1INT - 1 + level]; hi22 = ((u_long)vec) >> 10; lo10 = ((u_long)vec) & 0x3ff; s = splhigh(); if ((fastvec & (1 << level)) != 0 || intrhand[level] != NULL) panic("intr_fasttrap: already handling level %d interrupts", level); #ifdef DIAGNOSTIC displ = &sparc_interrupt[0] - &tv->tv_instr[1]; /* has to be `mov level,%l3; ba _sparc_interrupt; rdpsr %l0' */ if (tv->tv_instr[0] != I_MOVi(I_L3, level) || tv->tv_instr[1] != I_BA(0, displ) || tv->tv_instr[2] != I_RDPSR(I_L0)) panic("intr_fasttrap(%d, %x)\n%x %x %x != %x %x %x", level, vec, tv->tv_instr[0], tv->tv_instr[1], tv->tv_instr[2], I_MOVi(I_L3, level), I_BA(0, displ), I_RDPSR(I_L0)); #endif /* kernel text is write protected -- let us in for a moment */ pmap_changeprot(kernel_pmap, (vm_offset_t)tv, VM_PROT_READ|VM_PROT_WRITE, 1); tv->tv_instr[0] = I_SETHI(I_L3, hi22); /* sethi %hi(vec),%l3 */ tv->tv_instr[1] = I_JMPLri(I_G0, I_L3, lo10);/* jmpl %l3+%lo(vec),%g0 */ tv->tv_instr[2] = I_RDPSR(I_L0); /* mov %psr, %l0 */ pmap_changeprot(kernel_pmap, (vm_offset_t)tv, VM_PROT_READ, 1); fastvec |= 1 << level; splx(s); }
{ "pile_set_name": "Github" }
// Copyright 2013 Dolphin Emulator Project // Licensed under GPLv2+ // Refer to the license.txt file included. // Originally written by Sven Peter <[email protected]> for anergistic. #include <signal.h> #include <stdio.h> #include <string.h> #include <unistd.h> #ifdef _WIN32 #include <iphlpapi.h> #include <iphlpapi.h> #include <ws2tcpip.h> #else #include <netinet/in.h> #include <sys/select.h> #include <sys/socket.h> #include <sys/un.h> #endif #include "Common/Logging/Log.h" #include "Core/HW/CPU.h" #include "Core/HW/Memmap.h" #include "Core/Host.h" #include "Core/PowerPC/GDBStub.h" #include "Core/PowerPC/Gekko.h" #include "Core/PowerPC/PPCCache.h" #include "Core/PowerPC/PowerPC.h" #define GDB_BFR_MAX 10000 #define GDB_MAX_BP 10 #define GDB_STUB_START '$' #define GDB_STUB_END '#' #define GDB_STUB_ACK '+' #define GDB_STUB_NAK '-' static int tmpsock = -1; static int sock = -1; static u8 cmd_bfr[GDB_BFR_MAX]; static u32 cmd_len; static u32 sig = 0; static u32 send_signal = 0; static u32 step_break = 0; typedef struct { u32 active; u32 addr; u32 len; } gdb_bp_t; static gdb_bp_t bp_x[GDB_MAX_BP]; static gdb_bp_t bp_r[GDB_MAX_BP]; static gdb_bp_t bp_w[GDB_MAX_BP]; static gdb_bp_t bp_a[GDB_MAX_BP]; // private helpers static u8 hex2char(u8 hex) { if (hex >= '0' && hex <= '9') return hex - '0'; else if (hex >= 'a' && hex <= 'f') return hex - 'a' + 0xa; else if (hex >= 'A' && hex <= 'F') return hex - 'A' + 0xa; ERROR_LOG(GDB_STUB, "Invalid nibble: %c (%02x)", hex, hex); return 0; } static u8 nibble2hex(u8 n) { n &= 0xf; if (n < 0xa) return '0' + n; else return 'A' + n - 0xa; } static void mem2hex(u8* dst, u8* src, u32 len) { u8 tmp; while (len-- > 0) { tmp = *src++; *dst++ = nibble2hex(tmp >> 4); *dst++ = nibble2hex(tmp); } } static void hex2mem(u8* dst, u8* src, u32 len) { while (len-- > 0) { *dst++ = (hex2char(*src) << 4) | hex2char(*(src + 1)); src += 2; } } static u8 gdb_read_byte() { ssize_t res; u8 c = '+'; res = recv(sock, &c, 1, MSG_WAITALL); if (res != 1) { ERROR_LOG(GDB_STUB, "recv failed : %ld", res); gdb_deinit(); } return c; } static u8 gdb_calc_chksum() { u32 len = cmd_len; u8* ptr = cmd_bfr; u8 c = 0; while (len-- > 0) c += *ptr++; return c; } static gdb_bp_t* gdb_bp_ptr(u32 type) { switch (type) { case GDB_BP_TYPE_X: return bp_x; case GDB_BP_TYPE_R: return bp_x; case GDB_BP_TYPE_W: return bp_x; case GDB_BP_TYPE_A: return bp_x; default: return nullptr; } } static gdb_bp_t* gdb_bp_empty_slot(u32 type) { gdb_bp_t* p; u32 i; p = gdb_bp_ptr(type); if (p == nullptr) return nullptr; for (i = 0; i < GDB_MAX_BP; i++) { if (p[i].active == 0) return &p[i]; } return nullptr; } static gdb_bp_t* gdb_bp_find(u32 type, u32 addr, u32 len) { gdb_bp_t* p; u32 i; p = gdb_bp_ptr(type); if (p == nullptr) return nullptr; for (i = 0; i < GDB_MAX_BP; i++) { if (p[i].active == 1 && p[i].addr == addr && p[i].len == len) return &p[i]; } return nullptr; } static void gdb_bp_remove(u32 type, u32 addr, u32 len) { gdb_bp_t* p; do { p = gdb_bp_find(type, addr, len); if (p != nullptr) { DEBUG_LOG(GDB_STUB, "gdb: removed a breakpoint: %08x bytes at %08x", len, addr); p->active = 0; memset(p, 0, sizeof(gdb_bp_t)); } } while (p != nullptr); } static int gdb_bp_check(u32 addr, u32 type) { gdb_bp_t* p; u32 i; p = gdb_bp_ptr(type); if (p == nullptr) return 0; for (i = 0; i < GDB_MAX_BP; i++) { if (p[i].active == 1 && (addr >= p[i].addr && addr < p[i].addr + p[i].len)) return 1; } return 0; } static void gdb_nak() { const char nak = GDB_STUB_NAK; ssize_t res; res = send(sock, &nak, 1, 0); if (res != 1) ERROR_LOG(GDB_STUB, "send failed"); } static void gdb_ack() { const char ack = GDB_STUB_ACK; ssize_t res; res = send(sock, &ack, 1, 0); if (res != 1) ERROR_LOG(GDB_STUB, "send failed"); } static void gdb_read_command() { u8 c; u8 chk_read, chk_calc; cmd_len = 0; memset(cmd_bfr, 0, sizeof cmd_bfr); c = gdb_read_byte(); if (c == '+') { // ignore ack return; } else if (c == 0x03) { CPU::Break(); gdb_signal(SIGTRAP); return; } else if (c != GDB_STUB_START) { DEBUG_LOG(GDB_STUB, "gdb: read invalid byte %02x", c); return; } while ((c = gdb_read_byte()) != GDB_STUB_END) { cmd_bfr[cmd_len++] = c; if (cmd_len == sizeof cmd_bfr) { ERROR_LOG(GDB_STUB, "gdb: cmd_bfr overflow"); gdb_nak(); return; } } chk_read = hex2char(gdb_read_byte()) << 4; chk_read |= hex2char(gdb_read_byte()); chk_calc = gdb_calc_chksum(); if (chk_calc != chk_read) { ERROR_LOG(GDB_STUB, "gdb: invalid checksum: calculated %02x and read %02x for $%s# (length: %d)", chk_calc, chk_read, cmd_bfr, cmd_len); cmd_len = 0; gdb_nak(); return; } DEBUG_LOG(GDB_STUB, "gdb: read command %c with a length of %d: %s", cmd_bfr[0], cmd_len, cmd_bfr); gdb_ack(); } static int gdb_data_available() { struct timeval t; fd_set _fds, *fds = &_fds; FD_ZERO(fds); FD_SET(sock, fds); t.tv_sec = 0; t.tv_usec = 20; if (select(sock + 1, fds, nullptr, nullptr, &t) < 0) { ERROR_LOG(GDB_STUB, "select failed"); return 0; } if (FD_ISSET(sock, fds)) return 1; return 0; } static void gdb_reply(const char* reply) { u8 chk; u32 left; u8* ptr; int n; if (!gdb_active()) return; memset(cmd_bfr, 0, sizeof cmd_bfr); cmd_len = strlen(reply); if (cmd_len + 4 > sizeof cmd_bfr) ERROR_LOG(GDB_STUB, "cmd_bfr overflow in gdb_reply"); memcpy(cmd_bfr + 1, reply, cmd_len); cmd_len++; chk = gdb_calc_chksum(); cmd_len--; cmd_bfr[0] = GDB_STUB_START; cmd_bfr[cmd_len + 1] = GDB_STUB_END; cmd_bfr[cmd_len + 2] = nibble2hex(chk >> 4); cmd_bfr[cmd_len + 3] = nibble2hex(chk); DEBUG_LOG(GDB_STUB, "gdb: reply (len: %d): %s", cmd_len, cmd_bfr); ptr = cmd_bfr; left = cmd_len + 4; while (left > 0) { n = send(sock, ptr, left, 0); if (n < 0) { ERROR_LOG(GDB_STUB, "gdb: send failed"); return gdb_deinit(); } left -= n; ptr += n; } } static void gdb_handle_query() { DEBUG_LOG(GDB_STUB, "gdb: query '%s'", cmd_bfr + 1); if (!strcmp((const char*)(cmd_bfr + 1), "TStatus")) { return gdb_reply("T0"); } gdb_reply(""); } static void gdb_handle_set_thread() { if (memcmp(cmd_bfr, "Hg0", 3) == 0 || memcmp(cmd_bfr, "Hc-1", 4) == 0 || memcmp(cmd_bfr, "Hc0", 4) == 0 || memcmp(cmd_bfr, "Hc1", 4) == 0) return gdb_reply("OK"); gdb_reply("E01"); } static void gdb_handle_signal() { char bfr[128]; memset(bfr, 0, sizeof bfr); sprintf(bfr, "T%02x%02x:%08x;%02x:%08x;", sig, 64, PC, 1, GPR(1)); gdb_reply(bfr); } static void wbe32hex(u8* p, u32 v) { u32 i; for (i = 0; i < 8; i++) p[i] = nibble2hex(v >> (28 - 4 * i)); } static void wbe64hex(u8* p, u64 v) { u32 i; for (i = 0; i < 16; i++) p[i] = nibble2hex(v >> (60 - 4 * i)); } static u32 re32hex(u8* p) { u32 i; u32 res = 0; for (i = 0; i < 8; i++) res = (res << 4) | hex2char(p[i]); return res; } static u64 re64hex(u8* p) { u32 i; u64 res = 0; for (i = 0; i < 16; i++) res = (res << 4) | hex2char(p[i]); return res; } static void gdb_read_register() { static u8 reply[64]; u32 id; memset(reply, 0, sizeof reply); id = hex2char(cmd_bfr[1]); if (cmd_bfr[2] != '\0') { id <<= 4; id |= hex2char(cmd_bfr[2]); } switch (id) { case 0 ... 31: wbe32hex(reply, GPR(id)); break; case 32 ... 63: wbe64hex(reply, riPS0(id - 32)); break; case 64: wbe32hex(reply, PC); break; case 65: wbe32hex(reply, MSR); break; case 66: wbe32hex(reply, PowerPC::GetCR()); break; case 67: wbe32hex(reply, LR); break; case 68: wbe32hex(reply, CTR); break; case 69: wbe32hex(reply, PowerPC::ppcState.spr[SPR_XER]); break; case 70: wbe32hex(reply, 0x0BADC0DE); break; case 71: wbe32hex(reply, FPSCR.Hex); break; default: return gdb_reply("E01"); break; } gdb_reply((char*)reply); } static void gdb_read_registers() { static u8 bfr[GDB_BFR_MAX - 4]; u8* bufptr = bfr; u32 i; memset(bfr, 0, sizeof bfr); for (i = 0; i < 32; i++) { wbe32hex(bufptr + i * 8, GPR(i)); } bufptr += 32 * 8; /* for (i = 0; i < 32; i++) { wbe32hex(bufptr + i*8, riPS0(i)); } bufptr += 32 * 8; wbe32hex(bufptr, PC); bufptr += 4; wbe32hex(bufptr, MSR); bufptr += 4; wbe32hex(bufptr, PowerPC::GetCR()); bufptr += 4; wbe32hex(bufptr, LR); bufptr += 4; wbe32hex(bufptr, CTR); bufptr += 4; wbe32hex(bufptr, PowerPC::ppcState.spr[SPR_XER]); bufptr += 4; // MQ register not used. wbe32hex(bufptr, 0x0BADC0DE); bufptr += 4; */ gdb_reply((char*)bfr); } static void gdb_write_registers() { u32 i; u8* bufptr = cmd_bfr; for (i = 0; i < 32; i++) { GPR(i) = re32hex(bufptr + i * 8); } bufptr += 32 * 8; gdb_reply("OK"); } static void gdb_write_register() { u32 id; u8* bufptr = cmd_bfr + 3; id = hex2char(cmd_bfr[1]); if (cmd_bfr[2] != '=') { ++bufptr; id <<= 4; id |= hex2char(cmd_bfr[2]); } switch (id) { case 0 ... 31: GPR(id) = re32hex(bufptr); break; case 32 ... 63: riPS0(id - 32) = re64hex(bufptr); break; case 64: PC = re32hex(bufptr); break; case 65: MSR = re32hex(bufptr); break; case 66: PowerPC::SetCR(re32hex(bufptr)); break; case 67: LR = re32hex(bufptr); break; case 68: CTR = re32hex(bufptr); break; case 69: PowerPC::ppcState.spr[SPR_XER] = re32hex(bufptr); break; case 70: // do nothing, we dont have MQ break; case 71: FPSCR.Hex = re32hex(bufptr); break; default: return gdb_reply("E01"); break; } gdb_reply("OK"); } static void gdb_read_mem() { static u8 reply[GDB_BFR_MAX - 4]; u32 addr, len; u32 i; i = 1; addr = 0; while (cmd_bfr[i] != ',') addr = (addr << 4) | hex2char(cmd_bfr[i++]); i++; len = 0; while (i < cmd_len) len = (len << 4) | hex2char(cmd_bfr[i++]); DEBUG_LOG(GDB_STUB, "gdb: read memory: %08x bytes from %08x", len, addr); if (len * 2 > sizeof reply) gdb_reply("E01"); u8* data = Memory::GetPointer(addr); if (!data) return gdb_reply("E0"); mem2hex(reply, data, len); reply[len * 2] = '\0'; gdb_reply((char*)reply); } static void gdb_write_mem() { u32 addr, len; u32 i; i = 1; addr = 0; while (cmd_bfr[i] != ',') addr = (addr << 4) | hex2char(cmd_bfr[i++]); i++; len = 0; while (cmd_bfr[i] != ':') len = (len << 4) | hex2char(cmd_bfr[i++]); DEBUG_LOG(GDB_STUB, "gdb: write memory: %08x bytes to %08x", len, addr); u8* dst = Memory::GetPointer(addr); if (!dst) return gdb_reply("E00"); hex2mem(dst, cmd_bfr + i + 1, len); gdb_reply("OK"); } // forces a break on next instruction check void gdb_break() { step_break = 1; send_signal = 1; } static void gdb_step() { gdb_break(); } static void gdb_continue() { send_signal = 1; } bool gdb_add_bp(u32 type, u32 addr, u32 len) { gdb_bp_t* bp; bp = gdb_bp_empty_slot(type); if (bp == nullptr) return false; bp->active = 1; bp->addr = addr; bp->len = len; DEBUG_LOG(GDB_STUB, "gdb: added %d breakpoint: %08x bytes at %08x", type, bp->len, bp->addr); return true; } static void _gdb_add_bp() { u32 type; u32 i, addr = 0, len = 0; type = hex2char(cmd_bfr[1]); switch (type) { case 0: case 1: type = GDB_BP_TYPE_X; break; case 2: type = GDB_BP_TYPE_W; break; case 3: type = GDB_BP_TYPE_R; break; case 4: type = GDB_BP_TYPE_A; break; default: return gdb_reply("E01"); } i = 3; while (cmd_bfr[i] != ',') addr = addr << 4 | hex2char(cmd_bfr[i++]); i++; while (i < cmd_len) len = len << 4 | hex2char(cmd_bfr[i++]); if (!gdb_add_bp(type, addr, len)) return gdb_reply("E02"); gdb_reply("OK"); } static void gdb_remove_bp() { u32 type, addr, len, i; type = hex2char(cmd_bfr[1]); switch (type) { case 0: case 1: type = GDB_BP_TYPE_X; break; case 2: type = GDB_BP_TYPE_W; break; case 3: type = GDB_BP_TYPE_R; break; case 4: type = GDB_BP_TYPE_A; break; default: return gdb_reply("E01"); } addr = 0; len = 0; i = 3; while (cmd_bfr[i] != ',') addr = (addr << 4) | hex2char(cmd_bfr[i++]); i++; while (i < cmd_len) len = (len << 4) | hex2char(cmd_bfr[i++]); gdb_bp_remove(type, addr, len); gdb_reply("OK"); } void gdb_handle_exception() { while (gdb_active()) { if (!gdb_data_available()) continue; gdb_read_command(); if (cmd_len == 0) continue; switch (cmd_bfr[0]) { case 'q': gdb_handle_query(); break; case 'H': gdb_handle_set_thread(); break; case '?': gdb_handle_signal(); break; case 'k': gdb_deinit(); INFO_LOG(GDB_STUB, "killed by gdb"); return; case 'g': gdb_read_registers(); break; case 'G': gdb_write_registers(); break; case 'p': gdb_read_register(); break; case 'P': gdb_write_register(); break; case 'm': gdb_read_mem(); break; case 'M': gdb_write_mem(); PowerPC::ppcState.iCache.Reset(); Host_UpdateDisasmDialog(); break; case 's': gdb_step(); return; case 'C': case 'c': gdb_continue(); return; case 'z': gdb_remove_bp(); break; case 'Z': _gdb_add_bp(); break; default: gdb_reply(""); break; } } } #ifdef _WIN32 WSADATA InitData; #endif // exported functions static void gdb_init_generic(int domain, const sockaddr* server_addr, socklen_t server_addrlen, sockaddr* client_addr, socklen_t* client_addrlen); #ifndef _WIN32 void gdb_init_local(const char* socket) { unlink(socket); sockaddr_un addr = {}; addr.sun_family = AF_UNIX; strcpy(addr.sun_path, socket); gdb_init_generic(PF_LOCAL, (const sockaddr*)&addr, sizeof(addr), NULL, NULL); } #endif void gdb_init(u32 port) { sockaddr_in saddr_server = {}; sockaddr_in saddr_client; saddr_server.sin_family = AF_INET; saddr_server.sin_port = htons(port); saddr_server.sin_addr.s_addr = INADDR_ANY; socklen_t client_addrlen = sizeof(saddr_client); gdb_init_generic(PF_INET, (const sockaddr*)&saddr_server, sizeof(saddr_server), (sockaddr*)&saddr_client, &client_addrlen); saddr_client.sin_addr.s_addr = ntohl(saddr_client.sin_addr.s_addr); /*if (((saddr_client.sin_addr.s_addr >> 24) & 0xff) != 127 || * ((saddr_client.sin_addr.s_addr >> 16) & 0xff) != 0 || * ((saddr_client.sin_addr.s_addr >> 8) & 0xff) != 0 || * ((saddr_client.sin_addr.s_addr >> 0) & 0xff) != 1) * ERROR_LOG(GDB_STUB, "gdb: incoming connection not from localhost"); */ } static void gdb_init_generic(int domain, const sockaddr* server_addr, socklen_t server_addrlen, sockaddr* client_addr, socklen_t* client_addrlen) { int on; #ifdef _WIN32 WSAStartup(MAKEWORD(2, 2), &InitData); #endif memset(bp_x, 0, sizeof bp_x); memset(bp_r, 0, sizeof bp_r); memset(bp_w, 0, sizeof bp_w); memset(bp_a, 0, sizeof bp_a); tmpsock = socket(domain, SOCK_STREAM, 0); if (tmpsock == -1) ERROR_LOG(GDB_STUB, "Failed to create gdb socket"); on = 1; if (setsockopt(tmpsock, SOL_SOCKET, SO_REUSEADDR, &on, sizeof on) < 0) ERROR_LOG(GDB_STUB, "Failed to setsockopt"); if (bind(tmpsock, server_addr, server_addrlen) < 0) ERROR_LOG(GDB_STUB, "Failed to bind gdb socket"); if (listen(tmpsock, 1) < 0) ERROR_LOG(GDB_STUB, "Failed to listen to gdb socket"); INFO_LOG(GDB_STUB, "Waiting for gdb to connect..."); sock = accept(tmpsock, client_addr, client_addrlen); if (sock < 0) ERROR_LOG(GDB_STUB, "Failed to accept gdb client"); INFO_LOG(GDB_STUB, "Client connected."); close(tmpsock); tmpsock = -1; } void gdb_deinit() { if (tmpsock != -1) { shutdown(tmpsock, SHUT_RDWR); tmpsock = -1; } if (sock != -1) { shutdown(sock, SHUT_RDWR); sock = -1; } #ifdef _WIN32 WSACleanup(); #endif } bool gdb_active() { return tmpsock != -1 || sock != -1; } int gdb_signal(u32 s) { if (sock == -1) return 1; sig = s; if (send_signal) { gdb_handle_signal(); send_signal = 0; } return 0; } int gdb_bp_x(u32 addr) { if (sock == -1) return 0; if (step_break) { step_break = 0; DEBUG_LOG(GDB_STUB, "Step was successful."); return 1; } return gdb_bp_check(addr, GDB_BP_TYPE_X); } int gdb_bp_r(u32 addr) { if (sock == -1) return 0; return gdb_bp_check(addr, GDB_BP_TYPE_R); } int gdb_bp_w(u32 addr) { if (sock == -1) return 0; return gdb_bp_check(addr, GDB_BP_TYPE_W); } int gdb_bp_a(u32 addr) { if (sock == -1) return 0; return gdb_bp_check(addr, GDB_BP_TYPE_A); }
{ "pile_set_name": "Github" }
// // detail/base_from_completion_cond.hpp // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2016 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef BOOST_ASIO_DETAIL_BASE_FROM_COMPLETION_COND_HPP #define BOOST_ASIO_DETAIL_BASE_FROM_COMPLETION_COND_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include <boost/asio/detail/config.hpp> #include <boost/asio/completion_condition.hpp> #include <boost/asio/detail/push_options.hpp> namespace boost { namespace asio { namespace detail { template <typename CompletionCondition> class base_from_completion_cond { protected: explicit base_from_completion_cond(CompletionCondition completion_condition) : completion_condition_(completion_condition) { } std::size_t check_for_completion( const boost::system::error_code& ec, std::size_t total_transferred) { return detail::adapt_completion_condition_result( completion_condition_(ec, total_transferred)); } private: CompletionCondition completion_condition_; }; template <> class base_from_completion_cond<transfer_all_t> { protected: explicit base_from_completion_cond(transfer_all_t) { } static std::size_t check_for_completion( const boost::system::error_code& ec, std::size_t total_transferred) { return transfer_all_t()(ec, total_transferred); } }; } // namespace detail } // namespace asio } // namespace boost #include <boost/asio/detail/pop_options.hpp> #endif // BOOST_ASIO_DETAIL_BASE_FROM_COMPLETION_COND_HPP
{ "pile_set_name": "Github" }
--TEST-- parallel bootstrap fail --SKIPIF-- <?php if (!extension_loaded('parallel')) { echo 'skip'; } ?> --FILE-- <?php try { $parallel = new \parallel\Runtime(sprintf("%s/nope.php", __DIR__)); } catch (\parallel\Runtime\Error\Bootstrap $ex) { var_dump($ex->getMessage()); } ?> --EXPECTF-- Warning: Unknown: failed to open stream: No such file or directory in Unknown on line 0 string(%d) "bootstrapping failed with %snope.php"
{ "pile_set_name": "Github" }
set(CPACK_PACKAGE_NAME "${CMAKE_PROJECT_NAME}") set(CPACK_PACKAGE_CONTACT "${PROJECT_MAINTAINER}") set(CPACK_PACKAGE_VERSION ${iowow_VERSION}) set(CPACK_PACKAGE_VERSION_MAJOR ${iowow_VERSION_MAJOR}) set(CPACK_PACKAGE_VERSION_MINOR ${iowow_VERSION_MINOR}) set(CPACK_PACKAGE_VERSION_PATCH ${iowow_VERSION_PATCH}) set(CPACK_PACKAGE_VENDOR ${PROJECT_VENDOR}) set(CPACK_PACKAGE_DESCRIPTION_SUMMARY ${PROJECT_DESCRIPTION_SUMMARY}) set(CPACK_PACKAGE_DESCRIPTION ${PROJECT_DESCRIPTION}) set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_SOURCE_DIR}/LICENSE") set(CPACK_RESOURCE_FILE_README "${CMAKE_SOURCE_DIR}/README.md") set(CPACK_RESOURCE_FILE_WELCOME "${CMAKE_SOURCE_DIR}/README.md") set(CPACK_PACKAGE_FILE_NAME "${PROJECT_NAME}-${CPACK_PACKAGE_VERSION}-${CMAKE_BUILD_TYPE}-${CMAKE_SYSTEM_NAME}-${PROJECT_ARCH}") if (CMAKE_BUILD_TYPE STREQUAL "Release") set(CPACK_STRIP_FILES ON) endif() if (PACKAGE_DEB) execute_process ( COMMAND /usr/bin/dpkg --print-architecture OUTPUT_VARIABLE CPACK_DEBIAN_PACKAGE_ARCHITECTURE RESULT_VARIABLE EXECUTE_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET ) if (EXECUTE_RESULT) message(FATAL_ERROR "dpkg not found: No package generation.") endif() set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${PROJECT_DESCRIPTION}) set(CPACK_DEBIAN_PACKAGE_HOMEPAGE ${PROJECT_WEBSITE}) set(CPACK_DEBIAN_PACKAGE_SECTION libs) set(CPACK_DEBIAN_PACKAGE_PRIORITY optional) #set(CPACK_DEBIAN_PACKAGE_DEPENDS zlib1g) set(CPACK_DEBIAN_PACKAGE_BUILD_DEPENDS pkg-config git devscripts dh-make) if (NOT PPA_DEBIAN_VERSION) set(PPA_DEBIAN_VERSION ppa1) endif() if (PROJECT_PPA) set(DPUT_HOST ${PROJECT_PPA}) endif() #set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "any") set(CPACK_DEBIAN_RESOURCE_FILE_CHANGELOG ${CMAKE_SOURCE_DIR}/Changelog) set(CPACK_DEBIAN_UPDATE_CHANGELOG ON) endif(PACKAGE_DEB) if (PACKAGE_TGZ) set(CPACK_INCLUDE_TOPLEVEL_DIRECTORY OFF) endif(PACKAGE_TGZ) include(CPack) if (ENABLE_PPA) if (NOT PROJECT_PPA_DISTRIB_TARGET) set(PROJECT_PPA_DISTRIB_TARGET xenial bionic) endif() set(DPUT_CONFIG_IN ${CMAKE_CURRENT_SOURCE_DIR}/debian/dput.cf.in) include(UploadPPA) endif()
{ "pile_set_name": "Github" }
--- jupyter: jupytext: notebook_metadata_filter: all text_representation: extension: .md format_name: markdown format_version: '1.1' jupytext_version: 1.1.7 kernelspec: display_name: Python 3 language: python name: python3 language_info: codemirror_mode: name: ipython version: 3 file_extension: .py mimetype: text/x-python name: python nbconvert_exporter: python pygments_lexer: ipython3 version: 3.6.5 plotly: description: How to use group by in Python with Plotly. display_as: transforms language: python layout: base name: Group By order: 2 page_type: example_index permalink: python/group-by/ thumbnail: thumbnail/groupby.jpg --- #### Basic Example ```python import plotly.io as pio subject = ['Moe','Larry','Curly','Moe','Larry','Curly','Moe','Larry','Curly','Moe','Larry','Curly'] score = [1,6,2,8,2,9,4,5,1,5,2,8] data = [dict( type = 'scatter', x = subject, y = score, mode = 'markers', transforms = [dict( type = 'groupby', groups = subject, styles = [ dict(target = 'Moe', value = dict(marker = dict(color = 'blue'))), dict(target = 'Larry', value = dict(marker = dict(color = 'red'))), dict(target = 'Curly', value = dict(marker = dict(color = 'black'))) ] )] )] fig_dict = dict(data=data) pio.show(fig_dict, validate=False) ``` #### Reference See https://plotly.com/python/reference/ for more information and chart attribute options!
{ "pile_set_name": "Github" }
// // CIImageRendererUtils.m // SCRecorder // // Created by Simon CORSIN on 13/09/14. // Copyright (c) 2014 rFlex. All rights reserved. // #import "CIImageRendererUtils.h" @implementation CIImageRendererUtils + (CGRect)processRect:(CGRect)rect withImageSize:(CGSize)imageSize contentScale:(CGFloat)contentScale contentMode:(UIViewContentMode)mode { rect = [CIImageRendererUtils rect:rect byApplyingContentScale:contentScale]; if (mode != UIViewContentModeScaleToFill) { CGFloat horizontalScale = rect.size.width / imageSize.width; CGFloat verticalScale = rect.size.height / imageSize.height; BOOL shouldResizeWidth = mode == UIViewContentModeScaleAspectFit ? horizontalScale > verticalScale : verticalScale > horizontalScale; BOOL shouldResizeHeight = mode == UIViewContentModeScaleAspectFit ? verticalScale > horizontalScale : horizontalScale > verticalScale; if (shouldResizeWidth) { CGFloat newWidth = imageSize.width * verticalScale; rect.origin.x = (rect.size.width / 2 - newWidth / 2); rect.size.width = newWidth; } else if (shouldResizeHeight) { CGFloat newHeight = imageSize.height * horizontalScale; rect.origin.y = (rect.size.height / 2 - newHeight / 2); rect.size.height = newHeight; } } return rect; } + (CGRect)rect:(CGRect)rect byApplyingContentScale:(CGFloat)scale { rect.origin.x *= scale; rect.origin.y *= scale; rect.size.width *= scale; rect.size.height *= scale; return rect; } + (CIImage *)generateImageFromSampleBufferHolder:(SCSampleBufferHolder *)sampleBufferHolder { CIImage *image = nil; CMSampleBufferRef sampleBuffer = sampleBufferHolder.sampleBuffer; if (sampleBuffer != nil) { image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)]; sampleBufferHolder.sampleBuffer = nil; } return image; } + (CGAffineTransform)preferredCIImageTransformFromUIImage:(UIImage *)image { if (image.imageOrientation == UIImageOrientationUp) { return CGAffineTransformIdentity; } CGAffineTransform transform = CGAffineTransformIdentity; switch (image.imageOrientation) { case UIImageOrientationDown: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height); transform = CGAffineTransformRotate(transform, M_PI); break; case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: transform = CGAffineTransformTranslate(transform, image.size.width, 0); transform = CGAffineTransformRotate(transform, M_PI_2); break; case UIImageOrientationRight: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, 0, image.size.height); transform = CGAffineTransformRotate(transform, -M_PI_2); break; case UIImageOrientationUp: case UIImageOrientationUpMirrored: break; } switch (image.imageOrientation) { case UIImageOrientationUpMirrored: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, image.size.width, 0); transform = CGAffineTransformScale(transform, -1, 1); break; case UIImageOrientationLeftMirrored: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, image.size.height, 0); transform = CGAffineTransformScale(transform, -1, 1); break; case UIImageOrientationUp: case UIImageOrientationDown: case UIImageOrientationLeft: case UIImageOrientationRight: break; } return transform; } + (void)putUIImage:(UIImage *)image toRenderer:(id<CIImageRenderer>)renderer { if (image == nil) { renderer.CIImage = nil; } else { renderer.preferredCIImageTransform = [CIImageRendererUtils preferredCIImageTransformFromUIImage:image]; renderer.CIImage = [CIImage imageWithCGImage:image.CGImage]; } } @end
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="24dp" android:height="24dp" android:viewportHeight="24" android:viewportWidth="24"> <path android:fillColor="#ff000000" android:pathData="M11,15h2v2h-2zm0,-8h2v6h-2zm0.99,-5C6.47,2 2,6.48 2,12s4.47,10 9.99,10C17.52,22 22,17.52 22,12S17.52,2 11.99,2zM12,20c-4.42,0 -8,-3.58 -8,-8s3.58,-8 8,-8 8,3.58 8,8 -3.58,8 -8,8z"/> </vector>
{ "pile_set_name": "Github" }
/***************************************************************************** * Open LiteSpeed is an open source HTTP server. * * Copyright (C) 2013 - 2020 LiteSpeed Technologies, Inc. * * * * This program is free software: you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation, either version 3 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with this program. If not, see http://www.gnu.org/licenses/. * *****************************************************************************/ #ifdef RUN_TEST #include <http/reqparser.h> #include <unistd.h> #include <stdio.h> #include "unittest-cpp/UnitTest++.h" TEST(Reqparser_TEST1) { ReqParser parser; parser.testAll(); } #endif
{ "pile_set_name": "Github" }
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2017 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.connectbot; import org.connectbot.util.VolumePreference; import org.connectbot.util.VolumePreferenceFragment; import com.takisoft.preferencex.PreferenceFragmentCompat; import android.os.Bundle; import androidx.fragment.app.DialogFragment; import androidx.preference.Preference; /** * Created by kenny on 2/20/17. */ public class SettingsFragment extends PreferenceFragmentCompat { public SettingsFragment() { } /** * Called when a preference in the tree requests to display a dialog. Subclasses should * override this method to display custom dialogs or to handle dialogs for custom preference * classes. * * @param preference The Preference object requesting the dialog. */ @Override public void onDisplayPreferenceDialog(Preference preference) { if (preference instanceof VolumePreference) { DialogFragment fragment = VolumePreferenceFragment.newInstance(preference); fragment.setTargetFragment(this, 0); fragment.show(getFragmentManager(), "android.support.v7.preference.PreferenceFragment.DIALOG"); } else { super.onDisplayPreferenceDialog(preference); } } @Override public void onCreatePreferencesFix(Bundle bundle, String rootKey) { setPreferencesFromResource(R.xml.preferences, rootKey); } }
{ "pile_set_name": "Github" }
21 51 36 46 21 33 50 54 45 25 42 17 22 2 11 44 17 31 41 28 24 22 18 36 11 41 33 22 22 27 15 40 33 49 29 85 28 40 22 49 39 39 29 39 37 77 40 58 61 24 31 18 9 51 72 43 56 53 72 74 76 42 33 50 43 34 65 90 26 71 74 90 35 73 58 49 48 69 56 75 38 53 78 62 80 78 36 34 64 19 22 73 45 91 60 69 74 67 14 30 16 66 60 65 92 24 69 71 78 35 25 29 36 23 51 18 64 11 25 75 57 65 51 45 32 35 31 54 65 64 39 82 30 67 35 45 38 63 68 46 53 58 24 88 77 16 23 47 69 54 39 63 79 61 81 58 66 27 20 48 49 49 58 57 39 42 58 39 68 33 65 49 45 58 17 61 39 64 54 61 49 48 51 29 52 67 56 49 56 24 28 52 40 61 54 66 33 53 27 49 51 45 62 58 68 53 41 56 64 44 47 50 62 57 46 60 70 58 48 44 43 59 30 60 51 28 53 2 55 35 13 42 64 33 56 33 11 9 34 39 62 23 64 34 18 45 37 55 39 21 64 52 56 17 25 42 30 19 26 56 48 60 60 32 46 30 29 16 36 12 35 49 42 44 30 38 77 56 51 37 54 49 61 67 54 0 59 82
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <string xmlns="http://tempuri.org/">{ "Info": [ { "IsSuccess": "True", "InAddress": "屏東縣屏東市中正路191號", "InSRS": "EPSG:4326", "InFuzzyType": "[單雙號機制]+[最近門牌號機制]", "InFuzzyBuffer": "0", "InIsOnlyFullMatch": "False", "InIsLockCounty": "True", "InIsLockTown": "False", "InIsLockVillage": "False", "InIsLockRoadSection": "False", "InIsLockLane": "False", "InIsLockAlley": "False", "InIsLockArea": "False", "InIsSameNumber_SubNumber": "True", "InCanIgnoreVillage": "True", "InCanIgnoreNeighborhood": "True", "InReturnMaxCount": "0", "OutTotal": "1", "OutMatchType": "完全比對", "OutMatchCode": "[屏東縣]\tFULL:1", "OutTraceInfo": "[屏東縣]\t { 完全比對 } 找到符合的門牌地址" } ], "AddressList": [ { "FULL_ADDR": "屏東縣屏東市斯文里19鄰中正路191號", "COUNTY": "屏東縣", "TOWN": "屏東市", "VILLAGE": "斯文里", "NEIGHBORHOOD": "19鄰", "ROAD": "中正路", "SECTION": "", "LANE": "", "ALLEY": "", "SUB_ALLEY": "", "TONG": "", "NUMBER": "191號", "X": 120.491081, "Y": 22.679777 } ] }</string>
{ "pile_set_name": "Github" }
/* * Copyright (c) 2011-2018, Meituan Dianping. All Rights Reserved. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * *    http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dianping.zebra.group.config; import com.dianping.zebra.group.config.system.entity.SqlFlowControl; import com.dianping.zebra.group.config.system.entity.SystemConfig; import java.beans.PropertyChangeListener; import java.util.Map; public interface SystemConfigManager { void init(); void addListerner(PropertyChangeListener listener); SystemConfig getSystemConfig(); Map<String, SqlFlowControl> getSqlFlowControlMap(); }
{ "pile_set_name": "Github" }
eclipse.preferences.version=1 org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve org.eclipse.jdt.core.compiler.compliance=1.6 org.eclipse.jdt.core.compiler.debug.lineNumber=generate org.eclipse.jdt.core.compiler.debug.localVariable=generate org.eclipse.jdt.core.compiler.debug.sourceFile=generate org.eclipse.jdt.core.compiler.problem.assertIdentifier=error org.eclipse.jdt.core.compiler.problem.enumIdentifier=error org.eclipse.jdt.core.compiler.source=1.6
{ "pile_set_name": "Github" }
/* * Copyright (c) 2017 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.eclipse.ditto.services.things.persistence.actors; import java.util.function.BiFunction; import java.util.regex.Pattern; import org.eclipse.ditto.model.base.entity.id.EntityId; import org.eclipse.ditto.model.things.ThingId; import org.eclipse.ditto.services.models.streaming.EntityIdWithRevision; import org.eclipse.ditto.services.models.things.ThingTag; import org.eclipse.ditto.services.utils.persistence.mongo.DefaultPersistenceStreamingActor; import org.eclipse.ditto.services.utils.persistence.mongo.SnapshotStreamingActor; import org.eclipse.ditto.services.utils.persistence.mongo.streaming.PidWithSeqNr; import akka.actor.ActorRef; import akka.actor.Props; /** * Creates an actor which streams information about persisted things. */ public final class ThingsPersistenceStreamingActorCreator { /** * The name of the event streaming actor. Must agree with * {@link org.eclipse.ditto.services.models.things.ThingsMessagingConstants#THINGS_STREAM_PROVIDER_ACTOR_PATH}. */ public static final String EVENT_STREAMING_ACTOR_NAME = "persistenceStreamingActor"; /** * The name of the snapshot streaming actor. Must agree with * {@link org.eclipse.ditto.services.models.things.ThingsMessagingConstants#THINGS_SNAPSHOT_STREAMING_ACTOR_PATH}. */ public static final String SNAPSHOT_STREAMING_ACTOR_NAME = "snapshotStreamingActor"; private static final Pattern PERSISTENCE_ID_PATTERN = Pattern.compile(ThingPersistenceActor.PERSISTENCE_ID_PREFIX); private ThingsPersistenceStreamingActorCreator() { throw new AssertionError(); } /** * Create an actor for streaming from the event journal. * * @param streamingCacheSize the size of the streaming cache. * @param actorCreator function to create a named actor with. * @return a reference of the created actor. */ public static ActorRef startEventStreamingActor(final int streamingCacheSize, final BiFunction<String, Props, ActorRef> actorCreator) { final Props props = DefaultPersistenceStreamingActor.props(ThingTag.class, ThingsPersistenceStreamingActorCreator::createElement, ThingsPersistenceStreamingActorCreator::createPidWithSeqNr); return actorCreator.apply(EVENT_STREAMING_ACTOR_NAME, props); } /** * Create an actor that streams from the snapshot store. * * @param actorCreator function to create a named actor with. * @return a reference of the created actor. */ public static ActorRef startSnapshotStreamingActor(final BiFunction<String, Props, ActorRef> actorCreator) { final Props props = SnapshotStreamingActor.props(ThingsPersistenceStreamingActorCreator::pid2EntityId, ThingsPersistenceStreamingActorCreator::entityId2Pid); return actorCreator.apply(SNAPSHOT_STREAMING_ACTOR_NAME, props); } private static ThingTag createElement(final PidWithSeqNr pidWithSeqNr) { return ThingTag.of(pid2EntityId(pidWithSeqNr.getPersistenceId()), pidWithSeqNr.getSequenceNr()); } private static PidWithSeqNr createPidWithSeqNr(final EntityIdWithRevision thingTag) { return new PidWithSeqNr(entityId2Pid(thingTag.getEntityId()), thingTag.getRevision()); } private static ThingId pid2EntityId(final String pid) { final String id = PERSISTENCE_ID_PATTERN.matcher(pid).replaceFirst(""); return ThingId.of(id); } private static String entityId2Pid(final EntityId entityId) { return ThingPersistenceActor.PERSISTENCE_ID_PREFIX + entityId; } }
{ "pile_set_name": "Github" }
function Out-SCF { <# .SYNOPSIS Nishang script useful for creating SCF files which could be used to capture NTLM hashes. .DESCRIPTION The script generates a SCF file. The file (default name "SystemCatalog.scf") needs to be put on a share. Whenever a user opens the file on the share, his credentials are sent to the specifed capture server. The IP address of the capture server is specifed in the icon field. There are various good servers to capture hashes in this way, a PowerShell one is Inveigh (https://github.com/Kevin-Robertson/Inveigh) The script is based on a blog by Rob Fuller (@mubix) .PARAMETER IPAddress IPAddress of the capture server. .PARAMETER OutputPath Path to the .scf file to be generated. Default is with the name SystemCatalog.scf in the current directory. .EXAMPLE PS > Out-SCF IPAddress 192.168.230.1 Put the generated scf file in a shared folder. When a user opens the share (it is not required to open the scf file), his NTLM hashes can be captured on the capture server running on the specified IP. .LINK https://room362.com/post/2016/smb-http-auth-capture-via-scf https://github.com/samratashok/nishang #> [CmdletBinding()] Param( [Parameter(Position = 0, Mandatory = $False)] [String] $IPAddress, [Parameter(Position = 3, Mandatory = $False)] [String] $OutputPath = "$pwd\SystemCatalog.scf" ) $scf = @" [Shell] Command=2 IconFile=\\$IPAddress\share\test.ico [Taskbar] Command=ToggleDesktop "@ Out-File -InputObject $scf -FilePath $OutputPath -Encoding default Write-Output "SCF file written to $OutputPath" Write-Output "Put $OutputPath on a share." }
{ "pile_set_name": "Github" }
--- title: "Artifacts on NFS" meta_title: "NFS" meta_description: "Using NFS to organize your jobs' outputs and experiments' artifacts. Polyaxon allows users to connect to one or multiple NFS servers to store job outputs and experiment artifacts." custom_excerpt: "The Network File System (NFS) is a client/server application that lets a computer user view and optionally store and update files on a remote computer as though they were on the user's own computer." image: "../../content/images/integrations/nfs.png" author: name: "Polyaxon" slug: "Polyaxon" website: "https://polyaxon.com" twitter: "polyaxonAI" github: "polyaxon" tags: - artifacts - storage featured: false popularity: 0 visibility: public status: published --- You can use one or multiple NFS servers to store logs, job outputs, and experiment artifacts. ## Overview This guide shows how to use an NFS server to mount a volume to collect artifacts and artifacts of your jobs and experiments. This guide uses the [click-to-deploy single-node file server](https://console.cloud.google.com/marketplace/details/click-to-deploy-images/singlefs) on Google Cloud Platform to create a ZFS file server running on a single Google Compute Engine instance, but the same principle applies to an NFS server running on any platform. ## Create a Single Node Filer Using [click-to-deploy single-node file server](https://console.cloud.google.com/marketplace/details/click-to-deploy-images/singlefs), you need to create a filer: `polyaxon-nfs`, and keep the default value `data`, and check `enable NFS sharing`. You can set the storage to 50GB for example. ## Create a folder for hosting your artifacts Use ssh to create a folder for your artifacts `plx-artifacts` under `/data`: ```bash gcloud --project "polyaxon-test" compute ssh --ssh-flag=-L3000:localhost:3000 --zone=us-central1-b polyaxon-nfs-vm ``` ```bash cd /data ``` ```bash mkdir -m 777 plx-artifacts ``` ## Get the ip address of the filer ```bash gcloud --project "polyaxon-test" compute instances describe polyaxon-nfs-vm --zone=us-central1-b --format='value(networkInterfaces[0].networkIP)' ``` > You might need to use the correct project name and zone. ## Create a PVC with the correct ip addresses Create `artifacts-pvc.yml` containing the following PVS definition: ```yaml apiVersion: v1 kind: PersistentVolume metadata: name: polyaxon-pv-artifacts spec: capacity: storage: 45Gi accessModes: - ReadWriteMany nfs: server: 10.138.0.3 # Use the right IP path: "/data/plx-artifacts" claimRef: namespace: polyaxon name: polyaxon-pvc-artifacts --- kind: PersistentVolumeClaim apiVersion: v1 metadata: name: polyaxon-pvc-artifacts spec: accessModes: - ReadWriteMany resources: requests: storage: 45Gi ``` ## Use kubectl to create the PVC based on the nfs server Under the same namespace where you are deploying Polyaxon, e.g. `polyaxon`, create the PVC using kubectl ```bash kubectl create -f artifacts-pvc.yml -n polyaxon ``` ## Now you can use this PVC to mount the artifacts volume to your experiments and jobs in Polyaxon In order to use the PVC with Polyaxon, you can follow the [artifacts on Persistent Volume Claim](/integrations/artifacts-on-pvc/).
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Data; using System.Linq.Expressions; using System.Threading.Tasks; namespace RepoDb { /// <summary> /// A base object for all entity-based repositories. /// </summary> public abstract partial class BaseRepository<TEntity, TDbConnection> : IDisposable { #region Update<TEntity> /// <summary> /// Updates an existing row in the table. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update(TEntity entity, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity>(entity: entity, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression. /// </summary> /// <typeparam name="TWhat">The type of the expression or the key value.</typeparam> /// <param name="entity">The data entity object to be updated.</param> /// <param name="what">The dynamic expression or the primary/identity key value to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update<TWhat>(TEntity entity, TWhat what, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity, TWhat>(entity: entity, what: what, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="what">The dynamic expression or the primary/identity key value to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update(TEntity entity, object what, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity>(entity: entity, what: what, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update(TEntity entity, Expression<Func<TEntity, bool>> where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update(TEntity entity, QueryField where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update(TEntity entity, IEnumerable<QueryField> where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public int Update(TEntity entity, QueryGroup where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.Update<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } #endregion #region UpdateAsync<TEntity> /// <summary> /// Updates an existing row in the table in an asynchronous way. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync(TEntity entity, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity>(entity: entity, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression in an asynchronous way. /// </summary> /// <typeparam name="TWhat">The type of the expression or the key value.</typeparam> /// <param name="entity">The data entity object to be updated.</param> /// <param name="what">The dynamic expression or the primary/identity key value to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync<TWhat>(TEntity entity, TWhat what, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity, TWhat>(entity: entity, what: what, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression in an asynchronous way. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="what">The dynamic expression or the primary/identity key value to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync(TEntity entity, object what, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity>(entity: entity, what: what, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression in an asynchronous way. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync(TEntity entity, Expression<Func<TEntity, bool>> where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression in an asynchronous way. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync(TEntity entity, QueryField where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression in an asynchronous way. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync(TEntity entity, IEnumerable<QueryField> where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } /// <summary> /// Updates an existing row in the table based on the given query expression in an asynchronous way. /// </summary> /// <param name="entity">The data entity object to be updated.</param> /// <param name="where">The query expression to be used.</param> /// <param name="fields">The mapping list of <see cref="Field"/> objects to be used.</param> /// <param name="hints">The table hints to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of affected rows during the update process.</returns> public Task<int> UpdateAsync(TEntity entity, QueryGroup where, IEnumerable<Field> fields = null, string hints = null, IDbTransaction transaction = null) { return DbRepository.UpdateAsync<TEntity>(entity: entity, where: where, fields: fields, hints: hints, transaction: transaction); } #endregion } }
{ "pile_set_name": "Github" }
// Distributed under the terms of the MIT license // Test case submitted to project by https://github.com/practicalswift (practicalswift) // Test case found by fuzzing init<T : b> { class B<T where T where T where T: P { enum b : a { func f<T { } protocol c : a { let a { } init<d { enum b { } { func f<T: a { struct d<T : b { struct S<T { } } } } } enum b { protocol c : P { } } } var b { func c, class case c,
{ "pile_set_name": "Github" }
/****************************************************************************** * Copyright 2018 The Apollo Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ #ifndef CYBER_SCHEDULER_POLICY_SCHEDULER_CHOREOGRAPHY_H_ #define CYBER_SCHEDULER_POLICY_SCHEDULER_CHOREOGRAPHY_H_ #include <memory> #include <string> #include <unordered_map> #include <vector> #include "cyber/croutine/croutine.h" #include "cyber/proto/choreography_conf.pb.h" #include "cyber/scheduler/scheduler.h" namespace apollo { namespace cyber { namespace scheduler { using apollo::cyber::croutine::CRoutine; using apollo::cyber::proto::ChoreographyTask; class SchedulerChoreography : public Scheduler { public: bool RemoveCRoutine(uint64_t crid) override; bool RemoveTask(const std::string& name) override; bool DispatchTask(const std::shared_ptr<CRoutine>&) override; private: friend Scheduler* Instance(); SchedulerChoreography(); void CreateProcessor(); bool NotifyProcessor(uint64_t crid) override; std::unordered_map<std::string, ChoreographyTask> cr_confs_; int32_t choreography_processor_prio_; int32_t pool_processor_prio_; std::string choreography_affinity_; std::string pool_affinity_; std::string choreography_processor_policy_; std::string pool_processor_policy_; std::vector<int> choreography_cpuset_; std::vector<int> pool_cpuset_; }; } // namespace scheduler } // namespace cyber } // namespace apollo #endif // CYBER_SCHEDULER_POLICY_SCHEDULER_CHOREOGRAPHY_H_
{ "pile_set_name": "Github" }
'use strict'; const prettyPrintTypes = (types) => { const addArticle = (str) => { const vowels = ['a', 'e', 'i', 'o', 'u']; if (vowels.indexOf(str[0]) !== -1) { return `an ${str}`; } return `a ${str}`; }; return types.map(addArticle).join(' or '); }; const isArrayOfNotation = (typeDefinition) => { return /array of /.test(typeDefinition); }; const extractTypeFromArrayOfNotation = (typeDefinition) => { // The notation is e.g. 'array of string' return typeDefinition.split(' of ')[1]; }; const isValidTypeDefinition = (typeStr) => { if (isArrayOfNotation(typeStr)) { return isValidTypeDefinition(extractTypeFromArrayOfNotation(typeStr)); } return [ 'string', 'number', 'boolean', 'array', 'object', 'buffer', 'null', 'undefined', 'function', ].some((validType) => { return validType === typeStr; }); }; const detectType = (value) => { if (value === null) { return 'null'; } if (Array.isArray(value)) { return 'array'; } if (Buffer.isBuffer(value)) { return 'buffer'; } return typeof value; }; const onlyUniqueValuesInArrayFilter = (value, index, self) => { return self.indexOf(value) === index; }; const detectTypeDeep = (value) => { let type = detectType(value); let typesInArray; if (type === 'array') { typesInArray = value .map((element) => { return detectType(element); }) .filter(onlyUniqueValuesInArrayFilter); type += ` of ${typesInArray.join(', ')}`; } return type; }; const validateArray = (argumentValue, typeToCheck) => { const allowedTypeInArray = extractTypeFromArrayOfNotation(typeToCheck); if (detectType(argumentValue) !== 'array') { return false; } return argumentValue.every((element) => { return detectType(element) === allowedTypeInArray; }); }; const validateArgument = (methodName, argumentName, argumentValue, argumentMustBe) => { const isOneOfAllowedTypes = argumentMustBe.some((type) => { if (!isValidTypeDefinition(type)) { throw new Error(`Unknown type "${type}"`); } if (isArrayOfNotation(type)) { return validateArray(argumentValue, type); } return type === detectType(argumentValue); }); if (!isOneOfAllowedTypes) { throw new Error(`Argument "${argumentName}" passed to ${methodName} must be ${prettyPrintTypes(argumentMustBe)}. Received ${detectTypeDeep(argumentValue)}`); } }; const validateOptions = (methodName, optionsObjName, obj, allowedOptions) => { if (obj !== undefined) { validateArgument(methodName, optionsObjName, obj, ['object']); Object.keys(obj).forEach((key) => { const argName = `${optionsObjName}.${key}`; if (allowedOptions[key] !== undefined) { validateArgument(methodName, argName, obj[key], allowedOptions[key]); } else { throw new Error(`Unknown argument "${argName}" passed to ${methodName}`); } }); } }; module.exports = { argument: validateArgument, options: validateOptions, };
{ "pile_set_name": "Github" }
package pflag import "strconv" // -- int32 Value type int32Value int32 func newInt32Value(val int32, p *int32) *int32Value { *p = val return (*int32Value)(p) } func (i *int32Value) Set(s string) error { v, err := strconv.ParseInt(s, 0, 32) *i = int32Value(v) return err } func (i *int32Value) Type() string { return "int32" } func (i *int32Value) String() string { return strconv.FormatInt(int64(*i), 10) } func int32Conv(sval string) (interface{}, error) { v, err := strconv.ParseInt(sval, 0, 32) if err != nil { return 0, err } return int32(v), nil } // GetInt32 return the int32 value of a flag with the given name func (f *FlagSet) GetInt32(name string) (int32, error) { val, err := f.getFlagType(name, "int32", int32Conv) if err != nil { return 0, err } return val.(int32), nil } // Int32Var defines an int32 flag with specified name, default value, and usage string. // The argument p points to an int32 variable in which to store the value of the flag. func (f *FlagSet) Int32Var(p *int32, name string, value int32, usage string) { f.VarP(newInt32Value(value, p), name, "", usage) } // Int32VarP is like Int32Var, but accepts a shorthand letter that can be used after a single dash. func (f *FlagSet) Int32VarP(p *int32, name, shorthand string, value int32, usage string) { f.VarP(newInt32Value(value, p), name, shorthand, usage) } // Int32Var defines an int32 flag with specified name, default value, and usage string. // The argument p points to an int32 variable in which to store the value of the flag. func Int32Var(p *int32, name string, value int32, usage string) { CommandLine.VarP(newInt32Value(value, p), name, "", usage) } // Int32VarP is like Int32Var, but accepts a shorthand letter that can be used after a single dash. func Int32VarP(p *int32, name, shorthand string, value int32, usage string) { CommandLine.VarP(newInt32Value(value, p), name, shorthand, usage) } // Int32 defines an int32 flag with specified name, default value, and usage string. // The return value is the address of an int32 variable that stores the value of the flag. func (f *FlagSet) Int32(name string, value int32, usage string) *int32 { p := new(int32) f.Int32VarP(p, name, "", value, usage) return p } // Int32P is like Int32, but accepts a shorthand letter that can be used after a single dash. func (f *FlagSet) Int32P(name, shorthand string, value int32, usage string) *int32 { p := new(int32) f.Int32VarP(p, name, shorthand, value, usage) return p } // Int32 defines an int32 flag with specified name, default value, and usage string. // The return value is the address of an int32 variable that stores the value of the flag. func Int32(name string, value int32, usage string) *int32 { return CommandLine.Int32P(name, "", value, usage) } // Int32P is like Int32, but accepts a shorthand letter that can be used after a single dash. func Int32P(name, shorthand string, value int32, usage string) *int32 { return CommandLine.Int32P(name, shorthand, value, usage) }
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0-only /* * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved. */ #include <linux/gpio/driver.h> #include <linux/module.h> #include <linux/of.h> #include <linux/of_irq.h> #include <linux/pinctrl/pinconf-generic.h> #include <linux/pinctrl/pinconf.h> #include <linux/pinctrl/pinmux.h> #include <linux/platform_device.h> #include <linux/regmap.h> #include <linux/slab.h> #include <linux/types.h> #include <dt-bindings/pinctrl/qcom,pmic-mpp.h> #include "../core.h" #include "../pinctrl-utils.h" #define PMIC_MPP_ADDRESS_RANGE 0x100 /* * Pull Up Values - it indicates whether a pull-up should be * applied for bidirectional mode only. The hardware ignores the * configuration when operating in other modes. */ #define PMIC_MPP_PULL_UP_0P6KOHM 0 #define PMIC_MPP_PULL_UP_10KOHM 1 #define PMIC_MPP_PULL_UP_30KOHM 2 #define PMIC_MPP_PULL_UP_OPEN 3 /* type registers base address bases */ #define PMIC_MPP_REG_TYPE 0x4 #define PMIC_MPP_REG_SUBTYPE 0x5 /* mpp peripheral type and subtype values */ #define PMIC_MPP_TYPE 0x11 #define PMIC_MPP_SUBTYPE_4CH_NO_ANA_OUT 0x3 #define PMIC_MPP_SUBTYPE_ULT_4CH_NO_ANA_OUT 0x4 #define PMIC_MPP_SUBTYPE_4CH_NO_SINK 0x5 #define PMIC_MPP_SUBTYPE_ULT_4CH_NO_SINK 0x6 #define PMIC_MPP_SUBTYPE_4CH_FULL_FUNC 0x7 #define PMIC_MPP_SUBTYPE_8CH_FULL_FUNC 0xf #define PMIC_MPP_REG_RT_STS 0x10 #define PMIC_MPP_REG_RT_STS_VAL_MASK 0x1 /* control register base address bases */ #define PMIC_MPP_REG_MODE_CTL 0x40 #define PMIC_MPP_REG_DIG_VIN_CTL 0x41 #define PMIC_MPP_REG_DIG_PULL_CTL 0x42 #define PMIC_MPP_REG_DIG_IN_CTL 0x43 #define PMIC_MPP_REG_EN_CTL 0x46 #define PMIC_MPP_REG_AOUT_CTL 0x48 #define PMIC_MPP_REG_AIN_CTL 0x4a #define PMIC_MPP_REG_SINK_CTL 0x4c /* PMIC_MPP_REG_MODE_CTL */ #define PMIC_MPP_REG_MODE_VALUE_MASK 0x1 #define PMIC_MPP_REG_MODE_FUNCTION_SHIFT 1 #define PMIC_MPP_REG_MODE_FUNCTION_MASK 0x7 #define PMIC_MPP_REG_MODE_DIR_SHIFT 4 #define PMIC_MPP_REG_MODE_DIR_MASK 0x7 /* PMIC_MPP_REG_DIG_VIN_CTL */ #define PMIC_MPP_REG_VIN_SHIFT 0 #define PMIC_MPP_REG_VIN_MASK 0x7 /* PMIC_MPP_REG_DIG_PULL_CTL */ #define PMIC_MPP_REG_PULL_SHIFT 0 #define PMIC_MPP_REG_PULL_MASK 0x7 /* PMIC_MPP_REG_EN_CTL */ #define PMIC_MPP_REG_MASTER_EN_SHIFT 7 /* PMIC_MPP_REG_AIN_CTL */ #define PMIC_MPP_REG_AIN_ROUTE_SHIFT 0 #define PMIC_MPP_REG_AIN_ROUTE_MASK 0x7 #define PMIC_MPP_MODE_DIGITAL_INPUT 0 #define PMIC_MPP_MODE_DIGITAL_OUTPUT 1 #define PMIC_MPP_MODE_DIGITAL_BIDIR 2 #define PMIC_MPP_MODE_ANALOG_BIDIR 3 #define PMIC_MPP_MODE_ANALOG_INPUT 4 #define PMIC_MPP_MODE_ANALOG_OUTPUT 5 #define PMIC_MPP_MODE_CURRENT_SINK 6 #define PMIC_MPP_SELECTOR_NORMAL 0 #define PMIC_MPP_SELECTOR_PAIRED 1 #define PMIC_MPP_SELECTOR_DTEST_FIRST 4 #define PMIC_MPP_PHYSICAL_OFFSET 1 /* Qualcomm specific pin configurations */ #define PMIC_MPP_CONF_AMUX_ROUTE (PIN_CONFIG_END + 1) #define PMIC_MPP_CONF_ANALOG_LEVEL (PIN_CONFIG_END + 2) #define PMIC_MPP_CONF_DTEST_SELECTOR (PIN_CONFIG_END + 3) #define PMIC_MPP_CONF_PAIRED (PIN_CONFIG_END + 4) /** * struct pmic_mpp_pad - keep current MPP settings * @base: Address base in SPMI device. * @irq: IRQ number which this MPP generate. * @is_enabled: Set to false when MPP should be put in high Z state. * @out_value: Cached pin output value. * @output_enabled: Set to true if MPP output logic is enabled. * @input_enabled: Set to true if MPP input buffer logic is enabled. * @paired: Pin operates in paired mode * @has_pullup: Pin has support to configure pullup * @num_sources: Number of power-sources supported by this MPP. * @power_source: Current power-source used. * @amux_input: Set the source for analog input. * @aout_level: Analog output level * @pullup: Pullup resistor value. Valid in Bidirectional mode only. * @function: See pmic_mpp_functions[]. * @drive_strength: Amount of current in sink mode * @dtest: DTEST route selector */ struct pmic_mpp_pad { u16 base; int irq; bool is_enabled; bool out_value; bool output_enabled; bool input_enabled; bool paired; bool has_pullup; unsigned int num_sources; unsigned int power_source; unsigned int amux_input; unsigned int aout_level; unsigned int pullup; unsigned int function; unsigned int drive_strength; unsigned int dtest; }; struct pmic_mpp_state { struct device *dev; struct regmap *map; struct pinctrl_dev *ctrl; struct gpio_chip chip; }; static const struct pinconf_generic_params pmic_mpp_bindings[] = { {"qcom,amux-route", PMIC_MPP_CONF_AMUX_ROUTE, 0}, {"qcom,analog-level", PMIC_MPP_CONF_ANALOG_LEVEL, 0}, {"qcom,dtest", PMIC_MPP_CONF_DTEST_SELECTOR, 0}, {"qcom,paired", PMIC_MPP_CONF_PAIRED, 0}, }; #ifdef CONFIG_DEBUG_FS static const struct pin_config_item pmic_conf_items[] = { PCONFDUMP(PMIC_MPP_CONF_AMUX_ROUTE, "analog mux", NULL, true), PCONFDUMP(PMIC_MPP_CONF_ANALOG_LEVEL, "analog level", NULL, true), PCONFDUMP(PMIC_MPP_CONF_DTEST_SELECTOR, "dtest", NULL, true), PCONFDUMP(PMIC_MPP_CONF_PAIRED, "paired", NULL, false), }; #endif static const char *const pmic_mpp_groups[] = { "mpp1", "mpp2", "mpp3", "mpp4", "mpp5", "mpp6", "mpp7", "mpp8", }; #define PMIC_MPP_DIGITAL 0 #define PMIC_MPP_ANALOG 1 #define PMIC_MPP_SINK 2 static const char *const pmic_mpp_functions[] = { "digital", "analog", "sink" }; static int pmic_mpp_read(struct pmic_mpp_state *state, struct pmic_mpp_pad *pad, unsigned int addr) { unsigned int val; int ret; ret = regmap_read(state->map, pad->base + addr, &val); if (ret < 0) dev_err(state->dev, "read 0x%x failed\n", addr); else ret = val; return ret; } static int pmic_mpp_write(struct pmic_mpp_state *state, struct pmic_mpp_pad *pad, unsigned int addr, unsigned int val) { int ret; ret = regmap_write(state->map, pad->base + addr, val); if (ret < 0) dev_err(state->dev, "write 0x%x failed\n", addr); return ret; } static int pmic_mpp_get_groups_count(struct pinctrl_dev *pctldev) { /* Every PIN is a group */ return pctldev->desc->npins; } static const char *pmic_mpp_get_group_name(struct pinctrl_dev *pctldev, unsigned pin) { return pctldev->desc->pins[pin].name; } static int pmic_mpp_get_group_pins(struct pinctrl_dev *pctldev, unsigned pin, const unsigned **pins, unsigned *num_pins) { *pins = &pctldev->desc->pins[pin].number; *num_pins = 1; return 0; } static const struct pinctrl_ops pmic_mpp_pinctrl_ops = { .get_groups_count = pmic_mpp_get_groups_count, .get_group_name = pmic_mpp_get_group_name, .get_group_pins = pmic_mpp_get_group_pins, .dt_node_to_map = pinconf_generic_dt_node_to_map_group, .dt_free_map = pinctrl_utils_free_map, }; static int pmic_mpp_get_functions_count(struct pinctrl_dev *pctldev) { return ARRAY_SIZE(pmic_mpp_functions); } static const char *pmic_mpp_get_function_name(struct pinctrl_dev *pctldev, unsigned function) { return pmic_mpp_functions[function]; } static int pmic_mpp_get_function_groups(struct pinctrl_dev *pctldev, unsigned function, const char *const **groups, unsigned *const num_qgroups) { *groups = pmic_mpp_groups; *num_qgroups = pctldev->desc->npins; return 0; } static int pmic_mpp_write_mode_ctl(struct pmic_mpp_state *state, struct pmic_mpp_pad *pad) { unsigned int mode; unsigned int sel; unsigned int val; unsigned int en; switch (pad->function) { case PMIC_MPP_ANALOG: if (pad->input_enabled && pad->output_enabled) mode = PMIC_MPP_MODE_ANALOG_BIDIR; else if (pad->input_enabled) mode = PMIC_MPP_MODE_ANALOG_INPUT; else mode = PMIC_MPP_MODE_ANALOG_OUTPUT; break; case PMIC_MPP_DIGITAL: if (pad->input_enabled && pad->output_enabled) mode = PMIC_MPP_MODE_DIGITAL_BIDIR; else if (pad->input_enabled) mode = PMIC_MPP_MODE_DIGITAL_INPUT; else mode = PMIC_MPP_MODE_DIGITAL_OUTPUT; break; case PMIC_MPP_SINK: default: mode = PMIC_MPP_MODE_CURRENT_SINK; break; } if (pad->dtest) sel = PMIC_MPP_SELECTOR_DTEST_FIRST + pad->dtest - 1; else if (pad->paired) sel = PMIC_MPP_SELECTOR_PAIRED; else sel = PMIC_MPP_SELECTOR_NORMAL; en = !!pad->out_value; val = mode << PMIC_MPP_REG_MODE_DIR_SHIFT | sel << PMIC_MPP_REG_MODE_FUNCTION_SHIFT | en; return pmic_mpp_write(state, pad, PMIC_MPP_REG_MODE_CTL, val); } static int pmic_mpp_set_mux(struct pinctrl_dev *pctldev, unsigned function, unsigned pin) { struct pmic_mpp_state *state = pinctrl_dev_get_drvdata(pctldev); struct pmic_mpp_pad *pad; unsigned int val; int ret; pad = pctldev->desc->pins[pin].drv_data; pad->function = function; ret = pmic_mpp_write_mode_ctl(state, pad); if (ret < 0) return ret; val = pad->is_enabled << PMIC_MPP_REG_MASTER_EN_SHIFT; return pmic_mpp_write(state, pad, PMIC_MPP_REG_EN_CTL, val); } static const struct pinmux_ops pmic_mpp_pinmux_ops = { .get_functions_count = pmic_mpp_get_functions_count, .get_function_name = pmic_mpp_get_function_name, .get_function_groups = pmic_mpp_get_function_groups, .set_mux = pmic_mpp_set_mux, }; static int pmic_mpp_config_get(struct pinctrl_dev *pctldev, unsigned int pin, unsigned long *config) { unsigned param = pinconf_to_config_param(*config); struct pmic_mpp_pad *pad; unsigned arg = 0; pad = pctldev->desc->pins[pin].drv_data; switch (param) { case PIN_CONFIG_BIAS_DISABLE: if (pad->pullup != PMIC_MPP_PULL_UP_OPEN) return -EINVAL; arg = 1; break; case PIN_CONFIG_BIAS_PULL_UP: switch (pad->pullup) { case PMIC_MPP_PULL_UP_0P6KOHM: arg = 600; break; case PMIC_MPP_PULL_UP_10KOHM: arg = 10000; break; case PMIC_MPP_PULL_UP_30KOHM: arg = 30000; break; default: return -EINVAL; } break; case PIN_CONFIG_BIAS_HIGH_IMPEDANCE: if (pad->is_enabled) return -EINVAL; arg = 1; break; case PIN_CONFIG_POWER_SOURCE: arg = pad->power_source; break; case PIN_CONFIG_INPUT_ENABLE: if (!pad->input_enabled) return -EINVAL; arg = 1; break; case PIN_CONFIG_OUTPUT: arg = pad->out_value; break; case PMIC_MPP_CONF_DTEST_SELECTOR: arg = pad->dtest; break; case PMIC_MPP_CONF_AMUX_ROUTE: arg = pad->amux_input; break; case PMIC_MPP_CONF_PAIRED: if (!pad->paired) return -EINVAL; arg = 1; break; case PIN_CONFIG_DRIVE_STRENGTH: arg = pad->drive_strength; break; case PMIC_MPP_CONF_ANALOG_LEVEL: arg = pad->aout_level; break; default: return -EINVAL; } /* Convert register value to pinconf value */ *config = pinconf_to_config_packed(param, arg); return 0; } static int pmic_mpp_config_set(struct pinctrl_dev *pctldev, unsigned int pin, unsigned long *configs, unsigned nconfs) { struct pmic_mpp_state *state = pinctrl_dev_get_drvdata(pctldev); struct pmic_mpp_pad *pad; unsigned param, arg; unsigned int val; int i, ret; pad = pctldev->desc->pins[pin].drv_data; /* Make it possible to enable the pin, by not setting high impedance */ pad->is_enabled = true; for (i = 0; i < nconfs; i++) { param = pinconf_to_config_param(configs[i]); arg = pinconf_to_config_argument(configs[i]); switch (param) { case PIN_CONFIG_BIAS_DISABLE: pad->pullup = PMIC_MPP_PULL_UP_OPEN; break; case PIN_CONFIG_BIAS_PULL_UP: switch (arg) { case 600: pad->pullup = PMIC_MPP_PULL_UP_0P6KOHM; break; case 10000: pad->pullup = PMIC_MPP_PULL_UP_10KOHM; break; case 30000: pad->pullup = PMIC_MPP_PULL_UP_30KOHM; break; default: return -EINVAL; } break; case PIN_CONFIG_BIAS_HIGH_IMPEDANCE: pad->is_enabled = false; break; case PIN_CONFIG_POWER_SOURCE: if (arg >= pad->num_sources) return -EINVAL; pad->power_source = arg; break; case PIN_CONFIG_INPUT_ENABLE: pad->input_enabled = arg ? true : false; break; case PIN_CONFIG_OUTPUT: pad->output_enabled = true; pad->out_value = arg; break; case PMIC_MPP_CONF_DTEST_SELECTOR: pad->dtest = arg; break; case PIN_CONFIG_DRIVE_STRENGTH: pad->drive_strength = arg; break; case PMIC_MPP_CONF_AMUX_ROUTE: if (arg >= PMIC_MPP_AMUX_ROUTE_ABUS4) return -EINVAL; pad->amux_input = arg; break; case PMIC_MPP_CONF_ANALOG_LEVEL: pad->aout_level = arg; break; case PMIC_MPP_CONF_PAIRED: pad->paired = !!arg; break; default: return -EINVAL; } } val = pad->power_source << PMIC_MPP_REG_VIN_SHIFT; ret = pmic_mpp_write(state, pad, PMIC_MPP_REG_DIG_VIN_CTL, val); if (ret < 0) return ret; if (pad->has_pullup) { val = pad->pullup << PMIC_MPP_REG_PULL_SHIFT; ret = pmic_mpp_write(state, pad, PMIC_MPP_REG_DIG_PULL_CTL, val); if (ret < 0) return ret; } val = pad->amux_input & PMIC_MPP_REG_AIN_ROUTE_MASK; ret = pmic_mpp_write(state, pad, PMIC_MPP_REG_AIN_CTL, val); if (ret < 0) return ret; ret = pmic_mpp_write(state, pad, PMIC_MPP_REG_AOUT_CTL, pad->aout_level); if (ret < 0) return ret; ret = pmic_mpp_write_mode_ctl(state, pad); if (ret < 0) return ret; ret = pmic_mpp_write(state, pad, PMIC_MPP_REG_SINK_CTL, pad->drive_strength); if (ret < 0) return ret; val = pad->is_enabled << PMIC_MPP_REG_MASTER_EN_SHIFT; return pmic_mpp_write(state, pad, PMIC_MPP_REG_EN_CTL, val); } static void pmic_mpp_config_dbg_show(struct pinctrl_dev *pctldev, struct seq_file *s, unsigned pin) { struct pmic_mpp_state *state = pinctrl_dev_get_drvdata(pctldev); struct pmic_mpp_pad *pad; int ret; static const char *const biases[] = { "0.6kOhm", "10kOhm", "30kOhm", "Disabled" }; pad = pctldev->desc->pins[pin].drv_data; seq_printf(s, " mpp%-2d:", pin + PMIC_MPP_PHYSICAL_OFFSET); if (!pad->is_enabled) { seq_puts(s, " ---"); } else { if (pad->input_enabled) { ret = pmic_mpp_read(state, pad, PMIC_MPP_REG_RT_STS); if (ret < 0) return; ret &= PMIC_MPP_REG_RT_STS_VAL_MASK; pad->out_value = ret; } seq_printf(s, " %-4s", pad->output_enabled ? "out" : "in"); seq_printf(s, " %-7s", pmic_mpp_functions[pad->function]); seq_printf(s, " vin-%d", pad->power_source); seq_printf(s, " %d", pad->aout_level); if (pad->has_pullup) seq_printf(s, " %-8s", biases[pad->pullup]); seq_printf(s, " %-4s", pad->out_value ? "high" : "low"); if (pad->dtest) seq_printf(s, " dtest%d", pad->dtest); if (pad->paired) seq_puts(s, " paired"); } } static const struct pinconf_ops pmic_mpp_pinconf_ops = { .is_generic = true, .pin_config_group_get = pmic_mpp_config_get, .pin_config_group_set = pmic_mpp_config_set, .pin_config_group_dbg_show = pmic_mpp_config_dbg_show, }; static int pmic_mpp_direction_input(struct gpio_chip *chip, unsigned pin) { struct pmic_mpp_state *state = gpiochip_get_data(chip); unsigned long config; config = pinconf_to_config_packed(PIN_CONFIG_INPUT_ENABLE, 1); return pmic_mpp_config_set(state->ctrl, pin, &config, 1); } static int pmic_mpp_direction_output(struct gpio_chip *chip, unsigned pin, int val) { struct pmic_mpp_state *state = gpiochip_get_data(chip); unsigned long config; config = pinconf_to_config_packed(PIN_CONFIG_OUTPUT, val); return pmic_mpp_config_set(state->ctrl, pin, &config, 1); } static int pmic_mpp_get(struct gpio_chip *chip, unsigned pin) { struct pmic_mpp_state *state = gpiochip_get_data(chip); struct pmic_mpp_pad *pad; int ret; pad = state->ctrl->desc->pins[pin].drv_data; if (pad->input_enabled) { ret = pmic_mpp_read(state, pad, PMIC_MPP_REG_RT_STS); if (ret < 0) return ret; pad->out_value = ret & PMIC_MPP_REG_RT_STS_VAL_MASK; } return !!pad->out_value; } static void pmic_mpp_set(struct gpio_chip *chip, unsigned pin, int value) { struct pmic_mpp_state *state = gpiochip_get_data(chip); unsigned long config; config = pinconf_to_config_packed(PIN_CONFIG_OUTPUT, value); pmic_mpp_config_set(state->ctrl, pin, &config, 1); } static int pmic_mpp_of_xlate(struct gpio_chip *chip, const struct of_phandle_args *gpio_desc, u32 *flags) { if (chip->of_gpio_n_cells < 2) return -EINVAL; if (flags) *flags = gpio_desc->args[1]; return gpio_desc->args[0] - PMIC_MPP_PHYSICAL_OFFSET; } static int pmic_mpp_to_irq(struct gpio_chip *chip, unsigned pin) { struct pmic_mpp_state *state = gpiochip_get_data(chip); struct pmic_mpp_pad *pad; pad = state->ctrl->desc->pins[pin].drv_data; return pad->irq; } static void pmic_mpp_dbg_show(struct seq_file *s, struct gpio_chip *chip) { struct pmic_mpp_state *state = gpiochip_get_data(chip); unsigned i; for (i = 0; i < chip->ngpio; i++) { pmic_mpp_config_dbg_show(state->ctrl, s, i); seq_puts(s, "\n"); } } static const struct gpio_chip pmic_mpp_gpio_template = { .direction_input = pmic_mpp_direction_input, .direction_output = pmic_mpp_direction_output, .get = pmic_mpp_get, .set = pmic_mpp_set, .request = gpiochip_generic_request, .free = gpiochip_generic_free, .of_xlate = pmic_mpp_of_xlate, .to_irq = pmic_mpp_to_irq, .dbg_show = pmic_mpp_dbg_show, }; static int pmic_mpp_populate(struct pmic_mpp_state *state, struct pmic_mpp_pad *pad) { int type, subtype, val, dir; unsigned int sel; type = pmic_mpp_read(state, pad, PMIC_MPP_REG_TYPE); if (type < 0) return type; if (type != PMIC_MPP_TYPE) { dev_err(state->dev, "incorrect block type 0x%x at 0x%x\n", type, pad->base); return -ENODEV; } subtype = pmic_mpp_read(state, pad, PMIC_MPP_REG_SUBTYPE); if (subtype < 0) return subtype; switch (subtype) { case PMIC_MPP_SUBTYPE_4CH_NO_ANA_OUT: case PMIC_MPP_SUBTYPE_ULT_4CH_NO_ANA_OUT: case PMIC_MPP_SUBTYPE_4CH_NO_SINK: case PMIC_MPP_SUBTYPE_ULT_4CH_NO_SINK: case PMIC_MPP_SUBTYPE_4CH_FULL_FUNC: pad->num_sources = 4; break; case PMIC_MPP_SUBTYPE_8CH_FULL_FUNC: pad->num_sources = 8; break; default: dev_err(state->dev, "unknown MPP type 0x%x at 0x%x\n", subtype, pad->base); return -ENODEV; } val = pmic_mpp_read(state, pad, PMIC_MPP_REG_MODE_CTL); if (val < 0) return val; pad->out_value = val & PMIC_MPP_REG_MODE_VALUE_MASK; dir = val >> PMIC_MPP_REG_MODE_DIR_SHIFT; dir &= PMIC_MPP_REG_MODE_DIR_MASK; switch (dir) { case PMIC_MPP_MODE_DIGITAL_INPUT: pad->input_enabled = true; pad->output_enabled = false; pad->function = PMIC_MPP_DIGITAL; break; case PMIC_MPP_MODE_DIGITAL_OUTPUT: pad->input_enabled = false; pad->output_enabled = true; pad->function = PMIC_MPP_DIGITAL; break; case PMIC_MPP_MODE_DIGITAL_BIDIR: pad->input_enabled = true; pad->output_enabled = true; pad->function = PMIC_MPP_DIGITAL; break; case PMIC_MPP_MODE_ANALOG_BIDIR: pad->input_enabled = true; pad->output_enabled = true; pad->function = PMIC_MPP_ANALOG; break; case PMIC_MPP_MODE_ANALOG_INPUT: pad->input_enabled = true; pad->output_enabled = false; pad->function = PMIC_MPP_ANALOG; break; case PMIC_MPP_MODE_ANALOG_OUTPUT: pad->input_enabled = false; pad->output_enabled = true; pad->function = PMIC_MPP_ANALOG; break; case PMIC_MPP_MODE_CURRENT_SINK: pad->input_enabled = false; pad->output_enabled = true; pad->function = PMIC_MPP_SINK; break; default: dev_err(state->dev, "unknown MPP direction\n"); return -ENODEV; } sel = val >> PMIC_MPP_REG_MODE_FUNCTION_SHIFT; sel &= PMIC_MPP_REG_MODE_FUNCTION_MASK; if (sel >= PMIC_MPP_SELECTOR_DTEST_FIRST) pad->dtest = sel + 1; else if (sel == PMIC_MPP_SELECTOR_PAIRED) pad->paired = true; val = pmic_mpp_read(state, pad, PMIC_MPP_REG_DIG_VIN_CTL); if (val < 0) return val; pad->power_source = val >> PMIC_MPP_REG_VIN_SHIFT; pad->power_source &= PMIC_MPP_REG_VIN_MASK; if (subtype != PMIC_MPP_SUBTYPE_ULT_4CH_NO_ANA_OUT && subtype != PMIC_MPP_SUBTYPE_ULT_4CH_NO_SINK) { val = pmic_mpp_read(state, pad, PMIC_MPP_REG_DIG_PULL_CTL); if (val < 0) return val; pad->pullup = val >> PMIC_MPP_REG_PULL_SHIFT; pad->pullup &= PMIC_MPP_REG_PULL_MASK; pad->has_pullup = true; } val = pmic_mpp_read(state, pad, PMIC_MPP_REG_AIN_CTL); if (val < 0) return val; pad->amux_input = val >> PMIC_MPP_REG_AIN_ROUTE_SHIFT; pad->amux_input &= PMIC_MPP_REG_AIN_ROUTE_MASK; val = pmic_mpp_read(state, pad, PMIC_MPP_REG_SINK_CTL); if (val < 0) return val; pad->drive_strength = val; val = pmic_mpp_read(state, pad, PMIC_MPP_REG_AOUT_CTL); if (val < 0) return val; pad->aout_level = val; val = pmic_mpp_read(state, pad, PMIC_MPP_REG_EN_CTL); if (val < 0) return val; pad->is_enabled = !!val; return 0; } static int pmic_mpp_probe(struct platform_device *pdev) { struct device *dev = &pdev->dev; struct pinctrl_pin_desc *pindesc; struct pinctrl_desc *pctrldesc; struct pmic_mpp_pad *pad, *pads; struct pmic_mpp_state *state; int ret, npins, i; u32 reg; ret = of_property_read_u32(dev->of_node, "reg", &reg); if (ret < 0) { dev_err(dev, "missing base address"); return ret; } npins = platform_irq_count(pdev); if (!npins) return -EINVAL; if (npins < 0) return npins; BUG_ON(npins > ARRAY_SIZE(pmic_mpp_groups)); state = devm_kzalloc(dev, sizeof(*state), GFP_KERNEL); if (!state) return -ENOMEM; platform_set_drvdata(pdev, state); state->dev = &pdev->dev; state->map = dev_get_regmap(dev->parent, NULL); pindesc = devm_kcalloc(dev, npins, sizeof(*pindesc), GFP_KERNEL); if (!pindesc) return -ENOMEM; pads = devm_kcalloc(dev, npins, sizeof(*pads), GFP_KERNEL); if (!pads) return -ENOMEM; pctrldesc = devm_kzalloc(dev, sizeof(*pctrldesc), GFP_KERNEL); if (!pctrldesc) return -ENOMEM; pctrldesc->pctlops = &pmic_mpp_pinctrl_ops; pctrldesc->pmxops = &pmic_mpp_pinmux_ops; pctrldesc->confops = &pmic_mpp_pinconf_ops; pctrldesc->owner = THIS_MODULE; pctrldesc->name = dev_name(dev); pctrldesc->pins = pindesc; pctrldesc->npins = npins; pctrldesc->num_custom_params = ARRAY_SIZE(pmic_mpp_bindings); pctrldesc->custom_params = pmic_mpp_bindings; #ifdef CONFIG_DEBUG_FS pctrldesc->custom_conf_items = pmic_conf_items; #endif for (i = 0; i < npins; i++, pindesc++) { pad = &pads[i]; pindesc->drv_data = pad; pindesc->number = i; pindesc->name = pmic_mpp_groups[i]; pad->irq = platform_get_irq(pdev, i); if (pad->irq < 0) return pad->irq; pad->base = reg + i * PMIC_MPP_ADDRESS_RANGE; ret = pmic_mpp_populate(state, pad); if (ret < 0) return ret; } state->chip = pmic_mpp_gpio_template; state->chip.parent = dev; state->chip.base = -1; state->chip.ngpio = npins; state->chip.label = dev_name(dev); state->chip.of_gpio_n_cells = 2; state->chip.can_sleep = false; state->ctrl = devm_pinctrl_register(dev, pctrldesc, state); if (IS_ERR(state->ctrl)) return PTR_ERR(state->ctrl); ret = gpiochip_add_data(&state->chip, state); if (ret) { dev_err(state->dev, "can't add gpio chip\n"); return ret; } ret = gpiochip_add_pin_range(&state->chip, dev_name(dev), 0, 0, npins); if (ret) { dev_err(dev, "failed to add pin range\n"); goto err_range; } return 0; err_range: gpiochip_remove(&state->chip); return ret; } static int pmic_mpp_remove(struct platform_device *pdev) { struct pmic_mpp_state *state = platform_get_drvdata(pdev); gpiochip_remove(&state->chip); return 0; } static const struct of_device_id pmic_mpp_of_match[] = { { .compatible = "qcom,pm8841-mpp" }, /* 4 MPP's */ { .compatible = "qcom,pm8916-mpp" }, /* 4 MPP's */ { .compatible = "qcom,pm8941-mpp" }, /* 8 MPP's */ { .compatible = "qcom,pm8950-mpp" }, /* 4 MPP's */ { .compatible = "qcom,pmi8950-mpp" }, /* 4 MPP's */ { .compatible = "qcom,pm8994-mpp" }, /* 8 MPP's */ { .compatible = "qcom,pma8084-mpp" }, /* 8 MPP's */ { .compatible = "qcom,spmi-mpp" }, /* Generic */ { }, }; MODULE_DEVICE_TABLE(of, pmic_mpp_of_match); static struct platform_driver pmic_mpp_driver = { .driver = { .name = "qcom-spmi-mpp", .of_match_table = pmic_mpp_of_match, }, .probe = pmic_mpp_probe, .remove = pmic_mpp_remove, }; module_platform_driver(pmic_mpp_driver); MODULE_AUTHOR("Ivan T. Ivanov <[email protected]>"); MODULE_DESCRIPTION("Qualcomm SPMI PMIC MPP pin control driver"); MODULE_ALIAS("platform:qcom-spmi-mpp"); MODULE_LICENSE("GPL v2");
{ "pile_set_name": "Github" }
<?xml version="1.0" standalone="no" ?> <!DOCTYPE pov SYSTEM "/usr/share/cgc-docs/replay.dtd"> <pov> <cbid>service</cbid> <replay> <read><delim>\x0a</delim><match><data>2D Particle Simulator\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter the number of particles to simulate \x281-10\x29:\x0a</data></match></read> <write><data>8\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>121.41717,141.67660\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>8.49637,6.17148\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>1.48435\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>9.07339\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #1 added at (121.41717,141.67660) velocity(8.49637,6.17148) mass(1.48435) radius(9.07339).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>133.51814,172.57804\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>0.65282,6.34017\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>9.98650\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>9.82363\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #2 added at (133.51814,172.57804) velocity(0.65282,6.34017) mass(9.98650) radius(9.82363).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>176.72654,117.36771\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>6.67108,9.06932\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>8.82026\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>7.34462\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #3 added at (176.72654,117.36771) velocity(6.67108,9.06932) mass(8.82026) radius(7.34462).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>185.61964,175.93530\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>7.65806,1.54625\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>5.86677\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>8.37198\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #4 added at (185.61964,175.93530) velocity(7.65806,1.54625) mass(5.86677) radius(8.37198).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>105.99641,172.09624\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>0.30226,3.53534\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>1.15629\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>2.95464\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #5 added at (105.99641,172.09624) velocity(0.30226,3.53534) mass(1.15629) radius(2.95464).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>115.65415,193.36213\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>1.02618,6.66458\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>5.91258\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>5.14644\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #6 added at (115.65415,193.36213) velocity(1.02618,6.66458) mass(5.91258) radius(5.14644).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>172.90458,138.09336\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>6.24204,5.53686\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>3.19674\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>2.63793\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #7 added at (172.90458,138.09336) velocity(6.24204,5.53686) mass(3.19674) radius(2.63793).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>428.93487,165.75142\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>-9.94966,-32.77771\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>95.18667\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>0.83351\x0a</data></write> <read><delim>\x0a</delim><match><data>Invalid simulation data. Try again.\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Enter Position (x,y):\x0a</data></match></read> <write><data>161.78378,168.59290\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Velocity (x,y):\x0a</data></match></read> <write><data>2.64092,3.42055\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Mass:\x0a</data></match></read> <write><data>7.95167\x0a</data></write> <read><delim>\x0a</delim><match><data>Enter Radius:\x0a</data></match></read> <write><data>2.95477\x0a</data></write> <read><delim>\x0a</delim><match><data>Particle #8 added at (161.78378,168.59290) velocity(2.64092,3.42055) mass(7.95167) radius(2.95477).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Running simulation with...\x0a</data></match></read> <read><delim>\x0a</delim><match><data>8 total particles:\x0a</data></match></read> <read><delim>\x0a</delim><match><data>0: Position (121.41000,141.67000) Velocity (8.49000,6.17000) mass (1.48000) radius (9.07000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>1: Position (133.51000,172.57000) Velocity (0.65000,6.34000) mass (9.98000) radius (9.82000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>2: Position (176.72000,117.36000) Velocity (6.67000,9.06000) mass (8.82000) radius (7.34000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>3: Position (185.61000,175.93000) Velocity (7.65000,1.54000) mass (5.86000) radius (8.37000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>4: Position (105.99000,172.09000) Velocity (0.30000,3.53000) mass (1.15000) radius (2.95000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>5: Position (115.65000,193.36000) Velocity (1.02000,6.66000) mass (5.91000) radius (5.14000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>6: Position (172.90000,138.09000) Velocity (6.24000,5.53000) mass (3.19000) radius (2.63000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>7: Position (161.78000,168.59000) Velocity (2.64000,3.42000) mass (7.95000) radius (2.95000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>--------------------\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00010000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000100\x0a</data></match></read> <read><delim>\x0a</delim><match><data>01000010000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000010000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00001000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000100000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000010000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>--------------------\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Simulation complete, 12 collisions simulated over 10 seconds in 40 frames.\x0a</data></match></read> <read><delim>\x0a</delim><match><data>8 total particles:\x0a</data></match></read> <read><delim>\x0a</delim><match><data>0: Position (140.73810,111.01089) Velocity (-2.04548,13.87615) mass (1.48000) radius (9.07000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>1: Position (118.63413,159.41287) Velocity (7.67715,-3.07530) mass (9.98000) radius (9.82000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>2: Position (154.05143,174.26519) Velocity (-7.26790,0.26222) mass (8.82000) radius (7.34000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>3: Position (172.33685,167.45887) Velocity (-4.53159,-9.11759) mass (5.86000) radius (8.37000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>4: Position (108.99000,186.71000) Velocity (0.30000,-3.53000) mass (1.15000) radius (2.95000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>5: Position (125.85000,129.76000) Velocity (1.02000,-6.66000) mass (5.91000) radius (5.14000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>6: Position (186.90488,173.38035) Velocity (-1.69149,-2.24929) mass (3.19000) radius (2.63000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>7: Position (123.09685,194.74197) Velocity (-5.95094,2.68271) mass (7.95000) radius (2.95000).\x0a</data></match></read> <read><delim>\x0a</delim><match><data>--------------------\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00001000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>01000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000001000000100\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000100000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00010000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000100000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000100000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>00000000000000000000\x0a</data></match></read> <read><delim>\x0a</delim><match><data>--------------------\x0a</data></match></read> <read><delim>\x0a</delim><match><data>Goodbye\x0a</data></match></read> </replay> </pov>
{ "pile_set_name": "Github" }
%% %% wings_proxy.erl -- %% %% This module implements the smooth proxy. %% %% Copyright (c) 2001-2011 Bjorn Gustavsson & Dan Gudmundsson %% %% See the file "license.terms" for information on usage and redistribution %% of this file, and for a DISCLAIMER OF ALL WARRANTIES. %% %% $Id$ %% -module(wings_proxy). -export([setup/1,quick_preview/1,update/2,draw_smooth_edges/3, smooth/2, smooth_dl/1, flat_dl/1, invalidate/2, split_proxy/3, update_dynamic/3, reset_dynamic/1]). -export_type([sp/0]). -define(NEED_OPENGL, 1). -include("wings.hrl"). -import(lists, [foldl/3,reverse/1]). -record(split, {upd_fs, % Update only these faces dyn, % Update tables info % proxy drag info }). -record(sp, {src_we=#we{}, % Previous source we. we=none, % Previous smoothed we. split, type = ?MODULE, %% Display Lists faces = none, smooth = none, proxy_edges = none, vab = none %% Note: face_map is a list ordered in face appearance order in bins above }). -type sp() :: #sp{}. quick_preview(_St) -> case any_proxy() of false -> setup_all(true), wings_wm:set_prop(workmode, false); true -> setup_all(false), wings_wm:set_prop(workmode, true) end. -spec invalidate('none'|#sp{}, 'vab'|'dl'|'edges'|'maybe'|'all') -> 'none'|#sp{}. invalidate(none, _) -> none; invalidate(#sp{}=Pd, 'vab') -> %% Invalidate vertex buffers - implies invalidating displays lists. %% Used when there are changes having to do with UV coordinates and/or %% vertex colors (including toggling their visibility using %% View|Show Colors or View|Show Textures). Pd#sp{faces=none,smooth=none,vab=none,proxy_edges=none}; invalidate(#sp{}=Pd, 'dl') -> %% Invalidate displays lists. Pd#sp{faces=none,smooth=none}; invalidate(#sp{faces=none}=Pd, maybe) -> Pd; invalidate(#sp{faces=FL}=Pd, maybe) -> Pd#sp{faces=[FL]}; invalidate(#sp{}=Pd, edges) -> Pd#sp{proxy_edges=none}; invalidate(#sp{}, all) -> none. smooth_dl(#sp{smooth=Smooth}) when Smooth =/= none -> Smooth; smooth_dl(#sp{smooth=none, faces=FL}) when FL =/= none -> {[FL,[]], false}; smooth_dl(_) -> {none, false}. flat_dl(#sp{faces=FL}) -> FL. setup(#st{sel=OrigSel}=St) -> wings_dl:map(fun(D, Sel) -> setup_1(D, Sel) end, OrigSel), {save_state,wings_sel:reset(St)}. setup_1(#dlo{src_we=#we{id=Id}=We}=D, [{Id,_}|Sel]) when ?IS_ANY_LIGHT(We) -> %% Never use proxies on lights. {D#dlo{proxy=false},Sel}; setup_1(#dlo{src_we=#we{id=Id},proxy=false, proxy_data=Pd}=D, [{Id,_}|Sel]) -> case Pd of none -> Wire0 = wings_wm:get_prop(wings_wm:this(), wireframed_objects), Wire = gb_sets:add(Id, Wire0), wings_wm:set_prop(wings_wm:this(), wireframed_objects, Wire), {D#dlo{proxy=true, proxy_data=#sp{}},Sel}; _ -> Wire0 = wings_wm:get_prop(wings_wm:this(), wireframed_objects), Wire = gb_sets:add(Id, Wire0), wings_wm:set_prop(wings_wm:this(), wireframed_objects, Wire), {D#dlo{proxy=true},Sel} end; setup_1(#dlo{src_we=#we{id=Id},proxy=true}=D, [{Id,_}|Sel]) -> Wire0 = wings_wm:get_prop(wings_wm:this(), wireframed_objects), Wire = gb_sets:delete_any(Id, Wire0), wings_wm:set_prop(wings_wm:this(), wireframed_objects, Wire), {D#dlo{proxy=false},Sel}; setup_1(D, Sel) -> {D,Sel}. setup_all(Activate) -> wings_dl:map(fun(D, _) -> setup_all(D, Activate) end, []). setup_all(#dlo{src_we=We}=D, _) when ?IS_ANY_LIGHT(We) -> %% Never use proxies on lights. D#dlo{proxy=false}; setup_all(#dlo{src_we=#we{id=Id},proxy_data=none}=D, true) -> Wire0 = wings_wm:get_prop(wings_wm:this(), wireframed_objects), Wire = gb_sets:add(Id, Wire0), wings_wm:set_prop(wings_wm:this(), wireframed_objects, Wire), D#dlo{proxy=true, proxy_data=#sp{}}; setup_all(#dlo{src_we=#we{id=Id}}=D, true) -> Wire0 = wings_wm:get_prop(wings_wm:this(), wireframed_objects), Wire = gb_sets:add(Id, Wire0), wings_wm:set_prop(wings_wm:this(), wireframed_objects, Wire), D#dlo{proxy=true}; setup_all(#dlo{proxy=false}=D, false) -> D; setup_all(#dlo{src_we=#we{id=Id}}=D, false) -> Wire0 = wings_wm:get_prop(wings_wm:this(), wireframed_objects), Wire = gb_sets:delete_any(Id, Wire0), wings_wm:set_prop(wings_wm:this(), wireframed_objects, Wire), D#dlo{proxy=false}; setup_all(D, _) -> D. update(#dlo{proxy=false}=D, _) -> D; update(#dlo{src_we=We}=D, _) when ?IS_ANY_LIGHT(We) -> D#dlo{proxy=false}; %% Never use proxies on lights. %% Proxy data is not up to date. Recalculate! update(#dlo{proxy_data=#sp{faces=[_]}=Pd0}=D, St) -> update(D#dlo{proxy_data=Pd0#sp{faces=none}},St); update(#dlo{src_we=We0,proxy_data=#sp{faces=none}=Pd0}=D, St) -> Pd = proxy_smooth(We0, Pd0, St), Faces = wings_draw:draw_flat_faces(Pd#sp.vab, St), ProxyEdges = update_edges(D, Pd), D#dlo{proxy_data=Pd#sp{faces=Faces,proxy_edges=ProxyEdges}}; update(#dlo{proxy_data=#sp{proxy_edges=none}=Pd}=D, _) -> ProxyEdges = update_edges(D, Pd), D#dlo{proxy_data=Pd#sp{proxy_edges=ProxyEdges}}; update(#dlo{src_we=We0,proxy_data=none}=D, St) -> Pd = proxy_smooth(We0, #sp{}, St), Faces = wings_draw:draw_flat_faces(Pd#sp.vab, St), ProxyEdges = update_edges(D, Pd), D#dlo{proxy_data=Pd#sp{faces=Faces,proxy_edges=ProxyEdges}}; update(D, _) -> D. update_edges(D, Pd) -> update_edges_1(D, Pd, wings_pref:get_value(proxy_shaded_edge_style)). update_edges_1(_, _, cage) -> none; update_edges_1(_, #sp{vab=#vab{mat_map=MatMap}=Vab}, all) -> %% expects the materials to be reversed Count = case MatMap of [{_Mat,_Type,0,MCount}] -> MCount; [{_Mat,_Type,Start,MCount}|_] when Start =/= 0 -> %% Assert order of material faces Start + MCount end, fun(RS0) -> Extra = [face_normals], RS = wings_draw_setup:enable_pointers(Vab, Extra,RS0), gl:drawArrays(?GL_QUADS, 0, Count), wings_draw_setup:disable_pointers(Vab,RS) end; update_edges_1(#dlo{}, #sp{type={wings_cc,_}, vab=#vab{face_es={0,Bin}}}, some) -> vbo_draw_arrays(?GL_LINES, Bin); update_edges_1(#dlo{src_we=#we{vp=OldVtab}}, #sp{we=#we{vp=Vtab,es=Etab}=We}, some) -> Edges0 = wings_edge:from_vs(wings_util:array_keys(OldVtab), We), case wings_we:is_open(We) of true -> Visible = wings_we:visible_edges(gb_sets:from_list(Edges0), We), Edges = gb_sets:to_list(Visible); false -> Edges = Edges0 end, Bin = foldl(fun(E, Bin) -> #edge{vs=Va,ve=Vb} = array:get(E, Etab), {X1,Y1,Z1} = array:get(Va,Vtab), {X2,Y2,Z2} = array:get(Vb,Vtab), <<Bin/binary,X1:?F32,Y1:?F32,Z1:?F32, X2:?F32,Y2:?F32,Z2:?F32>> end, <<>>, Edges), vbo_draw_arrays(?GL_LINES, Bin). vbo_draw_arrays(Type, Data) -> N = byte_size(Data) div 12, D = fun(RS) -> gl:drawArrays(Type, 0, N), RS end, wings_vbo:new(D, Data). smooth(D=#dlo{proxy=false},_) -> D; smooth(D=#dlo{drag=Active},_) when Active =/= none -> D; smooth(D=#dlo{src_we=We},_) when ?IS_ANY_LIGHT(We) -> D; smooth(D=#dlo{proxy_data=#sp{smooth=none, vab=#vab{face_map=FN}=Vab0, type=Type, we=We}=Pd0, mirror=MM},St) -> Vab = case Type of ?MODULE -> PartialNs = lists:sort(FN), Flist = wings_we:normals(PartialNs, We, MM), Ftab = array:from_orddict(Flist), SN = setup_smooth_normals(FN, Ftab, <<>>), [Vbo] = gl:genBuffers(1), gl:bindBuffer(?GL_ARRAY_BUFFER, Vbo), gl:bufferData(?GL_ARRAY_BUFFER, byte_size(SN), SN, ?GL_STATIC_DRAW), gl:bindBuffer(?GL_ARRAY_BUFFER, 0), Vab0#vab{face_sn={vbo,Vbo}}; _ -> Vab0 end, DL = wings_draw:draw_smooth_faces(Vab, St), D#dlo{proxy_data=Pd0#sp{smooth=DL, vab=Vab}}; smooth(D,_) -> D. setup_smooth_normals([{Face,_Normal}|Fs], Ftab, SN0) -> [{X1,Y1,Z1},{X2,Y2,Z2},{X3,Y3,Z3},{X4,Y4,Z4}] = array:get(Face, Ftab), SN = <<SN0/binary, X1:?F32,Y1:?F32,Z1:?F32, X2:?F32,Y2:?F32,Z2:?F32, X3:?F32,Y3:?F32,Z3:?F32, X4:?F32,Y4:?F32,Z4:?F32>>, setup_smooth_normals(Fs,Ftab,SN); setup_smooth_normals([], _, SN) -> SN. any_proxy() -> wings_dl:fold(fun(#dlo{proxy=false}, A) -> A; (#dlo{}, _) -> true end, false). draw_smooth_edges(#dlo{drag=none}=D, Style, RS) -> draw_edges(D, Style, RS); draw_smooth_edges(D, _, RS) -> draw_edges(D, cage, RS). draw_edges(#dlo{edges=Edges}, cage, RS) -> wings_dl:call(Edges, RS); draw_edges(#dlo{proxy_data=#sp{proxy_edges=ProxyEdges}}, _, RS) -> wings_dl:call(ProxyEdges, RS). proxy_smooth(We0, Pd0, St) -> Level = wings_pref:get_value(proxy_opencl_level), Impl = if is_integer(Level),Level > 0 -> wings_cc; true ->?MODULE end, case proxy_needs_update(We0, Pd0) of {false,_} -> Pd0; {_, _} = Info when Impl =:= ?MODULE -> create_proxy_subdiv(Info, We0, St); {Op, _} -> case Pd0 of #sp{type={wings_cc,Data}} when Op =:= update -> update_proxy_cc(We0, Data); _ -> try create_proxy_cc(We0, Level, St) catch to_large -> %% Fallback if we can't allocate memory create_proxy_subdiv({smooth,We0}, We0, St) end end end. proxy_needs_update(We, #sp{we=SWe,src_we=We,vab=#vab{face_vs=Bin}}) when Bin =/= none -> %% Nothing important changed - just recreate the display lists {false, SWe}; proxy_needs_update(#we{es=Etab,he=Hard,mat=M,next_id=Next, lv=Lv,rv=Rv,mirror=Mirror}, #sp{we=OldWe,src_we= #we{es=Etab,he=Hard,mat=M,next_id=Next, lv=Lv,rv=Rv,mirror=Mirror}}) -> {update,OldWe}; proxy_needs_update(We0, #sp{}) -> if ?IS_ANY_LIGHT(We0) -> {false,We0}; true -> {smooth,We0} end. update_proxy_subdiv({false, We}, _) -> We; update_proxy_subdiv({update, OldWe}, We0) -> wings_subdiv:inc_smooth(We0, OldWe); update_proxy_subdiv({smooth, We}, _) -> wings_subdiv:smooth(We). create_proxy_subdiv(Info, We0, St) -> We = update_proxy_subdiv(Info, We0), Plan = wings_draw_setup:prepare(gb_trees:to_list(We#we.fs), We, St), flat_faces(Plan, #sp{src_we=We0,we=We}). update_proxy_cc(We0, Data0) -> Data = wings_cc:update(We0, Data0), Vab = wings_cc:gen_vab(Data), #sp{src_we=We0,we=We0,vab=Vab,type={wings_cc,Data}}. create_proxy_cc(We = #we{fs=Ftab}, Level, St) -> Plan = wings_draw_setup:prepare(gb_trees:keys(Ftab), We, St), Data = wings_cc:init(Plan, Level, We), Vab = wings_cc:gen_vab(Data), #sp{src_we=We,we=We,vab=Vab,type={wings_cc,Data}}. split_proxy(#dlo{proxy=true, src_we=We=#we{fs=Ftab}, proxy_data=Pd=#sp{type={wings_cc,Data0}}}, DynVs0, St) -> Fs0 = gb_trees:keys(Ftab), DynFs0 = wings_face:from_vs(DynVs0, We), %% Expand once (to get the split drawing faces) DynVs1 = wings_face:to_vertices(DynFs0, We), DynFs = wings_face:from_vs(DynVs1, We), Data = case proxy_needs_update(We, Pd) of {false, _} -> Data0; {update, _} -> wings_cc:update(DynVs0, Data0); {_, _} -> Plan = wings_draw_setup:prepare(Fs0, We, St), wings_cc:init(Plan, Data0, We) end, StaticFsSet = gb_sets:subtract(gb_sets:from_ordset(Fs0), gb_sets:from_ordset(DynFs)), StaticFs = gb_sets:to_list(StaticFsSet), StaticPlan = wings_draw_setup:prepare(StaticFs, We, St), StaticVab = wings_cc:gen_vab(StaticPlan, Data), StaticDL = wings_draw:draw_flat_faces(StaticVab, St), %% To get the subdiv correct we need outer layer of faces during calc SubdivVs = wings_face:to_vertices(DynFs, We), SubdivFs = wings_face:from_vs(SubdivVs, We), SubdivPlan = wings_draw_setup:prepare(SubdivFs, We, St), SubdivData = wings_cc:init(SubdivPlan, Data0, We), DynPlan = wings_draw_setup:prepare(DynFs, We, St), DynVab = wings_cc:gen_vab(DynPlan, SubdivData), DynDL = wings_draw:draw_flat_faces(DynVab, St), Split = #split{dyn=DynPlan, info=SubdivData}, #sp{we=We,src_we=We,type={wings_cc,Data}, faces=[StaticDL,DynDL],split=Split}; split_proxy(#dlo{proxy=true,proxy_data=Pd0,src_we=SrcWe}, DynVs0, St) -> DynFs0 = wings_face:from_vs(DynVs0, SrcWe), #we{mirror=Mirror,holes=Holes} = SrcWe, DynFs = ordsets:subtract(DynFs0, ordsets:union([Mirror], Holes)), DynVs = wings_vertex:from_faces(DynFs, SrcWe), #we{fs=Ftab0}=We0 = update_proxy_subdiv(proxy_needs_update(SrcWe, Pd0),SrcWe), Fs0 = wings_face:from_vs(DynVs, We0), OutEs = wings_face:outer_edges(Fs0, We0), UpdateVs0 = gb_sets:from_ordset(wings_face:to_vertices(Fs0, We0)), OuterVs = gb_sets:from_ordset(wings_edge:to_vertices(OutEs, We0)), UpdateVs = gb_sets:subtract(UpdateVs0,OuterVs), Ftab = sofs:from_external(gb_trees:to_list(Ftab0), [{face,data}]), Fs = sofs:from_external(Fs0, [face]), {DynFtab0,StaticFtab0} = sofs:partition(1, Ftab, Fs), DynFtab = sofs:to_external(DynFtab0), StaticFtab = sofs:to_external(StaticFtab0), StaticPlan = wings_draw_setup:prepare(StaticFtab, We0, St), #sp{vab=StaticVab} = flat_faces(StaticPlan, #sp{we=We0}), StaticDL = wings_draw:draw_flat_faces(StaticVab, St), DynPlan = wings_draw_setup:prepare(DynFtab, We0, St), Info = wings_subdiv:get_proxy_info(DynVs, UpdateVs, SrcWe), Split = #split{upd_fs=DynFs,dyn=DynPlan,info=Info}, Sp = #sp{we=We0,src_we=SrcWe,split=Split}, DynD = flat_faces(DynPlan, Sp), Temp = wings_draw:draw_flat_faces(DynD#sp.vab, St), DynD#sp{faces=[StaticDL,Temp]}; split_proxy(#dlo{proxy_data=PD},_, _St) -> PD. update_dynamic(ChangedVs, St, #dlo{proxy=true,proxy_data=#sp{type=?MODULE}=Pd0}=D0) -> #sp{faces=[SDL|_],we=SmoothedWe,split=Split, src_we=SrcWe0=#we{vp=Vtab0}}=Pd0, #split{upd_fs=Upd,dyn=DynPlan,info=Info} = Split, Vtab = foldl(fun({V,Pos},Acc) -> array:set(V,Pos,Acc) end, Vtab0, ChangedVs), SrcWe = SrcWe0#we{vp=Vtab}, We = wings_subdiv:inc_smooth(SrcWe, Upd, Info, SmoothedWe), Pd1 = flat_faces(DynPlan, Pd0#sp{we=We, src_we=SrcWe}), Temp = wings_draw:draw_flat_faces(Pd1#sp.vab, St), D0#dlo{proxy_data=Pd1#sp{faces=[SDL,Temp]}}; update_dynamic(ChangedVs, St, D0=#dlo{proxy=true,proxy_data=#sp{type={wings_cc,_}}=Pd0}) -> #sp{faces=[SDL|_],split=SP=#split{dyn=DynPlan, info=Data0}}=Pd0, Data = wings_cc:update(ChangedVs, Data0), Vab = wings_cc:gen_vab(DynPlan, Data), DL = wings_draw:draw_flat_faces(Vab, St), D0#dlo{proxy_data=Pd0#sp{faces=[SDL,DL], split=SP#split{info=Data}}}; update_dynamic(_, _, D) -> D. reset_dynamic(#sp{we=We, src_we=We0, type=Type}) -> #sp{we=We,src_we=We0,type=Type}; reset_dynamic(D) -> D. %%% Setup binaries and meta info flat_faces({plain,MatFaces}, Pd) -> plain_flat_faces(MatFaces, Pd, 0, <<>>, [], []); flat_faces({uv,MatFaces}, Pd) -> uv_flat_faces(MatFaces, Pd, 0, <<>>, [], []); flat_faces({uv_tangent,MatFaces}, Pd) -> Z = e3d_vec:zero(), Array = array:new([{default, {Z,Z}}]), tangent_flat_faces(MatFaces, Pd, 0, <<>>, [], [], {Array, []}); flat_faces({color,MatFaces}, Pd) -> col_flat_faces(MatFaces, Pd, 0, <<>>, [], []); flat_faces({color_uv,MatFaces}, Pd) -> col_uv_faces(MatFaces, Pd, 0, <<>>, [], []); flat_faces({color_uv_tangent,MatFaces}, Pd) -> Z = e3d_vec:zero(), Array = array:new([{default, {Z,Z}}]), col_tangent_faces(MatFaces, Pd, 0, <<>>, [], [], {Array, []}). plain_flat_faces([{Mat,Fs}|T], #sp{we=We}=Pd, Start0, Vs0, Fmap0, MatInfo0) -> {Start,Vs,FaceMap} = flat_faces_1(Fs, We, Start0, Vs0, Fmap0), MatInfo = [{Mat,?GL_QUADS,Start0,Start-Start0}|MatInfo0], plain_flat_faces(T, Pd, Start, Vs, FaceMap, MatInfo); plain_flat_faces([], Pd, _Start, Vs, FaceMap0, MatInfo) -> FaceMap = reverse(FaceMap0), What = [vertices,face_normals], Vab = wings_draw_setup:create_vab(What, Vs, FaceMap, MatInfo), Pd#sp{vab=Vab}. flat_faces_1([{Face,Edge}|Fs], We, Start, Vs, FaceMap) -> VsPos = wings_face:vertex_positions(Face, Edge, We), Normal = e3d_vec:normal(VsPos), flat_faces_1(Fs,We,Start+4,add_quad(Vs,Normal,VsPos),[{Face,Normal}|FaceMap]); flat_faces_1([], _, Start, Vs, FaceMap) -> {Start,Vs,FaceMap}. uv_flat_faces([{Mat,Fs}|T], #sp{we=We}=Pd, Start0, Vs0, Fmap0, MatInfo0) -> {Start,Vs,FaceMap} = uv_flat_faces_1(Fs, We, Start0, Vs0, Fmap0), MatInfo = [{Mat,?GL_QUADS,Start0,Start-Start0}|MatInfo0], uv_flat_faces(T, Pd, Start, Vs, FaceMap, MatInfo); uv_flat_faces([], Pd, _Start, Vs, FaceMap0, MatInfo) -> FaceMap = reverse(FaceMap0), What = [vertices,face_normals,uvs], Vab = wings_draw_setup:create_vab(What, Vs, FaceMap, MatInfo), Pd#sp{vab=Vab}. uv_flat_faces_1([{Face,Edge}|Fs], We, Start, Vs, FaceMap) -> {VsPos,UV} = wings_va:face_pos_attr(uv, Face, Edge, We), Normal = e3d_vec:normal(VsPos), uv_flat_faces_1(Fs, We, Start+4, add_quad_uv(Vs, Normal, VsPos, UV), [{Face,Normal}|FaceMap]); uv_flat_faces_1([], _, Start, Vs, FaceMap) -> {Start,Vs,FaceMap}. tangent_flat_faces([{Mat,Fs}|T], #sp{we=We}=Pd, Start0, Vs0, Fmap0, MatInfo0, Ts0) -> {Start,Vs,FaceMap, Ts} = tangent_flat_faces_1(Fs, We, Start0, Vs0, Fmap0, Ts0), MatInfo = [{Mat,?GL_QUADS,Start0,Start-Start0}|MatInfo0], tangent_flat_faces(T, Pd, Start, Vs, FaceMap, MatInfo, Ts); tangent_flat_faces([], Pd, _Start, Vs, FaceMap0, MatInfo, {VsTs0, RevF2V}) -> FaceMap = reverse(FaceMap0), VsTs = array:map(fun(_V, {T, BT}) -> {e3d_vec:norm(T), e3d_vec:norm(BT)} end, VsTs0), Data = wings_draw_setup:add_tangents(reverse(RevF2V), VsTs, Vs), What = [vertices,face_normals,uvs], Vab = wings_draw_setup:create_tangent_vab(What, Vs, Data, FaceMap, MatInfo), Pd#sp{vab=Vab}. tangent_flat_faces_1([{Face,Edge}|Fs], We, Start, Vs, FaceMap, Ts0) -> {VsPos,UV} = wings_va:face_pos_attr(uv, Face, Edge, We), Normal = e3d_vec:normal(VsPos), tangent_flat_faces_1(Fs, We, Start+4, add_quad_uv(Vs, Normal, VsPos, UV), [{Face,Normal}|FaceMap], add_ts(VsPos, UV, Normal,wings_face:vertices_ccw(Face, We), Ts0)); tangent_flat_faces_1([], _, Start, Vs, FaceMap, Ts) -> {Start,Vs,FaceMap, Ts}. col_flat_faces([{Mat,Fs}|T], #sp{we=We}=Pd, Start0, Vs0, Fmap0, MatInfo0) -> {Start,Vs,FaceMap} = col_flat_faces_1(Fs, We, Start0, Vs0, Fmap0), MatInfo = [{Mat,?GL_QUADS,Start0,Start-Start0}|MatInfo0], col_flat_faces(T, Pd, Start, Vs, FaceMap, MatInfo); col_flat_faces([], Pd, _Start, Vs, FaceMap0, MatInfo) -> FaceMap = reverse(FaceMap0), What = [vertices,face_normals,colors], Vab = wings_draw_setup:create_vab(What, Vs, FaceMap, MatInfo), Pd#sp{vab=Vab}. col_flat_faces_1([{Face,Edge}|T], We, Start, Vs, Fmap) -> {VsPos,Col} = wings_va:face_pos_attr(color, Face, Edge, We), Normal = e3d_vec:normal(VsPos), col_flat_faces_1(T, We, Start+4, add_quad_col(Vs, Normal, VsPos, Col), [{Face,Normal}|Fmap]); col_flat_faces_1([], _, Start, Vs, Fmap) -> {Start,Vs,Fmap}. col_uv_faces([{Mat,Fs}|T], #sp{we=We}=Pd, Start0, Vs0, Fmap0, MatInfo0) -> {Start,Vs,FaceMap} = col_uv_faces_1(Fs, We, Start0, Vs0, Fmap0), MatInfo = [{Mat,?GL_QUADS,Start0,Start-Start0}|MatInfo0], col_uv_faces(T, Pd, Start, Vs, FaceMap, MatInfo); col_uv_faces([], Pd, _Start, Vs, FaceMap0, MatInfo) -> FaceMap = reverse(FaceMap0), What = [vertices,face_normals,colors,uvs], Vab = wings_draw_setup:create_vab(What, Vs, FaceMap, MatInfo), Pd#sp{vab=Vab}. col_uv_faces_1([{Face,Edge}|Fs], We, Start, Vs, FaceMap) -> {VsPos,UV} = wings_va:face_pos_attr([color|uv], Face, Edge, We), Normal = e3d_vec:normal(VsPos), col_uv_faces_1(Fs, We, Start+4, add_quad_col_uv(Vs, Normal, VsPos, UV), [{Face,Normal}|FaceMap]); col_uv_faces_1([], _, Start, Vs, FaceMap) -> {Start,Vs,FaceMap}. col_tangent_faces([{Mat,Fs}|T], #sp{we=We}=Pd, Start0, Vs0, Fmap0, MatInfo0, Ts0) -> {Start,Vs,FaceMap, Ts} = col_tangent_faces_1(Fs, We, Start0, Vs0, Fmap0, Ts0), MatInfo = [{Mat,?GL_QUADS,Start0,Start-Start0}|MatInfo0], col_tangent_faces(T, Pd, Start, Vs, FaceMap, MatInfo, Ts); col_tangent_faces([], Pd, _Start, Vs, FaceMap0, MatInfo, {VsTs0, RevF2V}) -> FaceMap = reverse(FaceMap0), VsTs = array:map(fun(_V, {T,BT}) -> {e3d_vec:norm(T),e3d_vec:norm(BT)} end, VsTs0), Data = wings_draw_setup:add_tangents(reverse(RevF2V), VsTs, Vs), What = [vertices,face_normals,colors,uvs], Vab = wings_draw_setup:create_tangent_vab(What, Vs, Data, FaceMap, MatInfo), Pd#sp{vab=Vab}. col_tangent_faces_1([{Face,Edge}|Fs], We, Start, Vs, FaceMap, Ts0) -> {VsPos,ColUV} = wings_va:face_pos_attr([color|uv], Face, Edge, We), Normal = e3d_vec:normal(VsPos), col_tangent_faces_1(Fs, We, Start+4, add_quad_col_uv(Vs, Normal, VsPos, ColUV), [{Face,Normal}|FaceMap], add_ts(VsPos, [UV || [_|UV] <- ColUV], Normal, wings_face:vertices_ccw(Face, We), Ts0)); col_tangent_faces_1([], _, Start, Vs, FaceMap, Ts) -> {Start,Vs,FaceMap, Ts}. add_quad(Bin, {NX,NY,NZ}, [{X1,Y1,Z1},{X2,Y2,Z2},{X3,Y3,Z3},{X4,Y4,Z4}]) -> <<Bin/binary, X1:?F32,Y1:?F32,Z1:?F32, NX:?F32,NY:?F32,NZ:?F32, X2:?F32,Y2:?F32,Z2:?F32, NX:?F32,NY:?F32,NZ:?F32, X3:?F32,Y3:?F32,Z3:?F32, NX:?F32,NY:?F32,NZ:?F32, X4:?F32,Y4:?F32,Z4:?F32, NX:?F32,NY:?F32,NZ:?F32>>. add_quad_uv(Bin, {NX,NY,NZ}, [{X1,Y1,Z1},{X2,Y2,Z2},{X3,Y3,Z3},{X4,Y4,Z4}], [{U1,V1},{U2,V2},{U3,V3},{U4,V4}]) -> <<Bin/binary, X1:?F32,Y1:?F32,Z1:?F32, NX:?F32,NY:?F32,NZ:?F32, U1:?F32,V1:?F32, X2:?F32,Y2:?F32,Z2:?F32, NX:?F32,NY:?F32,NZ:?F32, U2:?F32,V2:?F32, X3:?F32,Y3:?F32,Z3:?F32, NX:?F32,NY:?F32,NZ:?F32, U3:?F32,V3:?F32, X4:?F32,Y4:?F32,Z4:?F32, NX:?F32,NY:?F32,NZ:?F32, U4:?F32,V4:?F32>>; add_quad_uv(Bin, N, Pos, _) -> Z = {0.0,0.0}, add_quad_uv(Bin, N, Pos, [Z,Z,Z,Z]). add_quad_col(Bin, {NX,NY,NZ}, [{X1,Y1,Z1},{X2,Y2,Z2},{X3,Y3,Z3},{X4,Y4,Z4}], [{R1,G1,B1},{R2,G2,B2},{R3,G3,B3},{R4,G4,B4}]) -> <<Bin/binary, X1:?F32,Y1:?F32,Z1:?F32, NX:?F32,NY:?F32,NZ:?F32, R1:?F32,G1:?F32,B1:?F32, X2:?F32,Y2:?F32,Z2:?F32, NX:?F32,NY:?F32,NZ:?F32, R2:?F32,G2:?F32,B2:?F32, X3:?F32,Y3:?F32,Z3:?F32, NX:?F32,NY:?F32,NZ:?F32, R3:?F32,G3:?F32,B3:?F32, X4:?F32,Y4:?F32,Z4:?F32, NX:?F32,NY:?F32,NZ:?F32, R4:?F32,G4:?F32,B4:?F32>>; add_quad_col(Bin, N, Pos, Cols0) -> Cols = [def_color(C) || C <- Cols0], add_quad_col(Bin, N, Pos, Cols). add_quad_col_uv(Bin, {NX,NY,NZ}, [{X1,Y1,Z1},{X2,Y2,Z2},{X3,Y3,Z3},{X4,Y4,Z4}], [[{R1,G1,B1}|{U1,V1}], [{R2,G2,B2}|{U2,V2}], [{R3,G3,B3}|{U3,V3}], [{R4,G4,B4}|{U4,V4}]]) -> <<Bin/binary, X1:?F32,Y1:?F32,Z1:?F32, NX:?F32,NY:?F32,NZ:?F32, R1:?F32,G1:?F32,B1:?F32, U1:?F32,V1:?F32, X2:?F32,Y2:?F32,Z2:?F32, NX:?F32,NY:?F32,NZ:?F32, R2:?F32,G2:?F32,B2:?F32, U2:?F32,V2:?F32, X3:?F32,Y3:?F32,Z3:?F32, NX:?F32,NY:?F32,NZ:?F32, R3:?F32,G3:?F32,B3:?F32, U3:?F32,V3:?F32, X4:?F32,Y4:?F32,Z4:?F32, NX:?F32,NY:?F32,NZ:?F32, R4:?F32,G4:?F32,B4:?F32, U4:?F32,V4:?F32>>; add_quad_col_uv(Bin, N, Pos, Attrs0) -> Attrs = fix_color_uv(Attrs0), add_quad_col_uv(Bin, N, Pos, Attrs). add_ts([P1,P2,P3,_P4], [UV1,UV2,UV3,_UV4], N, Vs, Ts0) -> %% Quads, subdivided so rougly the same tangents for both tris. wings_draw_setup:add_ts([P1,P2,P3],[UV1,UV2,UV3], N, Vs, Ts0). fix_color_uv(Attrs) -> case good_uvs(Attrs) of false -> %% Bad UVs, possibly bad vertex colors too. Fix both. Zuv = {0.0,0.0}, [[def_color(C)|Zuv] || [C|_] <- Attrs]; true -> %% Good UVs, bad vertex colors. [[def_color(C)|UV] || [C|UV] <- Attrs] end. good_uvs([[_|{_,_}]|T]) -> good_uvs(T); good_uvs([_|_]) -> false; good_uvs([]) -> true. def_color({_,_,_}=C) -> C; def_color(_) -> {1.0,1.0,1.0}.
{ "pile_set_name": "Github" }
CREATE TABLE list (id VARCHAR(64) NOT NULL, value VARCHAR(64) NOT NULL, PRIMARY KEY(id)) DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci ENGINE = InnoDB; INSERT INTO `list` (`id`, `value`) VALUES ('AF', 'Afghanistan'); INSERT INTO `list` (`id`, `value`) VALUES ('ZA', 'Afrique du Sud'); INSERT INTO `list` (`id`, `value`) VALUES ('AL', 'Albanie'); INSERT INTO `list` (`id`, `value`) VALUES ('DZ', 'Algérie'); INSERT INTO `list` (`id`, `value`) VALUES ('DE', 'Allemagne'); INSERT INTO `list` (`id`, `value`) VALUES ('AD', 'Andorre'); INSERT INTO `list` (`id`, `value`) VALUES ('AO', 'Angola'); INSERT INTO `list` (`id`, `value`) VALUES ('AI', 'Anguilla'); INSERT INTO `list` (`id`, `value`) VALUES ('AQ', 'Antarctique'); INSERT INTO `list` (`id`, `value`) VALUES ('AG', 'Antigua-et-Barbuda'); INSERT INTO `list` (`id`, `value`) VALUES ('SA', 'Arabie saoudite'); INSERT INTO `list` (`id`, `value`) VALUES ('AR', 'Argentine'); INSERT INTO `list` (`id`, `value`) VALUES ('AM', 'Arménie'); INSERT INTO `list` (`id`, `value`) VALUES ('AW', 'Aruba'); INSERT INTO `list` (`id`, `value`) VALUES ('AU', 'Australie'); INSERT INTO `list` (`id`, `value`) VALUES ('AT', 'Autriche'); INSERT INTO `list` (`id`, `value`) VALUES ('AZ', 'Azerbaïdjan'); INSERT INTO `list` (`id`, `value`) VALUES ('BS', 'Bahamas'); INSERT INTO `list` (`id`, `value`) VALUES ('BH', 'Bahreïn'); INSERT INTO `list` (`id`, `value`) VALUES ('BD', 'Bangladesh'); INSERT INTO `list` (`id`, `value`) VALUES ('BB', 'Barbade'); INSERT INTO `list` (`id`, `value`) VALUES ('BE', 'Belgique'); INSERT INTO `list` (`id`, `value`) VALUES ('BZ', 'Belize'); INSERT INTO `list` (`id`, `value`) VALUES ('BJ', 'Bénin'); INSERT INTO `list` (`id`, `value`) VALUES ('BM', 'Bermudes'); INSERT INTO `list` (`id`, `value`) VALUES ('BT', 'Bhoutan'); INSERT INTO `list` (`id`, `value`) VALUES ('BY', 'Biélorussie'); INSERT INTO `list` (`id`, `value`) VALUES ('BO', 'Bolivie'); INSERT INTO `list` (`id`, `value`) VALUES ('BA', 'Bosnie-Herzégovine'); INSERT INTO `list` (`id`, `value`) VALUES ('BW', 'Botswana'); INSERT INTO `list` (`id`, `value`) VALUES ('BR', 'Brésil'); INSERT INTO `list` (`id`, `value`) VALUES ('BN', 'Brunéi Darussalam'); INSERT INTO `list` (`id`, `value`) VALUES ('BG', 'Bulgarie'); INSERT INTO `list` (`id`, `value`) VALUES ('BF', 'Burkina Faso'); INSERT INTO `list` (`id`, `value`) VALUES ('BI', 'Burundi'); INSERT INTO `list` (`id`, `value`) VALUES ('KH', 'Cambodge'); INSERT INTO `list` (`id`, `value`) VALUES ('CM', 'Cameroun'); INSERT INTO `list` (`id`, `value`) VALUES ('CA', 'Canada'); INSERT INTO `list` (`id`, `value`) VALUES ('CV', 'Cap-Vert'); INSERT INTO `list` (`id`, `value`) VALUES ('CL', 'Chili'); INSERT INTO `list` (`id`, `value`) VALUES ('CN', 'Chine'); INSERT INTO `list` (`id`, `value`) VALUES ('CY', 'Chypre'); INSERT INTO `list` (`id`, `value`) VALUES ('CO', 'Colombie'); INSERT INTO `list` (`id`, `value`) VALUES ('KM', 'Comores'); INSERT INTO `list` (`id`, `value`) VALUES ('CG', 'Congo-Brazzaville'); INSERT INTO `list` (`id`, `value`) VALUES ('CD', 'Congo-Kinshasa'); INSERT INTO `list` (`id`, `value`) VALUES ('KP', 'Corée du Nord'); INSERT INTO `list` (`id`, `value`) VALUES ('KR', 'Corée du Sud'); INSERT INTO `list` (`id`, `value`) VALUES ('CR', 'Costa Rica'); INSERT INTO `list` (`id`, `value`) VALUES ('CI', 'Côte d’Ivoire'); INSERT INTO `list` (`id`, `value`) VALUES ('HR', 'Croatie'); INSERT INTO `list` (`id`, `value`) VALUES ('CU', 'Cuba'); INSERT INTO `list` (`id`, `value`) VALUES ('CW', 'Curaçao'); INSERT INTO `list` (`id`, `value`) VALUES ('DK', 'Danemark'); INSERT INTO `list` (`id`, `value`) VALUES ('DJ', 'Djibouti'); INSERT INTO `list` (`id`, `value`) VALUES ('DM', 'Dominique'); INSERT INTO `list` (`id`, `value`) VALUES ('EG', 'Égypte'); INSERT INTO `list` (`id`, `value`) VALUES ('AE', 'Émirats arabes unis'); INSERT INTO `list` (`id`, `value`) VALUES ('EC', 'Équateur'); INSERT INTO `list` (`id`, `value`) VALUES ('ER', 'Érythrée'); INSERT INTO `list` (`id`, `value`) VALUES ('ES', 'Espagne'); INSERT INTO `list` (`id`, `value`) VALUES ('EE', 'Estonie'); INSERT INTO `list` (`id`, `value`) VALUES ('SZ', 'Eswatini'); INSERT INTO `list` (`id`, `value`) VALUES ('VA', 'État de la Cité du Vatican'); INSERT INTO `list` (`id`, `value`) VALUES ('FM', 'États fédérés de Micronésie'); INSERT INTO `list` (`id`, `value`) VALUES ('US', 'États-Unis'); INSERT INTO `list` (`id`, `value`) VALUES ('ET', 'Éthiopie'); INSERT INTO `list` (`id`, `value`) VALUES ('FJ', 'Fidji'); INSERT INTO `list` (`id`, `value`) VALUES ('FI', 'Finlande'); INSERT INTO `list` (`id`, `value`) VALUES ('FR', 'France'); INSERT INTO `list` (`id`, `value`) VALUES ('GA', 'Gabon'); INSERT INTO `list` (`id`, `value`) VALUES ('GM', 'Gambie'); INSERT INTO `list` (`id`, `value`) VALUES ('GE', 'Géorgie'); INSERT INTO `list` (`id`, `value`) VALUES ('GS', 'Géorgie du Sud et îles Sandwich du Sud'); INSERT INTO `list` (`id`, `value`) VALUES ('GH', 'Ghana'); INSERT INTO `list` (`id`, `value`) VALUES ('GI', 'Gibraltar'); INSERT INTO `list` (`id`, `value`) VALUES ('GR', 'Grèce'); INSERT INTO `list` (`id`, `value`) VALUES ('GD', 'Grenade'); INSERT INTO `list` (`id`, `value`) VALUES ('GL', 'Groenland'); INSERT INTO `list` (`id`, `value`) VALUES ('GP', 'Guadeloupe'); INSERT INTO `list` (`id`, `value`) VALUES ('GU', 'Guam'); INSERT INTO `list` (`id`, `value`) VALUES ('GT', 'Guatemala'); INSERT INTO `list` (`id`, `value`) VALUES ('GG', 'Guernesey'); INSERT INTO `list` (`id`, `value`) VALUES ('GN', 'Guinée'); INSERT INTO `list` (`id`, `value`) VALUES ('GQ', 'Guinée équatoriale'); INSERT INTO `list` (`id`, `value`) VALUES ('GW', 'Guinée-Bissau'); INSERT INTO `list` (`id`, `value`) VALUES ('GY', 'Guyana'); INSERT INTO `list` (`id`, `value`) VALUES ('GF', 'Guyane française'); INSERT INTO `list` (`id`, `value`) VALUES ('HT', 'Haïti'); INSERT INTO `list` (`id`, `value`) VALUES ('HN', 'Honduras'); INSERT INTO `list` (`id`, `value`) VALUES ('HU', 'Hongrie'); INSERT INTO `list` (`id`, `value`) VALUES ('BV', 'Île Bouvet'); INSERT INTO `list` (`id`, `value`) VALUES ('CX', 'Île Christmas'); INSERT INTO `list` (`id`, `value`) VALUES ('IM', 'Île de Man'); INSERT INTO `list` (`id`, `value`) VALUES ('NF', 'Île Norfolk'); INSERT INTO `list` (`id`, `value`) VALUES ('AX', 'Îles Åland'); INSERT INTO `list` (`id`, `value`) VALUES ('KY', 'Îles Caïmans'); INSERT INTO `list` (`id`, `value`) VALUES ('CC', 'Îles Cocos'); INSERT INTO `list` (`id`, `value`) VALUES ('CK', 'Îles Cook'); INSERT INTO `list` (`id`, `value`) VALUES ('FO', 'Îles Féroé'); INSERT INTO `list` (`id`, `value`) VALUES ('HM', 'Îles Heard et McDonald'); INSERT INTO `list` (`id`, `value`) VALUES ('FK', 'Îles Malouines'); INSERT INTO `list` (`id`, `value`) VALUES ('MP', 'Îles Mariannes du Nord'); INSERT INTO `list` (`id`, `value`) VALUES ('MH', 'Îles Marshall'); INSERT INTO `list` (`id`, `value`) VALUES ('UM', 'Îles mineures éloignées des États-Unis'); INSERT INTO `list` (`id`, `value`) VALUES ('PN', 'Îles Pitcairn'); INSERT INTO `list` (`id`, `value`) VALUES ('SB', 'Îles Salomon'); INSERT INTO `list` (`id`, `value`) VALUES ('TC', 'Îles Turques-et-Caïques'); INSERT INTO `list` (`id`, `value`) VALUES ('VG', 'Îles Vierges britanniques'); INSERT INTO `list` (`id`, `value`) VALUES ('VI', 'Îles Vierges des États-Unis'); INSERT INTO `list` (`id`, `value`) VALUES ('IN', 'Inde'); INSERT INTO `list` (`id`, `value`) VALUES ('ID', 'Indonésie'); INSERT INTO `list` (`id`, `value`) VALUES ('IQ', 'Irak'); INSERT INTO `list` (`id`, `value`) VALUES ('IR', 'Iran'); INSERT INTO `list` (`id`, `value`) VALUES ('IE', 'Irlande'); INSERT INTO `list` (`id`, `value`) VALUES ('IS', 'Islande'); INSERT INTO `list` (`id`, `value`) VALUES ('IL', 'Israël'); INSERT INTO `list` (`id`, `value`) VALUES ('IT', 'Italie'); INSERT INTO `list` (`id`, `value`) VALUES ('JM', 'Jamaïque'); INSERT INTO `list` (`id`, `value`) VALUES ('JP', 'Japon'); INSERT INTO `list` (`id`, `value`) VALUES ('JE', 'Jersey'); INSERT INTO `list` (`id`, `value`) VALUES ('JO', 'Jordanie'); INSERT INTO `list` (`id`, `value`) VALUES ('KZ', 'Kazakhstan'); INSERT INTO `list` (`id`, `value`) VALUES ('KE', 'Kenya'); INSERT INTO `list` (`id`, `value`) VALUES ('KG', 'Kirghizistan'); INSERT INTO `list` (`id`, `value`) VALUES ('KI', 'Kiribati'); INSERT INTO `list` (`id`, `value`) VALUES ('KW', 'Koweït'); INSERT INTO `list` (`id`, `value`) VALUES ('RE', 'La Réunion'); INSERT INTO `list` (`id`, `value`) VALUES ('LA', 'Laos'); INSERT INTO `list` (`id`, `value`) VALUES ('LS', 'Lesotho'); INSERT INTO `list` (`id`, `value`) VALUES ('LV', 'Lettonie'); INSERT INTO `list` (`id`, `value`) VALUES ('LB', 'Liban'); INSERT INTO `list` (`id`, `value`) VALUES ('LR', 'Libéria'); INSERT INTO `list` (`id`, `value`) VALUES ('LY', 'Libye'); INSERT INTO `list` (`id`, `value`) VALUES ('LI', 'Liechtenstein'); INSERT INTO `list` (`id`, `value`) VALUES ('LT', 'Lituanie'); INSERT INTO `list` (`id`, `value`) VALUES ('LU', 'Luxembourg'); INSERT INTO `list` (`id`, `value`) VALUES ('MK', 'Macédoine du Nord'); INSERT INTO `list` (`id`, `value`) VALUES ('MG', 'Madagascar'); INSERT INTO `list` (`id`, `value`) VALUES ('MY', 'Malaisie'); INSERT INTO `list` (`id`, `value`) VALUES ('MW', 'Malawi'); INSERT INTO `list` (`id`, `value`) VALUES ('MV', 'Maldives'); INSERT INTO `list` (`id`, `value`) VALUES ('ML', 'Mali'); INSERT INTO `list` (`id`, `value`) VALUES ('MT', 'Malte'); INSERT INTO `list` (`id`, `value`) VALUES ('MA', 'Maroc'); INSERT INTO `list` (`id`, `value`) VALUES ('MQ', 'Martinique'); INSERT INTO `list` (`id`, `value`) VALUES ('MU', 'Maurice'); INSERT INTO `list` (`id`, `value`) VALUES ('MR', 'Mauritanie'); INSERT INTO `list` (`id`, `value`) VALUES ('YT', 'Mayotte'); INSERT INTO `list` (`id`, `value`) VALUES ('MX', 'Mexique'); INSERT INTO `list` (`id`, `value`) VALUES ('MD', 'Moldavie'); INSERT INTO `list` (`id`, `value`) VALUES ('MC', 'Monaco'); INSERT INTO `list` (`id`, `value`) VALUES ('MN', 'Mongolie'); INSERT INTO `list` (`id`, `value`) VALUES ('ME', 'Monténégro'); INSERT INTO `list` (`id`, `value`) VALUES ('MS', 'Montserrat'); INSERT INTO `list` (`id`, `value`) VALUES ('MZ', 'Mozambique'); INSERT INTO `list` (`id`, `value`) VALUES ('MM', 'Myanmar (Birmanie)'); INSERT INTO `list` (`id`, `value`) VALUES ('NA', 'Namibie'); INSERT INTO `list` (`id`, `value`) VALUES ('NR', 'Nauru'); INSERT INTO `list` (`id`, `value`) VALUES ('NP', 'Népal'); INSERT INTO `list` (`id`, `value`) VALUES ('NI', 'Nicaragua'); INSERT INTO `list` (`id`, `value`) VALUES ('NE', 'Niger'); INSERT INTO `list` (`id`, `value`) VALUES ('NG', 'Nigéria'); INSERT INTO `list` (`id`, `value`) VALUES ('NU', 'Niue'); INSERT INTO `list` (`id`, `value`) VALUES ('NO', 'Norvège'); INSERT INTO `list` (`id`, `value`) VALUES ('NC', 'Nouvelle-Calédonie'); INSERT INTO `list` (`id`, `value`) VALUES ('NZ', 'Nouvelle-Zélande'); INSERT INTO `list` (`id`, `value`) VALUES ('OM', 'Oman'); INSERT INTO `list` (`id`, `value`) VALUES ('UG', 'Ouganda'); INSERT INTO `list` (`id`, `value`) VALUES ('UZ', 'Ouzbékistan'); INSERT INTO `list` (`id`, `value`) VALUES ('PK', 'Pakistan'); INSERT INTO `list` (`id`, `value`) VALUES ('PW', 'Palaos'); INSERT INTO `list` (`id`, `value`) VALUES ('PA', 'Panama'); INSERT INTO `list` (`id`, `value`) VALUES ('PG', 'Papouasie-Nouvelle-Guinée'); INSERT INTO `list` (`id`, `value`) VALUES ('PY', 'Paraguay'); INSERT INTO `list` (`id`, `value`) VALUES ('NL', 'Pays-Bas'); INSERT INTO `list` (`id`, `value`) VALUES ('BQ', 'Pays-Bas caribéens'); INSERT INTO `list` (`id`, `value`) VALUES ('PE', 'Pérou'); INSERT INTO `list` (`id`, `value`) VALUES ('PH', 'Philippines'); INSERT INTO `list` (`id`, `value`) VALUES ('PL', 'Pologne'); INSERT INTO `list` (`id`, `value`) VALUES ('PF', 'Polynésie française'); INSERT INTO `list` (`id`, `value`) VALUES ('PR', 'Porto Rico'); INSERT INTO `list` (`id`, `value`) VALUES ('PT', 'Portugal'); INSERT INTO `list` (`id`, `value`) VALUES ('QA', 'Qatar'); INSERT INTO `list` (`id`, `value`) VALUES ('HK', 'R.A.S. chinoise de Hong Kong'); INSERT INTO `list` (`id`, `value`) VALUES ('MO', 'R.A.S. chinoise de Macao'); INSERT INTO `list` (`id`, `value`) VALUES ('CF', 'République centrafricaine'); INSERT INTO `list` (`id`, `value`) VALUES ('DO', 'République dominicaine'); INSERT INTO `list` (`id`, `value`) VALUES ('RO', 'Roumanie'); INSERT INTO `list` (`id`, `value`) VALUES ('GB', 'Royaume-Uni'); INSERT INTO `list` (`id`, `value`) VALUES ('RU', 'Russie'); INSERT INTO `list` (`id`, `value`) VALUES ('RW', 'Rwanda'); INSERT INTO `list` (`id`, `value`) VALUES ('EH', 'Sahara occidental'); INSERT INTO `list` (`id`, `value`) VALUES ('BL', 'Saint-Barthélemy'); INSERT INTO `list` (`id`, `value`) VALUES ('KN', 'Saint-Christophe-et-Niévès'); INSERT INTO `list` (`id`, `value`) VALUES ('SM', 'Saint-Marin'); INSERT INTO `list` (`id`, `value`) VALUES ('MF', 'Saint-Martin'); INSERT INTO `list` (`id`, `value`) VALUES ('SX', 'Saint-Martin (partie néerlandaise)'); INSERT INTO `list` (`id`, `value`) VALUES ('PM', 'Saint-Pierre-et-Miquelon'); INSERT INTO `list` (`id`, `value`) VALUES ('VC', 'Saint-Vincent-et-les-Grenadines'); INSERT INTO `list` (`id`, `value`) VALUES ('SH', 'Sainte-Hélène'); INSERT INTO `list` (`id`, `value`) VALUES ('LC', 'Sainte-Lucie'); INSERT INTO `list` (`id`, `value`) VALUES ('SV', 'Salvador'); INSERT INTO `list` (`id`, `value`) VALUES ('WS', 'Samoa'); INSERT INTO `list` (`id`, `value`) VALUES ('AS', 'Samoa américaines'); INSERT INTO `list` (`id`, `value`) VALUES ('ST', 'Sao Tomé-et-Principe'); INSERT INTO `list` (`id`, `value`) VALUES ('SN', 'Sénégal'); INSERT INTO `list` (`id`, `value`) VALUES ('RS', 'Serbie'); INSERT INTO `list` (`id`, `value`) VALUES ('SC', 'Seychelles'); INSERT INTO `list` (`id`, `value`) VALUES ('SL', 'Sierra Leone'); INSERT INTO `list` (`id`, `value`) VALUES ('SG', 'Singapour'); INSERT INTO `list` (`id`, `value`) VALUES ('SK', 'Slovaquie'); INSERT INTO `list` (`id`, `value`) VALUES ('SI', 'Slovénie'); INSERT INTO `list` (`id`, `value`) VALUES ('SO', 'Somalie'); INSERT INTO `list` (`id`, `value`) VALUES ('SD', 'Soudan'); INSERT INTO `list` (`id`, `value`) VALUES ('SS', 'Soudan du Sud'); INSERT INTO `list` (`id`, `value`) VALUES ('LK', 'Sri Lanka'); INSERT INTO `list` (`id`, `value`) VALUES ('SE', 'Suède'); INSERT INTO `list` (`id`, `value`) VALUES ('CH', 'Suisse'); INSERT INTO `list` (`id`, `value`) VALUES ('SR', 'Suriname'); INSERT INTO `list` (`id`, `value`) VALUES ('SJ', 'Svalbard et Jan Mayen'); INSERT INTO `list` (`id`, `value`) VALUES ('SY', 'Syrie'); INSERT INTO `list` (`id`, `value`) VALUES ('TJ', 'Tadjikistan'); INSERT INTO `list` (`id`, `value`) VALUES ('TW', 'Taïwan'); INSERT INTO `list` (`id`, `value`) VALUES ('TZ', 'Tanzanie'); INSERT INTO `list` (`id`, `value`) VALUES ('TD', 'Tchad'); INSERT INTO `list` (`id`, `value`) VALUES ('CZ', 'Tchéquie'); INSERT INTO `list` (`id`, `value`) VALUES ('TF', 'Terres australes françaises'); INSERT INTO `list` (`id`, `value`) VALUES ('IO', 'Territoire britannique de l’océan Indien'); INSERT INTO `list` (`id`, `value`) VALUES ('PS', 'Territoires palestiniens'); INSERT INTO `list` (`id`, `value`) VALUES ('TH', 'Thaïlande'); INSERT INTO `list` (`id`, `value`) VALUES ('TL', 'Timor oriental'); INSERT INTO `list` (`id`, `value`) VALUES ('TG', 'Togo'); INSERT INTO `list` (`id`, `value`) VALUES ('TK', 'Tokelau'); INSERT INTO `list` (`id`, `value`) VALUES ('TO', 'Tonga'); INSERT INTO `list` (`id`, `value`) VALUES ('TT', 'Trinité-et-Tobago'); INSERT INTO `list` (`id`, `value`) VALUES ('TN', 'Tunisie'); INSERT INTO `list` (`id`, `value`) VALUES ('TM', 'Turkménistan'); INSERT INTO `list` (`id`, `value`) VALUES ('TR', 'Turquie'); INSERT INTO `list` (`id`, `value`) VALUES ('TV', 'Tuvalu'); INSERT INTO `list` (`id`, `value`) VALUES ('UA', 'Ukraine'); INSERT INTO `list` (`id`, `value`) VALUES ('UY', 'Uruguay'); INSERT INTO `list` (`id`, `value`) VALUES ('VU', 'Vanuatu'); INSERT INTO `list` (`id`, `value`) VALUES ('VE', 'Venezuela'); INSERT INTO `list` (`id`, `value`) VALUES ('VN', 'Vietnam'); INSERT INTO `list` (`id`, `value`) VALUES ('WF', 'Wallis-et-Futuna'); INSERT INTO `list` (`id`, `value`) VALUES ('YE', 'Yémen'); INSERT INTO `list` (`id`, `value`) VALUES ('ZM', 'Zambie'); INSERT INTO `list` (`id`, `value`) VALUES ('ZW', 'Zimbabwe');
{ "pile_set_name": "Github" }
from unittest.mock import patch from mindmeld.components.nlp import NaturalLanguageProcessor def test_intent_classifier_svm(kwik_e_mart_app_path): nlp = NaturalLanguageProcessor(app_path=kwik_e_mart_app_path) search_grid = { "C": [0.1, 0.5, 1, 5, 10, 50, 100, 1000, 5000], "kernel": ["linear", "rbf", "poly"], } param_selection_settings = {"grid": search_grid, "type": "k-fold", "k": 10} ic = nlp.domains["store_info"].intent_classifier ic.fit( model_settings={"classifier_type": "svm"}, param_selection=param_selection_settings, ) def test_intent_classifier_logreg(kwik_e_mart_app_path): nlp = NaturalLanguageProcessor(app_path=kwik_e_mart_app_path) features = { "bag-of-words": {"lengths": [1]}, "freq": {"bins": 5}, "in-gaz": {}, "length": {}, } ic = nlp.domains["store_info"].intent_classifier ic.fit(model_settings={"classifier_type": "logreg"}, features=features) features = { "bag-of-words": {"lengths": [1, 2]}, "freq": {"bins": 5}, "in-gaz": {}, "length": {}, } ic.fit(model_settings={"classifier_type": "logreg"}, features=features) def test_intent_classifier_random_forest(kwik_e_mart_app_path, caplog): nlp = NaturalLanguageProcessor(app_path=kwik_e_mart_app_path) search_grid = { "n_estimators": [5, 10, 15, 20], "criterion": ["gini", "entropy"], "warm_start": [True, False], } param_selection_settings = {"grid": search_grid, "type": "k-fold", "k": 10} ic = nlp.domains["store_info"].intent_classifier ic.fit( model_settings={"classifier_type": "rforest"}, param_selection=param_selection_settings, ) ic.fit( model_settings={"classifier_type": "rforest"}, param_selection={ "type": "k-fold", "k": 10, "grid": {"class_bias": [0.7, 0.3, 0]}, }, ) features = { "bag-of-words": {"lengths": [1, 2]}, "freq": {"bins": 5}, "in-gaz": {}, "length": {}, } with patch("logging.Logger.warning") as mock: ic.fit(model_settings={"classifier_type": "rforest"}, features=features) mock.assert_any_call("Unexpected param `C`, dropping it from model config.") mock.assert_any_call( "Unexpected param `fit_intercept`, dropping it from model config." )
{ "pile_set_name": "Github" }
SELECT * FROM KNOWLEDGE_TAGS WHERE KNOWLEDGE_ID = ? AND TAG_ID = ? ;
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0_22) on Mon May 24 23:03:19 CEST 2010 --> <TITLE> org.mockito.internal.matchers (Mockito API) </TITLE> <META NAME="keywords" CONTENT="org.mockito.internal.matchers package"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style"> </HEAD> <BODY BGCOLOR="white"> <FONT size="+1" CLASS="FrameTitleFont"> <A HREF="../../../../org/mockito/internal/matchers/package-summary.html" target="classFrame">org.mockito.internal.matchers</A></FONT> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Interfaces</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="CapturesArguments.html" title="interface in org.mockito.internal.matchers" target="classFrame"><I>CapturesArguments</I></A> <BR> <A HREF="ContainsExtraTypeInformation.html" title="interface in org.mockito.internal.matchers" target="classFrame"><I>ContainsExtraTypeInformation</I></A> <BR> <A HREF="MatcherDecorator.html" title="interface in org.mockito.internal.matchers" target="classFrame"><I>MatcherDecorator</I></A> <BR> <A HREF="VarargMatcher.html" title="interface in org.mockito.internal.matchers" target="classFrame"><I>VarargMatcher</I></A></FONT></TD> </TR> </TABLE> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Classes</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="And.html" title="class in org.mockito.internal.matchers" target="classFrame">And</A> <BR> <A HREF="Any.html" title="class in org.mockito.internal.matchers" target="classFrame">Any</A> <BR> <A HREF="AnyVararg.html" title="class in org.mockito.internal.matchers" target="classFrame">AnyVararg</A> <BR> <A HREF="ArrayEquals.html" title="class in org.mockito.internal.matchers" target="classFrame">ArrayEquals</A> <BR> <A HREF="CapturingMatcher.html" title="class in org.mockito.internal.matchers" target="classFrame">CapturingMatcher</A> <BR> <A HREF="CompareEqual.html" title="class in org.mockito.internal.matchers" target="classFrame">CompareEqual</A> <BR> <A HREF="CompareTo.html" title="class in org.mockito.internal.matchers" target="classFrame">CompareTo</A> <BR> <A HREF="Contains.html" title="class in org.mockito.internal.matchers" target="classFrame">Contains</A> <BR> <A HREF="EndsWith.html" title="class in org.mockito.internal.matchers" target="classFrame">EndsWith</A> <BR> <A HREF="Equality.html" title="class in org.mockito.internal.matchers" target="classFrame">Equality</A> <BR> <A HREF="Equals.html" title="class in org.mockito.internal.matchers" target="classFrame">Equals</A> <BR> <A HREF="EqualsWithDelta.html" title="class in org.mockito.internal.matchers" target="classFrame">EqualsWithDelta</A> <BR> <A HREF="Find.html" title="class in org.mockito.internal.matchers" target="classFrame">Find</A> <BR> <A HREF="GreaterOrEqual.html" title="class in org.mockito.internal.matchers" target="classFrame">GreaterOrEqual</A> <BR> <A HREF="GreaterThan.html" title="class in org.mockito.internal.matchers" target="classFrame">GreaterThan</A> <BR> <A HREF="InstanceOf.html" title="class in org.mockito.internal.matchers" target="classFrame">InstanceOf</A> <BR> <A HREF="LessOrEqual.html" title="class in org.mockito.internal.matchers" target="classFrame">LessOrEqual</A> <BR> <A HREF="LessThan.html" title="class in org.mockito.internal.matchers" target="classFrame">LessThan</A> <BR> <A HREF="LocalizedMatcher.html" title="class in org.mockito.internal.matchers" target="classFrame">LocalizedMatcher</A> <BR> <A HREF="MatchersPrinter.html" title="class in org.mockito.internal.matchers" target="classFrame">MatchersPrinter</A> <BR> <A HREF="Matches.html" title="class in org.mockito.internal.matchers" target="classFrame">Matches</A> <BR> <A HREF="Not.html" title="class in org.mockito.internal.matchers" target="classFrame">Not</A> <BR> <A HREF="NotNull.html" title="class in org.mockito.internal.matchers" target="classFrame">NotNull</A> <BR> <A HREF="Null.html" title="class in org.mockito.internal.matchers" target="classFrame">Null</A> <BR> <A HREF="Or.html" title="class in org.mockito.internal.matchers" target="classFrame">Or</A> <BR> <A HREF="Same.html" title="class in org.mockito.internal.matchers" target="classFrame">Same</A> <BR> <A HREF="StartsWith.html" title="class in org.mockito.internal.matchers" target="classFrame">StartsWith</A></FONT></TD> </TR> </TABLE> </BODY> </HTML>
{ "pile_set_name": "Github" }
/* Derby - Class org.apache.derby.client.am.ProductLevel Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.client.am; public class ProductLevel { String databaseProductName_; int versionLevel_; int releaseLevel_; private int modificationLevel_; // The following value is sent in the srvrlslv level String databaseProductVersion_; // The productID is set by the constructors. // dabaseProductVersion added by derby to include srvrlslv public ProductLevel(String productID, String databaseProductName, String srvrlslv) { // this.productID has the following format // CSS for Derby // vv = version id // rr = release id // m = modification level versionLevel_ = Integer.parseInt(productID.substring(3, 5)); releaseLevel_ = Integer.parseInt(productID.substring(5, 7)); modificationLevel_ = Integer.parseInt(productID.substring(7, 8)); databaseProductName_ = (databaseProductName == null) ? "Derby" : databaseProductName; // This is the srvclsnm in PROTOCOL. // databaseProductVersion - extracted from the srvrlslv. // srvrlslv has the format <PRDID>/<ALTERNATE VERSION FORMAT> // for example Derby has a four part verison number so might send // CSS10000/10.0.1.1 beta. If the alternate version format is not // specified, // databaseProductVersion_ will just be set to the srvrlslvl. // final fallback will be the product id. // this is the value returned with the getDatabaseProductVersion() // metadata call int dbVersionOffset = 0; if (srvrlslv != null) { dbVersionOffset = srvrlslv.indexOf('/') + 1; // if there was no '/' dbVersionOffset will just be 0 databaseProductVersion_ = srvrlslv.substring(dbVersionOffset); } if (databaseProductVersion_ == null) { databaseProductVersion_ = productID; } } boolean greaterThanOrEqualTo(int versionLevel, int releaseLevel, int modificationLevel) { if (versionLevel_ > versionLevel) { return true; } else if (versionLevel_ == versionLevel) { if (releaseLevel_ > releaseLevel) { return true; } else if (releaseLevel_ == releaseLevel) { if (modificationLevel_ >= modificationLevel) { return true; } } } return false; } boolean lessThan(int versionLevel, int releaseLevel, int modificationLevel) { if (versionLevel_ < versionLevel) { return true; } else if (versionLevel_ == versionLevel) { if (releaseLevel_ < releaseLevel) { return true; } else if (releaseLevel_ == releaseLevel) { if (modificationLevel_ < modificationLevel) { return true; } } } return false; } }
{ "pile_set_name": "Github" }
#if 0 #include "defs.h" /*->atcmd.h (uchar)*/ #endif typedef enum { ATDA_NUM, ATDA_STR } AtdAType; typedef enum { ATDP_NUL, ATDP_NUM, ATDP_STR } AtdPType; #define ADDR_MAX 63 #define PORT_MAX 63 #define PT_MAX 40 #define SREG_MAX 12 typedef struct { struct { struct { char str[ADDR_MAX+1]; AtdAType type; } addr; struct { char str[PORT_MAX+1]; AtdPType type; } port; } d; int f; uchar s[SREG_MAX+1]; int pb[2]; int pd; int pl; int pr; struct { uchar str[PT_MAX+1]; int len; int wont; } pt; int pv; } Atcmd; extern Atcmd atcmd, atcmdNV; #define CHAR_ESC (atcmd.s[2]) #define CHAR_CR (atcmd.s[3]) #define CHAR_LF (atcmd.s[4]) #define CHAR_BS (atcmd.s[5]) void atcmdInit(void); void atcmdD(const char *s, AtdAType at, AtdPType pt); int atcmdFake(const char *s, const char *vals); int atcmdH(const char *s); int atcmdI(const char *s); int atcmdSQuery(const char *s); int atcmdSSet(const char *s); void atcmdZ(void); void atcmdAW(void); int atcmdPB(const char *s); int atcmdPD(const char *s); int atcmdPL(const char *s); void atcmdPQ(void); int atcmdPR(const char *s); int atcmdPT(const char *s); int atcmdPTSet(const char *s); int atcmdPV(const char *s);
{ "pile_set_name": "Github" }
{ "ctm": { "ctm_version": 1, "type": "pillar", "layer": "TRANSLUCENT", "textures": [ "chisel:blocks/icepillar/plaingreek-ctmv" ] } }
{ "pile_set_name": "Github" }
{ "version" : "5.5.0", "timestamp" : 1581572093660, "path" : "query-validation-tests/joins.json", "schemas" : { "CSAS_INNER_JOIN_0.INNER_JOIN" : { "schema" : "`ROWKEY` BIGINT KEY, `T_ROWTIME` BIGINT, `T_ROWKEY` BIGINT, `T_ID` BIGINT, `T_NAME` STRING, `T_VALUE` BIGINT, `F1` STRING", "serdeOptions" : [ ] }, "CSAS_INNER_JOIN_0.Join.Left" : { "schema" : "`ROWKEY` BIGINT KEY, `T_ID` BIGINT, `T_NAME` STRING, `T_VALUE` BIGINT, `T_ROWTIME` BIGINT, `T_ROWKEY` BIGINT", "serdeOptions" : [ ] }, "CSAS_INNER_JOIN_0.KafkaTopic_Right.Source" : { "schema" : "`ROWKEY` BIGINT KEY, `ID` BIGINT, `F1` STRING, `F2` BIGINT", "serdeOptions" : [ ] }, "CSAS_INNER_JOIN_0.KafkaTopic_Left.Source" : { "schema" : "`ROWKEY` BIGINT KEY, `ID` BIGINT, `NAME` STRING, `VALUE` BIGINT", "serdeOptions" : [ ] }, "CSAS_INNER_JOIN_0.Join.Right" : { "schema" : "`ROWKEY` BIGINT KEY, `TT_ID` BIGINT, `TT_F1` STRING, `TT_F2` BIGINT, `TT_ROWTIME` BIGINT, `TT_ROWKEY` BIGINT", "serdeOptions" : [ ] } }, "testCase" : { "name" : "stream stream inner join all left fields some right - JSON", "inputs" : [ { "topic" : "left_topic", "key" : 0, "value" : { "ID" : 0, "NAME" : "zero", "VALUE" : 0 }, "timestamp" : 0 }, { "topic" : "right_topic", "key" : 0, "value" : { "ID" : 0, "F1" : "blah", "F2" : 50 }, "timestamp" : 10000 }, { "topic" : "left_topic", "key" : 10, "value" : { "ID" : 10, "NAME" : "100", "VALUE" : 5 }, "timestamp" : 11000 }, { "topic" : "left_topic", "key" : 0, "value" : { "ID" : 0, "NAME" : "foo", "VALUE" : 100 }, "timestamp" : 13000 }, { "topic" : "right_topic", "key" : 0, "value" : { "ID" : 0, "F1" : "a", "F2" : 10 }, "timestamp" : 15000 }, { "topic" : "right_topic", "key" : 100, "value" : { "ID" : 100, "F1" : "newblah", "F2" : 150 }, "timestamp" : 16000 }, { "topic" : "left_topic", "key" : 90, "value" : { "ID" : 90, "NAME" : "ninety", "VALUE" : 90 }, "timestamp" : 17000 }, { "topic" : "left_topic", "key" : 0, "value" : { "ID" : 0, "NAME" : "bar", "VALUE" : 99 }, "timestamp" : 30000 } ], "outputs" : [ { "topic" : "INNER_JOIN", "key" : 0, "value" : { "T_ID" : 0, "T_NAME" : "zero", "T_VALUE" : 0, "F1" : "blah", "T_ROWKEY" : 0, "T_ROWTIME" : 0 }, "timestamp" : 10000 }, { "topic" : "INNER_JOIN", "key" : 0, "value" : { "T_ID" : 0, "T_NAME" : "foo", "T_VALUE" : 100, "F1" : "blah", "T_ROWKEY" : 0, "T_ROWTIME" : 13000 }, "timestamp" : 13000 }, { "topic" : "INNER_JOIN", "key" : 0, "value" : { "T_ID" : 0, "T_NAME" : "foo", "T_VALUE" : 100, "F1" : "a", "T_ROWKEY" : 0, "T_ROWTIME" : 13000 }, "timestamp" : 15000 } ], "topics" : [ { "name" : "right_topic", "replicas" : 1, "numPartitions" : 4 }, { "name" : "INNER_JOIN", "replicas" : 1, "numPartitions" : 4 }, { "name" : "left_topic", "replicas" : 1, "numPartitions" : 4 } ], "statements" : [ "CREATE STREAM TEST (ROWKEY BIGINT KEY, ID BIGINT, NAME STRING, VALUE BIGINT) WITH (KAFKA_TOPIC='left_topic', KEY='ID', VALUE_FORMAT='JSON');", "CREATE STREAM TEST_STREAM (ROWKEY BIGINT KEY, ID BIGINT, F1 STRING, F2 BIGINT) WITH (KAFKA_TOPIC='right_topic', KEY='ID', VALUE_FORMAT='JSON');", "CREATE STREAM INNER_JOIN AS SELECT\n T.*,\n TT.F1 F1\nFROM TEST T\nINNER JOIN TEST_STREAM TT WITHIN 11 SECONDS ON ((T.ID = TT.ID))\nEMIT CHANGES;" ], "post" : { "sources" : [ { "name" : "INNER_JOIN", "type" : "STREAM", "schema" : "`ROWKEY` BIGINT KEY, `T_ROWTIME` BIGINT, `T_ROWKEY` BIGINT, `T_ID` BIGINT, `T_NAME` STRING, `T_VALUE` BIGINT, `F1` STRING", "keyFormat" : { "format" : "KAFKA" }, "serdeOptions" : [ ] }, { "name" : "TEST", "type" : "STREAM", "schema" : "`ROWKEY` BIGINT KEY, `ID` BIGINT, `NAME` STRING, `VALUE` BIGINT", "keyFormat" : { "format" : "KAFKA" }, "serdeOptions" : [ ] }, { "name" : "TEST_STREAM", "type" : "STREAM", "schema" : "`ROWKEY` BIGINT KEY, `ID` BIGINT, `F1` STRING, `F2` BIGINT", "keyFormat" : { "format" : "KAFKA" }, "serdeOptions" : [ ] } ], "topics" : { "topics" : [ { "name" : "_confluent-ksql-some.ksql.service.idquery_CSAS_INNER_JOIN_0-KSTREAM-JOINTHIS-0000000008-store-changelog", "keyFormat" : { "formatInfo" : { "format" : "KAFKA" } }, "valueFormat" : { "format" : "JSON" } }, { "name" : "INNER_JOIN", "keyFormat" : { "formatInfo" : { "format" : "KAFKA" } }, "valueFormat" : { "format" : "JSON" }, "partitions" : 4 }, { "name" : "_confluent-ksql-some.ksql.service.idquery_CSAS_INNER_JOIN_0-KSTREAM-JOINOTHER-0000000009-store-changelog", "keyFormat" : { "formatInfo" : { "format" : "KAFKA" } }, "valueFormat" : { "format" : "JSON" } }, { "name" : "left_topic", "keyFormat" : { "formatInfo" : { "format" : "KAFKA" } }, "valueFormat" : { "format" : "JSON" }, "partitions" : 4 }, { "name" : "right_topic", "keyFormat" : { "formatInfo" : { "format" : "KAFKA" } }, "valueFormat" : { "format" : "JSON" }, "partitions" : 4 } ] } } } }
{ "pile_set_name": "Github" }
{ "$schema": "http://json-schema.org/draft-04/schema#", "_id": "kapi.blackhole.module_resp", "description": "AMQP API for blackhole.module_resp", "properties": { "Error": { "type": "string" }, "Event-Category": { "enum": [ "blackhole" ], "type": "string" }, "Event-Name": { "enum": [ "module_resp" ], "type": "string" }, "Persisted": { "type": "boolean" }, "Started": { "type": "boolean" } }, "required": [ "Persisted", "Started" ], "type": "object" }
{ "pile_set_name": "Github" }
""" Lightnet Data Module |br| This module contains everything related to pre- and post-processing of your data. It also has functionality to create datasets from images and annotations that are parseable with brambox_. """ from ._dataloading import * from . import transform # Lightnet from ._dataset_brambox import * from ._dataset_darknet import *
{ "pile_set_name": "Github" }
// ---- Created with 3Dmigoto v1.3.14 on Sat Feb 23 16:40:28 2019 // 3Dmigoto declarations #define cmp - void main( float4 v0 : POSITION0, float4 v1 : COLOR0, float2 v2 : TEXCOORD0, out float4 o0 : SV_Position0, out float4 o1 : COLOR0, out float2 o2 : TEXCOORD0) { o0.xyzw = v0.xyzw; o1.xyzw = v1.xyzw; o2.xy = v2.xy; return; }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta content="text/html; charset=UTF-8" http-equiv="Content-Type"> <title>module SafeYAML - jekyll-1.4.0 Documentation</title> <link type="text/css" media="screen" href="./rdoc.css" rel="stylesheet"> <script type="text/javascript"> var rdoc_rel_prefix = "./"; </script> <script type="text/javascript" charset="utf-8" src="./js/jquery.js"></script> <script type="text/javascript" charset="utf-8" src="./js/navigation.js"></script> <script type="text/javascript" charset="utf-8" src="./js/search_index.js"></script> <script type="text/javascript" charset="utf-8" src="./js/search.js"></script> <script type="text/javascript" charset="utf-8" src="./js/searcher.js"></script> <script type="text/javascript" charset="utf-8" src="./js/darkfish.js"></script> <body id="top" class="module"> <nav id="metadata"> <nav id="home-section" class="section"> <h3 class="section-header"> <a href="./index.html">Home</a> <a href="./table_of_contents.html#classes">Classes</a> <a href="./table_of_contents.html#methods">Methods</a> </h3> </nav> <nav id="search-section" class="section project-section" class="initially-hidden"> <form action="#" method="get" accept-charset="utf-8"> <h3 class="section-header"> <input type="text" name="search" placeholder="Search" id="search-field" title="Type to search, Up and Down to navigate, Enter to load"> </h3> </form> <ul id="search-results" class="initially-hidden"></ul> </nav> <div id="file-metadata"> <nav id="file-list-section" class="section"> <h3 class="section-header">Defined In</h3> <ul> <li>lib/jekyll.rb </ul> </nav> </div> <div id="class-metadata"> </div> <div id="project-metadata"> <nav id="fileindex-section" class="section project-section"> <h3 class="section-header">Pages</h3> <ul> <li class="file"><a href="./LICENSE.html">LICENSE</a> <li class="file"><a href="./README_markdown.html">README.markdown</a> <li class="file"><a href="./lib/jekyll/mime_types.html">mime.types</a> <li class="file"><a href="./lib/site_template/_layouts/default_html.html">default.html</a> <li class="file"><a href="./lib/site_template/_layouts/post_html.html">post.html</a> <li class="file"><a href="./lib/site_template/css/main_css.html">main.css</a> <li class="file"><a href="./lib/site_template/css/syntax_css.html">syntax.css</a> <li class="file"><a href="./lib/site_template/index_html.html">index.html</a> </ul> </nav> <nav id="classindex-section" class="section project-section"> <h3 class="section-header">Class and Module Index</h3> <ul class="link-list"> <li><a href="./Date.html">Date</a> <li><a href="./EntryFilter.html">EntryFilter</a> <li><a href="./Enumerable.html">Enumerable</a> <li><a href="./File.html">File</a> <li><a href="./Hash.html">Hash</a> <li><a href="./Jekyll.html">Jekyll</a> <li><a href="./Jekyll/Command.html">Jekyll::Command</a> <li><a href="./Jekyll/Commands.html">Jekyll::Commands</a> <li><a href="./Jekyll/Commands/Build.html">Jekyll::Commands::Build</a> <li><a href="./Jekyll/Commands/Doctor.html">Jekyll::Commands::Doctor</a> <li><a href="./Jekyll/Commands/New.html">Jekyll::Commands::New</a> <li><a href="./Jekyll/Commands/Serve.html">Jekyll::Commands::Serve</a> <li><a href="./Jekyll/Configuration.html">Jekyll::Configuration</a> <li><a href="./Jekyll/Converter.html">Jekyll::Converter</a> <li><a href="./Jekyll/Converters.html">Jekyll::Converters</a> <li><a href="./Jekyll/Converters/Identity.html">Jekyll::Converters::Identity</a> <li><a href="./Jekyll/Converters/Markdown.html">Jekyll::Converters::Markdown</a> <li><a href="./Jekyll/Converters/Markdown/KramdownParser.html">Jekyll::Converters::Markdown::KramdownParser</a> <li><a href="./Jekyll/Converters/Markdown/MarukuParser.html">Jekyll::Converters::Markdown::MarukuParser</a> <li><a href="./Jekyll/Converters/Markdown/RDiscountParser.html">Jekyll::Converters::Markdown::RDiscountParser</a> <li><a href="./Jekyll/Converters/Markdown/RedcarpetParser.html">Jekyll::Converters::Markdown::RedcarpetParser</a> <li><a href="./Jekyll/Converters/Markdown/RedcarpetParser/CommonMethods.html">Jekyll::Converters::Markdown::RedcarpetParser::CommonMethods</a> <li><a href="./Jekyll/Converters/Markdown/RedcarpetParser/WithPygments.html">Jekyll::Converters::Markdown::RedcarpetParser::WithPygments</a> <li><a href="./Jekyll/Converters/Markdown/RedcarpetParser/WithoutPygments.html">Jekyll::Converters::Markdown::RedcarpetParser::WithoutPygments</a> <li><a href="./Jekyll/Converters/Textile.html">Jekyll::Converters::Textile</a> <li><a href="./Jekyll/Convertible.html">Jekyll::Convertible</a> <li><a href="./Jekyll/Deprecator.html">Jekyll::Deprecator</a> <li><a href="./Jekyll/Draft.html">Jekyll::Draft</a> <li><a href="./Jekyll/Excerpt.html">Jekyll::Excerpt</a> <li><a href="./Jekyll/FatalException.html">Jekyll::FatalException</a> <li><a href="./Jekyll/Filters.html">Jekyll::Filters</a> <li><a href="./Jekyll/Generator.html">Jekyll::Generator</a> <li><a href="./Jekyll/Generators.html">Jekyll::Generators</a> <li><a href="./Jekyll/Generators/Pagination.html">Jekyll::Generators::Pagination</a> <li><a href="./Jekyll/GistTag.html">Jekyll::GistTag</a> <li><a href="./Jekyll/Layout.html">Jekyll::Layout</a> <li><a href="./Jekyll/Page.html">Jekyll::Page</a> <li><a href="./Jekyll/Pager.html">Jekyll::Pager</a> <li><a href="./Jekyll/Plugin.html">Jekyll::Plugin</a> <li><a href="./Jekyll/Post.html">Jekyll::Post</a> <li><a href="./Jekyll/RelatedPosts.html">Jekyll::RelatedPosts</a> <li><a href="./Jekyll/Site.html">Jekyll::Site</a> <li><a href="./Jekyll/Site/Cleaner.html">Jekyll::Site::Cleaner</a> <li><a href="./Jekyll/StaticFile.html">Jekyll::StaticFile</a> <li><a href="./Jekyll/Stevenson.html">Jekyll::Stevenson</a> <li><a href="./Jekyll/Tags.html">Jekyll::Tags</a> <li><a href="./Jekyll/Tags/HighlightBlock.html">Jekyll::Tags::HighlightBlock</a> <li><a href="./Jekyll/Tags/IncludeTag.html">Jekyll::Tags::IncludeTag</a> <li><a href="./Jekyll/Tags/IncludeTagError.html">Jekyll::Tags::IncludeTagError</a> <li><a href="./Jekyll/Tags/PostComparer.html">Jekyll::Tags::PostComparer</a> <li><a href="./Jekyll/Tags/PostUrl.html">Jekyll::Tags::PostUrl</a> <li><a href="./Jekyll/URL.html">Jekyll::URL</a> <li><a href="./Liquid.html">Liquid</a> <li><a href="./Object.html">Object</a> <li><a href="./SafeYAML.html">SafeYAML</a> </ul> </nav> </div> </nav> <div id="documentation"> <h1 class="module">module SafeYAML</h1> <div id="description" class="description"> </div><!-- description --> <section id="5Buntitled-5D" class="documentation-section"> <!-- Methods --> </section><!-- 5Buntitled-5D --> </div><!-- documentation --> <footer id="validator-badges"> <p><a href="http://validator.w3.org/check/referer">[Validate]</a> <p>Generated by <a href="https://github.com/rdoc/rdoc">RDoc</a> 4.0.0. <p>Generated with the <a href="http://deveiate.org/projects/Darkfish-Rdoc/">Darkfish Rdoc Generator</a> 3. </footer>
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: f6315a9155238724c9b221c155c73f68 timeCreated: 1515601872 licenseType: Free MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
/* * Copyright (C) 2004 Manuel Novoa III <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /* July 29, 2004 * * This is a hacked replacement for the 'trx' utility used to create * wrt54g .trx firmware files. It isn't pretty, but it does the job * for me. * * As an extension, you can specify a larger maximum length for the * .trx file using '-m'. It will be rounded up to be a multiple of 4K. * NOTE: This space will be malloc()'d. * * August 16, 2004 * * Sigh... Make it endian-neutral. * * TODO: Support '-b' option to specify offsets for each file. * * February 19, 2005 - mbm * * Add -a (align offset) and -b (absolute offset) * * March 24, 2010 - markus * * extend trx header struct for new version * assume v1 for as default * Add option -2 to allow v2 header */ #include <stdio.h> #include <stdlib.h> #include <stddef.h> #include <stdint.h> #include <string.h> #include <errno.h> #include <unistd.h> #if __BYTE_ORDER == __BIG_ENDIAN #define STORE32_LE(X) bswap_32(X) #define LOAD32_LE(X) bswap_32(X) #elif __BYTE_ORDER == __LITTLE_ENDIAN #define STORE32_LE(X) (X) #define LOAD32_LE(X) (X) #else #error unkown endianness! #endif uint32_t crc32buf(char *buf, size_t len); /**********************************************************************/ /* from trxhdr.h */ #define TRX_MAGIC 0x30524448 /* "HDR0" */ #define TRX_MAX_LEN 0x720000 #define TRX_NO_HEADER 1 /* Do not write TRX header */ struct trx_header { uint32_t magic; /* "HDR0" */ uint32_t len; /* Length of file including header */ uint32_t crc32; /* 32-bit CRC from flag_version to end of file */ uint32_t flag_version; /* 0:15 flags, 16:31 version */ uint32_t offsets[4]; /* Offsets of partitions from start of header */ }; /**********************************************************************/ void usage(void) __attribute__ (( __noreturn__ )); void usage(void) { fprintf(stderr, "Usage:\n"); fprintf(stderr, " trx [-2] [-o outfile] [-m maxlen] [-a align] [-b absolute offset] [-x relative offset]\n"); fprintf(stderr, " [-f file] [-f file [-f file [-f file (v2 only)]]]\n"); exit(EXIT_FAILURE); } int main(int argc, char **argv) { FILE *out = stdout; FILE *in; char *ofn = NULL; char *buf; char *e; int c, i, append = 0; size_t n; ssize_t n2; uint32_t cur_len, fsmark=0; unsigned long maxlen = TRX_MAX_LEN; struct trx_header *p; char trx_version = 1; unsigned char binheader[32]; fprintf(stderr, "mjn3's trx replacement - v0.81.1\n"); if (!(buf = malloc(maxlen))) { fprintf(stderr, "malloc failed\n"); return EXIT_FAILURE; } p = (struct trx_header *) buf; p->magic = STORE32_LE(TRX_MAGIC); cur_len = sizeof(struct trx_header) - 4; /* assume v1 header */ in = NULL; i = 0; while ((c = getopt(argc, argv, "-:2o:m:a:x:b:f:A:F:")) != -1) { switch (c) { case '2': /* take care that nothing was written to buf so far */ if (cur_len != sizeof(struct trx_header) - 4) { fprintf(stderr, "-2 has to be used before any other argument!\n"); } else { trx_version = 2; cur_len += 4; } break; case 'F': fsmark = cur_len; case 'A': append = 1; /* fall through */ case 'f': case 1: if (!append) p->offsets[i++] = STORE32_LE(cur_len); if (!(in = fopen(optarg, "r"))) { fprintf(stderr, "can not open \"%s\" for reading\n", optarg); usage(); } n = fread(buf + cur_len, 1, maxlen - cur_len, in); if (!feof(in)) { fprintf(stderr, "fread failure or file \"%s\" too large\n",optarg); fclose(in); return EXIT_FAILURE; } fclose(in); #undef ROUND #define ROUND 4 if (n & (ROUND-1)) { memset(buf + cur_len + n, 0, ROUND - (n & (ROUND-1))); n += ROUND - (n & (ROUND-1)); } cur_len += n; append = 0; break; case 'o': ofn = optarg; if (ofn && !(out = fopen(ofn, "w"))) { fprintf(stderr, "can not open \"%s\" for writing\n", ofn); usage(); } break; case 'm': errno = 0; maxlen = strtoul(optarg, &e, 0); if (errno || (e == optarg) || *e) { fprintf(stderr, "illegal numeric string\n"); usage(); } #undef ROUND #define ROUND 0x1000 if (maxlen & (ROUND-1)) { maxlen += (ROUND - (maxlen & (ROUND-1))); } if (maxlen < ROUND) { fprintf(stderr, "maxlen too small (or wrapped)\n"); usage(); } if (maxlen > TRX_MAX_LEN) { fprintf(stderr, "WARNING: maxlen exceeds default maximum! Beware of overwriting nvram!\n"); } if (!(buf = realloc(buf,maxlen))) { fprintf(stderr, "realloc failed"); return EXIT_FAILURE; } p = (struct trx_header *) buf; break; case 'a': errno = 0; n = strtoul(optarg, &e, 0); if (errno || (e == optarg) || *e) { fprintf(stderr, "illegal numeric string\n"); usage(); } if (cur_len & (n-1)) { n = n - (cur_len & (n-1)); memset(buf + cur_len, 0, n); cur_len += n; } break; case 'b': errno = 0; n = strtoul(optarg, &e, 0); if (errno || (e == optarg) || *e) { fprintf(stderr, "illegal numeric string\n"); usage(); } if (n < cur_len) { fprintf(stderr, "WARNING: current length exceeds -b %d offset\n",(int) n); } else { memset(buf + cur_len, 0, n - cur_len); cur_len = n; } break; case 'x': errno = 0; n2 = strtol(optarg, &e, 0); if (errno || (e == optarg) || *e) { fprintf(stderr, "illegal numeric string\n"); usage(); } if (n2 < 0) { if (-n2 > cur_len) { fprintf(stderr, "WARNING: current length smaller then -x %d offset\n",(int) n2); cur_len = 0; } else cur_len += n2; } else { memset(buf + cur_len, 0, n2); cur_len += n2; } break; default: usage(); } } p->flag_version = STORE32_LE((trx_version << 16)); if (!in) { fprintf(stderr, "we require atleast one filename\n"); usage(); } #undef ROUND #define ROUND 0x1000 n = cur_len & (ROUND-1); if (n) { memset(buf + cur_len, 0, ROUND - n); cur_len += ROUND - n; } /* for TRXv2 set bin-header Flags to 0xFF for CRC calculation like CFE does */ if (trx_version == 2) { if(cur_len - LOAD32_LE(p->offsets[3]) < sizeof(binheader)) { fprintf(stderr, "TRXv2 binheader too small!\n"); return EXIT_FAILURE; } memcpy(binheader, buf + LOAD32_LE(p->offsets[3]), sizeof(binheader)); /* save header */ memset(buf + LOAD32_LE(p->offsets[3]) + 22, 0xFF, 8); /* set stable and try1-3 to 0xFF */ } p->crc32 = crc32buf((char *) &p->flag_version, ((fsmark)?fsmark:cur_len) - offsetof(struct trx_header, flag_version)); p->crc32 = STORE32_LE(p->crc32); p->len = STORE32_LE((fsmark) ? fsmark : cur_len); /* restore TRXv2 bin-header */ if (trx_version == 2) { memcpy(buf + LOAD32_LE(p->offsets[3]), binheader, sizeof(binheader)); } if (!fwrite(buf, cur_len, 1, out) || fflush(out)) { fprintf(stderr, "fwrite failed\n"); return EXIT_FAILURE; } fclose(out); return EXIT_SUCCESS; } /**********************************************************************/ /* The following was grabbed and tweaked from the old snippets collection * of public domain C code. */ /**********************************************************************\ |* Demonstration program to compute the 32-bit CRC used as the frame *| |* check sequence in ADCCP (ANSI X3.66, also known as FIPS PUB 71 *| |* and FED-STD-1003, the U.S. versions of CCITT's X.25 link-level *| |* protocol). The 32-bit FCS was added via the Federal Register, *| |* 1 June 1982, p.23798. I presume but don't know for certain that *| |* this polynomial is or will be included in CCITT V.41, which *| |* defines the 16-bit CRC (often called CRC-CCITT) polynomial. FIPS *| |* PUB 78 says that the 32-bit FCS reduces otherwise undetected *| |* errors by a factor of 10^-5 over 16-bit FCS. *| \**********************************************************************/ /* Copyright (C) 1986 Gary S. Brown. You may use this program, or code or tables extracted from it, as desired without restriction.*/ /* First, the polynomial itself and its table of feedback terms. The */ /* polynomial is */ /* X^32+X^26+X^23+X^22+X^16+X^12+X^11+X^10+X^8+X^7+X^5+X^4+X^2+X^1+X^0 */ /* Note that we take it "backwards" and put the highest-order term in */ /* the lowest-order bit. The X^32 term is "implied"; the LSB is the */ /* X^31 term, etc. The X^0 term (usually shown as "+1") results in */ /* the MSB being 1. */ /* Note that the usual hardware shift register implementation, which */ /* is what we're using (we're merely optimizing it by doing eight-bit */ /* chunks at a time) shifts bits into the lowest-order term. In our */ /* implementation, that means shifting towards the right. Why do we */ /* do it this way? Because the calculated CRC must be transmitted in */ /* order from highest-order term to lowest-order term. UARTs transmit */ /* characters in order from LSB to MSB. By storing the CRC this way, */ /* we hand it to the UART in the order low-byte to high-byte; the UART */ /* sends each low-bit to hight-bit; and the result is transmission bit */ /* by bit from highest- to lowest-order term without requiring any bit */ /* shuffling on our part. Reception works similarly. */ /* The feedback terms table consists of 256, 32-bit entries. Notes: */ /* */ /* 1. The table can be generated at runtime if desired; code to do so */ /* is shown later. It might not be obvious, but the feedback */ /* terms simply represent the results of eight shift/xor opera- */ /* tions for all combinations of data and CRC register values. */ /* */ /* 2. The CRC accumulation logic is the same for all CRC polynomials, */ /* be they sixteen or thirty-two bits wide. You simply choose the */ /* appropriate table. Alternatively, because the table can be */ /* generated at runtime, you can start by generating the table for */ /* the polynomial in question and use exactly the same "updcrc", */ /* if your application needn't simultaneously handle two CRC */ /* polynomials. (Note, however, that XMODEM is strange.) */ /* */ /* 3. For 16-bit CRCs, the table entries need be only 16 bits wide; */ /* of course, 32-bit entries work OK if the high 16 bits are zero. */ /* */ /* 4. The values must be right-shifted by eight bits by the "updcrc" */ /* logic; the shift must be unsigned (bring in zeroes). On some */ /* hardware you could probably optimize the shift in assembler by */ /* using byte-swap instructions. */ static const uint32_t crc_32_tab[] = { /* CRC polynomial 0xedb88320 */ 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d }; #define UPDC32(octet,crc) (crc_32_tab[((crc) ^ (octet)) & 0xff] ^ ((crc) >> 8)) uint32_t crc32buf(char *buf, size_t len) { uint32_t crc; crc = 0xFFFFFFFF; for ( ; len; --len, ++buf) { crc = UPDC32(*buf, crc); } return crc; }
{ "pile_set_name": "Github" }
/* SPDX-License-Identifier: GPL-2.0 */ /* * mtu3.h - MediaTek USB3 DRD header * * Copyright (C) 2016 MediaTek Inc. * * Author: Chunfeng Yun <[email protected]> */ #ifndef __MTU3_H__ #define __MTU3_H__ #include <linux/device.h> #include <linux/dmapool.h> #include <linux/extcon.h> #include <linux/interrupt.h> #include <linux/list.h> #include <linux/phy/phy.h> #include <linux/regulator/consumer.h> #include <linux/usb.h> #include <linux/usb/ch9.h> #include <linux/usb/gadget.h> #include <linux/usb/otg.h> struct mtu3; struct mtu3_ep; struct mtu3_request; #include "mtu3_hw_regs.h" #include "mtu3_qmu.h" #define MU3D_EP_TXCR0(epnum) (U3D_TX1CSR0 + (((epnum) - 1) * 0x10)) #define MU3D_EP_TXCR1(epnum) (U3D_TX1CSR1 + (((epnum) - 1) * 0x10)) #define MU3D_EP_TXCR2(epnum) (U3D_TX1CSR2 + (((epnum) - 1) * 0x10)) #define MU3D_EP_RXCR0(epnum) (U3D_RX1CSR0 + (((epnum) - 1) * 0x10)) #define MU3D_EP_RXCR1(epnum) (U3D_RX1CSR1 + (((epnum) - 1) * 0x10)) #define MU3D_EP_RXCR2(epnum) (U3D_RX1CSR2 + (((epnum) - 1) * 0x10)) #define USB_QMU_TQHIAR(epnum) (U3D_TXQHIAR1 + (((epnum) - 1) * 0x4)) #define USB_QMU_RQHIAR(epnum) (U3D_RXQHIAR1 + (((epnum) - 1) * 0x4)) #define USB_QMU_RQCSR(epnum) (U3D_RXQCSR1 + (((epnum) - 1) * 0x10)) #define USB_QMU_RQSAR(epnum) (U3D_RXQSAR1 + (((epnum) - 1) * 0x10)) #define USB_QMU_RQCPR(epnum) (U3D_RXQCPR1 + (((epnum) - 1) * 0x10)) #define USB_QMU_TQCSR(epnum) (U3D_TXQCSR1 + (((epnum) - 1) * 0x10)) #define USB_QMU_TQSAR(epnum) (U3D_TXQSAR1 + (((epnum) - 1) * 0x10)) #define USB_QMU_TQCPR(epnum) (U3D_TXQCPR1 + (((epnum) - 1) * 0x10)) #define SSUSB_U3_CTRL(p) (U3D_SSUSB_U3_CTRL_0P + ((p) * 0x08)) #define SSUSB_U2_CTRL(p) (U3D_SSUSB_U2_CTRL_0P + ((p) * 0x08)) #define MTU3_DRIVER_NAME "mtu3" #define DMA_ADDR_INVALID (~(dma_addr_t)0) #define MTU3_EP_ENABLED BIT(0) #define MTU3_EP_STALL BIT(1) #define MTU3_EP_WEDGE BIT(2) #define MTU3_EP_BUSY BIT(3) #define MTU3_U3_IP_SLOT_DEFAULT 2 #define MTU3_U2_IP_SLOT_DEFAULT 1 /** * IP TRUNK version * from 0x1003 version, USB3 Gen2 is supported, two changes affect driver: * 1. MAXPKT and MULTI bits layout of TXCSR1 and RXCSR1 are adjusted, * but not backward compatible * 2. QMU extend buffer length supported */ #define MTU3_TRUNK_VERS_1003 0x1003 /** * Normally the device works on HS or SS, to simplify fifo management, * devide fifo into some 512B parts, use bitmap to manage it; And * 128 bits size of bitmap is large enough, that means it can manage * up to 64KB fifo size. * NOTE: MTU3_EP_FIFO_UNIT should be power of two */ #define MTU3_EP_FIFO_UNIT (1 << 9) #define MTU3_FIFO_BIT_SIZE 128 #define MTU3_U2_IP_EP0_FIFO_SIZE 64 /** * Maximum size of ep0 response buffer for ch9 requests, * the SET_SEL request uses 6 so far, and GET_STATUS is 2 */ #define EP0_RESPONSE_BUF 6 /* device operated link and speed got from DEVICE_CONF register */ enum mtu3_speed { MTU3_SPEED_INACTIVE = 0, MTU3_SPEED_FULL = 1, MTU3_SPEED_HIGH = 3, MTU3_SPEED_SUPER = 4, MTU3_SPEED_SUPER_PLUS = 5, }; /** * @MU3D_EP0_STATE_SETUP: waits for SETUP or received a SETUP * without data stage. * @MU3D_EP0_STATE_TX: IN data stage * @MU3D_EP0_STATE_RX: OUT data stage * @MU3D_EP0_STATE_TX_END: the last IN data is transferred, and * waits for its completion interrupt * @MU3D_EP0_STATE_STALL: ep0 is in stall status, will be auto-cleared * after receives a SETUP. */ enum mtu3_g_ep0_state { MU3D_EP0_STATE_SETUP = 1, MU3D_EP0_STATE_TX, MU3D_EP0_STATE_RX, MU3D_EP0_STATE_TX_END, MU3D_EP0_STATE_STALL, }; /** * MTU3_DR_FORCE_NONE: automatically switch host and periperal mode * by IDPIN signal. * MTU3_DR_FORCE_HOST: force to enter host mode and override OTG * IDPIN signal. * MTU3_DR_FORCE_DEVICE: force to enter peripheral mode. */ enum mtu3_dr_force_mode { MTU3_DR_FORCE_NONE = 0, MTU3_DR_FORCE_HOST, MTU3_DR_FORCE_DEVICE, }; /** * @base: the base address of fifo * @limit: the bitmap size in bits * @bitmap: fifo bitmap in unit of @MTU3_EP_FIFO_UNIT */ struct mtu3_fifo_info { u32 base; u32 limit; DECLARE_BITMAP(bitmap, MTU3_FIFO_BIT_SIZE); }; /** * General Purpose Descriptor (GPD): * The format of TX GPD is a little different from RX one. * And the size of GPD is 16 bytes. * * @dw0_info: * bit0: Hardware Own (HWO) * bit1: Buffer Descriptor Present (BDP), always 0, BD is not supported * bit2: Bypass (BPS), 1: HW skips this GPD if HWO = 1 * bit6: [EL] Zero Length Packet (ZLP), moved from @dw3_info[29] * bit7: Interrupt On Completion (IOC) * bit[31:16]: ([EL] bit[31:12]) allow data buffer length (RX ONLY), * the buffer length of the data to receive * bit[23:16]: ([EL] bit[31:24]) extension address (TX ONLY), * lower 4 bits are extension bits of @buffer, * upper 4 bits are extension bits of @next_gpd * @next_gpd: Physical address of the next GPD * @buffer: Physical address of the data buffer * @dw3_info: * bit[15:0]: ([EL] bit[19:0]) data buffer length, * (TX): the buffer length of the data to transmit * (RX): The total length of data received * bit[23:16]: ([EL] bit[31:24]) extension address (RX ONLY), * lower 4 bits are extension bits of @buffer, * upper 4 bits are extension bits of @next_gpd * bit29: ([EL] abandoned) Zero Length Packet (ZLP) (TX ONLY) */ struct qmu_gpd { __le32 dw0_info; __le32 next_gpd; __le32 buffer; __le32 dw3_info; } __packed; /** * dma: physical base address of GPD segment * start: virtual base address of GPD segment * end: the last GPD element * enqueue: the first empty GPD to use * dequeue: the first completed GPD serviced by ISR * NOTE: the size of GPD ring should be >= 2 */ struct mtu3_gpd_ring { dma_addr_t dma; struct qmu_gpd *start; struct qmu_gpd *end; struct qmu_gpd *enqueue; struct qmu_gpd *dequeue; }; /** * @vbus: vbus 5V used by host mode * @edev: external connector used to detect vbus and iddig changes * @vbus_nb: notifier for vbus detection * @vbus_work : work of vbus detection notifier, used to avoid sleep in * notifier callback which is atomic context * @vbus_event : event of vbus detecion notifier * @id_nb : notifier for iddig(idpin) detection * @id_work : work of iddig detection notifier * @id_event : event of iddig detecion notifier * @role_sw : use USB Role Switch to support dual-role switch, can't use * extcon at the same time, and extcon is deprecated. * @role_sw_used : true when the USB Role Switch is used. * @is_u3_drd: whether port0 supports usb3.0 dual-role device or not * @manual_drd_enabled: it's true when supports dual-role device by debugfs * to switch host/device modes depending on user input. */ struct otg_switch_mtk { struct regulator *vbus; struct extcon_dev *edev; struct notifier_block vbus_nb; struct work_struct vbus_work; unsigned long vbus_event; struct notifier_block id_nb; struct work_struct id_work; unsigned long id_event; struct usb_role_switch *role_sw; bool role_sw_used; bool is_u3_drd; bool manual_drd_enabled; }; /** * @mac_base: register base address of device MAC, exclude xHCI's * @ippc_base: register base address of IP Power and Clock interface (IPPC) * @vusb33: usb3.3V shared by device/host IP * @sys_clk: system clock of mtu3, shared by device/host IP * @ref_clk: reference clock * @mcu_clk: mcu_bus_ck clock for AHB bus etc * @dma_clk: dma_bus_ck clock for AXI bus etc * @dr_mode: works in which mode: * host only, device only or dual-role mode * @u2_ports: number of usb2.0 host ports * @u3_ports: number of usb3.0 host ports * @u3p_dis_msk: mask of disabling usb3 ports, for example, bit0==1 to * disable u3port0, bit1==1 to disable u3port1,... etc * @dbgfs_root: only used when supports manual dual-role switch via debugfs * @uwk_en: it's true when supports remote wakeup in host mode * @uwk: syscon including usb wakeup glue layer between SSUSB IP and SPM * @uwk_reg_base: the base address of the wakeup glue layer in @uwk * @uwk_vers: the version of the wakeup glue layer */ struct ssusb_mtk { struct device *dev; struct mtu3 *u3d; void __iomem *mac_base; void __iomem *ippc_base; struct phy **phys; int num_phys; /* common power & clock */ struct regulator *vusb33; struct clk *sys_clk; struct clk *ref_clk; struct clk *mcu_clk; struct clk *dma_clk; /* otg */ struct otg_switch_mtk otg_switch; enum usb_dr_mode dr_mode; bool is_host; int u2_ports; int u3_ports; int u3p_dis_msk; struct dentry *dbgfs_root; /* usb wakeup for host mode */ bool uwk_en; struct regmap *uwk; u32 uwk_reg_base; u32 uwk_vers; }; /** * @fifo_size: it is (@slot + 1) * @fifo_seg_size * @fifo_seg_size: it is roundup_pow_of_two(@maxp) */ struct mtu3_ep { struct usb_ep ep; char name[12]; struct mtu3 *mtu; u8 epnum; u8 type; u8 is_in; u16 maxp; int slot; u32 fifo_size; u32 fifo_addr; u32 fifo_seg_size; struct mtu3_fifo_info *fifo; struct list_head req_list; struct mtu3_gpd_ring gpd_ring; const struct usb_ss_ep_comp_descriptor *comp_desc; const struct usb_endpoint_descriptor *desc; int flags; }; struct mtu3_request { struct usb_request request; struct list_head list; struct mtu3_ep *mep; struct mtu3 *mtu; struct qmu_gpd *gpd; int epnum; }; static inline struct ssusb_mtk *dev_to_ssusb(struct device *dev) { return dev_get_drvdata(dev); } /** * struct mtu3 - device driver instance data. * @slot: MTU3_U2_IP_SLOT_DEFAULT for U2 IP only, * MTU3_U3_IP_SLOT_DEFAULT for U3 IP * @may_wakeup: means device's remote wakeup is enabled * @is_self_powered: is reported in device status and the config descriptor * @delayed_status: true when function drivers ask for delayed status * @gen2cp: compatible with USB3 Gen2 IP * @ep0_req: dummy request used while handling standard USB requests * for GET_STATUS and SET_SEL * @setup_buf: ep0 response buffer for GET_STATUS and SET_SEL requests */ struct mtu3 { spinlock_t lock; struct ssusb_mtk *ssusb; struct device *dev; void __iomem *mac_base; void __iomem *ippc_base; int irq; struct mtu3_fifo_info tx_fifo; struct mtu3_fifo_info rx_fifo; struct mtu3_ep *ep_array; struct mtu3_ep *in_eps; struct mtu3_ep *out_eps; struct mtu3_ep *ep0; int num_eps; int slot; int active_ep; struct dma_pool *qmu_gpd_pool; enum mtu3_g_ep0_state ep0_state; struct usb_gadget g; /* the gadget */ struct usb_gadget_driver *gadget_driver; struct mtu3_request ep0_req; u8 setup_buf[EP0_RESPONSE_BUF]; enum usb_device_speed max_speed; enum usb_device_speed speed; unsigned is_active:1; unsigned may_wakeup:1; unsigned is_self_powered:1; unsigned test_mode:1; unsigned softconnect:1; unsigned u1_enable:1; unsigned u2_enable:1; unsigned is_u3_ip:1; unsigned delayed_status:1; unsigned gen2cp:1; u8 address; u8 test_mode_nr; u32 hw_version; }; static inline struct mtu3 *gadget_to_mtu3(struct usb_gadget *g) { return container_of(g, struct mtu3, g); } static inline int is_first_entry(const struct list_head *list, const struct list_head *head) { return list_is_last(head, list); } static inline struct mtu3_request *to_mtu3_request(struct usb_request *req) { return req ? container_of(req, struct mtu3_request, request) : NULL; } static inline struct mtu3_ep *to_mtu3_ep(struct usb_ep *ep) { return ep ? container_of(ep, struct mtu3_ep, ep) : NULL; } static inline struct mtu3_request *next_request(struct mtu3_ep *mep) { return list_first_entry_or_null(&mep->req_list, struct mtu3_request, list); } static inline void mtu3_writel(void __iomem *base, u32 offset, u32 data) { writel(data, base + offset); } static inline u32 mtu3_readl(void __iomem *base, u32 offset) { return readl(base + offset); } static inline void mtu3_setbits(void __iomem *base, u32 offset, u32 bits) { void __iomem *addr = base + offset; u32 tmp = readl(addr); writel((tmp | (bits)), addr); } static inline void mtu3_clrbits(void __iomem *base, u32 offset, u32 bits) { void __iomem *addr = base + offset; u32 tmp = readl(addr); writel((tmp & ~(bits)), addr); } int ssusb_check_clocks(struct ssusb_mtk *ssusb, u32 ex_clks); struct usb_request *mtu3_alloc_request(struct usb_ep *ep, gfp_t gfp_flags); void mtu3_free_request(struct usb_ep *ep, struct usb_request *req); void mtu3_req_complete(struct mtu3_ep *mep, struct usb_request *req, int status); int mtu3_config_ep(struct mtu3 *mtu, struct mtu3_ep *mep, int interval, int burst, int mult); void mtu3_deconfig_ep(struct mtu3 *mtu, struct mtu3_ep *mep); void mtu3_ep_stall_set(struct mtu3_ep *mep, bool set); void mtu3_ep0_setup(struct mtu3 *mtu); void mtu3_start(struct mtu3 *mtu); void mtu3_stop(struct mtu3 *mtu); void mtu3_dev_on_off(struct mtu3 *mtu, int is_on); void mtu3_set_speed(struct mtu3 *mtu, enum usb_device_speed speed); int mtu3_gadget_setup(struct mtu3 *mtu); void mtu3_gadget_cleanup(struct mtu3 *mtu); void mtu3_gadget_reset(struct mtu3 *mtu); void mtu3_gadget_suspend(struct mtu3 *mtu); void mtu3_gadget_resume(struct mtu3 *mtu); void mtu3_gadget_disconnect(struct mtu3 *mtu); irqreturn_t mtu3_ep0_isr(struct mtu3 *mtu); extern const struct usb_ep_ops mtu3_ep0_ops; #endif
{ "pile_set_name": "Github" }
executable: gradle args: clean instrumentClasses expected-output-file: incrementalBuildInputFilesConfig.out allow-additional-output: true allow-disordered-output: true
{ "pile_set_name": "Github" }
use specs::prelude::*; use super::{WantsToPickupItem, Name, InBackpack, Position, gamelog::GameLog, WantsToUseItem, Consumable, ProvidesHealing, CombatStats, WantsToDropItem, InflictsDamage, Map, SufferDamage, AreaOfEffect, Confusion, Equippable, Equipped, WantsToRemoveItem, particle_system::ParticleBuilder, ProvidesFood, HungerClock, HungerState, MagicMapper, RunState}; pub struct ItemCollectionSystem {} impl<'a> System<'a> for ItemCollectionSystem { #[allow(clippy::type_complexity)] type SystemData = ( ReadExpect<'a, Entity>, WriteExpect<'a, GameLog>, WriteStorage<'a, WantsToPickupItem>, WriteStorage<'a, Position>, ReadStorage<'a, Name>, WriteStorage<'a, InBackpack> ); fn run(&mut self, data : Self::SystemData) { let (player_entity, mut gamelog, mut wants_pickup, mut positions, names, mut backpack) = data; for pickup in wants_pickup.join() { positions.remove(pickup.item); backpack.insert(pickup.item, InBackpack{ owner: pickup.collected_by }).expect("Unable to insert backpack entry"); if pickup.collected_by == *player_entity { gamelog.entries.push(format!("You pick up the {}.", names.get(pickup.item).unwrap().name)); } } wants_pickup.clear(); } } pub struct ItemUseSystem {} impl<'a> System<'a> for ItemUseSystem { #[allow(clippy::type_complexity)] type SystemData = ( ReadExpect<'a, Entity>, WriteExpect<'a, GameLog>, WriteExpect<'a, Map>, Entities<'a>, WriteStorage<'a, WantsToUseItem>, ReadStorage<'a, Name>, ReadStorage<'a, Consumable>, ReadStorage<'a, ProvidesHealing>, ReadStorage<'a, InflictsDamage>, WriteStorage<'a, CombatStats>, WriteStorage<'a, SufferDamage>, ReadStorage<'a, AreaOfEffect>, WriteStorage<'a, Confusion>, ReadStorage<'a, Equippable>, WriteStorage<'a, Equipped>, WriteStorage<'a, InBackpack>, WriteExpect<'a, ParticleBuilder>, ReadStorage<'a, Position>, ReadStorage<'a, ProvidesFood>, WriteStorage<'a, HungerClock>, ReadStorage<'a, MagicMapper>, WriteExpect<'a, RunState> ); #[allow(clippy::cognitive_complexity)] fn run(&mut self, data : Self::SystemData) { let (player_entity, mut gamelog, map, entities, mut wants_use, names, consumables, healing, inflict_damage, mut combat_stats, mut suffer_damage, aoe, mut confused, equippable, mut equipped, mut backpack, mut particle_builder, positions, provides_food, mut hunger_clocks, magic_mapper, mut runstate) = data; for (entity, useitem) in (&entities, &wants_use).join() { let mut used_item = true; // Targeting let mut targets : Vec<Entity> = Vec::new(); match useitem.target { None => { targets.push( *player_entity ); } Some(target) => { let area_effect = aoe.get(useitem.item); match area_effect { None => { // Single target in tile let idx = map.xy_idx(target.x, target.y); for mob in map.tile_content[idx].iter() { targets.push(*mob); } } Some(area_effect) => { // AoE let mut blast_tiles = rltk::field_of_view(target, area_effect.radius, &*map); blast_tiles.retain(|p| p.x > 0 && p.x < map.width-1 && p.y > 0 && p.y < map.height-1 ); for tile_idx in blast_tiles.iter() { let idx = map.xy_idx(tile_idx.x, tile_idx.y); for mob in map.tile_content[idx].iter() { targets.push(*mob); } particle_builder.request(tile_idx.x, tile_idx.y, rltk::RGB::named(rltk::ORANGE), rltk::RGB::named(rltk::BLACK), rltk::to_cp437('░'), 200.0); } } } } } // If it is equippable, then we want to equip it - and unequip whatever else was in that slot let item_equippable = equippable.get(useitem.item); match item_equippable { None => {} Some(can_equip) => { let target_slot = can_equip.slot; let target = targets[0]; // Remove any items the target has in the item's slot let mut to_unequip : Vec<Entity> = Vec::new(); for (item_entity, already_equipped, name) in (&entities, &equipped, &names).join() { if already_equipped.owner == target && already_equipped.slot == target_slot { to_unequip.push(item_entity); if target == *player_entity { gamelog.entries.push(format!("You unequip {}.", name.name)); } } } for item in to_unequip.iter() { equipped.remove(*item); backpack.insert(*item, InBackpack{ owner: target }).expect("Unable to insert backpack entry"); } // Wield the item equipped.insert(useitem.item, Equipped{ owner: target, slot: target_slot }).expect("Unable to insert equipped component"); backpack.remove(useitem.item); if target == *player_entity { gamelog.entries.push(format!("You equip {}.", names.get(useitem.item).unwrap().name)); } } } // It it is edible, eat it! let item_edible = provides_food.get(useitem.item); match item_edible { None => {} Some(_) => { used_item = true; let target = targets[0]; let hc = hunger_clocks.get_mut(target); if let Some(hc) = hc { hc.state = HungerState::WellFed; hc.duration = 20; gamelog.entries.push(format!("You eat the {}.", names.get(useitem.item).unwrap().name)); } } } // If its a magic mapper... let is_mapper = magic_mapper.get(useitem.item); match is_mapper { None => {} Some(_) => { used_item = true; gamelog.entries.push("The map is revealed to you!".to_string()); *runstate = RunState::MagicMapReveal{ row : 0}; } } // If it heals, apply the healing let item_heals = healing.get(useitem.item); match item_heals { None => {} Some(healer) => { used_item = false; for target in targets.iter() { let stats = combat_stats.get_mut(*target); if let Some(stats) = stats { stats.hp = i32::min(stats.max_hp, stats.hp + healer.heal_amount); if entity == *player_entity { gamelog.entries.push(format!("You use the {}, healing {} hp.", names.get(useitem.item).unwrap().name, healer.heal_amount)); } used_item = true; let pos = positions.get(*target); if let Some(pos) = pos { particle_builder.request(pos.x, pos.y, rltk::RGB::named(rltk::GREEN), rltk::RGB::named(rltk::BLACK), rltk::to_cp437('♥'), 200.0); } } } } } // If it inflicts damage, apply it to the target cell let item_damages = inflict_damage.get(useitem.item); match item_damages { None => {} Some(damage) => { used_item = false; for mob in targets.iter() { SufferDamage::new_damage(&mut suffer_damage, *mob, damage.damage); if entity == *player_entity { let mob_name = names.get(*mob).unwrap(); let item_name = names.get(useitem.item).unwrap(); gamelog.entries.push(format!("You use {} on {}, inflicting {} hp.", item_name.name, mob_name.name, damage.damage)); let pos = positions.get(*mob); if let Some(pos) = pos { particle_builder.request(pos.x, pos.y, rltk::RGB::named(rltk::RED), rltk::RGB::named(rltk::BLACK), rltk::to_cp437('‼'), 200.0); } } used_item = true; } } } // Can it pass along confusion? Note the use of scopes to escape from the borrow checker! let mut add_confusion = Vec::new(); { let causes_confusion = confused.get(useitem.item); match causes_confusion { None => {} Some(confusion) => { used_item = false; for mob in targets.iter() { add_confusion.push((*mob, confusion.turns )); if entity == *player_entity { let mob_name = names.get(*mob).unwrap(); let item_name = names.get(useitem.item).unwrap(); gamelog.entries.push(format!("You use {} on {}, confusing them.", item_name.name, mob_name.name)); let pos = positions.get(*mob); if let Some(pos) = pos { particle_builder.request(pos.x, pos.y, rltk::RGB::named(rltk::MAGENTA), rltk::RGB::named(rltk::BLACK), rltk::to_cp437('?'), 200.0); } } } } } } for mob in add_confusion.iter() { confused.insert(mob.0, Confusion{ turns: mob.1 }).expect("Unable to insert status"); } // If its a consumable, we delete it on use if used_item { let consumable = consumables.get(useitem.item); match consumable { None => {} Some(_) => { entities.delete(useitem.item).expect("Delete failed"); } } } } wants_use.clear(); } } pub struct ItemDropSystem {} impl<'a> System<'a> for ItemDropSystem { #[allow(clippy::type_complexity)] type SystemData = ( ReadExpect<'a, Entity>, WriteExpect<'a, GameLog>, Entities<'a>, WriteStorage<'a, WantsToDropItem>, ReadStorage<'a, Name>, WriteStorage<'a, Position>, WriteStorage<'a, InBackpack> ); fn run(&mut self, data : Self::SystemData) { let (player_entity, mut gamelog, entities, mut wants_drop, names, mut positions, mut backpack) = data; for (entity, to_drop) in (&entities, &wants_drop).join() { let mut dropper_pos : Position = Position{x:0, y:0}; { let dropped_pos = positions.get(entity).unwrap(); dropper_pos.x = dropped_pos.x; dropper_pos.y = dropped_pos.y; } positions.insert(to_drop.item, Position{ x : dropper_pos.x, y : dropper_pos.y }).expect("Unable to insert position"); backpack.remove(to_drop.item); if entity == *player_entity { gamelog.entries.push(format!("You drop the {}.", names.get(to_drop.item).unwrap().name)); } } wants_drop.clear(); } } pub struct ItemRemoveSystem {} impl<'a> System<'a> for ItemRemoveSystem { #[allow(clippy::type_complexity)] type SystemData = ( Entities<'a>, WriteStorage<'a, WantsToRemoveItem>, WriteStorage<'a, Equipped>, WriteStorage<'a, InBackpack> ); fn run(&mut self, data : Self::SystemData) { let (entities, mut wants_remove, mut equipped, mut backpack) = data; for (entity, to_remove) in (&entities, &wants_remove).join() { equipped.remove(to_remove.item); backpack.insert(to_remove.item, InBackpack{ owner: entity }).expect("Unable to insert backpack"); } wants_remove.clear(); } }
{ "pile_set_name": "Github" }
package com.mimieye.odd.srp.service; import com.mimieye.odd.srp.util.FileReadUtil; import java.io.IOException; import java.util.*; /** * Created by Pierreluo on 2017/6/15. */ public interface PromotionInfoService { List<Map<String, String>> listPromotions(String filePath) throws Exception; }
{ "pile_set_name": "Github" }
{ "extends": ["tslint-config-airbnb"], "rules": { /* modifications to base config */ // adds statements, members, and elements to the base config "align": [true, "parameters", "arguments", "statements", "members", "elements"], // adds number of spaces so auto-fixing will work "indent": [true, "spaces", 2], // increase value from 100 in base config to 120 "max-line-length": [true, 120], // adds avoid-escape and avoid-template "quotemark": [true, "single", "avoid-escape", "avoid-template"], // adds ban-keywords and allow-leading-underscores // once this gets implemented, we should incorporate it: https://github.com/palantir/tslint/issues/3442 "variable-name": [true, "ban-keywords", "check-format", "allow-leading-underscore"], // adds check-module, check-type, check-rest-spread, check-typecast, check-type-operator "whitespace": [true, "check-branch", "check-decl", "check-operator", "check-preblock", "check-type", "check-module", "check-separator", "check-rest-spread", "check-typecast", "check-type-operator" ], /* not used in base config */ "await-promise": true, "ban-comma-operator": true, // Disabling the following rule because of https://github.com/palantir/tslint/issues/4493 // "completed-docs": true, "interface-over-type-literal": true, "jsdoc-format": [true, "check-multiline-start"], "member-access": [true, "check-accessor"], "no-duplicate-imports": true, "no-duplicate-switch-case": true, "no-duplicate-variable": true, "no-dynamic-delete": true, "no-empty": true, "no-floating-promises": true, "no-for-in-array": true, "no-implicit-dependencies": true, "no-object-literal-type-assertion": true, "no-redundant-jsdoc": true, "no-require-imports": true, "no-return-await": true, "no-submodule-imports": true, "no-this-assignment": true, "no-unused-expression": true, "no-var-requires": true, "one-line": [true, "check-else", "check-whitespace", "check-open-brace", "check-catch", "check-finally"], "strict-boolean-expressions": [true, "allow-boolean-or-undefined"], "typedef": [true, "call-signature"], "typedef-whitespace": [true, { "call-signature": "nospace", "index-signature": "nospace", "parameter": "nospace", "property-declaration": "nospace", "variable-declaration": "nospace" }] // TODO: find a rule similar to https://palantir.github.io/tslint/rules/no-construct/, except it bans those types // from interfaces (e.g. a function that returns Boolean is an error, it should return boolean) }, "linterOptions": { "format": "verbose" } }
{ "pile_set_name": "Github" }
// Copyright (c) 2013 Christopher Kormanyos // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // // This work is based on an earlier work: // "Algorithm 910: A Portable C++ Multiple-Precision System for Special-Function Calculations", // in ACM TOMS, {VOL 37, ISSUE 4, (February 2011)} (C) ACM, 2011. http://doi.acm.org/10.1145/1916461.1916469 // // This header contains implementation details for estimating the zeros // of the Airy functions airy_ai and airy_bi on the negative real axis. // #ifndef _AIRY_AI_BI_ZERO_2013_01_20_HPP_ #define _AIRY_AI_BI_ZERO_2013_01_20_HPP_ #include <boost/math/constants/constants.hpp> #include <boost/math/special_functions/cbrt.hpp> namespace boost { namespace math { namespace detail { // Forward declarations of the needed Airy function implementations. template <class T, class Policy> T airy_ai_imp(T x, const Policy& pol); template <class T, class Policy> T airy_bi_imp(T x, const Policy& pol); template <class T, class Policy> T airy_ai_prime_imp(T x, const Policy& pol); template <class T, class Policy> T airy_bi_prime_imp(T x, const Policy& pol); namespace airy_zero { template<class T> T equation_as_10_4_105(const T& z) { const T one_over_z (T(1) / z); const T one_over_z_squared(one_over_z * one_over_z); const T z_pow_third (boost::math::cbrt(z)); const T z_pow_two_thirds(z_pow_third * z_pow_third); // Implement the top line of Eq. 10.4.105. const T fz(z_pow_two_thirds * ((((( + (T(162375596875.0) / 334430208UL) * one_over_z_squared - ( T(108056875.0) / 6967296UL)) * one_over_z_squared + ( T(77125UL) / 82944UL)) * one_over_z_squared - ( T(5U) / 36U)) * one_over_z_squared + ( T(5U) / 48U)) * one_over_z_squared + (1))); return fz; } namespace airy_ai_zero_detail { template<class T> T initial_guess(const int m) { T guess; switch(m) { case 0: { guess = T(0); break; } case 1: { guess = T(-2.33810741045976703849); break; } case 2: { guess = T(-4.08794944413097061664); break; } case 3: { guess = T(-5.52055982809555105913); break; } case 4: { guess = T(-6.78670809007175899878); break; } case 5: { guess = T(-7.94413358712085312314); break; } case 6: { guess = T(-9.02265085334098038016); break; } case 7: { guess = T(-10.0401743415580859306); break; } case 8: { guess = T(-11.0085243037332628932); break; } case 9: { guess = T(-11.9360155632362625170); break; } case 10:{ guess = T(-12.8287767528657572004); break; } default: { const T t(((boost::math::constants::pi<T>() * 3) * ((T(m) * 4) - 1)) / 8); guess = -boost::math::detail::airy_zero::equation_as_10_4_105(t); break; } } return guess; } template<class T, class Policy> class function_object_ai_and_ai_prime { public: function_object_ai_and_ai_prime(const Policy& pol) : my_pol(pol) { } boost::math::tuple<T, T> operator()(const T& x) const { // Return a tuple containing both Ai(x) and Ai'(x). return boost::math::make_tuple( boost::math::detail::airy_ai_imp (x, my_pol), boost::math::detail::airy_ai_prime_imp(x, my_pol)); } private: const Policy& my_pol; const function_object_ai_and_ai_prime& operator=(const function_object_ai_and_ai_prime&); }; } // namespace airy_ai_zero_detail namespace airy_bi_zero_detail { template<class T> T initial_guess(const int m) { T guess; switch(m) { case 0: { guess = T(0); break; } case 1: { guess = T(-1.17371322270912792492); break; } case 2: { guess = T(-3.27109330283635271568); break; } case 3: { guess = T(-4.83073784166201593267); break; } case 4: { guess = T(-6.16985212831025125983); break; } case 5: { guess = T(-7.37676207936776371360); break; } case 6: { guess = T(-8.49194884650938801345); break; } case 7: { guess = T(-9.53819437934623888663); break; } case 8: { guess = T(-10.5299135067053579244); break; } case 9: { guess = T(-11.4769535512787794379); break; } case 10: { guess = T(-12.3864171385827387456); break; } default: { const T t(((boost::math::constants::pi<T>() * 3) * ((T(m) * 4) - 3)) / 8); guess = -boost::math::detail::airy_zero::equation_as_10_4_105(t); break; } } return guess; } template<class T, class Policy> class function_object_bi_and_bi_prime { public: function_object_bi_and_bi_prime(const Policy& pol) : my_pol(pol) { } boost::math::tuple<T, T> operator()(const T& x) const { // Return a tuple containing both Bi(x) and Bi'(x). return boost::math::make_tuple( boost::math::detail::airy_bi_imp (x, my_pol), boost::math::detail::airy_bi_prime_imp(x, my_pol)); } private: const Policy& my_pol; const function_object_bi_and_bi_prime& operator=(const function_object_bi_and_bi_prime&); }; } // namespace airy_bi_zero_detail } // namespace airy_zero } // namespace detail } // namespace math } // namespaces boost #endif // _AIRY_AI_BI_ZERO_2013_01_20_HPP_
{ "pile_set_name": "Github" }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: [email protected] (Kenton Varda) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // Implements parsing of .proto files to FileDescriptorProtos. #ifndef GOOGLE_PROTOBUF_COMPILER_PARSER_H__ #define GOOGLE_PROTOBUF_COMPILER_PARSER_H__ #include <map> #include <string> #include <utility> #include <google/protobuf/descriptor.pb.h> #include <google/protobuf/io/tokenizer.h> #include <google/protobuf/descriptor.h> #include <google/protobuf/repeated_field.h> #include <google/protobuf/port_def.inc> namespace google { namespace protobuf { class Message; namespace compiler { // Defined in this file. class Parser; class SourceLocationTable; // Implements parsing of protocol definitions (such as .proto files). // // Note that most users will be more interested in the Importer class. // Parser is a lower-level class which simply converts a single .proto file // to a FileDescriptorProto. It does not resolve import directives or perform // many other kinds of validation needed to construct a complete // FileDescriptor. class PROTOBUF_EXPORT Parser { public: Parser(); ~Parser(); // Parse the entire input and construct a FileDescriptorProto representing // it. Returns true if no errors occurred, false otherwise. bool Parse(io::Tokenizer* input, FileDescriptorProto* file); // Optional features: // DEPRECATED: New code should use the SourceCodeInfo embedded in the // FileDescriptorProto. // // Requests that locations of certain definitions be recorded to the given // SourceLocationTable while parsing. This can be used to look up exact line // and column numbers for errors reported by DescriptorPool during validation. // Set to NULL (the default) to discard source location information. void RecordSourceLocationsTo(SourceLocationTable* location_table) { source_location_table_ = location_table; } // Requests that errors be recorded to the given ErrorCollector while // parsing. Set to NULL (the default) to discard error messages. void RecordErrorsTo(io::ErrorCollector* error_collector) { error_collector_ = error_collector; } // Returns the identifier used in the "syntax = " declaration, if one was // seen during the last call to Parse(), or the empty string otherwise. const std::string& GetSyntaxIdentifier() { return syntax_identifier_; } // If set true, input files will be required to begin with a syntax // identifier. Otherwise, files may omit this. If a syntax identifier // is provided, it must be 'syntax = "proto2";' and must appear at the // top of this file regardless of whether or not it was required. void SetRequireSyntaxIdentifier(bool value) { require_syntax_identifier_ = value; } // Call SetStopAfterSyntaxIdentifier(true) to tell the parser to stop // parsing as soon as it has seen the syntax identifier, or lack thereof. // This is useful for quickly identifying the syntax of the file without // parsing the whole thing. If this is enabled, no error will be recorded // if the syntax identifier is something other than "proto2" (since // presumably the caller intends to deal with that), but other kinds of // errors (e.g. parse errors) will still be reported. When this is enabled, // you may pass a NULL FileDescriptorProto to Parse(). void SetStopAfterSyntaxIdentifier(bool value) { stop_after_syntax_identifier_ = value; } private: class LocationRecorder; // ================================================================= // Error recovery helpers // Consume the rest of the current statement. This consumes tokens // until it sees one of: // ';' Consumes the token and returns. // '{' Consumes the brace then calls SkipRestOfBlock(). // '}' Returns without consuming. // EOF Returns (can't consume). // The Parser often calls SkipStatement() after encountering a syntax // error. This allows it to go on parsing the following lines, allowing // it to report more than just one error in the file. void SkipStatement(); // Consume the rest of the current block, including nested blocks, // ending after the closing '}' is encountered and consumed, or at EOF. void SkipRestOfBlock(); // ----------------------------------------------------------------- // Single-token consuming helpers // // These make parsing code more readable. // True if the current token is TYPE_END. inline bool AtEnd(); // True if the next token matches the given text. inline bool LookingAt(const char* text); // True if the next token is of the given type. inline bool LookingAtType(io::Tokenizer::TokenType token_type); // If the next token exactly matches the text given, consume it and return // true. Otherwise, return false without logging an error. bool TryConsume(const char* text); // These attempt to read some kind of token from the input. If successful, // they return true. Otherwise they return false and add the given error // to the error list. // Consume a token with the exact text given. bool Consume(const char* text, const char* error); // Same as above, but automatically generates the error "Expected \"text\".", // where "text" is the expected token text. bool Consume(const char* text); // Consume a token of type IDENTIFIER and store its text in "output". bool ConsumeIdentifier(std::string* output, const char* error); // Consume an integer and store its value in "output". bool ConsumeInteger(int* output, const char* error); // Consume a signed integer and store its value in "output". bool ConsumeSignedInteger(int* output, const char* error); // Consume a 64-bit integer and store its value in "output". If the value // is greater than max_value, an error will be reported. bool ConsumeInteger64(uint64 max_value, uint64* output, const char* error); // Consume a number and store its value in "output". This will accept // tokens of either INTEGER or FLOAT type. bool ConsumeNumber(double* output, const char* error); // Consume a string literal and store its (unescaped) value in "output". bool ConsumeString(std::string* output, const char* error); // Consume a token representing the end of the statement. Comments between // this token and the next will be harvested for documentation. The given // LocationRecorder should refer to the declaration that was just parsed; // it will be populated with these comments. // // TODO(kenton): The LocationRecorder is const because historically locations // have been passed around by const reference, for no particularly good // reason. We should probably go through and change them all to mutable // pointer to make this more intuitive. bool TryConsumeEndOfDeclaration(const char* text, const LocationRecorder* location); bool TryConsumeEndOfDeclarationFinishScope(const char* text, const LocationRecorder* location); bool ConsumeEndOfDeclaration(const char* text, const LocationRecorder* location); // ----------------------------------------------------------------- // Error logging helpers // Invokes error_collector_->AddError(), if error_collector_ is not NULL. void AddError(int line, int column, const std::string& error); // Invokes error_collector_->AddError() with the line and column number // of the current token. void AddError(const std::string& error); // Invokes error_collector_->AddWarning() with the line and column number // of the current token. void AddWarning(const string& warning); // Records a location in the SourceCodeInfo.location table (see // descriptor.proto). We use RAII to ensure that the start and end locations // are recorded -- the constructor records the start location and the // destructor records the end location. Since the parser is // recursive-descent, this works out beautifully. class PROTOBUF_EXPORT LocationRecorder { public: // Construct the file's "root" location. LocationRecorder(Parser* parser); // Construct a location that represents a declaration nested within the // given parent. E.g. a field's location is nested within the location // for a message type. The parent's path will be copied, so you should // call AddPath() only to add the path components leading from the parent // to the child (as opposed to leading from the root to the child). LocationRecorder(const LocationRecorder& parent); // Convenience constructors that call AddPath() one or two times. LocationRecorder(const LocationRecorder& parent, int path1); LocationRecorder(const LocationRecorder& parent, int path1, int path2); // Creates a recorder that generates locations into given source code info. LocationRecorder(const LocationRecorder& parent, int path1, SourceCodeInfo* source_code_info); ~LocationRecorder(); // Add a path component. See SourceCodeInfo.Location.path in // descriptor.proto. void AddPath(int path_component); // By default the location is considered to start at the current token at // the time the LocationRecorder is created. StartAt() sets the start // location to the given token instead. void StartAt(const io::Tokenizer::Token& token); // Start at the same location as some other LocationRecorder. void StartAt(const LocationRecorder& other); // By default the location is considered to end at the previous token at // the time the LocationRecorder is destroyed. EndAt() sets the end // location to the given token instead. void EndAt(const io::Tokenizer::Token& token); // Records the start point of this location to the SourceLocationTable that // was passed to RecordSourceLocationsTo(), if any. SourceLocationTable // is an older way of keeping track of source locations which is still // used in some places. void RecordLegacyLocation( const Message* descriptor, DescriptorPool::ErrorCollector::ErrorLocation location); void RecordLegacyImportLocation(const Message* descriptor, const string& name); // Returns the number of path components in the recorder's current location. int CurrentPathSize() const; // Attaches leading and trailing comments to the location. The two strings // will be swapped into place, so after this is called *leading and // *trailing will be empty. // // TODO(kenton): See comment on TryConsumeEndOfDeclaration(), above, for // why this is const. void AttachComments(std::string* leading, std::string* trailing, std::vector<std::string>* detached_comments) const; private: // Indexes of parent and current location in the parent // SourceCodeInfo.location repeated field. For top-level elements, // parent_index_ is -1. Parser* parser_; SourceCodeInfo* source_code_info_; SourceCodeInfo::Location* location_; void Init(const LocationRecorder& parent, SourceCodeInfo* source_code_info); }; // ================================================================= // Parsers for various language constructs // Parses the "syntax = \"proto2\";" line at the top of the file. Returns // false if it failed to parse or if the syntax identifier was not // recognized. bool ParseSyntaxIdentifier(const LocationRecorder& parent); // These methods parse various individual bits of code. They return // false if they completely fail to parse the construct. In this case, // it is probably necessary to skip the rest of the statement to recover. // However, if these methods return true, it does NOT mean that there // were no errors; only that there were no *syntax* errors. For instance, // if a service method is defined using proper syntax but uses a primitive // type as its input or output, ParseMethodField() still returns true // and only reports the error by calling AddError(). In practice, this // makes logic much simpler for the caller. // Parse a top-level message, enum, service, etc. bool ParseTopLevelStatement(FileDescriptorProto* file, const LocationRecorder& root_location); // Parse various language high-level language construrcts. bool ParseMessageDefinition(DescriptorProto* message, const LocationRecorder& message_location, const FileDescriptorProto* containing_file); bool ParseEnumDefinition(EnumDescriptorProto* enum_type, const LocationRecorder& enum_location, const FileDescriptorProto* containing_file); bool ParseServiceDefinition(ServiceDescriptorProto* service, const LocationRecorder& service_location, const FileDescriptorProto* containing_file); bool ParsePackage(FileDescriptorProto* file, const LocationRecorder& root_location, const FileDescriptorProto* containing_file); bool ParseImport(RepeatedPtrField<std::string>* dependency, RepeatedField<int32>* public_dependency, RepeatedField<int32>* weak_dependency, const LocationRecorder& root_location, const FileDescriptorProto* containing_file); // These methods parse the contents of a message, enum, or service type and // add them to the given object. They consume the entire block including // the beginning and ending brace. bool ParseMessageBlock(DescriptorProto* message, const LocationRecorder& message_location, const FileDescriptorProto* containing_file); bool ParseEnumBlock(EnumDescriptorProto* enum_type, const LocationRecorder& enum_location, const FileDescriptorProto* containing_file); bool ParseServiceBlock(ServiceDescriptorProto* service, const LocationRecorder& service_location, const FileDescriptorProto* containing_file); // Parse one statement within a message, enum, or service block, including // final semicolon. bool ParseMessageStatement(DescriptorProto* message, const LocationRecorder& message_location, const FileDescriptorProto* containing_file); bool ParseEnumStatement(EnumDescriptorProto* message, const LocationRecorder& enum_location, const FileDescriptorProto* containing_file); bool ParseServiceStatement(ServiceDescriptorProto* message, const LocationRecorder& service_location, const FileDescriptorProto* containing_file); // Parse a field of a message. If the field is a group, its type will be // added to "messages". // // parent_location and location_field_number_for_nested_type are needed when // parsing groups -- we need to generate a nested message type within the // parent and record its location accordingly. Since the parent could be // either a FileDescriptorProto or a DescriptorProto, we must pass in the // correct field number to use. bool ParseMessageField(FieldDescriptorProto* field, RepeatedPtrField<DescriptorProto>* messages, const LocationRecorder& parent_location, int location_field_number_for_nested_type, const LocationRecorder& field_location, const FileDescriptorProto* containing_file); // Like ParseMessageField() but expects the label has already been filled in // by the caller. bool ParseMessageFieldNoLabel(FieldDescriptorProto* field, RepeatedPtrField<DescriptorProto>* messages, const LocationRecorder& parent_location, int location_field_number_for_nested_type, const LocationRecorder& field_location, const FileDescriptorProto* containing_file); // Parse an "extensions" declaration. bool ParseExtensions(DescriptorProto* message, const LocationRecorder& extensions_location, const FileDescriptorProto* containing_file); // Parse a "reserved" declaration. bool ParseReserved(DescriptorProto* message, const LocationRecorder& message_location); bool ParseReservedNames(DescriptorProto* message, const LocationRecorder& parent_location); bool ParseReservedNumbers(DescriptorProto* message, const LocationRecorder& parent_location); bool ParseReserved(EnumDescriptorProto* message, const LocationRecorder& message_location); bool ParseReservedNames(EnumDescriptorProto* message, const LocationRecorder& parent_location); bool ParseReservedNumbers(EnumDescriptorProto* message, const LocationRecorder& parent_location); // Parse an "extend" declaration. (See also comments for // ParseMessageField().) bool ParseExtend(RepeatedPtrField<FieldDescriptorProto>* extensions, RepeatedPtrField<DescriptorProto>* messages, const LocationRecorder& parent_location, int location_field_number_for_nested_type, const LocationRecorder& extend_location, const FileDescriptorProto* containing_file); // Parse a "oneof" declaration. The caller is responsible for setting // oneof_decl->label() since it will have had to parse the label before it // knew it was parsing a oneof. bool ParseOneof(OneofDescriptorProto* oneof_decl, DescriptorProto* containing_type, int oneof_index, const LocationRecorder& oneof_location, const LocationRecorder& containing_type_location, const FileDescriptorProto* containing_file); // Parse a single enum value within an enum block. bool ParseEnumConstant(EnumValueDescriptorProto* enum_value, const LocationRecorder& enum_value_location, const FileDescriptorProto* containing_file); // Parse enum constant options, i.e. the list in square brackets at the end // of the enum constant value definition. bool ParseEnumConstantOptions(EnumValueDescriptorProto* value, const LocationRecorder& enum_value_location, const FileDescriptorProto* containing_file); // Parse a single method within a service definition. bool ParseServiceMethod(MethodDescriptorProto* method, const LocationRecorder& method_location, const FileDescriptorProto* containing_file); // Parse options of a single method or stream. bool ParseMethodOptions(const LocationRecorder& parent_location, const FileDescriptorProto* containing_file, const int optionsFieldNumber, Message* mutable_options); // Parse "required", "optional", or "repeated" and fill in "label" // with the value. Returns true if such a label is consumed. bool ParseLabel(FieldDescriptorProto::Label* label, const LocationRecorder& field_location, const FileDescriptorProto* containing_file); // Parse a type name and fill in "type" (if it is a primitive) or // "type_name" (if it is not) with the type parsed. bool ParseType(FieldDescriptorProto::Type* type, std::string* type_name); // Parse a user-defined type and fill in "type_name" with the name. // If a primitive type is named, it is treated as an error. bool ParseUserDefinedType(std::string* type_name); // Parses field options, i.e. the stuff in square brackets at the end // of a field definition. Also parses default value. bool ParseFieldOptions(FieldDescriptorProto* field, const LocationRecorder& field_location, const FileDescriptorProto* containing_file); // Parse the "default" option. This needs special handling because its // type is the field's type. bool ParseDefaultAssignment(FieldDescriptorProto* field, const LocationRecorder& field_location, const FileDescriptorProto* containing_file); bool ParseJsonName(FieldDescriptorProto* field, const LocationRecorder& field_location, const FileDescriptorProto* containing_file); enum OptionStyle { OPTION_ASSIGNMENT, // just "name = value" OPTION_STATEMENT // "option name = value;" }; // Parse a single option name/value pair, e.g. "ctype = CORD". The name // identifies a field of the given Message, and the value of that field // is set to the parsed value. bool ParseOption(Message* options, const LocationRecorder& options_location, const FileDescriptorProto* containing_file, OptionStyle style); // Parses a single part of a multipart option name. A multipart name consists // of names separated by dots. Each name is either an identifier or a series // of identifiers separated by dots and enclosed in parentheses. E.g., // "foo.(bar.baz).qux". bool ParseOptionNamePart(UninterpretedOption* uninterpreted_option, const LocationRecorder& part_location, const FileDescriptorProto* containing_file); // Parses a string surrounded by balanced braces. Strips off the outer // braces and stores the enclosed string in *value. // E.g., // { foo } *value gets 'foo' // { foo { bar: box } } *value gets 'foo { bar: box }' // {} *value gets '' // // REQUIRES: LookingAt("{") // When finished successfully, we are looking at the first token past // the ending brace. bool ParseUninterpretedBlock(std::string* value); struct MapField { // Whether the field is a map field. bool is_map_field; // The types of the key and value if they are primitive types. FieldDescriptorProto::Type key_type; FieldDescriptorProto::Type value_type; // Or the type names string if the types are customized types. std::string key_type_name; std::string value_type_name; MapField() : is_map_field(false) {} }; // Desugar the map syntax to generate a nested map entry message. void GenerateMapEntry(const MapField& map_field, FieldDescriptorProto* field, RepeatedPtrField<DescriptorProto>* messages); // Whether fields without label default to optional fields. bool DefaultToOptionalFields() const { return syntax_identifier_ == "proto3"; } bool ValidateEnum(const EnumDescriptorProto* proto); // ================================================================= io::Tokenizer* input_; io::ErrorCollector* error_collector_; SourceCodeInfo* source_code_info_; SourceLocationTable* source_location_table_; // legacy bool had_errors_; bool require_syntax_identifier_; bool stop_after_syntax_identifier_; std::string syntax_identifier_; // Leading doc comments for the next declaration. These are not complete // yet; use ConsumeEndOfDeclaration() to get the complete comments. std::string upcoming_doc_comments_; // Detached comments are not connected to any syntax entities. Elements in // this vector are paragraphs of comments separated by empty lines. The // detached comments will be put into the leading_detached_comments field for // the next element (See SourceCodeInfo.Location in descriptor.proto), when // ConsumeEndOfDeclaration() is called. std::vector<std::string> upcoming_detached_comments_; GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Parser); }; // A table mapping (descriptor, ErrorLocation) pairs -- as reported by // DescriptorPool when validating descriptors -- to line and column numbers // within the original source code. // // This is semi-obsolete: FileDescriptorProto.source_code_info now contains // far more complete information about source locations. However, as of this // writing you still need to use SourceLocationTable when integrating with // DescriptorPool. class PROTOBUF_EXPORT SourceLocationTable { public: SourceLocationTable(); ~SourceLocationTable(); // Finds the precise location of the given error and fills in *line and // *column with the line and column numbers. If not found, sets *line to // -1 and *column to 0 (since line = -1 is used to mean "error has no exact // location" in the ErrorCollector interface). Returns true if found, false // otherwise. bool Find(const Message* descriptor, DescriptorPool::ErrorCollector::ErrorLocation location, int* line, int* column) const; bool FindImport(const Message* descriptor, const string& name, int* line, int* column) const; // Adds a location to the table. void Add(const Message* descriptor, DescriptorPool::ErrorCollector::ErrorLocation location, int line, int column); void AddImport(const Message* descriptor, const string& name, int line, int column); // Clears the contents of the table. void Clear(); private: typedef std::map< std::pair<const Message*, DescriptorPool::ErrorCollector::ErrorLocation>, std::pair<int, int> > LocationMap; LocationMap location_map_; std::map<std::pair<const Message*, string>, std::pair<int, int> > import_location_map_; }; } // namespace compiler } // namespace protobuf } // namespace google #include <google/protobuf/port_undef.inc> #endif // GOOGLE_PROTOBUF_COMPILER_PARSER_H__
{ "pile_set_name": "Github" }
const path = require('path'); const rimraf = require('rimraf'); const webpack = require('webpack'); const CommonConfigWebpackPlugin = require('../src/CommonConfigWebpackPlugin'); // Allow tests to run 30s jest.setTimeout(30000); // AppVeyor on Node6 will fail if the fork-ts-checker is not limited to 512 MB memory require('fork-ts-checker-webpack-plugin').DEFAULT_MEMORY_LIMIT = 512; beforeAll((done) => { rimraf(path.join(__dirname, 'fixtures/dist'), done); }); beforeEach((done) => { process.chdir(path.join(__dirname, 'fixtures')); rimraf(path.join(__dirname, 'fixtures/dist'), done); }); describe('CommonConfigWebpackPlugin standalone', () => { it('should be creatable without options', () => { new CommonConfigWebpackPlugin(); }); it('should be creatable with options', () => { new CommonConfigWebpackPlugin({}); }); }); describe('CommonConfigWebpackPlugin inside webpack context', () => { it('should compile without errors', (done) => { const compiler = webpack({ context: path.join(__dirname, 'fixtures/simple'), plugins: [new CommonConfigWebpackPlugin()], }); compiler.run((err, stats) => { expect(stats.compilation.errors).toEqual([]); done(); }); }); it('should compile without errors in development mode', (done) => { const compiler = webpack({ mode: 'development', context: path.join(__dirname, 'fixtures/simple'), plugins: [new CommonConfigWebpackPlugin()], }); compiler.run((err, stats) => { expect(stats.compilation.errors).toEqual([]); done(); }); }); it('should compile without errors in production mode', (done) => { const compiler = webpack({ mode: 'production', context: path.join(__dirname, 'fixtures/simple'), plugins: [new CommonConfigWebpackPlugin()], }); compiler.run((err, stats) => { expect(stats.compilation.errors).toEqual([]); done(); }); }); });
{ "pile_set_name": "Github" }
/* Locale-independent implementations of string <-> double conversions. */ #include "jsi.h" #if defined(_MSC_VER) && (_MSC_VER < 1700) /* VS2012 has stdint.h */ typedef unsigned int uint32_t; typedef unsigned __int64 uint64_t; #else #include <stdint.h> #endif #include <errno.h> #include <assert.h> #ifndef TRUE #define TRUE 1 #define FALSE 0 #endif /* * format exponent like sprintf(p, "e%+d", e) */ void js_fmtexp(char *p, int e) { char se[9]; int i; *p++ = 'e'; if(e < 0) { *p++ = '-'; e = -e; } else *p++ = '+'; i = 0; while(e) { se[i++] = e % 10 + '0'; e /= 10; } while(i < 1) se[i++] = '0'; while(i > 0) *p++ = se[--i]; *p++ = '\0'; } /* * grisu2_59_56.c * * Grisu prints the optimal decimal representation of floating-point numbers. * * Copyright (c) 2009 Florian Loitsch * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ typedef struct diy_fp_t { uint64_t f; int e; } diy_fp_t; #define DIY_SIGNIFICAND_SIZE 64 #define D_1_LOG2_10 0.30102999566398114 /* 1 / lg(10) */ static const uint64_t powers_ten[] = { 0xbf29dcaba82fdeae, 0xeef453d6923bd65a, 0x9558b4661b6565f8, 0xbaaee17fa23ebf76, 0xe95a99df8ace6f54, 0x91d8a02bb6c10594, 0xb64ec836a47146fa, 0xe3e27a444d8d98b8, 0x8e6d8c6ab0787f73, 0xb208ef855c969f50, 0xde8b2b66b3bc4724, 0x8b16fb203055ac76, 0xaddcb9e83c6b1794, 0xd953e8624b85dd79, 0x87d4713d6f33aa6c, 0xa9c98d8ccb009506, 0xd43bf0effdc0ba48, 0x84a57695fe98746d, 0xa5ced43b7e3e9188, 0xcf42894a5dce35ea, 0x818995ce7aa0e1b2, 0xa1ebfb4219491a1f, 0xca66fa129f9b60a7, 0xfd00b897478238d1, 0x9e20735e8cb16382, 0xc5a890362fddbc63, 0xf712b443bbd52b7c, 0x9a6bb0aa55653b2d, 0xc1069cd4eabe89f9, 0xf148440a256e2c77, 0x96cd2a865764dbca, 0xbc807527ed3e12bd, 0xeba09271e88d976c, 0x93445b8731587ea3, 0xb8157268fdae9e4c, 0xe61acf033d1a45df, 0x8fd0c16206306bac, 0xb3c4f1ba87bc8697, 0xe0b62e2929aba83c, 0x8c71dcd9ba0b4926, 0xaf8e5410288e1b6f, 0xdb71e91432b1a24b, 0x892731ac9faf056f, 0xab70fe17c79ac6ca, 0xd64d3d9db981787d, 0x85f0468293f0eb4e, 0xa76c582338ed2622, 0xd1476e2c07286faa, 0x82cca4db847945ca, 0xa37fce126597973d, 0xcc5fc196fefd7d0c, 0xff77b1fcbebcdc4f, 0x9faacf3df73609b1, 0xc795830d75038c1e, 0xf97ae3d0d2446f25, 0x9becce62836ac577, 0xc2e801fb244576d5, 0xf3a20279ed56d48a, 0x9845418c345644d7, 0xbe5691ef416bd60c, 0xedec366b11c6cb8f, 0x94b3a202eb1c3f39, 0xb9e08a83a5e34f08, 0xe858ad248f5c22ca, 0x91376c36d99995be, 0xb58547448ffffb2e, 0xe2e69915b3fff9f9, 0x8dd01fad907ffc3c, 0xb1442798f49ffb4b, 0xdd95317f31c7fa1d, 0x8a7d3eef7f1cfc52, 0xad1c8eab5ee43b67, 0xd863b256369d4a41, 0x873e4f75e2224e68, 0xa90de3535aaae202, 0xd3515c2831559a83, 0x8412d9991ed58092, 0xa5178fff668ae0b6, 0xce5d73ff402d98e4, 0x80fa687f881c7f8e, 0xa139029f6a239f72, 0xc987434744ac874f, 0xfbe9141915d7a922, 0x9d71ac8fada6c9b5, 0xc4ce17b399107c23, 0xf6019da07f549b2b, 0x99c102844f94e0fb, 0xc0314325637a193a, 0xf03d93eebc589f88, 0x96267c7535b763b5, 0xbbb01b9283253ca3, 0xea9c227723ee8bcb, 0x92a1958a7675175f, 0xb749faed14125d37, 0xe51c79a85916f485, 0x8f31cc0937ae58d3, 0xb2fe3f0b8599ef08, 0xdfbdcece67006ac9, 0x8bd6a141006042be, 0xaecc49914078536d, 0xda7f5bf590966849, 0x888f99797a5e012d, 0xaab37fd7d8f58179, 0xd5605fcdcf32e1d7, 0x855c3be0a17fcd26, 0xa6b34ad8c9dfc070, 0xd0601d8efc57b08c, 0x823c12795db6ce57, 0xa2cb1717b52481ed, 0xcb7ddcdda26da269, 0xfe5d54150b090b03, 0x9efa548d26e5a6e2, 0xc6b8e9b0709f109a, 0xf867241c8cc6d4c1, 0x9b407691d7fc44f8, 0xc21094364dfb5637, 0xf294b943e17a2bc4, 0x979cf3ca6cec5b5b, 0xbd8430bd08277231, 0xece53cec4a314ebe, 0x940f4613ae5ed137, 0xb913179899f68584, 0xe757dd7ec07426e5, 0x9096ea6f3848984f, 0xb4bca50b065abe63, 0xe1ebce4dc7f16dfc, 0x8d3360f09cf6e4bd, 0xb080392cc4349ded, 0xdca04777f541c568, 0x89e42caaf9491b61, 0xac5d37d5b79b6239, 0xd77485cb25823ac7, 0x86a8d39ef77164bd, 0xa8530886b54dbdec, 0xd267caa862a12d67, 0x8380dea93da4bc60, 0xa46116538d0deb78, 0xcd795be870516656, 0x806bd9714632dff6, 0xa086cfcd97bf97f4, 0xc8a883c0fdaf7df0, 0xfad2a4b13d1b5d6c, 0x9cc3a6eec6311a64, 0xc3f490aa77bd60fd, 0xf4f1b4d515acb93c, 0x991711052d8bf3c5, 0xbf5cd54678eef0b7, 0xef340a98172aace5, 0x9580869f0e7aac0f, 0xbae0a846d2195713, 0xe998d258869facd7, 0x91ff83775423cc06, 0xb67f6455292cbf08, 0xe41f3d6a7377eeca, 0x8e938662882af53e, 0xb23867fb2a35b28e, 0xdec681f9f4c31f31, 0x8b3c113c38f9f37f, 0xae0b158b4738705f, 0xd98ddaee19068c76, 0x87f8a8d4cfa417ca, 0xa9f6d30a038d1dbc, 0xd47487cc8470652b, 0x84c8d4dfd2c63f3b, 0xa5fb0a17c777cf0a, 0xcf79cc9db955c2cc, 0x81ac1fe293d599c0, 0xa21727db38cb0030, 0xca9cf1d206fdc03c, 0xfd442e4688bd304b, 0x9e4a9cec15763e2f, 0xc5dd44271ad3cdba, 0xf7549530e188c129, 0x9a94dd3e8cf578ba, 0xc13a148e3032d6e8, 0xf18899b1bc3f8ca2, 0x96f5600f15a7b7e5, 0xbcb2b812db11a5de, 0xebdf661791d60f56, 0x936b9fcebb25c996, 0xb84687c269ef3bfb, 0xe65829b3046b0afa, 0x8ff71a0fe2c2e6dc, 0xb3f4e093db73a093, 0xe0f218b8d25088b8, 0x8c974f7383725573, 0xafbd2350644eead0, 0xdbac6c247d62a584, 0x894bc396ce5da772, 0xab9eb47c81f5114f, 0xd686619ba27255a3, 0x8613fd0145877586, 0xa798fc4196e952e7, 0xd17f3b51fca3a7a1, 0x82ef85133de648c5, 0xa3ab66580d5fdaf6, 0xcc963fee10b7d1b3, 0xffbbcfe994e5c620, 0x9fd561f1fd0f9bd4, 0xc7caba6e7c5382c9, 0xf9bd690a1b68637b, 0x9c1661a651213e2d, 0xc31bfa0fe5698db8, 0xf3e2f893dec3f126, 0x986ddb5c6b3a76b8, 0xbe89523386091466, 0xee2ba6c0678b597f, 0x94db483840b717f0, 0xba121a4650e4ddec, 0xe896a0d7e51e1566, 0x915e2486ef32cd60, 0xb5b5ada8aaff80b8, 0xe3231912d5bf60e6, 0x8df5efabc5979c90, 0xb1736b96b6fd83b4, 0xddd0467c64bce4a1, 0x8aa22c0dbef60ee4, 0xad4ab7112eb3929e, 0xd89d64d57a607745, 0x87625f056c7c4a8b, 0xa93af6c6c79b5d2e, 0xd389b47879823479, 0x843610cb4bf160cc, 0xa54394fe1eedb8ff, 0xce947a3da6a9273e, 0x811ccc668829b887, 0xa163ff802a3426a9, 0xc9bcff6034c13053, 0xfc2c3f3841f17c68, 0x9d9ba7832936edc1, 0xc5029163f384a931, 0xf64335bcf065d37d, 0x99ea0196163fa42e, 0xc06481fb9bcf8d3a, 0xf07da27a82c37088, 0x964e858c91ba2655, 0xbbe226efb628afeb, 0xeadab0aba3b2dbe5, 0x92c8ae6b464fc96f, 0xb77ada0617e3bbcb, 0xe55990879ddcaabe, 0x8f57fa54c2a9eab7, 0xb32df8e9f3546564, 0xdff9772470297ebd, 0x8bfbea76c619ef36, 0xaefae51477a06b04, 0xdab99e59958885c5, 0x88b402f7fd75539b, 0xaae103b5fcd2a882, 0xd59944a37c0752a2, 0x857fcae62d8493a5, 0xa6dfbd9fb8e5b88f, 0xd097ad07a71f26b2, 0x825ecc24c8737830, 0xa2f67f2dfa90563b, 0xcbb41ef979346bca, 0xfea126b7d78186bd, 0x9f24b832e6b0f436, 0xc6ede63fa05d3144, 0xf8a95fcf88747d94, 0x9b69dbe1b548ce7d, 0xc24452da229b021c, 0xf2d56790ab41c2a3, 0x97c560ba6b0919a6, 0xbdb6b8e905cb600f, 0xed246723473e3813, 0x9436c0760c86e30c, 0xb94470938fa89bcf, 0xe7958cb87392c2c3, 0x90bd77f3483bb9ba, 0xb4ecd5f01a4aa828, 0xe2280b6c20dd5232, 0x8d590723948a535f, 0xb0af48ec79ace837, 0xdcdb1b2798182245, 0x8a08f0f8bf0f156b, 0xac8b2d36eed2dac6, 0xd7adf884aa879177, 0x86ccbb52ea94baeb, 0xa87fea27a539e9a5, 0xd29fe4b18e88640f, 0x83a3eeeef9153e89, 0xa48ceaaab75a8e2b, 0xcdb02555653131b6, 0x808e17555f3ebf12, 0xa0b19d2ab70e6ed6, 0xc8de047564d20a8c, 0xfb158592be068d2f, 0x9ced737bb6c4183d, 0xc428d05aa4751e4d, 0xf53304714d9265e0, 0x993fe2c6d07b7fac, 0xbf8fdb78849a5f97, 0xef73d256a5c0f77d, 0x95a8637627989aae, 0xbb127c53b17ec159, 0xe9d71b689dde71b0, 0x9226712162ab070e, 0xb6b00d69bb55c8d1, 0xe45c10c42a2b3b06, 0x8eb98a7a9a5b04e3, 0xb267ed1940f1c61c, 0xdf01e85f912e37a3, 0x8b61313bbabce2c6, 0xae397d8aa96c1b78, 0xd9c7dced53c72256, 0x881cea14545c7575, 0xaa242499697392d3, 0xd4ad2dbfc3d07788, 0x84ec3c97da624ab5, 0xa6274bbdd0fadd62, 0xcfb11ead453994ba, 0x81ceb32c4b43fcf5, 0xa2425ff75e14fc32, 0xcad2f7f5359a3b3e, 0xfd87b5f28300ca0e, 0x9e74d1b791e07e48, 0xc612062576589ddb, 0xf79687aed3eec551, 0x9abe14cd44753b53, 0xc16d9a0095928a27, 0xf1c90080baf72cb1, 0x971da05074da7bef, 0xbce5086492111aeb, 0xec1e4a7db69561a5, 0x9392ee8e921d5d07, 0xb877aa3236a4b449, 0xe69594bec44de15b, 0x901d7cf73ab0acd9, 0xb424dc35095cd80f, 0xe12e13424bb40e13, 0x8cbccc096f5088cc, 0xafebff0bcb24aaff, 0xdbe6fecebdedd5bf, 0x89705f4136b4a597, 0xabcc77118461cefd, 0xd6bf94d5e57a42bc, 0x8637bd05af6c69b6, 0xa7c5ac471b478423, 0xd1b71758e219652c, 0x83126e978d4fdf3b, 0xa3d70a3d70a3d70a, 0xcccccccccccccccd, 0x8000000000000000, 0xa000000000000000, 0xc800000000000000, 0xfa00000000000000, 0x9c40000000000000, 0xc350000000000000, 0xf424000000000000, 0x9896800000000000, 0xbebc200000000000, 0xee6b280000000000, 0x9502f90000000000, 0xba43b74000000000, 0xe8d4a51000000000, 0x9184e72a00000000, 0xb5e620f480000000, 0xe35fa931a0000000, 0x8e1bc9bf04000000, 0xb1a2bc2ec5000000, 0xde0b6b3a76400000, 0x8ac7230489e80000, 0xad78ebc5ac620000, 0xd8d726b7177a8000, 0x878678326eac9000, 0xa968163f0a57b400, 0xd3c21bcecceda100, 0x84595161401484a0, 0xa56fa5b99019a5c8, 0xcecb8f27f4200f3a, 0x813f3978f8940984, 0xa18f07d736b90be5, 0xc9f2c9cd04674edf, 0xfc6f7c4045812296, 0x9dc5ada82b70b59e, 0xc5371912364ce305, 0xf684df56c3e01bc7, 0x9a130b963a6c115c, 0xc097ce7bc90715b3, 0xf0bdc21abb48db20, 0x96769950b50d88f4, 0xbc143fa4e250eb31, 0xeb194f8e1ae525fd, 0x92efd1b8d0cf37be, 0xb7abc627050305ae, 0xe596b7b0c643c719, 0x8f7e32ce7bea5c70, 0xb35dbf821ae4f38c, 0xe0352f62a19e306f, 0x8c213d9da502de45, 0xaf298d050e4395d7, 0xdaf3f04651d47b4c, 0x88d8762bf324cd10, 0xab0e93b6efee0054, 0xd5d238a4abe98068, 0x85a36366eb71f041, 0xa70c3c40a64e6c52, 0xd0cf4b50cfe20766, 0x82818f1281ed44a0, 0xa321f2d7226895c8, 0xcbea6f8ceb02bb3a, 0xfee50b7025c36a08, 0x9f4f2726179a2245, 0xc722f0ef9d80aad6, 0xf8ebad2b84e0d58c, 0x9b934c3b330c8577, 0xc2781f49ffcfa6d5, 0xf316271c7fc3908b, 0x97edd871cfda3a57, 0xbde94e8e43d0c8ec, 0xed63a231d4c4fb27, 0x945e455f24fb1cf9, 0xb975d6b6ee39e437, 0xe7d34c64a9c85d44, 0x90e40fbeea1d3a4b, 0xb51d13aea4a488dd, 0xe264589a4dcdab15, 0x8d7eb76070a08aed, 0xb0de65388cc8ada8, 0xdd15fe86affad912, 0x8a2dbf142dfcc7ab, 0xacb92ed9397bf996, 0xd7e77a8f87daf7fc, 0x86f0ac99b4e8dafd, 0xa8acd7c0222311bd, 0xd2d80db02aabd62c, 0x83c7088e1aab65db, 0xa4b8cab1a1563f52, 0xcde6fd5e09abcf27, 0x80b05e5ac60b6178, 0xa0dc75f1778e39d6, 0xc913936dd571c84c, 0xfb5878494ace3a5f, 0x9d174b2dcec0e47b, 0xc45d1df942711d9a, 0xf5746577930d6501, 0x9968bf6abbe85f20, 0xbfc2ef456ae276e9, 0xefb3ab16c59b14a3, 0x95d04aee3b80ece6, 0xbb445da9ca61281f, 0xea1575143cf97227, 0x924d692ca61be758, 0xb6e0c377cfa2e12e, 0xe498f455c38b997a, 0x8edf98b59a373fec, 0xb2977ee300c50fe7, 0xdf3d5e9bc0f653e1, 0x8b865b215899f46d, 0xae67f1e9aec07188, 0xda01ee641a708dea, 0x884134fe908658b2, 0xaa51823e34a7eedf, 0xd4e5e2cdc1d1ea96, 0x850fadc09923329e, 0xa6539930bf6bff46, 0xcfe87f7cef46ff17, 0x81f14fae158c5f6e, 0xa26da3999aef774a, 0xcb090c8001ab551c, 0xfdcb4fa002162a63, 0x9e9f11c4014dda7e, 0xc646d63501a1511e, 0xf7d88bc24209a565, 0x9ae757596946075f, 0xc1a12d2fc3978937, 0xf209787bb47d6b85, 0x9745eb4d50ce6333, 0xbd176620a501fc00, 0xec5d3fa8ce427b00, 0x93ba47c980e98ce0, 0xb8a8d9bbe123f018, 0xe6d3102ad96cec1e, 0x9043ea1ac7e41393, 0xb454e4a179dd1877, 0xe16a1dc9d8545e95, 0x8ce2529e2734bb1d, 0xb01ae745b101e9e4, 0xdc21a1171d42645d, 0x899504ae72497eba, 0xabfa45da0edbde69, 0xd6f8d7509292d603, 0x865b86925b9bc5c2, 0xa7f26836f282b733, 0xd1ef0244af2364ff, 0x8335616aed761f1f, 0xa402b9c5a8d3a6e7, 0xcd036837130890a1, 0x802221226be55a65, 0xa02aa96b06deb0fe, 0xc83553c5c8965d3d, 0xfa42a8b73abbf48d, 0x9c69a97284b578d8, 0xc38413cf25e2d70e, 0xf46518c2ef5b8cd1, 0x98bf2f79d5993803, 0xbeeefb584aff8604, 0xeeaaba2e5dbf6785, 0x952ab45cfa97a0b3, 0xba756174393d88e0, 0xe912b9d1478ceb17, 0x91abb422ccb812ef, 0xb616a12b7fe617aa, 0xe39c49765fdf9d95, 0x8e41ade9fbebc27d, 0xb1d219647ae6b31c, 0xde469fbd99a05fe3, 0x8aec23d680043bee, 0xada72ccc20054aea, 0xd910f7ff28069da4, 0x87aa9aff79042287, 0xa99541bf57452b28, 0xd3fa922f2d1675f2, 0x847c9b5d7c2e09b7, 0xa59bc234db398c25, 0xcf02b2c21207ef2f, 0x8161afb94b44f57d, 0xa1ba1ba79e1632dc, 0xca28a291859bbf93, 0xfcb2cb35e702af78, 0x9defbf01b061adab, 0xc56baec21c7a1916, 0xf6c69a72a3989f5c, 0x9a3c2087a63f6399, 0xc0cb28a98fcf3c80, 0xf0fdf2d3f3c30b9f, 0x969eb7c47859e744, 0xbc4665b596706115, 0xeb57ff22fc0c795a, 0x9316ff75dd87cbd8, 0xb7dcbf5354e9bece, 0xe5d3ef282a242e82, 0x8fa475791a569d11, 0xb38d92d760ec4455, 0xe070f78d3927556b, 0x8c469ab843b89563, 0xaf58416654a6babb, 0xdb2e51bfe9d0696a, 0x88fcf317f22241e2, 0xab3c2fddeeaad25b, 0xd60b3bd56a5586f2, 0x85c7056562757457, 0xa738c6bebb12d16d, 0xd106f86e69d785c8, 0x82a45b450226b39d, 0xa34d721642b06084, 0xcc20ce9bd35c78a5, 0xff290242c83396ce, 0x9f79a169bd203e41, 0xc75809c42c684dd1, 0xf92e0c3537826146, 0x9bbcc7a142b17ccc, 0xc2abf989935ddbfe, 0xf356f7ebf83552fe, 0x98165af37b2153df, 0xbe1bf1b059e9a8d6, 0xeda2ee1c7064130c, 0x9485d4d1c63e8be8, 0xb9a74a0637ce2ee1, 0xe8111c87c5c1ba9a, 0x910ab1d4db9914a0, 0xb54d5e4a127f59c8, 0xe2a0b5dc971f303a, 0x8da471a9de737e24, 0xb10d8e1456105dad, 0xdd50f1996b947519, 0x8a5296ffe33cc930, 0xace73cbfdc0bfb7b, 0xd8210befd30efa5a, 0x8714a775e3e95c78, 0xa8d9d1535ce3b396, 0xd31045a8341ca07c, 0x83ea2b892091e44e, 0xa4e4b66b68b65d61, 0xce1de40642e3f4b9, 0x80d2ae83e9ce78f4, 0xa1075a24e4421731, 0xc94930ae1d529cfd, 0xfb9b7cd9a4a7443c, 0x9d412e0806e88aa6, 0xc491798a08a2ad4f, 0xf5b5d7ec8acb58a3, 0x9991a6f3d6bf1766, 0xbff610b0cc6edd3f, 0xeff394dcff8a948f, 0x95f83d0a1fb69cd9, 0xbb764c4ca7a44410, 0xea53df5fd18d5514, 0x92746b9be2f8552c, 0xb7118682dbb66a77, 0xe4d5e82392a40515, 0x8f05b1163ba6832d, 0xb2c71d5bca9023f8, 0xdf78e4b2bd342cf7, 0x8bab8eefb6409c1a, 0xae9672aba3d0c321, 0xda3c0f568cc4f3e9, 0x8865899617fb1871, 0xaa7eebfb9df9de8e, 0xd51ea6fa85785631, 0x8533285c936b35df, 0xa67ff273b8460357, 0xd01fef10a657842c, 0x8213f56a67f6b29c, 0xa298f2c501f45f43, 0xcb3f2f7642717713, 0xfe0efb53d30dd4d8, 0x9ec95d1463e8a507, 0xc67bb4597ce2ce49, 0xf81aa16fdc1b81db, 0x9b10a4e5e9913129, 0xc1d4ce1f63f57d73, 0xf24a01a73cf2dcd0, 0x976e41088617ca02, 0xbd49d14aa79dbc82, 0xec9c459d51852ba3, 0x93e1ab8252f33b46, 0xb8da1662e7b00a17, 0xe7109bfba19c0c9d, 0x906a617d450187e2, 0xb484f9dc9641e9db, 0xe1a63853bbd26451, 0x8d07e33455637eb3, 0xb049dc016abc5e60, 0xdc5c5301c56b75f7, 0x89b9b3e11b6329bb, 0xac2820d9623bf429, 0xd732290fbacaf134, 0x867f59a9d4bed6c0, 0xa81f301449ee8c70, 0xd226fc195c6a2f8c, 0x83585d8fd9c25db8, 0xa42e74f3d032f526, 0xcd3a1230c43fb26f, 0x80444b5e7aa7cf85, 0xa0555e361951c367, 0xc86ab5c39fa63441, 0xfa856334878fc151, 0x9c935e00d4b9d8d2, 0xc3b8358109e84f07, 0xf4a642e14c6262c9, 0x98e7e9cccfbd7dbe, 0xbf21e44003acdd2d, 0xeeea5d5004981478, 0x95527a5202df0ccb, 0xbaa718e68396cffe, 0xe950df20247c83fd, 0x91d28b7416cdd27e, 0xb6472e511c81471e, 0xe3d8f9e563a198e5, 0x8e679c2f5e44ff8f, 0xb201833b35d63f73, 0xde81e40a034bcf50, 0x8b112e86420f6192, 0xadd57a27d29339f6, 0xd94ad8b1c7380874, 0x87cec76f1c830549, 0xa9c2794ae3a3c69b, 0xd433179d9c8cb841, 0x849feec281d7f329, 0xa5c7ea73224deff3, 0xcf39e50feae16bf0, 0x81842f29f2cce376, 0xa1e53af46f801c53, 0xca5e89b18b602368, 0xfcf62c1dee382c42, 0x9e19db92b4e31ba9, 0xc5a05277621be294, 0xf70867153aa2db39, 0x9a65406d44a5c903, 0xc0fe908895cf3b44, 0xf13e34aabb430a15, 0x96c6e0eab509e64d, 0xbc789925624c5fe1, 0xeb96bf6ebadf77d9, 0x933e37a534cbaae8, 0xb80dc58e81fe95a1, 0xe61136f2227e3b0a, 0x8fcac257558ee4e6, 0xb3bd72ed2af29e20, 0xe0accfa875af45a8, 0x8c6c01c9498d8b89, 0xaf87023b9bf0ee6b, 0xdb68c2ca82ed2a06, 0x892179be91d43a44, 0xab69d82e364948d4 }; static const int powers_ten_e[] = { -1203, -1200, -1196, -1193, -1190, -1186, -1183, -1180, -1176, -1173, -1170, -1166, -1163, -1160, -1156, -1153, -1150, -1146, -1143, -1140, -1136, -1133, -1130, -1127, -1123, -1120, -1117, -1113, -1110, -1107, -1103, -1100, -1097, -1093, -1090, -1087, -1083, -1080, -1077, -1073, -1070, -1067, -1063, -1060, -1057, -1053, -1050, -1047, -1043, -1040, -1037, -1034, -1030, -1027, -1024, -1020, -1017, -1014, -1010, -1007, -1004, -1000, -997, -994, -990, -987, -984, -980, -977, -974, -970, -967, -964, -960, -957, -954, -950, -947, -944, -940, -937, -934, -931, -927, -924, -921, -917, -914, -911, -907, -904, -901, -897, -894, -891, -887, -884, -881, -877, -874, -871, -867, -864, -861, -857, -854, -851, -847, -844, -841, -838, -834, -831, -828, -824, -821, -818, -814, -811, -808, -804, -801, -798, -794, -791, -788, -784, -781, -778, -774, -771, -768, -764, -761, -758, -754, -751, -748, -744, -741, -738, -735, -731, -728, -725, -721, -718, -715, -711, -708, -705, -701, -698, -695, -691, -688, -685, -681, -678, -675, -671, -668, -665, -661, -658, -655, -651, -648, -645, -642, -638, -635, -632, -628, -625, -622, -618, -615, -612, -608, -605, -602, -598, -595, -592, -588, -585, -582, -578, -575, -572, -568, -565, -562, -558, -555, -552, -549, -545, -542, -539, -535, -532, -529, -525, -522, -519, -515, -512, -509, -505, -502, -499, -495, -492, -489, -485, -482, -479, -475, -472, -469, -465, -462, -459, -455, -452, -449, -446, -442, -439, -436, -432, -429, -426, -422, -419, -416, -412, -409, -406, -402, -399, -396, -392, -389, -386, -382, -379, -376, -372, -369, -366, -362, -359, -356, -353, -349, -346, -343, -339, -336, -333, -329, -326, -323, -319, -316, -313, -309, -306, -303, -299, -296, -293, -289, -286, -283, -279, -276, -273, -269, -266, -263, -259, -256, -253, -250, -246, -243, -240, -236, -233, -230, -226, -223, -220, -216, -213, -210, -206, -203, -200, -196, -193, -190, -186, -183, -180, -176, -173, -170, -166, -163, -160, -157, -153, -150, -147, -143, -140, -137, -133, -130, -127, -123, -120, -117, -113, -110, -107, -103, -100, -97, -93, -90, -87, -83, -80, -77, -73, -70, -67, -63, -60, -57, -54, -50, -47, -44, -40, -37, -34, -30, -27, -24, -20, -17, -14, -10, -7, -4, 0, 3, 6, 10, 13, 16, 20, 23, 26, 30, 33, 36, 39, 43, 46, 49, 53, 56, 59, 63, 66, 69, 73, 76, 79, 83, 86, 89, 93, 96, 99, 103, 106, 109, 113, 116, 119, 123, 126, 129, 132, 136, 139, 142, 146, 149, 152, 156, 159, 162, 166, 169, 172, 176, 179, 182, 186, 189, 192, 196, 199, 202, 206, 209, 212, 216, 219, 222, 226, 229, 232, 235, 239, 242, 245, 249, 252, 255, 259, 262, 265, 269, 272, 275, 279, 282, 285, 289, 292, 295, 299, 302, 305, 309, 312, 315, 319, 322, 325, 328, 332, 335, 338, 342, 345, 348, 352, 355, 358, 362, 365, 368, 372, 375, 378, 382, 385, 388, 392, 395, 398, 402, 405, 408, 412, 415, 418, 422, 425, 428, 431, 435, 438, 441, 445, 448, 451, 455, 458, 461, 465, 468, 471, 475, 478, 481, 485, 488, 491, 495, 498, 501, 505, 508, 511, 515, 518, 521, 524, 528, 531, 534, 538, 541, 544, 548, 551, 554, 558, 561, 564, 568, 571, 574, 578, 581, 584, 588, 591, 594, 598, 601, 604, 608, 611, 614, 617, 621, 624, 627, 631, 634, 637, 641, 644, 647, 651, 654, 657, 661, 664, 667, 671, 674, 677, 681, 684, 687, 691, 694, 697, 701, 704, 707, 711, 714, 717, 720, 724, 727, 730, 734, 737, 740, 744, 747, 750, 754, 757, 760, 764, 767, 770, 774, 777, 780, 784, 787, 790, 794, 797, 800, 804, 807, 810, 813, 817, 820, 823, 827, 830, 833, 837, 840, 843, 847, 850, 853, 857, 860, 863, 867, 870, 873, 877, 880, 883, 887, 890, 893, 897, 900, 903, 907, 910, 913, 916, 920, 923, 926, 930, 933, 936, 940, 943, 946, 950, 953, 956, 960, 963, 966, 970, 973, 976, 980, 983, 986, 990, 993, 996, 1000, 1003, 1006, 1009, 1013, 1016, 1019, 1023, 1026, 1029, 1033, 1036, 1039, 1043, 1046, 1049, 1053, 1056, 1059, 1063, 1066, 1069, 1073, 1076 }; static diy_fp_t cached_power(int k) { diy_fp_t res; int index = 343 + k; res.f = powers_ten[index]; res.e = powers_ten_e[index]; return res; } static int k_comp(int e, int alpha, int gamma) { return ceil((alpha-e+63) * D_1_LOG2_10); } static diy_fp_t minus(diy_fp_t x, diy_fp_t y) { diy_fp_t r; assert(x.e == y.e); assert(x.f >= y.f); r.f = x.f - y.f; r.e = x.e; return r; } static diy_fp_t multiply(diy_fp_t x, diy_fp_t y) { uint64_t a,b,c,d,ac,bc,ad,bd,tmp; diy_fp_t r; uint64_t M32 = 0xFFFFFFFF; a = x.f >> 32; b = x.f & M32; c = y.f >> 32; d = y.f & M32; ac = a*c; bc = b*c; ad = a*d; bd = b*d; tmp = (bd>>32) + (ad&M32) + (bc&M32); tmp += 1U << 31; r.f = ac+(ad>>32)+(bc>>32)+(tmp >>32); r.e = x.e + y.e + 64; return r; } static uint64_t double_to_uint64(double d) { uint64_t n; memcpy(&n, &d, 8); return n; } #define DP_SIGNIFICAND_SIZE 52 #define DP_EXPONENT_BIAS (0x3FF + DP_SIGNIFICAND_SIZE) #define DP_MIN_EXPONENT (-DP_EXPONENT_BIAS) #define DP_EXPONENT_MASK 0x7FF0000000000000 #define DP_SIGNIFICAND_MASK 0x000FFFFFFFFFFFFF #define DP_HIDDEN_BIT 0x0010000000000000 static diy_fp_t double2diy_fp(double d) { uint64_t d64 = double_to_uint64(d); int biased_e = (d64 & DP_EXPONENT_MASK) >> DP_SIGNIFICAND_SIZE; uint64_t significand = (d64 & DP_SIGNIFICAND_MASK); diy_fp_t res; if (biased_e != 0) { res.f = significand + DP_HIDDEN_BIT; res.e = biased_e - DP_EXPONENT_BIAS; } else { res.f = significand; res.e = DP_MIN_EXPONENT + 1; } return res; } static diy_fp_t normalize_boundary(diy_fp_t in) { diy_fp_t res = in; /* Normalize now */ /* the original number could have been a denormal. */ while (! (res.f & (DP_HIDDEN_BIT << 1))) { res.f <<= 1; res.e--; } /* do the final shifts in one go. Don't forget the hidden bit (the '-1') */ res.f <<= (DIY_SIGNIFICAND_SIZE - DP_SIGNIFICAND_SIZE - 2); res.e = res.e - (DIY_SIGNIFICAND_SIZE - DP_SIGNIFICAND_SIZE - 2); return res; } static void normalized_boundaries(double d, diy_fp_t* out_m_minus, diy_fp_t* out_m_plus) { diy_fp_t v = double2diy_fp(d); diy_fp_t pl, mi; int significand_is_zero = v.f == DP_HIDDEN_BIT; pl.f = (v.f << 1) + 1; pl.e = v.e - 1; pl = normalize_boundary(pl); if (significand_is_zero) { mi.f = (v.f << 2) - 1; mi.e = v.e - 2; } else { mi.f = (v.f << 1) - 1; mi.e = v.e - 1; } mi.f <<= mi.e - pl.e; mi.e = pl.e; *out_m_plus = pl; *out_m_minus = mi; } #define TEN2 100 static void digit_gen(diy_fp_t Mp, diy_fp_t delta, char* buffer, int* len, int* K) { uint32_t div, p1; uint64_t p2; int d,kappa; diy_fp_t one; one.f = ((uint64_t) 1) << -Mp.e; one.e = Mp.e; p1 = Mp.f >> -one.e; p2 = Mp.f & (one.f - 1); *len = 0; kappa = 3; div = TEN2; while (kappa > 0) { d = p1 / div; if (d || *len) buffer[(*len)++] = '0' + d; p1 %= div; kappa--; div /= 10; if ((((uint64_t)p1)<<-one.e)+p2 <= delta.f) { *K += kappa; return; } } do { p2 *= 10; d = p2 >> -one.e; if (d || *len) buffer[(*len)++] = '0' + d; p2 &= one.f - 1; kappa--; delta.f *= 10; } while (p2 > delta.f); *K += kappa; } int js_grisu2(double v, char *buffer, int *K) { int length, mk; diy_fp_t w_m, w_p, c_mk, Wp, Wm, delta; int q = 64, alpha = -59, gamma = -56; normalized_boundaries(v, &w_m, &w_p); mk = k_comp(w_p.e + q, alpha, gamma); c_mk = cached_power(mk); Wp = multiply(w_p, c_mk); Wm = multiply(w_m, c_mk); Wm.f++; Wp.f--; delta = minus(Wp, Wm); *K = -mk; digit_gen(Wp, delta, buffer, &length, K); return length; } /* * strtod.c * * Copyright (c) 1988-1993 The Regents of the University of California. * Copyright (c) 1994 Sun Microsystems, Inc. * * Permission to use, copy, modify, and distribute this software and its * documentation for any purpose and without fee is hereby granted, provided * that the above copyright notice appear in all copies. The University of * California makes no representations about the suitability of this software * for any purpose. It is provided "as is" without express or implied warranty. */ /* Largest possible base 10 exponent. Any exponent larger than this will * already produce underflow or overflow, so there's no need to worry about * additional digits. */ static int maxExponent = 511; /* Table giving binary powers of 10. Entry * is 10^2^i. Used to convert decimal * exponents into floating-point numbers. */ static double powersOf10[] = { 10., 100., 1.0e4, 1.0e8, 1.0e16, 1.0e32, 1.0e64, 1.0e128, 1.0e256 }; /* Parse a decimal ASCII floating-point number, optionally preceded by white * space. Must have form "-I.FE-X", where I is the integer part of the * mantissa, F is the fractional part of the mantissa, and X is the exponent. * Either of the signs may be "+", "-", or omitted. Either I or F may be * omitted, or both. The decimal point isn't necessary unless F is present. * The "E" may actually be an "e". E and X may both be omitted (but not just * one). */ double js_strtod(const char *string, char **endPtr) { int sign, expSign = FALSE; double fraction, dblExp, *d; register const char *p; register int c; /* Exponent read from "EX" field. */ int exp = 0; /* Exponent that derives from the fractional part. Under normal * circumstances, it is the negative of the number of digits in F. * However, if I is very long, the last digits of I get dropped * (otherwise a long I with a large negative exponent could cause an * unnecessary overflow on I alone). In this case, fracExp is * incremented one for each dropped digit. */ int fracExp = 0; /* Number of digits in mantissa. */ int mantSize; /* Number of mantissa digits BEFORE decimal point. */ int decPt; /* Temporarily holds location of exponent in string. */ const char *pExp; /* * Strip off leading blanks and check for a sign. */ p = string; while (*p == ' ' || *p == '\t' || *p == '\n' || *p == '\r') { p += 1; } if (*p == '-') { sign = TRUE; p += 1; } else { if (*p == '+') { p += 1; } sign = FALSE; } /* * Count the number of digits in the mantissa (including the decimal * point), and also locate the decimal point. */ decPt = -1; for (mantSize = 0; ; mantSize += 1) { c = *p; if (!(c>='0'&&c<='9')) { if ((c != '.') || (decPt >= 0)) { break; } decPt = mantSize; } p += 1; } /* * Now suck up the digits in the mantissa. Use two integers to * collect 9 digits each (this is faster than using floating-point). * If the mantissa has more than 18 digits, ignore the extras, since * they can't affect the value anyway. */ pExp = p; p -= mantSize; if (decPt < 0) { decPt = mantSize; } else { mantSize -= 1; /* One of the digits was the point. */ } if (mantSize > 18) { fracExp = decPt - 18; mantSize = 18; } else { fracExp = decPt - mantSize; } if (mantSize == 0) { fraction = 0.0; p = string; goto done; } else { int frac1, frac2; frac1 = 0; for ( ; mantSize > 9; mantSize -= 1) { c = *p; p += 1; if (c == '.') { c = *p; p += 1; } frac1 = 10*frac1 + (c - '0'); } frac2 = 0; for (; mantSize > 0; mantSize -= 1) { c = *p; p += 1; if (c == '.') { c = *p; p += 1; } frac2 = 10*frac2 + (c - '0'); } fraction = (1.0e9 * frac1) + frac2; } /* * Skim off the exponent. */ p = pExp; if ((*p == 'E') || (*p == 'e')) { p += 1; if (*p == '-') { expSign = TRUE; p += 1; } else { if (*p == '+') { p += 1; } expSign = FALSE; } while ((*p >= '0') && (*p <= '9')) { exp = exp * 10 + (*p - '0'); p += 1; } } if (expSign) { exp = fracExp - exp; } else { exp = fracExp + exp; } /* * Generate a floating-point number that represents the exponent. * Do this by processing the exponent one bit at a time to combine * many powers of 2 of 10. Then combine the exponent with the * fraction. */ if (exp < -maxExponent) { exp = maxExponent; expSign = TRUE; errno = ERANGE; } else if (exp > maxExponent) { exp = maxExponent; expSign = FALSE; errno = ERANGE; } else if (exp < 0) { expSign = TRUE; exp = -exp; } else { expSign = FALSE; } dblExp = 1.0; for (d = powersOf10; exp != 0; exp >>= 1, d += 1) { if (exp & 01) { dblExp *= *d; } } if (expSign) { fraction /= dblExp; } else { fraction *= dblExp; } done: if (endPtr != NULL) { *endPtr = (char *) p; } if (sign) { return -fraction; } return fraction; }
{ "pile_set_name": "Github" }
-- -- -- delimiter // drop procedure if exists _expand_statement_variables // create procedure _expand_statement_variables( in id_from int unsigned, in id_to int unsigned, out expanded_statement text charset utf8, out expanded_variables_found tinyint unsigned, in should_execute_statement tinyint unsigned ) comment 'Returns an expanded script statement' language SQL deterministic modifies sql data sql security invoker main_body: begin declare expanded_variables TEXT CHARSET utf8; SELECT GROUP_CONCAT(DISTINCT _extract_expanded_query_script_variable_name(token)) from _sql_tokens where (id between id_from and id_to) and (state = 'expanded query_script variable') INTO @_expanded_variables; set expanded_variables=@_expanded_variables; set expanded_variables_found := (expanded_variables IS NOT NULL); if expanded_variables_found and should_execute_statement then call _take_local_variables_snapshot(expanded_variables); end if; SELECT GROUP_CONCAT( case when _qs_variables_this_function.mapped_user_defined_variable_name IS NOT NULL then case when state = 'expanded query_script variable' then _qs_variables_this_function.value_snapshot /* expanded */ else _qs_variables_this_function.mapped_user_defined_variable_name /* non-expanded */ end when _qs_variables_global.mapped_user_defined_variable_name IS NOT NULL then case when state = 'expanded query_script variable' then _qs_variables_global.value_snapshot /* expanded */ else _qs_variables_global.mapped_user_defined_variable_name /* non-expanded */ end else token /* not a query script variable at all */ end ORDER BY id ASC SEPARATOR '' ) FROM _sql_tokens LEFT JOIN _qs_variables AS _qs_variables_this_function ON ( /* Try to match a query script variable, or an expanded query script variable */ ( (state = 'expanded query_script variable' AND _extract_expanded_query_script_variable_name(token) = _qs_variables_this_function.variable_name) /* expanded */ or (state = 'query_script variable' AND token = _qs_variables_this_function.variable_name) /* non-expanded */ ) and (id_from between _qs_variables_this_function.declaration_id and _qs_variables_this_function.scope_end_id) and _qs_variables_this_function.function_scope = _get_current_variables_function_scope() ) LEFT JOIN _qs_variables AS _qs_variables_global ON ( /* Try to match a query script variable, or an expanded query script variable */ ( (state = 'expanded query_script variable' AND _extract_expanded_query_script_variable_name(token) = _qs_variables_global.variable_name) /* expanded */ or (state = 'query_script variable' AND token = _qs_variables_global.variable_name) /* non-expanded */ ) and (id_from between _qs_variables_global.declaration_id and _qs_variables_global.scope_end_id) and _qs_variables_global.function_scope = '' ) where (id between id_from and id_to) into @_expanded_statement; set expanded_statement=@_expanded_statement; set expanded_statement := trim_wspace(expanded_statement); end; // delimiter ;
{ "pile_set_name": "Github" }
FC011: Missing README in markdown format: examples/README.md:1 FC031: Cookbook without metadata.rb file: examples/metadata.rb:1 FC064: Ensure issues_url is set in metadata: ./metadata.rb:1 FC065: Ensure source_url is set in metadata: ./metadata.rb:1 FC066: Ensure chef_version is set in metadata: ./metadata.rb:1 FC069: Ensure standardized license defined in metadata: ./metadata.rb:1 FC071: Missing LICENSE file: examples/LICENSE:1 FC078: Ensure cookbook shared under an OSI-approved open source license: ./metadata.rb:1 FC085: Resource using new_resource.updated_by_last_action to converge resource: ./providers/serve.rb:62
{ "pile_set_name": "Github" }
/** * @license * Copyright (c) 2014 The Polymer Project Authors. All rights reserved. * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt * Code distributed by Google as part of the polymer project is also * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt */ suite('MutationObserver', function() { 'use strict'; suite('childList', function() { var NodeList = ShadowDOMPolyfill.wrappers.NodeList; function makeNodeList(/* ...args */) { var nodeList = new NodeList; for (var i = 0; i < arguments.length; i++) { nodeList[i] = arguments[i]; } nodeList.length = i; return nodeList; } test('appendChild', function() { var div = document.createElement('div'); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); var a = document.createElement('a'); var b = document.createElement('b'); div.appendChild(a); div.appendChild(b); var records = observer.takeRecords(); assert.equal(records.length, 2); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: [a] }); expectMutationRecord(records[1], { type: 'childList', target: div, addedNodes: [b], previousSibling: a }); }); test('insertBefore', function() { var div = document.createElement('div'); var a = document.createElement('a'); var b = document.createElement('b'); var c = document.createElement('c'); div.appendChild(a); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.insertBefore(b, a); div.insertBefore(c, a); var records = observer.takeRecords(); assert.equal(records.length, 2); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: [b], nextSibling: a }); expectMutationRecord(records[1], { type: 'childList', target: div, addedNodes: [c], nextSibling: a, previousSibling: b }); }); test('replaceChild', function() { var div = document.createElement('div'); var a = document.createElement('a'); var b = document.createElement('b'); div.appendChild(a); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.replaceChild(b, a); var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: [b], removedNodes: [a] }); }); test('removeChild', function() { var div = document.createElement('div'); var a = div.appendChild(document.createElement('a')); var b = div.appendChild(document.createElement('b')); var c = div.appendChild(document.createElement('c')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.removeChild(b); div.removeChild(a); var records = observer.takeRecords(); assert.equal(records.length, 2); expectMutationRecord(records[0], { type: 'childList', target: div, removedNodes: [b], nextSibling: c, previousSibling: a }); expectMutationRecord(records[1], { type: 'childList', target: div, removedNodes: [a], nextSibling: c }); }); test('Direct children', function() { var div = document.createElement('div'); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); var a = document.createElement('a'); var b = document.createElement('b'); div.appendChild(a); div.insertBefore(b, a); div.removeChild(b); var records = observer.takeRecords(); assert.equal(records.length, 3); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: [a] }); expectMutationRecord(records[1], { type: 'childList', target: div, nextSibling: a, addedNodes: [b] }); expectMutationRecord(records[2], { type: 'childList', target: div, nextSibling: a, removedNodes: [b] }); }); test('subtree', function() { var div = document.createElement('div'); var child = div.appendChild(document.createElement('div')); var observer = new MutationObserver(function() {}); observer.observe(child, { childList: true }); var a = document.createTextNode('a'); var b = document.createTextNode('b'); child.appendChild(a); child.insertBefore(b, a); child.removeChild(b); var records = observer.takeRecords(); assert.equal(records.length, 3); expectMutationRecord(records[0], { type: 'childList', target: child, addedNodes: [a] }); expectMutationRecord(records[1], { type: 'childList', target: child, nextSibling: a, addedNodes: [b] }); expectMutationRecord(records[2], { type: 'childList', target: child, nextSibling: a, removedNodes: [b] }); }); test('both direct and subtree', function() { var div = document.createElement('div'); var child = div.appendChild(document.createElement('div')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true, subtree: true }); observer.observe(child, { childList: true }); var a = document.createTextNode('a'); var b = document.createTextNode('b'); child.appendChild(a); div.appendChild(b); var records = observer.takeRecords(); assert.equal(records.length, 2); expectMutationRecord(records[0], { type: 'childList', target: child, addedNodes: [a] }); expectMutationRecord(records[1], { type: 'childList', target: div, addedNodes: [b], previousSibling: child }); }); test('Append multiple at once at the end', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); var df = document.createDocumentFragment(); var b = df.appendChild(document.createTextNode('b')); var c = df.appendChild(document.createTextNode('c')); var d = df.appendChild(document.createTextNode('d')); div.appendChild(df); var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(b, c, d), removedNodes: makeNodeList(), previousSibling: a, nextSibling: null }); }); test('Append multiple at once at the front', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); var df = document.createDocumentFragment(); var b = df.appendChild(document.createTextNode('b')); var c = df.appendChild(document.createTextNode('c')); var d = df.appendChild(document.createTextNode('d')); div.insertBefore(df, a); var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(b, c, d), removedNodes: makeNodeList(), previousSibling: null, nextSibling: a }); }); test('Append multiple at once in the middle', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var b = div.appendChild(document.createTextNode('b')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); var df = document.createDocumentFragment(); var c = df.appendChild(document.createTextNode('c')); var d = df.appendChild(document.createTextNode('d')); div.insertBefore(df, b); var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(c, d), removedNodes: makeNodeList(), previousSibling: a, nextSibling: b }); }); test('Remove all children using innerHTML', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var b = div.appendChild(document.createTextNode('b')); var c = div.appendChild(document.createTextNode('c')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.innerHTML = ''; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(), removedNodes: makeNodeList(a, b, c), previousSibling: null, nextSibling: null }); }); test('Replace all children using innerHTML', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var b = div.appendChild(document.createTextNode('b')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.innerHTML = '<c></c><d></d>'; var c = div.firstChild; var d = div.lastChild; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(c, d), removedNodes: makeNodeList(a, b), previousSibling: null, nextSibling: null }); }); test('Remove all children using textContent', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var b = div.appendChild(document.createTextNode('b')); var c = div.appendChild(document.createTextNode('c')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.textContent = ''; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(), removedNodes: makeNodeList(a, b, c), previousSibling: null, nextSibling: null }); }); test('Replace all children using textContent', function() { var div = document.createElement('div'); var a = div.appendChild(document.createTextNode('a')); var b = div.appendChild(document.createTextNode('b')); var observer = new MutationObserver(function() {}); observer.observe(div, { childList: true }); div.textContent = 'text'; var text = div.firstChild; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: div, addedNodes: makeNodeList(text), removedNodes: makeNodeList(a, b), previousSibling: null, nextSibling: null }); }); test('appendChild removal', function() { var a = document.createElement('a'); var b = document.createElement('b'); var c = document.createElement('c'); a.appendChild(c); var observerA = new MutationObserver(function() {}); observerA.observe(a, { childList: true }); var observerB = new MutationObserver(function() {}); observerB.observe(b, { childList: true }); b.appendChild(c); var recordsA = observerA.takeRecords(); assert.equal(recordsA.length, 1); expectMutationRecord(recordsA[0], { type: 'childList', target: a, removedNodes: [c] }); var recordsB = observerB.takeRecords(); assert.equal(recordsB.length, 1); expectMutationRecord(recordsB[0], { type: 'childList', target: b, addedNodes: [c] }); }); test('insertBefore removal', function() { var a = document.createElement('a'); var b = document.createElement('b'); var c = document.createElement('c'); var d = document.createElement('d'); var e = document.createElement('e'); a.appendChild(c); a.appendChild(d); b.appendChild(e); var observerA = new MutationObserver(function() {}); observerA.observe(a, { childList: true }); var observerB = new MutationObserver(function() {}); observerB.observe(b, { childList: true }); b.insertBefore(d, e); var recordsA = observerA.takeRecords(); assert.equal(recordsA.length, 1); expectMutationRecord(recordsA[0], { type: 'childList', target: a, removedNodes: [d], previousSibling: c }); var recordsB = observerB.takeRecords(); assert.equal(recordsB.length, 1); expectMutationRecord(recordsB[0], { type: 'childList', target: b, addedNodes: [d], nextSibling: e }); }); test('insertBefore removal document fragment', function() { var df = document.createDocumentFragment(); var a = df.appendChild(document.createElement('a')); var b = df.appendChild(document.createElement('b')); var c = df.appendChild(document.createElement('c')); var d = document.createElement('d'); var e = d.appendChild(document.createElement('e')); var f = d.appendChild(document.createElement('f')); var observerDf = new MutationObserver(function() {}); observerDf.observe(df, { childList: true }); var observerD = new MutationObserver(function() {}); observerD.observe(d, { childList: true }); d.insertBefore(df, f); var recordsDf = observerDf.takeRecords(); assert.equal(recordsDf.length, 1); expectMutationRecord(recordsDf[0], { type: 'childList', target: df, removedNodes: [a, b, c] }); var recordsD = observerD.takeRecords(); assert.equal(recordsD.length, 1); expectMutationRecord(recordsD[0], { type: 'childList', target: d, addedNodes: [a, b, c], previousSibling: e, nextSibling: f }); }); test('insertBefore removal document fragment (with shadow roots)', function() { var df = document.createDocumentFragment(); var a = df.appendChild(document.createElement('a')); var b = df.appendChild(document.createElement('b')); var c = df.appendChild(document.createElement('c')); var d = document.createElement('d'); var sr = d.createShadowRoot(); var e = sr.appendChild(document.createElement('e')); var f = sr.appendChild(document.createElement('f')); var observerDf = new MutationObserver(function() {}); observerDf.observe(df, { childList: true }); var observerSr = new MutationObserver(function() {}); observerSr.observe(sr, { childList: true }); sr.insertBefore(df, f); var recordsDf = observerDf.takeRecords(); assert.equal(recordsDf.length, 1); expectMutationRecord(recordsDf[0], { type: 'childList', target: df, removedNodes: [a, b, c] }); var recordsSr = observerSr.takeRecords(); assert.equal(recordsSr.length, 1); expectMutationRecord(recordsSr[0], { type: 'childList', target: sr, addedNodes: [a, b, c], previousSibling: e, nextSibling: f }); }); test('Check old siblings', function() { var a = document.createElement('a'); a.innerHTML = '<b></b><c></c>'; var b = a.firstChild; var c = a.lastChild; var d = document.createElement('d'); d.innerHTML = '<e></e><f></f><g></g>'; var e = d.firstChild; var f = e.nextSibling; var g = d.lastChild; var observer = new MutationObserver(function() {}); observer.observe(a, { childList: true }); var observer2 = new MutationObserver(function() {}); observer2.observe(d, { childList: true }); a.insertBefore(f, c); var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: a, addedNodes: [f], previousSibling: b, nextSibling: c }); records = observer2.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: d, removedNodes: [f], previousSibling: e, nextSibling: g }); }); test('insertAdjacentHTML beforebegin', function() { var a = document.createElement('a'); a.innerHTML = '<b></b><c></c>'; var b = a.firstChild; var c = a.lastChild; var observer = new MutationObserver(function() {}); observer.observe(a, { childList: true }); c.insertAdjacentHTML('beforebegin', '<d></d><e></e>'); assert.equal(a.innerHTML, '<b></b><d></d><e></e><c></c>'); var d = b.nextSibling; var e = d.nextSibling; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: a, addedNodes: [d, e], previousSibling: b, nextSibling: c }); }); test('insertAdjacentHTML afterbegin', function() { var a = document.createElement('a'); a.innerHTML = '<b></b><c></c>'; var b = a.firstChild; var c = a.lastChild; var observer = new MutationObserver(function() {}); observer.observe(a, { childList: true }); a.insertAdjacentHTML('afterbegin', '<d></d><e></e>'); assert.equal(a.innerHTML, '<d></d><e></e><b></b><c></c>'); var d = a.firstChild; var e = d.nextSibling; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: a, addedNodes: [d, e], previousSibling: null, nextSibling: b }); }); test('insertAdjacentHTML beforeend', function() { var a = document.createElement('a'); a.innerHTML = '<b></b><c></c>'; var b = a.firstChild; var c = a.lastChild; var observer = new MutationObserver(function() {}); observer.observe(a, { childList: true }); a.insertAdjacentHTML('beforeend', '<d></d><e></e>'); assert.equal(a.innerHTML, '<b></b><c></c><d></d><e></e>'); var d = c.nextSibling; var e = d.nextSibling; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: a, addedNodes: [d, e], previousSibling: c, nextSibling: null }); }); test('insertAdjacentHTML afterend', function() { var a = document.createElement('a'); a.innerHTML = '<b></b><c></c>'; var b = a.firstChild; var c = a.lastChild; var observer = new MutationObserver(function() {}); observer.observe(a, { childList: true }); b.insertAdjacentHTML('afterend', '<d></d><e></e>'); assert.equal(a.innerHTML, '<b></b><d></d><e></e><c></c>'); var d = b.nextSibling; var e = d.nextSibling; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: a, addedNodes: [d, e], previousSibling: b, nextSibling: c }); }); test('outerHTML', function() { var a = document.createElement('a'); a.innerHTML = '<b></b><c></c><d></d>'; var b = a.firstChild; var c = b.nextSibling; var d = a.lastChild; var sr = a.createShadowRoot(); a.offsetHeight; var observer = new MutationObserver(function() {}); observer.observe(a, { childList: true }); c.outerHTML = '<e></e><f></f>'; assert.equal(a.innerHTML, '<b></b><e></e><f></f><d></d>'); var e = b.nextSibling; var f = e.nextSibling; var records = observer.takeRecords(); assert.equal(records.length, 1); expectMutationRecord(records[0], { type: 'childList', target: a, addedNodes: [e, f], removedNodes: [c], previousSibling: b, nextSibling: d }); }); }); });
{ "pile_set_name": "Github" }
// -*- C++ -*- //===-------------------------- cerrno ------------------------------------===// // // The LLVM Compiler Infrastructure // // This file is dual licensed under the MIT and the University of Illinois Open // Source Licenses. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #ifndef _LIBCPP_CERRNO #define _LIBCPP_CERRNO /* cerrno synopsis Macros: EDOM EILSEQ // C99 ERANGE errno */ #include <__config> #include <errno.h> #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) #pragma GCC system_header #endif #if !defined(EOWNERDEAD) || !defined(ENOTRECOVERABLE) #ifdef ELAST const int __elast1 = ELAST+1; const int __elast2 = ELAST+2; #else const int __elast1 = 104; const int __elast2 = 105; #endif #ifdef ENOTRECOVERABLE #define EOWNERDEAD __elast1 #ifdef ELAST #undef ELAST #define ELAST EOWNERDEAD #endif #elif defined(EOWNERDEAD) #define ENOTRECOVERABLE __elast1 #ifdef ELAST #undef ELAST #define ELAST ENOTRECOVERABLE #endif #else // defined(EOWNERDEAD) #define EOWNERDEAD __elast1 #define ENOTRECOVERABLE __elast2 #ifdef ELAST #undef ELAST #define ELAST ENOTRECOVERABLE #endif #endif // defined(EOWNERDEAD) #endif // !defined(EOWNERDEAD) || !defined(ENOTRECOVERABLE) // supply errno values likely to be missing, particularly on Windows #ifndef EAFNOSUPPORT #define EAFNOSUPPORT 9901 #endif #ifndef EADDRINUSE #define EADDRINUSE 9902 #endif #ifndef EADDRNOTAVAIL #define EADDRNOTAVAIL 9903 #endif #ifndef EISCONN #define EISCONN 9904 #endif #ifndef EBADMSG #define EBADMSG 9905 #endif #ifndef ECONNABORTED #define ECONNABORTED 9906 #endif #ifndef EALREADY #define EALREADY 9907 #endif #ifndef ECONNREFUSED #define ECONNREFUSED 9908 #endif #ifndef ECONNRESET #define ECONNRESET 9909 #endif #ifndef EDESTADDRREQ #define EDESTADDRREQ 9910 #endif #ifndef EHOSTUNREACH #define EHOSTUNREACH 9911 #endif #ifndef EIDRM #define EIDRM 9912 #endif #ifndef EMSGSIZE #define EMSGSIZE 9913 #endif #ifndef ENETDOWN #define ENETDOWN 9914 #endif #ifndef ENETRESET #define ENETRESET 9915 #endif #ifndef ENETUNREACH #define ENETUNREACH 9916 #endif #ifndef ENOBUFS #define ENOBUFS 9917 #endif #ifndef ENOLINK #define ENOLINK 9918 #endif #ifndef ENODATA #define ENODATA 9919 #endif #ifndef ENOMSG #define ENOMSG 9920 #endif #ifndef ENOPROTOOPT #define ENOPROTOOPT 9921 #endif #ifndef ENOSR #define ENOSR 9922 #endif #ifndef ENOTSOCK #define ENOTSOCK 9923 #endif #ifndef ENOSTR #define ENOSTR 9924 #endif #ifndef ENOTCONN #define ENOTCONN 9925 #endif #ifndef ENOTSUP #define ENOTSUP 9926 #endif #ifndef ECANCELED #define ECANCELED 9927 #endif #ifndef EINPROGRESS #define EINPROGRESS 9928 #endif #ifndef EOPNOTSUPP #define EOPNOTSUPP 9929 #endif #ifndef EWOULDBLOCK #define EWOULDBLOCK 9930 #endif #ifndef EOWNERDEAD #define EOWNERDEAD 9931 #endif #ifndef EPROTO #define EPROTO 9932 #endif #ifndef EPROTONOSUPPORT #define EPROTONOSUPPORT 9933 #endif #ifndef ENOTRECOVERABLE #define ENOTRECOVERABLE 9934 #endif #ifndef ETIME #define ETIME 9935 #endif #ifndef ETXTBSY #define ETXTBSY 9936 #endif #ifndef ETIMEDOUT #define ETIMEDOUT 9938 #endif #ifndef ELOOP #define ELOOP 9939 #endif #ifndef EOVERFLOW #define EOVERFLOW 9940 #endif #ifndef EPROTOTYPE #define EPROTOTYPE 9941 #endif #ifndef ENOSYS #define ENOSYS 9942 #endif #ifndef EINVAL #define EINVAL 9943 #endif #ifndef ERANGE #define ERANGE 9944 #endif #ifndef EILSEQ #define EILSEQ 9945 #endif // Windows Mobile doesn't appear to define these: #ifndef E2BIG #define E2BIG 9946 #endif #ifndef EDOM #define EDOM 9947 #endif #ifndef EFAULT #define EFAULT 9948 #endif #ifndef EBADF #define EBADF 9949 #endif #ifndef EPIPE #define EPIPE 9950 #endif #ifndef EXDEV #define EXDEV 9951 #endif #ifndef EBUSY #define EBUSY 9952 #endif #ifndef ENOTEMPTY #define ENOTEMPTY 9953 #endif #ifndef ENOEXEC #define ENOEXEC 9954 #endif #ifndef EEXIST #define EEXIST 9955 #endif #ifndef EFBIG #define EFBIG 9956 #endif #ifndef ENAMETOOLONG #define ENAMETOOLONG 9957 #endif #ifndef ENOTTY #define ENOTTY 9958 #endif #ifndef EINTR #define EINTR 9959 #endif #ifndef ESPIPE #define ESPIPE 9960 #endif #ifndef EIO #define EIO 9961 #endif #ifndef EISDIR #define EISDIR 9962 #endif #ifndef ECHILD #define ECHILD 9963 #endif #ifndef ENOLCK #define ENOLCK 9964 #endif #ifndef ENOSPC #define ENOSPC 9965 #endif #ifndef ENXIO #define ENXIO 9966 #endif #ifndef ENODEV #define ENODEV 9967 #endif #ifndef ENOENT #define ENOENT 9968 #endif #ifndef ESRCH #define ESRCH 9969 #endif #ifndef ENOTDIR #define ENOTDIR 9970 #endif #ifndef ENOMEM #define ENOMEM 9971 #endif #ifndef EPERM #define EPERM 9972 #endif #ifndef EACCES #define EACCES 9973 #endif #ifndef EROFS #define EROFS 9974 #endif #ifndef EDEADLK #define EDEADLK 9975 #endif #ifndef EAGAIN #define EAGAIN 9976 #endif #ifndef ENFILE #define ENFILE 9977 #endif #ifndef EMFILE #define EMFILE 9978 #endif #ifndef EMLINK #define EMLINK 9979 #endif #endif // _LIBCPP_CERRNO
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-07-07 03:51 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('activity', '0018_auto_20180706_1708'), ] operations = [ migrations.AlterField( model_name='activityimagesmodel', name='image', field=models.ImageField(blank=True, null=True, upload_to='ActivityImagesModel/%y/%d/c2a097c6e09641179ce7cfc80bace63e', verbose_name='活动图片'), ), migrations.AlterField( model_name='activitymodel', name='audit', field=models.CharField(choices=[('0', '审核中'), ('1', '审核通过')], default=0, max_length=1, verbose_name='审核状态'), ), migrations.AlterField( model_name='activitymodel', name='cover_image', field=models.ImageField(blank=True, null=True, upload_to='Activity/%y/%d/c2a097c6e09641179ce7cfc80bace63e', verbose_name='封面图片'), ), migrations.AlterField( model_name='activitymodel', name='groupcode', field=models.ImageField(blank=True, null=True, upload_to='Activity/qr/%y/%d/c2a097c6e09641179ce7cfc80bace63e', verbose_name='群二维码'), ), migrations.AlterField( model_name='activitytypemodel', name='cover_image', field=models.ImageField(blank=True, null=True, upload_to='ActivityTypeModel/%y/%d/c2a097c6e09641179ce7cfc80bace63e', verbose_name='类别图片'), ), migrations.AlterField( model_name='slidemodels', name='image', field=models.ImageField(blank=True, null=True, upload_to='SlideModels/%y/%d/c2a097c6e09641179ce7cfc80bace63e', verbose_name='幻灯片图片'), ), ]
{ "pile_set_name": "Github" }
import Vue from 'vue'; import { resetStyled, expectCSSMatches } from './utils' let styled describe('with styles', () => { /** * Make sure the setup is the same for every test */ beforeEach(() => { styled = resetStyled() }) it('should append a style', () => { const rule = 'color: blue;' const Comp = styled.div` ${rule} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {color: blue;}') }) it('should append multiple styles', () => { const rule1 = 'color: blue;' const rule2 = 'background: red;' const Comp = styled.div` ${rule1} ${rule2} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {color: blue;background: red;}') }) it('should handle inline style objects', () => { const rule1 = { backgroundColor: 'blue', } const Comp = styled.div` ${rule1} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {background-color: blue;}') }) it('should handle inline style objects with media queries', () => { const rule1 = { backgroundColor: 'blue', '@media screen and (min-width: 250px)': { backgroundColor: 'red', }, } const Comp = styled.div` ${rule1} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {background-color: blue;}@media screen and (min-width: 250px) {.a {background-color: red;}}') }) it('should handle inline style objects with pseudo selectors', () => { const rule1 = { backgroundColor: 'blue', '&:hover': { textDecoration: 'underline', }, } const Comp = styled.div` ${rule1} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {background-color: blue;}.a:hover {-webkit-text-decoration: underline;text-decoration: underline;}') }) it('should handle inline style objects with pseudo selectors', () => { const rule1 = { backgroundColor: 'blue', '&:hover': { textDecoration: 'underline', }, } const Comp = styled.div` ${rule1} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {background-color: blue;}.a:hover {-webkit-text-decoration: underline;text-decoration: underline;}') }) it('should handle inline style objects with nesting', () => { const rule1 = { backgroundColor: 'blue', '> h1': { color: 'white', }, } const Comp = styled.div` ${rule1} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {background-color: blue;}.a > h1 {color: white;}') }) it('should handle inline style objects with contextual selectors', () => { const rule1 = { backgroundColor: 'blue', 'html.something &': { color: 'white', }, } const Comp = styled.div` ${rule1} ` const vm = new Vue(Comp).$mount() expectCSSMatches('.a {background-color: blue;}html.something .a {color: white;}') }) it('should inject styles of multiple components', () => { const firstRule = 'background: blue;' const secondRule = 'background: red;' const FirstComp = styled.div` ${firstRule} ` const SecondComp = styled.div` ${secondRule} ` const vm1 = new Vue(FirstComp).$mount() const vm2 = new Vue(SecondComp).$mount() expectCSSMatches('.a {background: blue;} .b {background: red;}') }) it('should inject styles of multiple components based on creation, not rendering order', () => { const firstRule = 'content: "first rule";' const secondRule = 'content: "second rule";' const FirstComp = styled.div` ${firstRule} ` const SecondComp = styled.div` ${secondRule} ` // Switch rendering order, shouldn't change injection order const vm2 = new Vue(SecondComp).$mount() const vm1 = new Vue(FirstComp).$mount() // Classes _do_ get generated in the order of rendering but that's ok expectCSSMatches(` .b {content: "first rule";} .a {content: "second rule";} `) }) it('should strip a JS-style (invalid) comment in the styles', () => { const comment = '// This is an invalid comment' const rule = 'color: blue;' const Comp = styled.div` ${comment} ${rule} ` const vm = new Vue(Comp).$mount() expectCSSMatches(` .a {color: blue;} `) }) })
{ "pile_set_name": "Github" }
# glibc2.m4 serial 2 dnl Copyright (C) 2000-2002, 2004, 2008-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. # Test for the GNU C Library, version 2.0 or newer. # From Bruno Haible. AC_DEFUN([gt_GLIBC2], [ AC_CACHE_CHECK([whether we are using the GNU C Library 2 or newer], [ac_cv_gnu_library_2], [AC_EGREP_CPP([Lucky GNU user], [ #include <features.h> #ifdef __GNU_LIBRARY__ #if (__GLIBC__ >= 2) Lucky GNU user #endif #endif ], [ac_cv_gnu_library_2=yes], [ac_cv_gnu_library_2=no]) ] ) AC_SUBST([GLIBC2]) GLIBC2="$ac_cv_gnu_library_2" ] )
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -verify -fopenmp %s -Wuninitialized // RUN: %clang_cc1 -verify -fopenmp-simd %s -Wuninitialized typedef void **omp_allocator_handle_t; extern const omp_allocator_handle_t omp_default_mem_alloc; extern const omp_allocator_handle_t omp_large_cap_mem_alloc; extern const omp_allocator_handle_t omp_const_mem_alloc; extern const omp_allocator_handle_t omp_high_bw_mem_alloc; extern const omp_allocator_handle_t omp_low_lat_mem_alloc; extern const omp_allocator_handle_t omp_cgroup_mem_alloc; extern const omp_allocator_handle_t omp_pteam_mem_alloc; extern const omp_allocator_handle_t omp_thread_mem_alloc; void foo() { } bool foobool(int argc) { return argc; } void xxx(int argc) { int fp; // expected-note {{initialize the variable 'fp' to silence this warning}} #pragma omp parallel master taskloop simd firstprivate(fp) // expected-warning {{variable 'fp' is uninitialized when used here}} for (int i = 0; i < 10; ++i) ; } struct S1; // expected-note 2 {{declared here}} expected-note 2 {{forward declaration of 'S1'}} extern S1 a; class S2 { mutable int a; public: S2() : a(0) {} S2(const S2 &s2) : a(s2.a) {} static float S2s; static const float S2sc; }; const float S2::S2sc = 0; const S2 b; const S2 ba[5]; class S3 { int a; S3 &operator=(const S3 &s3); public: S3() : a(0) {} // expected-note 2 {{candidate constructor not viable: requires 0 arguments, but 1 was provided}} S3(S3 &s3) : a(s3.a) {} // expected-note 2 {{candidate constructor not viable: 1st argument ('const S3') would lose const qualifier}} }; const S3 c; const S3 ca[5]; extern const int f; class S4 { int a; S4(); S4(const S4 &s4); // expected-note 2 {{implicitly declared private here}} public: S4(int v) : a(v) {} }; class S5 { int a; S5(const S5 &s5) : a(s5.a) {} // expected-note 4 {{implicitly declared private here}} public: S5() : a(0) {} S5(int v) : a(v) {} }; class S6 { int a; S6() : a(0) {} public: S6(const S6 &s6) : a(s6.a) {} S6(int v) : a(v) {} }; S3 h; #pragma omp threadprivate(h) // expected-note 2 {{defined as threadprivate or thread local}} template <class I, class C> int foomain(int argc, char **argv) { I e(4); C g(5); int i, z; int &j = i; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate // expected-error {{expected '(' after 'firstprivate'}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate() // expected-error {{expected expression}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc // expected-error {{expected ')'}} expected-note {{to match this '('}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc, // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc > 0 ? argv[1] : argv[2]) // expected-error {{expected variable name}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd allocate(omp_thread_mem_alloc: argc) firstprivate(argc) // expected-warning {{allocator with the 'thread' trait access has unspecified behavior on 'parallel master taskloop simd' directive}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(S1) // expected-error {{'S1' does not refer to a value}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(a, b) // expected-error {{firstprivate variable with incomplete type 'S1'}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argv[1]) // expected-error {{expected variable name}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(z, e, g) // expected-error {{calling a private constructor of class 'S4'}} expected-error {{calling a private constructor of class 'S5'}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(h) // expected-error {{threadprivate or thread local variable cannot be firstprivate}} for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel { int v = 0; int i; #pragma omp parallel master taskloop simd firstprivate(i) for (int k = 0; k < argc; ++k) { i = k; v += i; } } #pragma omp parallel shared(i) #pragma omp parallel private(i) #pragma omp parallel master taskloop simd firstprivate(j) for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(i) for (int k = 0; k < argc; ++k) ++k; #pragma omp parallel #pragma omp parallel master taskloop simd lastprivate(g) firstprivate(g) // expected-error {{calling a private constructor of class 'S5'}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel private(i) #pragma omp parallel master taskloop simd firstprivate(i) // expected-note 2 {{defined as firstprivate}} for (i = 0; i < argc; ++i) // expected-error 2 {{loop iteration variable in the associated loop of 'omp parallel master taskloop simd' directive may not be firstprivate, predetermined as linear}} foo(); #pragma omp parallel reduction(+ : i) // expected-note {{defined as reduction}} #pragma omp parallel master taskloop simd firstprivate(i) // expected-note {{defined as firstprivate}} expected-error {{argument of a reduction clause of a parallel construct must not appear in a firstprivate clause on a task construct}} for (i = 0; i < argc; ++i) // expected-error {{loop iteration variable in the associated loop of 'omp parallel master taskloop simd' directive may not be firstprivate, predetermined as linear}} foo(); return 0; } void bar(S4 a[2]) { #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(a) for (int i = 0; i < 2; ++i) foo(); } namespace A { double x; #pragma omp threadprivate(x) // expected-note {{defined as threadprivate or thread local}} } namespace B { using A::x; } int main(int argc, char **argv) { const int d = 5; const int da[5] = {0}; S4 e(4); S5 g(5); S3 m; S6 n(2); int i; int &j = i; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate // expected-error {{expected '(' after 'firstprivate'}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate() // expected-error {{expected expression}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc // expected-error {{expected ')'}} expected-note {{to match this '('}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc, // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc > 0 ? argv[1] : argv[2]) // expected-error {{expected variable name}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argc) allocate , allocate(, allocate(omp_default , allocate(omp_default_mem_alloc, allocate(omp_default_mem_alloc:, allocate(omp_default_mem_alloc: argc, allocate(omp_default_mem_alloc: argv), allocate(argv) // expected-error {{expected '(' after 'allocate'}} expected-error 2 {{expected expression}} expected-error 2 {{expected ')'}} expected-error {{use of undeclared identifier 'omp_default'}} expected-note 2 {{to match this '('}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(S1) // expected-error {{'S1' does not refer to a value}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(a, b, c, d, f) // expected-error {{firstprivate variable with incomplete type 'S1'}} expected-error {{no matching constructor for initialization of 'S3'}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(argv[1]) // expected-error {{expected variable name}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(2 * 2) // expected-error {{expected variable name}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(ba) // OK for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(ca) // expected-error {{no matching constructor for initialization of 'S3'}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(da) // OK for (i = 0; i < argc; ++i) foo(); int xa; #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(xa) // OK for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(S2::S2s) // OK for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(S2::S2sc) // OK for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd safelen(5) for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(e, g) // expected-error {{calling a private constructor of class 'S4'}} expected-error {{calling a private constructor of class 'S5'}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(m) // OK for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(h) // expected-error {{threadprivate or thread local variable cannot be firstprivate}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd private(xa), firstprivate(xa) // expected-error {{private variable cannot be firstprivate}} expected-note {{defined as private}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(i) // expected-note {{defined as firstprivate}} for (i = 0; i < argc; ++i) // expected-error {{loop iteration variable in the associated loop of 'omp parallel master taskloop simd' directive may not be firstprivate, predetermined as linear}} foo(); #pragma omp parallel shared(xa) #pragma omp parallel master taskloop simd firstprivate(xa) // OK: may be firstprivate for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(j) for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd lastprivate(g) firstprivate(g) // expected-error {{calling a private constructor of class 'S5'}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel #pragma omp parallel master taskloop simd lastprivate(n) firstprivate(n) // OK for (i = 0; i < argc; ++i) foo(); #pragma omp parallel { int v = 0; int i; #pragma omp parallel master taskloop simd firstprivate(i) for (int k = 0; k < argc; ++k) { i = k; v += i; } } #pragma omp parallel private(i) #pragma omp parallel master taskloop simd firstprivate(i) // expected-note {{defined as firstprivate}} for (i = 0; i < argc; ++i) // expected-error {{loop iteration variable in the associated loop of 'omp parallel master taskloop simd' directive may not be firstprivate, predetermined as linear}} foo(); #pragma omp parallel reduction(+ : i) // expected-note {{defined as reduction}} #pragma omp parallel master taskloop simd firstprivate(i) //expected-error {{argument of a reduction clause of a parallel construct must not appear in a firstprivate clause on a task construct}} for (i = 0; i < argc; ++i) foo(); #pragma omp parallel master taskloop simd firstprivate(i) //expected-note {{defined as firstprivate}} for (i = 0; i < argc; ++i) // expected-error {{loop iteration variable in the associated loop of 'omp parallel master taskloop simd' directive may not be firstprivate, predetermined as linear}} foo(); #pragma omp parallel #pragma omp parallel master taskloop simd firstprivate(B::x) // expected-error {{threadprivate or thread local variable cannot be firstprivate}} for (i = 0; i < argc; ++i) foo(); static int si; #pragma omp parallel master taskloop simd firstprivate(si) // OK for (i = 0; i < argc; ++i) si = i + 1; return foomain<S4, S5>(argc, argv); // expected-note {{in instantiation of function template specialization 'foomain<S4, S5>' requested here}} }
{ "pile_set_name": "Github" }
// RUN: %target-swift-ide-test -print-usrs -source-filename %s | %FileCheck %s -strict-whitespace // CHECK: [[@LINE+1]]:6 s:14swift_ide_test11InvalidEnumO{{$}} enum InvalidEnum { case } func qualifiedModuleName() { let a = Swift.Array<Int>() _ = a } func amazingCode() { let a = Swift let b = (Swift, Swift) _ = a, _ = b }
{ "pile_set_name": "Github" }
# Add your own tasks in files placed in lib/tasks ending in .rake, # for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. require File.expand_path('../config/application', __FILE__) Rails.application.load_tasks
{ "pile_set_name": "Github" }
<?php namespace Jane\OpenApi2\Tests\Expected\Normalizer; use Jane\JsonSchemaRuntime\Reference; use Jane\OpenApi2\Tests\Expected\Runtime\Normalizer\CheckArray; use Symfony\Component\Serializer\Exception\InvalidArgumentException; use Symfony\Component\Serializer\Normalizer\DenormalizerAwareInterface; use Symfony\Component\Serializer\Normalizer\DenormalizerAwareTrait; use Symfony\Component\Serializer\Normalizer\DenormalizerInterface; use Symfony\Component\Serializer\Normalizer\NormalizerAwareInterface; use Symfony\Component\Serializer\Normalizer\NormalizerAwareTrait; use Symfony\Component\Serializer\Normalizer\NormalizerInterface; class ProjectAssignmentNormalizer implements DenormalizerInterface, NormalizerInterface, DenormalizerAwareInterface, NormalizerAwareInterface { use DenormalizerAwareTrait; use NormalizerAwareTrait; use CheckArray; public function supportsDenormalization($data, $type, $format = null) { return $type === 'Jane\\OpenApi2\\Tests\\Expected\\Model\\ProjectAssignment'; } public function supportsNormalization($data, $format = null) { return is_object($data) && get_class($data) === 'Jane\\OpenApi2\\Tests\\Expected\\Model\\ProjectAssignment'; } public function denormalize($data, $class, $format = null, array $context = array()) { if (isset($data['$ref'])) { return new Reference($data['$ref'], $context['document-origin']); } if (isset($data['$recursiveRef'])) { return new Reference($data['$recursiveRef'], $context['document-origin']); } $object = new \Jane\OpenApi2\Tests\Expected\Model\ProjectAssignment(); if (null === $data) { return $object; } if (\array_key_exists('id', $data)) { $object->setId($data['id']); } if (\array_key_exists('is_active', $data)) { $object->setIsActive($data['is_active']); } if (\array_key_exists('is_project_manager', $data)) { $object->setIsProjectManager($data['is_project_manager']); } if (\array_key_exists('use_default_rates', $data)) { $object->setUseDefaultRates($data['use_default_rates']); } if (\array_key_exists('hourly_rate', $data)) { $object->setHourlyRate($data['hourly_rate']); } if (\array_key_exists('budget', $data)) { $object->setBudget($data['budget']); } if (\array_key_exists('created_at', $data)) { $object->setCreatedAt(\DateTime::createFromFormat('Y-m-d\\TH:i:sP', $data['created_at'])); } if (\array_key_exists('updated_at', $data)) { $object->setUpdatedAt(\DateTime::createFromFormat('Y-m-d\\TH:i:sP', $data['updated_at'])); } if (\array_key_exists('project', $data)) { $object->setProject($this->denormalizer->denormalize($data['project'], 'Jane\\OpenApi2\\Tests\\Expected\\Model\\ProjectAssignmentProject', 'json', $context)); } if (\array_key_exists('client', $data)) { $object->setClient($this->denormalizer->denormalize($data['client'], 'Jane\\OpenApi2\\Tests\\Expected\\Model\\ProjectAssignmentClient', 'json', $context)); } if (\array_key_exists('task_assignments', $data)) { $values = array(); foreach ($data['task_assignments'] as $value) { $values[] = $this->denormalizer->denormalize($value, 'Jane\\OpenApi2\\Tests\\Expected\\Model\\TaskAssignment', 'json', $context); } $object->setTaskAssignments($values); } return $object; } public function normalize($object, $format = null, array $context = array()) { $data = array(); if (null !== $object->getId()) { $data['id'] = $object->getId(); } if (null !== $object->getIsActive()) { $data['is_active'] = $object->getIsActive(); } if (null !== $object->getIsProjectManager()) { $data['is_project_manager'] = $object->getIsProjectManager(); } if (null !== $object->getUseDefaultRates()) { $data['use_default_rates'] = $object->getUseDefaultRates(); } if (null !== $object->getHourlyRate()) { $data['hourly_rate'] = $object->getHourlyRate(); } if (null !== $object->getBudget()) { $data['budget'] = $object->getBudget(); } if (null !== $object->getCreatedAt()) { $data['created_at'] = $object->getCreatedAt()->format('Y-m-d\\TH:i:sP'); } if (null !== $object->getUpdatedAt()) { $data['updated_at'] = $object->getUpdatedAt()->format('Y-m-d\\TH:i:sP'); } if (null !== $object->getProject()) { $data['project'] = $this->normalizer->normalize($object->getProject(), 'json', $context); } if (null !== $object->getClient()) { $data['client'] = $this->normalizer->normalize($object->getClient(), 'json', $context); } if (null !== $object->getTaskAssignments()) { $values = array(); foreach ($object->getTaskAssignments() as $value) { $values[] = $this->normalizer->normalize($value, 'json', $context); } $data['task_assignments'] = $values; } return $data; } }
{ "pile_set_name": "Github" }
# SPDX-License-Identifier: GPL-2.0 # EP93xx Platform Support snd-soc-ep93xx-objs := ep93xx-pcm.o snd-soc-ep93xx-i2s-objs := ep93xx-i2s.o snd-soc-ep93xx-ac97-objs := ep93xx-ac97.o obj-$(CONFIG_SND_EP93XX_SOC) += snd-soc-ep93xx.o obj-$(CONFIG_SND_EP93XX_SOC_I2S) += snd-soc-ep93xx-i2s.o obj-$(CONFIG_SND_EP93XX_SOC_AC97) += snd-soc-ep93xx-ac97.o # EP93XX Machine Support snd-soc-snappercl15-objs := snappercl15.o snd-soc-simone-objs := simone.o snd-soc-edb93xx-objs := edb93xx.o obj-$(CONFIG_SND_EP93XX_SOC_SNAPPERCL15) += snd-soc-snappercl15.o obj-$(CONFIG_SND_EP93XX_SOC_SIMONE) += snd-soc-simone.o obj-$(CONFIG_SND_EP93XX_SOC_EDB93XX) += snd-soc-edb93xx.o
{ "pile_set_name": "Github" }
namespace Fulma.Elmish.DatePicker open Fable.React open Fable.React.Props open Fulma open Fable.FontAwesome open Fulma.Extensions.Wikiki open Types open System module View = let isCalendarDisplayed state = state.InputFocused && not (state.AutoClose && state.ForceClose) let onFocus (config : Config<'Msg>) state currentDate dispatch = // If the calendar is already displayed don't dispatch a new onFocus message // This is needed because we register to both onClick and onFocus event if not(isCalendarDisplayed state) then config.OnChange ({ state with InputFocused = true ForceClose = false }, currentDate) |> dispatch let onChange (config : Config<'Msg>) state currentDate dispatch = config.OnChange (state, currentDate) |> dispatch let onDeleteClick (config : Config<'Msg>) state (currentDate : DateTime option) dispatch = if currentDate.IsSome then config.OnChange (state, None) |> dispatch let calendar (config : Config<'Msg>) state (currentDate : DateTime option) dispatch = let isCurrentMonth (date : DateTime) = state.ReferenceDate.Month = date.Month let isToday (dateToCompare : DateTime) = let d = DateTime.UtcNow dateToCompare.Day = d.Day && dateToCompare.Month = d.Month && dateToCompare.Year = d.Year let isSelected (dateToCompare : DateTime) = match currentDate with | Some date -> date.Day = dateToCompare.Day && date.Month = dateToCompare.Month && date.Year = dateToCompare.Year | None -> false let firstDateCalendar = let firstOfMonth = DateTime(state.ReferenceDate.Year, state.ReferenceDate.Month, 1) let weekOffset = (7 + (int firstOfMonth.DayOfWeek) - (int config.Local.Date.FirstDayOfTheWeek)) % 7 firstOfMonth.AddDays(float -weekOffset) let header = [0..6] |> List.splitAt (int firstDateCalendar.DayOfWeek) |> (fun (first, second) -> second @ first) |> List.map (fun intDayOfWeek -> let dayOfWeek = enum<System.DayOfWeek> intDayOfWeek let name = match dayOfWeek with | DayOfWeek.Monday -> config.Local.Date.AbbreviatedDays.Monday | DayOfWeek.Tuesday -> config.Local.Date.AbbreviatedDays.Tuesday | DayOfWeek.Wednesday -> config.Local.Date.AbbreviatedDays.Wednesday | DayOfWeek.Thursday -> config.Local.Date.AbbreviatedDays.Thursday | DayOfWeek.Friday -> config.Local.Date.AbbreviatedDays.Friday | DayOfWeek.Saturday -> config.Local.Date.AbbreviatedDays.Saturday | DayOfWeek.Sunday -> config.Local.Date.AbbreviatedDays.Sunday | x -> failwithf "not a valid day of week: %A" x Calendar.Date.date [ ] [ str name ]) let body = seq { for dayRank = 0 to 41 do // We have 42 dates to show let date = firstDateCalendar.AddDays(float dayRank) yield Calendar.Date.date [ Calendar.Date.IsDisabled (not (isCurrentMonth date)) ] [ Calendar.Date.item [ Calendar.Date.Item.IsToday (isToday date) Calendar.Date.Item.IsActive (isSelected date) Calendar.Date.Item.Props [ OnClick (fun _ -> let newState = { state with ForceClose = true } onChange config newState (Some date) dispatch) ] ] [ str (date.Day.ToString()) ] ] } |> Seq.toList Box.box' [ Common.Props [ Style config.DatePickerStyle ] ] [ Calendar.calendar [ Calendar.Props [ OnMouseDown (fun ev -> ev.preventDefault()) ] ] [ Calendar.Nav.nav [ ] [ Calendar.Nav.left [ ] [ Button.button [ Button.IsLink Button.OnClick (fun _ -> let newState = { state with ReferenceDate = state.ReferenceDate.AddMonths(-1) ForceClose = false } onChange config newState currentDate dispatch) ] [ Icon.icon [ ] [ Fa.i [ Fa.Solid.ChevronLeft ] [ ] ] ] ] str (Date.Format.localFormat config.Local "MMMM yyyy" state.ReferenceDate) Calendar.Nav.right [ ] [ Button.button [ Button.IsLink Button.OnClick (fun _ -> let newState = { state with ReferenceDate = state.ReferenceDate.AddMonths(1) ForceClose = false } onChange config newState currentDate dispatch) ] [ Icon.icon [ ] [ Fa.i [ Fa.Solid.ChevronRight ] [ ] ] ] ] ] div [ ] [ Calendar.header [ ] header Calendar.body [ ] body ] ] ] let root<'Msg> (config: Config<'Msg>) (state: State) (currentDate: DateTime option) dispatch = let dateTxt = match currentDate with | Some date -> Date.Format.localFormat config.Local config.Local.Date.DefaultFormat date | None -> "" div [ ] [ yield Field.body [] [ Field.div (if state.ShowDeleteButton then [Field.HasAddons] else []) [ yield Control.p [ Control.IsExpanded ] [ Input.text [ Input.Props [ Value dateTxt OnFocus (fun _ -> onFocus config state currentDate dispatch) OnClick (fun _ -> onFocus config state currentDate dispatch) // TODO: Implement something to trigger onChange only if the value actually change OnBlur (fun _ -> let newState = { state with InputFocused = false } onChange config newState currentDate dispatch) ]; ] ] if state.ShowDeleteButton then yield Control.p [] [ Button.a [ Button.OnClick(fun _ -> onDeleteClick config state currentDate dispatch) ] [ Icon.icon [ ] [ Fa.i [ Fa.Solid.Times ] [ ] ] ] ] ] ] if isCalendarDisplayed state then yield calendar config state currentDate dispatch ]
{ "pile_set_name": "Github" }
#summary About the MetaCat extension to Cat. #labels Glossary = MetaCat = MetaCat is the name of the extension of Cat language with [Macros macros]. MetaCat is based on the paper [http://www.latrobe.edu.au/philosophy/phimvt/joy/j07rrs.html A Rewriting System for Joy] by Manfred von Thun. [something] More information is available at http://www.cat-language.com/metacat.html
{ "pile_set_name": "Github" }
package main import ( "io/ioutil" "os" "path/filepath" "testing" "github.com/ahmetb/kubectx/internal/testutil" ) func Test_readLastContext_nonExistingFile(t *testing.T) { s, err := readLastContext(filepath.FromSlash("/non/existing/file")) if err != nil { t.Fatal(err) } if s != "" { t.Fatalf("expected empty string; got=%q", s) } } func Test_readLastContext(t *testing.T) { path, cleanup := testutil.TempFile(t, "foo") defer cleanup() s, err := readLastContext(path) if err != nil { t.Fatal(err) } if expected := "foo"; s != expected { t.Fatalf("expected=%q; got=%q", expected, s) } } func Test_writeLastContext_err(t *testing.T) { path := filepath.Join(os.DevNull, "foo", "bar") err := writeLastContext(path, "foo") if err == nil { t.Fatal("got empty error") } } func Test_writeLastContext(t *testing.T) { dir, err := ioutil.TempDir(os.TempDir(), "state-file-test") if err != nil { t.Fatal(err) } path := filepath.Join(dir, "foo", "bar") if err := writeLastContext(path, "ctx1"); err != nil { t.Fatal(err) } v, err := readLastContext(path) if err != nil { t.Fatal(err) } if expected := "ctx1"; v != expected { t.Fatalf("read wrong value=%q; expected=%q", v, expected) } } func Test_kubectxFilePath(t *testing.T) { origHome := os.Getenv("HOME") os.Setenv("HOME", filepath.FromSlash("/foo/bar")) defer os.Setenv("HOME", origHome) expected := filepath.Join(filepath.FromSlash("/foo/bar"), ".kube", "kubectx") v, err := kubectxPrevCtxFile() if err != nil { t.Fatal(err) } if v != expected { t.Fatalf("expected=%q got=%q", expected, v) } } func Test_kubectxFilePath_error(t *testing.T) { origHome := os.Getenv("HOME") origUserprofile := os.Getenv("USERPROFILE") os.Unsetenv("HOME") os.Unsetenv("USERPROFILE") defer os.Setenv("HOME", origHome) defer os.Setenv("USERPROFILE", origUserprofile) _, err := kubectxPrevCtxFile() if err == nil { t.Fatal(err) } }
{ "pile_set_name": "Github" }
<?php namespace Hamcrest\Arrays; /* Copyright (c) 2009 hamcrest.org */ // NOTE: This class is not exactly a direct port of Java's since Java handles // arrays quite differently than PHP // TODO: Allow this to take matchers or values within the array use Hamcrest\Description; use Hamcrest\TypeSafeMatcher; use Hamcrest\Util; /** * Matcher for array whose elements satisfy a sequence of matchers. * The array size must equal the number of element matchers. */ class IsArray extends TypeSafeMatcher { private $_elementMatchers; public function __construct(array $elementMatchers) { parent::__construct(self::TYPE_ARRAY); Util::checkAllAreMatchers($elementMatchers); $this->_elementMatchers = $elementMatchers; } protected function matchesSafely($array) { if (array_keys($array) != array_keys($this->_elementMatchers)) { return false; } /** @var $matcher \Hamcrest\Matcher */ foreach ($this->_elementMatchers as $k => $matcher) { if (!$matcher->matches($array[$k])) { return false; } } return true; } protected function describeMismatchSafely($actual, Description $mismatchDescription) { if (count($actual) != count($this->_elementMatchers)) { $mismatchDescription->appendText('array length was ' . count($actual)); return; } elseif (array_keys($actual) != array_keys($this->_elementMatchers)) { $mismatchDescription->appendText('array keys were ') ->appendValueList( $this->descriptionStart(), $this->descriptionSeparator(), $this->descriptionEnd(), array_keys($actual) ) ; return; } /** @var $matcher \Hamcrest\Matcher */ foreach ($this->_elementMatchers as $k => $matcher) { if (!$matcher->matches($actual[$k])) { $mismatchDescription->appendText('element ')->appendValue($k) ->appendText(' was ')->appendValue($actual[$k]); return; } } } public function describeTo(Description $description) { $description->appendList( $this->descriptionStart(), $this->descriptionSeparator(), $this->descriptionEnd(), $this->_elementMatchers ); } /** * Evaluates to true only if each $matcher[$i] is satisfied by $array[$i]. * * @factory ... */ public static function anArray(/* args... */) { $args = func_get_args(); return new self(Util::createMatcherArray($args)); } // -- Protected Methods protected function descriptionStart() { return '['; } protected function descriptionSeparator() { return ', '; } protected function descriptionEnd() { return ']'; } }
{ "pile_set_name": "Github" }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2020 DBeaver Corp and others * Copyright (C) 2011-2012 Eugene Fradkin ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.dialogs.driver; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import org.jkiss.dbeaver.model.connection.DBPDriverLibrary; import org.jkiss.dbeaver.registry.driver.DriverDescriptor; import org.jkiss.dbeaver.registry.driver.DriverLibraryMavenArtifact; import org.jkiss.dbeaver.registry.maven.MavenArtifactReference; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.internal.UIConnectionMessages; import org.jkiss.utils.CommonUtils; /** * EditMavenArtifactDialog */ class EditMavenArtifactDialog extends Dialog { private DriverLibraryMavenArtifact library; private Text groupText; private Text artifactText; private Text classifierText; private Combo versionText; private boolean ignoreDependencies; public EditMavenArtifactDialog(Shell shell, DriverDescriptor driver, DriverLibraryMavenArtifact library) { super(shell); this.library = library == null ? new DriverLibraryMavenArtifact(driver, DBPDriverLibrary.FileType.jar, "", MavenArtifactReference.VERSION_PATTERN_RELEASE) : library; } public DriverLibraryMavenArtifact getLibrary() { return library; } @Override protected boolean isResizable() { return true; } @Override protected Control createDialogArea(Composite parent) { getShell().setText(UIConnectionMessages.dialog_edit_driver_edit_maven_title); Composite composite = (Composite) super.createDialogArea(parent); ((GridLayout)composite.getLayout()).numColumns = 2; GridData gd = new GridData(GridData.FILL_HORIZONTAL); gd.widthHint = 200; groupText = UIUtils.createLabelText(composite, UIConnectionMessages.dialog_edit_driver_edit_maven_group_id_label, library.getReference().getGroupId()); groupText.setLayoutData(gd); artifactText = UIUtils.createLabelText(composite, UIConnectionMessages.dialog_edit_driver_edit_maven_artifact_id_label, library.getReference().getArtifactId()); artifactText.setLayoutData(gd); classifierText = UIUtils.createLabelText(composite, UIConnectionMessages.dialog_edit_driver_edit_maven_classfier_label, CommonUtils.notEmpty(library.getReference().getClassifier())); classifierText.setLayoutData(gd); versionText = UIUtils.createLabelCombo(composite, UIConnectionMessages.dialog_edit_driver_edit_maven_version_label, SWT.DROP_DOWN | SWT.BORDER); versionText.setLayoutData(gd); versionText.setText(library.getVersion()); versionText.add(MavenArtifactReference.VERSION_PATTERN_RELEASE); versionText.add(MavenArtifactReference.VERSION_PATTERN_LATEST); Button ignoreDependenciesCheckbox = UIUtils.createCheckbox(composite, "Ignore transient dependencies", "Do not include library dependencies", library.isIgnoreDependencies(), 2); ignoreDependenciesCheckbox.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { ignoreDependencies = ignoreDependenciesCheckbox.getSelection(); } }); ModifyListener ml = e -> updateButtons(); groupText.addModifyListener(ml); artifactText.addModifyListener(ml); classifierText.addModifyListener(ml); versionText.addModifyListener(ml); return composite; } @Override protected void createButtonsForButtonBar(Composite parent) { super.createButtonsForButtonBar(parent); updateButtons(); } private void updateButtons() { getButton(IDialogConstants.OK_ID).setEnabled( !CommonUtils.isEmpty(groupText.getText()) && !CommonUtils.isEmpty(artifactText.getText()) && !CommonUtils.isEmpty(versionText.getText()) ); } @Override protected void okPressed() { String classifier = classifierText.getText(); library.setReference( new MavenArtifactReference( groupText.getText(), artifactText.getText(), CommonUtils.isEmpty(classifier) ? null : classifier, versionText.getText())); library.setIgnoreDependencies(ignoreDependencies); super.okPressed(); } }
{ "pile_set_name": "Github" }
name: "Ta Safo" url: "https://tasafo.org" twitter: "tasafo"
{ "pile_set_name": "Github" }
package com.akkafun.order.api.dtos; import org.hibernate.validator.constraints.NotEmpty; import javax.validation.Valid; import javax.validation.constraints.NotNull; import java.util.ArrayList; import java.util.List; /** * Created by liubin on 2016/5/6. */ public class PlaceOrderDto { @NotNull(message = "用户ID不能为空") private Long userId; private List<Long> couponIdList = new ArrayList<>(); @NotEmpty @Valid private List<PlaceOrderItemDto> placeOrderItemList = new ArrayList<>(0); public Long getUserId() { return userId; } public void setUserId(Long userId) { this.userId = userId; } public List<Long> getCouponIdList() { return couponIdList; } public void setCouponIdList(List<Long> couponIdList) { this.couponIdList = couponIdList; } public List<PlaceOrderItemDto> getPlaceOrderItemList() { return placeOrderItemList; } public void setPlaceOrderItemList(List<PlaceOrderItemDto> placeOrderItemList) { this.placeOrderItemList = placeOrderItemList; } }
{ "pile_set_name": "Github" }
<app-header></app-header> <ion-content> <div id="page-not-found-container"> <div [fivCenter] class="ion-text-center"> <lottie-animation-view [options]="lottieConfig" [width]="200" [height]="200" (animCreated)="handleAnimation($event)"> </lottie-animation-view> <h3 class="ion-text-center">Oops, we couldn't find the page you are looking for.</h3> <ion-button routerLink="/" shape="round"> Go to homepage </ion-button> </div> </div> </ion-content>
{ "pile_set_name": "Github" }
package p1.p2; public class JavaHighlighting1 { public void f() { int n = R.<error>xml</error>.main; } }
{ "pile_set_name": "Github" }
/* ** c_bind.h ** **--------------------------------------------------------------------------- ** Copyright 1998-2006 Randy Heit ** All rights reserved. ** ** Redistribution and use in source and binary forms, with or without ** modification, are permitted provided that the following conditions ** are met: ** ** 1. Redistributions of source code must retain the above copyright ** notice, this list of conditions and the following disclaimer. ** 2. Redistributions in binary form must reproduce the above copyright ** notice, this list of conditions and the following disclaimer in the ** documentation and/or other materials provided with the distribution. ** 3. The name of the author may not be used to endorse or promote products ** derived from this software without specific prior written permission. ** ** THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ** IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES ** OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. ** IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, ** INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT ** NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF ** THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **--------------------------------------------------------------------------- ** */ #ifndef __C_BINDINGS_H__ #define __C_BINDINGS_H__ #include "doomdef.h" struct event_t; class FConfigFile; class FCommandLine; void C_NameKeys (char *str, int first, int second); class FKeyBindings { FString Binds[NUM_KEYS]; public: void PerformBind(FCommandLine &argv, const char *msg); bool DoKey(event_t *ev); void ArchiveBindings(FConfigFile *F, const char *matchcmd = NULL); int GetKeysForCommand (const char *cmd, int *first, int *second); void UnbindACommand (const char *str); void UnbindAll (); void UnbindKey(const char *key); void DoBind (const char *key, const char *bind); void DefaultBind(const char *keyname, const char *cmd); void SetBind(unsigned int key, const char *bind) { if (key < NUM_KEYS) Binds[key] = bind; } const FString &GetBinding(unsigned int index) const { return Binds[index]; } const char *GetBind(unsigned int index) const { if (index < NUM_KEYS) return Binds[index]; else return NULL; } }; extern FKeyBindings Bindings; extern FKeyBindings DoubleBindings; extern FKeyBindings AutomapBindings; extern FKeyBindings MenuBindings; bool C_DoKey (event_t *ev, FKeyBindings *binds, FKeyBindings *doublebinds); // Stuff used by the customize controls menu void C_SetDefaultBindings (); void C_UnbindAll (); extern const char *KeyNames[]; struct FKeyAction { FString mTitle; FString mAction; }; struct FKeySection { FString mTitle; FString mSection; TArray<FKeyAction> mActions; }; extern TArray<FKeySection> KeySections; #endif //__C_BINDINGS_H__
{ "pile_set_name": "Github" }
{{# def.definitions }} {{# def.errors }} {{# def.setupKeyword }} {{# def.$data }} {{? ($schema || $isData) && it.opts.uniqueItems !== false }} {{? $isData }} var {{=$valid}}; if ({{=$schemaValue}} === false || {{=$schemaValue}} === undefined) {{=$valid}} = true; else if (typeof {{=$schemaValue}} != 'boolean') {{=$valid}} = false; else { {{?}} var {{=$valid}} = true; if ({{=$data}}.length > 1) { var i = {{=$data}}.length, j; outer: for (;i--;) { for (j = i; j--;) { if (equal({{=$data}}[i], {{=$data}}[j])) { {{=$valid}} = false; break outer; } } } } {{? $isData }} } {{?}} if (!{{=$valid}}) { {{# def.error:'uniqueItems' }} } {{? $breakOnError }} else { {{?}} {{??}} {{? $breakOnError }} if (true) { {{?}} {{?}}
{ "pile_set_name": "Github" }
T1830_1.hs:5:29: error: Can't make a derived instance of ‘Lift (Foo a)’: You need DeriveLift to derive an instance for this class In the data declaration for ‘Foo’
{ "pile_set_name": "Github" }
// @flow import * as React from "react"; import Slider from "../index"; export default { Example: () => <Slider label="Volume" minValue={0} maxValue={100} defaultValue={33} />, info: { title: "Default slider", description: "A default slider presents a range in between given minimum and maximum values. Always include a label.", }, };
{ "pile_set_name": "Github" }
CREATE TABLE IF NOT EXISTS "carddav_server" ( "carddav_server_id" serial, "user_id" int NOT NULL REFERENCES "users" ON DELETE CASCADE, "url" varchar(255) NOT NULL, "username" varchar(128) NOT NULL, "password" varchar(128) NOT NULL, "label" varchar(128) NOT NULL, "read_only" int NOT NULL, PRIMARY KEY ("carddav_server_id") ); CREATE TABLE IF NOT EXISTS "carddav_contacts" ( "carddav_contact_id" serial, "carddav_server_id" int REFERENCES "carddav_server" ON DELETE CASCADE, "user_id" int, "etag" varchar(64) NOT NULL, "last_modified" varchar(128) NOT NULL, "vcard_id" varchar(64), "vcard" text NOT NULL, "words" text, "firstname" varchar(128) DEFAULT NULL, "surname" varchar(128) DEFAULT NULL, "name" varchar(255) DEFAULT NULL, "email" varchar(255) DEFAULT NULL, PRIMARY KEY ("carddav_server_id","user_id","vcard_id") ); CREATE INDEX "user_id" ON "carddav_contacts" ("user_id");
{ "pile_set_name": "Github" }
``` package vultrbaremetal import "github.com/cloudlibz/gocloud/baremetal/vultrbaremetal" ``` ### TYPES ``` type BareMetalInfo struct { SUBID string OS string `json:"os"` RAM string `json:"ram"` Disk string `json:"disk"` MainIP string `json:"main_ip"` CPUCount float64 `json:"cpu_count"` Location string `json:"location"` DCID string DefaultPassword string `json:"default_password"` DateCreated string `json:"date_created"` Status string `json:"status"` NetmaskV4 string `json:"netmask_v4"` GatewayV4 string `json:"gateway_v4"` METALPLANID float64 V6Networks []V6Network `json:"v6_networks"` Label string `json:"label"` Tag string `json:"tag"` OSID string APPID string } type CreateBareMetal struct { DCID int // Location in which to create the server. See v1/regions/list. METALPLANID int // Plan to use when creating this server. See v1/plans/list_baremetal. OSID int // Operating system to use. See v1/os/list. SCRIPTID int // (optional) The SCRIPTID of a startup script to execute on boot. This only works when using a Vultr supplied operating system. See v1/startupscript/list. SNAPSHOTID string // (optional) If you've selected the 'snapshot' operating system, this should be the SNAPSHOTID (see v1/snapshot/list) to restore for the initial installation. SSHKEYID string // (optional) List of SSH keys to apply to this server on install (only valid for Linux/FreeBSD). See v1/sshkey/list. Separate keys with commas. APPID int // (optional) If launching an application (OSID 186), this is the APPID to launch. See v1/app/list. // contains filtered or unexported fields } type CreateBareMetalBuilder struct { // contains filtered or unexported fields } CreateBareMetal builder pattern code func NewCreateBareMetalBuilder() *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) APPID(aPPID int) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) Build() (map[string]interface{}, error) func (b *CreateBareMetalBuilder) DCID(dCID int) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) EnableIpv6(enable_ipv6 string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) Hostname(hostname string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) Label(label string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) METALPLANID(mETALPLANID int) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) NotifyActivate(notify_activate string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) OSID(oSID int) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) SCRIPTID(sCRIPTID int) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) SNAPSHOTID(sNAPSHOTID string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) SSHKEYID(sSHKEYID string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) Tag(tag string) *CreateBareMetalBuilder func (b *CreateBareMetalBuilder) UserData(userdata string) *CreateBareMetalBuilder type CreateBareMetalResp struct { StatusCode int SUBID string } func ParseCreateBareMetalResp(resp interface{}) (createBareMetalResp CreateBareMetalResp, err error) type DeleteBareMetal struct { SUBID int // Unique identifier for this subscription. } type DeleteBareMetalBuilder struct { // contains filtered or unexported fields } DeleteBareMetal builder pattern code func NewDeleteBareMetalBuilder() *DeleteBareMetalBuilder func (b *DeleteBareMetalBuilder) Build() (map[string]interface{}, error) func (b *DeleteBareMetalBuilder) SUBID(sUBID int) *DeleteBareMetalBuilder type HaltBareMetal struct { SUBID int // Unique identifier for this subscription. } type HaltBareMetalBuilder struct { // contains filtered or unexported fields } HaltBareMetal builder pattern code func NewHaltBareMetalBuilder() *HaltBareMetalBuilder func (b *HaltBareMetalBuilder) Build() (map[string]interface{}, error) func (b *HaltBareMetalBuilder) SUBID(sUBID int) *HaltBareMetalBuilder type ListBareMetal struct { SUBID int // (optional) Unique identifier of a subscription. Only the subscription object will be returned. // contains filtered or unexported fields } type ListBareMetalBuilder struct { // contains filtered or unexported fields } ListBareMetal builder pattern code func NewListBareMetalBuilder() *ListBareMetalBuilder func (b *ListBareMetalBuilder) Build() (map[string]interface{}, error) func (b *ListBareMetalBuilder) Label(label string) *ListBareMetalBuilder func (b *ListBareMetalBuilder) MainIp(main_ip string) *ListBareMetalBuilder func (b *ListBareMetalBuilder) SUBID(sUBID int) *ListBareMetalBuilder func (b *ListBareMetalBuilder) Tag(tag string) *ListBareMetalBuilder type ListBareMetalResp struct { StatusCode int BareMetalSlice []BareMetalInfo } func ParseListBareMetalResp(resp interface{}) (listBareMetalResp ListBareMetalResp, err error) type RebootBareMetal struct { SUBID int // Unique identifier for this subscription. } type RebootBareMetalBuilder struct { // contains filtered or unexported fields } RebootBareMetal builder pattern code func NewRebootBareMetalBuilder() *RebootBareMetalBuilder func (b *RebootBareMetalBuilder) Build() (map[string]interface{}, error) func (b *RebootBareMetalBuilder) SUBID(sUBID int) *RebootBareMetalBuilder type ReinstallBareMetal struct { SUBID int // Unique identifier for this subscription. } type ReinstallBareMetalBuilder struct { // contains filtered or unexported fields } ReinstallBareMetal builder pattern code func NewReinstallBareMetalBuilder() *ReinstallBareMetalBuilder func (b *ReinstallBareMetalBuilder) Build() (map[string]interface{}, error) func (b *ReinstallBareMetalBuilder) SUBID(sUBID int) *ReinstallBareMetalBuilder type V6Network struct { V6Network string `json:"v6_network"` V6MainIP string `json:"v6_main_ip"` V6NetworkSize float64 `json:"v6_network_size"` } type VultrBareMetal struct { } func (*VultrBareMetal) CreateBareMetal(request interface{}) (resp interface{}, err error) CreateBareMetal function creates a new Vultr bare metal machine. func (*VultrBareMetal) DeleteBareMetal(request interface{}) (resp interface{}, err error) DeleteBareMetal function deletes a Vultr bare metal machine. func (*VultrBareMetal) HaltBareMetal(request interface{}) (resp interface{}, err error) HaltBareMetal function halt a Vultr bare metal machine. func (*VultrBareMetal) ListBareMetal(request interface{}) (resp interface{}, err error) ListBareMetal function list Vultr bare metal machines. func (*VultrBareMetal) RebootBareMetal(request interface{}) (resp interface{}, err error) RebootBareMetal function reboots a Vultr bare metal machine. func (*VultrBareMetal) ReinstallBareMetal(request interface{}) (resp interface{}, err error) ReinstallBareMetal function reinstall a Vultr bare metal machine. ```
{ "pile_set_name": "Github" }
package com.twitter.scrooge.ast import scala.collection.mutable import com.twitter.scrooge.frontend.ScroogeInternalException sealed abstract class Identifier extends IdNode { // It was intentional not to override toString. Instead, use // "fullName" to indicate its purpose. def fullName: String def toCamelCase: Identifier def toTitleCase: Identifier def toUpperCase: Identifier def toLowerCase: Identifier // to prevent accidental use of Identifier as String private[scrooge] def +(str: String): String = throw new ScroogeInternalException("do not use \"+\" operation on Identifiers") } object Identifier { // constructor def apply(str: String): Identifier = { assert(!str.isEmpty) val ids = str.split("\\.") if (ids.size == 1) SimpleID(ids.head) else QualifiedID(ids.toIndexedSeq) } def toTitleCase(str: String): String = toCamelCase(str, true) /** * convert string to camel case, with the following fine print: * - leading underscores are preserved * - internal underscores are removed. Character following an underscore * is converted to upper case. * - first character (non underscore char) is upper case if * firstCharUp is true, lower case if false * - first character of the second and following parts (text between underscores) * is always in upper case * - if a part is all upper case it is converted to lower case (except for first character), * in other cases case is preserved * * Examples: (original, camel case, title case) * (gen_html_report, genHtmlReport, GenHtmlReport) * (GEN_HTML_REPORT, genHtmlReport, GenHtmlReport) * (Gen_HTMLReport, genHTMLReport, GenHTMLReport) * (Gen_HTML_Report, genHtmlReport, GenHtmlReport) * (GENHTMLREPORT, genhtmlreport, Genhtmlreport) * (genhtmlreport, genhtmlreport, Genhtmlreport) * (genHtmlReport, genHtmlReport, GenHtmlReport) * (genHTMLReport, genHTMLReport, GenHtmlReport) * (_genHtmlReport, _genHtmlReport, _GenHtmlReport) */ def toCamelCase(str: String, firstCharUp: Boolean = false): String = { str.takeWhile(_ == '_') + str .split('_') .filterNot(_.isEmpty) .zipWithIndex .map { case (part, ind) => val first = if (ind == 0 && !firstCharUp) part(0).toLower else part(0).toUpper val isAllUpperCase = part.forall { c => c.isUpper || !c.isLetter } val rest = if (isAllUpperCase) part.drop(1).toLowerCase else part.drop(1) new mutable.StringBuilder(part.size).append(first).append(rest) } .mkString } } case class SimpleID(name: String, origName: Option[String] = None) extends Identifier { assert( !name.contains(".") && !name.isEmpty, s"'$name' is not a valid SimpleID" ) // name is a simple string val fullName: String = name val originalName: String = origName.getOrElse(fullName) def toCamelCase: SimpleID = SimpleID(Identifier.toCamelCase(name), origName = Some(originalName)) def toTitleCase: SimpleID = SimpleID(Identifier.toTitleCase(name), origName = Some(originalName)) def toUpperCase: SimpleID = SimpleID(name.toUpperCase, origName = Some(originalName)) def toLowerCase: SimpleID = SimpleID(name.toLowerCase, origName = Some(originalName)) // append and prepend only available for SimpleID // To encourage correct usage of SimpleID, we intentionally don't use implicit // string conversions def append(other: String): SimpleID = { assert(!other.isEmpty && !other.contains(".")) SimpleID(name + other) } def prepend(other: String): SimpleID = { assert(!other.isEmpty && !other.contains(".")) SimpleID(other + name) } def addScope(scope: Identifier): QualifiedID = QualifiedID(scope match { case SimpleID(s, _) => Seq(s, this.name) case QualifiedID(names) => names :+ name }) } case class QualifiedID(names: Seq[String]) extends Identifier { assert(names.size >= 2) // at least a scope and a name assert(!names.exists(_.isEmpty)) val fullName: String = names.mkString(".") // case conversion only happens on the last id def toCamelCase: QualifiedID = QualifiedID(names.dropRight(1) :+ Identifier.toCamelCase(names.last)) def toTitleCase: QualifiedID = QualifiedID(names.dropRight(1) :+ Identifier.toTitleCase(names.last)) def toUpperCase: QualifiedID = QualifiedID(names.dropRight(1) :+ names.last.toUpperCase) def toLowerCase: QualifiedID = QualifiedID(names.dropRight(1) :+ names.last.toLowerCase) def head: SimpleID = SimpleID(names.head) def tail: Identifier = Identifier(names.tail.mkString(".")) def qualifier: Identifier = Identifier(names.dropRight(1).mkString(".")) def name: SimpleID = SimpleID(names.last) }
{ "pile_set_name": "Github" }
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Threading.Tasks; using Pulumi.Serialization; namespace Pulumi.Aws.WafRegional.Inputs { public sealed class WebAclRuleOverrideActionArgs : Pulumi.ResourceArgs { /// <summary> /// Specifies how you want AWS WAF Regional to respond to requests that match the settings in a rule. e.g. `ALLOW`, `BLOCK` or `COUNT` /// </summary> [Input("type", required: true)] public Input<string> Type { get; set; } = null!; public WebAclRuleOverrideActionArgs() { } } }
{ "pile_set_name": "Github" }
#import <UIKit/UIKit.h> @class TGCameraPreviewView; @class TGMenuSheetPallete; @interface TGAttachmentCameraView : UIView @property (nonatomic, copy) void (^pressed)(void); @property (nonatomic, strong) TGMenuSheetPallete *pallete; - (instancetype)initForSelfPortrait:(bool)selfPortrait; @property (nonatomic, readonly) bool previewViewAttached; - (void)detachPreviewView; - (void)attachPreviewViewAnimated:(bool)animated; - (void)willAttachPreviewView; - (void)startPreview; - (void)stopPreview; - (void)resumePreview; - (void)pausePreview; - (void)setZoomedProgress:(CGFloat)progress; - (TGCameraPreviewView *)previewView; @end
{ "pile_set_name": "Github" }
#if defined( __VMS) && !defined( OPENSSL_NO_DECC_INIT) && \ defined( __DECC) && !defined( __VAX) && (__CRTL_VER >= 70301000) # define USE_DECC_INIT 1 #endif #ifdef USE_DECC_INIT /*- * 2010-04-26 SMS. * *---------------------------------------------------------------------- * * decc_init() * * On non-VAX systems, uses LIB$INITIALIZE to set a collection of C * RTL features without using the DECC$* logical name method. * *---------------------------------------------------------------------- */ # include <stdio.h> # include <stdlib.h> # include <unixlib.h> /* Global storage. */ /* Flag to sense if decc_init() was called. */ int decc_init_done = -1; /* Structure to hold a DECC$* feature name and its desired value. */ typedef struct { char *name; int value; } decc_feat_t; /* * Array of DECC$* feature names and their desired values. Note: * DECC$ARGV_PARSE_STYLE is the urgent one. */ decc_feat_t decc_feat_array[] = { /* Preserve command-line case with SET PROCESS/PARSE_STYLE=EXTENDED */ {"DECC$ARGV_PARSE_STYLE", 1}, /* Preserve case for file names on ODS5 disks. */ {"DECC$EFS_CASE_PRESERVE", 1}, /* * Enable multiple dots (and most characters) in ODS5 file names, while * preserving VMS-ness of ";version". */ {"DECC$EFS_CHARSET", 1}, /* List terminator. */ {(char *)NULL, 0} }; /* LIB$INITIALIZE initialization function. */ static void decc_init(void) { char *openssl_debug_decc_init; int verbose = 0; int feat_index; int feat_value; int feat_value_max; int feat_value_min; int i; int sts; /* Get debug option. */ openssl_debug_decc_init = getenv("OPENSSL_DEBUG_DECC_INIT"); if (openssl_debug_decc_init != NULL) { verbose = strtol(openssl_debug_decc_init, NULL, 10); if (verbose <= 0) { verbose = 1; } } /* Set the global flag to indicate that LIB$INITIALIZE worked. */ decc_init_done = 1; /* Loop through all items in the decc_feat_array[]. */ for (i = 0; decc_feat_array[i].name != NULL; i++) { /* Get the feature index. */ feat_index = decc$feature_get_index(decc_feat_array[i].name); if (feat_index >= 0) { /* Valid item. Collect its properties. */ feat_value = decc$feature_get_value(feat_index, 1); feat_value_min = decc$feature_get_value(feat_index, 2); feat_value_max = decc$feature_get_value(feat_index, 3); /* Check the validity of our desired value. */ if ((decc_feat_array[i].value >= feat_value_min) && (decc_feat_array[i].value <= feat_value_max)) { /* Valid value. Set it if necessary. */ if (feat_value != decc_feat_array[i].value) { sts = decc$feature_set_value(feat_index, 1, decc_feat_array[i].value); if (verbose > 1) { fprintf(stderr, " %s = %d, sts = %d.\n", decc_feat_array[i].name, decc_feat_array[i].value, sts); } } } else { /* Invalid DECC feature value. */ fprintf(stderr, " INVALID DECC$FEATURE VALUE, %d: %d <= %s <= %d.\n", feat_value, feat_value_min, decc_feat_array[i].name, feat_value_max); } } else { /* Invalid DECC feature name. */ fprintf(stderr, " UNKNOWN DECC$FEATURE: %s.\n", decc_feat_array[i].name); } } if (verbose > 0) { fprintf(stderr, " DECC_INIT complete.\n"); } } /* Get "decc_init()" into a valid, loaded LIB$INITIALIZE PSECT. */ # pragma nostandard /* * Establish the LIB$INITIALIZE PSECTs, with proper alignment and other * attributes. Note that "nopic" is significant only on VAX. */ # pragma extern_model save # if __INITIAL_POINTER_SIZE == 64 # define PSECT_ALIGN 3 # else # define PSECT_ALIGN 2 # endif # pragma extern_model strict_refdef "LIB$INITIALIZ" PSECT_ALIGN, nopic, nowrt const int spare[8] = { 0 }; # pragma extern_model strict_refdef "LIB$INITIALIZE" PSECT_ALIGN, nopic, nowrt void (*const x_decc_init) () = decc_init; # pragma extern_model restore /* Fake reference to ensure loading the LIB$INITIALIZE PSECT. */ # pragma extern_model save int LIB$INITIALIZE(void); # pragma extern_model strict_refdef int dmy_lib$initialize = (int)LIB$INITIALIZE; # pragma extern_model restore # pragma standard #else /* def USE_DECC_INIT */ /* Dummy code to avoid a %CC-W-EMPTYFILE complaint. */ int decc_init_dummy(void); #endif /* def USE_DECC_INIT */
{ "pile_set_name": "Github" }
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package kinesisvideomedia import ( "io" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awsutil" "github.com/aws/aws-sdk-go/aws/request" ) const opGetMedia = "GetMedia" // GetMediaRequest generates a "aws/request.Request" representing the // client's request for the GetMedia operation. The "output" return // value will be populated with the request's response once the request completes // successfuly. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetMedia for more information on using the GetMedia // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetMediaRequest method. // req, resp := client.GetMediaRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/kinesis-video-media-2017-09-30/GetMedia func (c *KinesisVideoMedia) GetMediaRequest(input *GetMediaInput) (req *request.Request, output *GetMediaOutput) { op := &request.Operation{ Name: opGetMedia, HTTPMethod: "POST", HTTPPath: "/getMedia", } if input == nil { input = &GetMediaInput{} } output = &GetMediaOutput{} req = c.newRequest(op, input, output) return } // GetMedia API operation for Amazon Kinesis Video Streams Media. // // Use this API to retrieve media content from a Kinesis video stream. In the // request, you identify stream name or stream Amazon Resource Name (ARN), and // the starting chunk. Kinesis Video Streams then returns a stream of chunks // in order by fragment number. // // You must first call the GetDataEndpoint API to get an endpoint to which you // can then send the GetMedia requests. // // When you put media data (fragments) on a stream, Kinesis Video Streams stores // each incoming fragment and related metadata in what is called a "chunk." // For more information, see . The GetMedia API returns a stream of these chunks // starting from the chunk that you specify in the request. // // The following limits apply when using the GetMedia API: // // * A client can call GetMedia up to five times per second per stream. // // * Kinesis Video Streams sends media data at a rate of up to 25 megabytes // per second (or 200 megabits per second) during a GetMedia session. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Kinesis Video Streams Media's // API operation GetMedia for usage and error information. // // Returned Error Codes: // * ErrCodeResourceNotFoundException "ResourceNotFoundException" // Status Code: 404, The stream with the given name does not exist. // // * ErrCodeNotAuthorizedException "NotAuthorizedException" // Status Code: 403, The caller is not authorized to perform an operation on // the given stream, or the token has expired. // // * ErrCodeInvalidEndpointException "InvalidEndpointException" // Status Code: 400, Caller used wrong endpoint to write data to a stream. On // receiving such an exception, the user must call GetDataEndpoint with AccessMode // set to "READ" and use the endpoint Kinesis Video returns in the next GetMedia // call. // // * ErrCodeClientLimitExceededException "ClientLimitExceededException" // Kinesis Video Streams has throttled the request because you have exceeded // the limit of allowed client calls. Try making the call later. // // * ErrCodeConnectionLimitExceededException "ConnectionLimitExceededException" // Kinesis Video Streams has throttled the request because you have exceeded // the limit of allowed client connections. // // * ErrCodeInvalidArgumentException "InvalidArgumentException" // The value for this input parameter is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/kinesis-video-media-2017-09-30/GetMedia func (c *KinesisVideoMedia) GetMedia(input *GetMediaInput) (*GetMediaOutput, error) { req, out := c.GetMediaRequest(input) return out, req.Send() } // GetMediaWithContext is the same as GetMedia with the addition of // the ability to pass a context and additional request options. // // See GetMedia for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *KinesisVideoMedia) GetMediaWithContext(ctx aws.Context, input *GetMediaInput, opts ...request.Option) (*GetMediaOutput, error) { req, out := c.GetMediaRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } type GetMediaInput struct { _ struct{} `type:"structure"` // Identifies the starting chunk to get from the specified stream. // // StartSelector is a required field StartSelector *StartSelector `type:"structure" required:"true"` // The ARN of the stream from where you want to get the media content. If you // don't specify the streamARN, you must specify the streamName. StreamARN *string `min:"1" type:"string"` // The Kinesis video stream name from where you want to get the media content. // If you don't specify the streamName, you must specify the streamARN. StreamName *string `min:"1" type:"string"` } // String returns the string representation func (s GetMediaInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetMediaInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *GetMediaInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "GetMediaInput"} if s.StartSelector == nil { invalidParams.Add(request.NewErrParamRequired("StartSelector")) } if s.StreamARN != nil && len(*s.StreamARN) < 1 { invalidParams.Add(request.NewErrParamMinLen("StreamARN", 1)) } if s.StreamName != nil && len(*s.StreamName) < 1 { invalidParams.Add(request.NewErrParamMinLen("StreamName", 1)) } if s.StartSelector != nil { if err := s.StartSelector.Validate(); err != nil { invalidParams.AddNested("StartSelector", err.(request.ErrInvalidParams)) } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetStartSelector sets the StartSelector field's value. func (s *GetMediaInput) SetStartSelector(v *StartSelector) *GetMediaInput { s.StartSelector = v return s } // SetStreamARN sets the StreamARN field's value. func (s *GetMediaInput) SetStreamARN(v string) *GetMediaInput { s.StreamARN = &v return s } // SetStreamName sets the StreamName field's value. func (s *GetMediaInput) SetStreamName(v string) *GetMediaInput { s.StreamName = &v return s } type GetMediaOutput struct { _ struct{} `type:"structure" payload:"Payload"` // The content type of the requested media. ContentType *string `location:"header" locationName:"Content-Type" min:"1" type:"string"` // The payload Kinesis Video Streams returns is a sequence of chunks from the // specified stream. For information about the chunks, see . The chunks that // Kinesis Video Streams returns in the GetMedia call also include the following // additional Matroska (MKV) tags: // // * AWS_KINESISVIDEO_CONTINUATION_TOKEN (UTF-8 string) - In the event your // GetMedia call terminates, you can use this continuation token in your // next request to get the next chunk where the last request terminated. // // * AWS_KINESISVIDEO_MILLIS_BEHIND_NOW (UTF-8 string) - Client applications // can use this tag value to determine how far behind the chunk returned // in the response is from the latest chunk on the stream. // // * AWS_KINESISVIDEO_FRAGMENT_NUMBER - Fragment number returned in the chunk. // // * AWS_KINESISVIDEO_SERVER_TIMESTAMP - Server time stamp of the fragment. // // * AWS_KINESISVIDEO_PRODUCER_TIMESTAMP - Producer time stamp of the fragment. // // The following tags will be present if an error occurs: // // * AWS_KINESISVIDEO_ERROR_CODE - String description of an error that caused // GetMedia to stop. // // * AWS_KINESISVIDEO_ERROR_ID: Integer code of the error. // // The error codes are as follows: // // * 3002 - Error writing to the stream // // * 4000 - Requested fragment is not found // // * 4500 - Access denied for the stream's KMS key // // * 4501 - Stream's KMS key is disabled // // * 4502 - Validation error on the Stream's KMS key // // * 4503 - KMS key specified in the stream is unavailable // // * 4504 - Invalid usage of the KMS key specified in the stream // // * 4505 - Invalid state of the KMS key specified in the stream // // * 4506 - Unable to find the KMS key specified in the stream // // * 5000 - Internal error Payload io.ReadCloser `type:"blob"` } // String returns the string representation func (s GetMediaOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetMediaOutput) GoString() string { return s.String() } // SetContentType sets the ContentType field's value. func (s *GetMediaOutput) SetContentType(v string) *GetMediaOutput { s.ContentType = &v return s } // SetPayload sets the Payload field's value. func (s *GetMediaOutput) SetPayload(v io.ReadCloser) *GetMediaOutput { s.Payload = v return s } // Identifies the chunk on the Kinesis video stream where you want the GetMedia // API to start returning media data. You have the following options to identify // the starting chunk: // // * Choose the latest (or oldest) chunk. // // * Identify a specific chunk. You can identify a specific chunk either // by providing a fragment number or time stamp (server or producer). // // * Each chunk's metadata includes a continuation token as a Matroska (MKV) // tag (AWS_KINESISVIDEO_CONTINUATION_TOKEN). If your previous GetMedia request // terminated, you can use this tag value in your next GetMedia request. // The API then starts returning chunks starting where the last API ended. type StartSelector struct { _ struct{} `type:"structure"` // Specifies the fragment number from where you want the GetMedia API to start // returning the fragments. AfterFragmentNumber *string `min:"1" type:"string"` // Continuation token that Kinesis Video Streams returned in the previous GetMedia // response. The GetMedia API then starts with the chunk identified by the continuation // token. ContinuationToken *string `min:"1" type:"string"` // Identifies the fragment on the Kinesis video stream where you want to start // getting the data from. // // * NOW - Start with the latest chunk on the stream. // // * EARLIEST - Start with earliest available chunk on the stream. // // * FRAGMENT_NUMBER - Start with the chunk containing the specific fragment. // You must also specify the StartFragmentNumber. // // * PRODUCER_TIMESTAMP or SERVER_TIMESTAMP - Start with the chunk containing // a fragment with the specified producer or server time stamp. You specify // the time stamp by adding StartTimestamp. // // * CONTINUATION_TOKEN - Read using the specified continuation token. // // If you choose the NOW, EARLIEST, or CONTINUATION_TOKEN as the startSelectorType, // you don't provide any additional information in the startSelector. // // StartSelectorType is a required field StartSelectorType *string `type:"string" required:"true" enum:"StartSelectorType"` // A time stamp value. This value is required if you choose the PRODUCER_TIMESTAMP // or the SERVER_TIMESTAMP as the startSelectorType. The GetMedia API then starts // with the chunk containing the fragment that has the specified time stamp. StartTimestamp *time.Time `type:"timestamp" timestampFormat:"unix"` } // String returns the string representation func (s StartSelector) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s StartSelector) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *StartSelector) Validate() error { invalidParams := request.ErrInvalidParams{Context: "StartSelector"} if s.AfterFragmentNumber != nil && len(*s.AfterFragmentNumber) < 1 { invalidParams.Add(request.NewErrParamMinLen("AfterFragmentNumber", 1)) } if s.ContinuationToken != nil && len(*s.ContinuationToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("ContinuationToken", 1)) } if s.StartSelectorType == nil { invalidParams.Add(request.NewErrParamRequired("StartSelectorType")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAfterFragmentNumber sets the AfterFragmentNumber field's value. func (s *StartSelector) SetAfterFragmentNumber(v string) *StartSelector { s.AfterFragmentNumber = &v return s } // SetContinuationToken sets the ContinuationToken field's value. func (s *StartSelector) SetContinuationToken(v string) *StartSelector { s.ContinuationToken = &v return s } // SetStartSelectorType sets the StartSelectorType field's value. func (s *StartSelector) SetStartSelectorType(v string) *StartSelector { s.StartSelectorType = &v return s } // SetStartTimestamp sets the StartTimestamp field's value. func (s *StartSelector) SetStartTimestamp(v time.Time) *StartSelector { s.StartTimestamp = &v return s } const ( // StartSelectorTypeFragmentNumber is a StartSelectorType enum value StartSelectorTypeFragmentNumber = "FRAGMENT_NUMBER" // StartSelectorTypeServerTimestamp is a StartSelectorType enum value StartSelectorTypeServerTimestamp = "SERVER_TIMESTAMP" // StartSelectorTypeProducerTimestamp is a StartSelectorType enum value StartSelectorTypeProducerTimestamp = "PRODUCER_TIMESTAMP" // StartSelectorTypeNow is a StartSelectorType enum value StartSelectorTypeNow = "NOW" // StartSelectorTypeEarliest is a StartSelectorType enum value StartSelectorTypeEarliest = "EARLIEST" // StartSelectorTypeContinuationToken is a StartSelectorType enum value StartSelectorTypeContinuationToken = "CONTINUATION_TOKEN" )
{ "pile_set_name": "Github" }
include_directories( ${CMAKE_CURRENT_BINARY_DIR}/.. ${CMAKE_CURRENT_SOURCE_DIR}/.. ) add_llvm_library(LLVMAArch64AsmParser AArch64AsmParser.cpp )
{ "pile_set_name": "Github" }
// This file contains changes that are only compatible with go 1.10 and onwards. // +build go1.10 package yaml import "encoding/json" // DisallowUnknownFields configures the JSON decoder to error out if unknown // fields come along, instead of dropping them by default. func DisallowUnknownFields(d *json.Decoder) *json.Decoder { d.DisallowUnknownFields() return d }
{ "pile_set_name": "Github" }
.charsheet { background-color: #fff; border: 1px solid; font-family: 'Oswald'; } .charsheet h1 { background-color: #000; color: #fff; font-variant: small-caps; padding: 0.1em; text-align: center; text-shadow: 3px 2px gray; } .charsheet input { background: transparent; height: 0.9em; } .charsheet input.sheet-short,.charsheet input.sheet-long,charsheet input.sheet-very_long { background-color: #fff; } .charsheet input.sheet-short { text-align: center; width: 4em; } .charsheet input.sheet-medium { width: 6em; } .charsheet input.sheet-long { width: 23em; } .charsheet input.sheet-very_long { width: 100%; } .charsheet hr.style-base { border: 1px; height: 0.25em; } .charsheet .sheet-veteran,.charsheet .sheet-exorcist,.charsheet .sheet-hunter,.charsheet .sheet-scoundrel,.charsheet .sheet-elementalist,.charsheet .sheet-occultist,.charsheet .sheet-outlander,.charsheet { display: none; } .charsheet .sheet-veteran-activator:checked ~ .sheet-veteran,.charsheet .sheet-exorcist-activator:checked ~ .sheet-exorcist,.charsheet .sheet-hunter-activator:checked ~ .sheet-hunter,.charsheet .sheet-scoundrel-activator:checked ~ .sheet-scoundrel,.charsheet .sheet-elementalist-activator:checked ~ .sheet-elementalist,.charsheet .sheet-occultist-activator:checked ~ .sheet-occultist,.charsheet .sheet-outlander-activator:checked ~ .sheet-outlander,.charsheet { display: block; } .sheet-section-stuff textarea { height: 20px; width: 99%; } .sheet-lib0 { color: #000; font-size: 26px; font-weight:bold; } .sheet-lib1 { color: #000; font-size: 18px; font-weight:bold; } .sheet-lib2 { color: #000; font-weight:bold; } .sheet-lib3 { font-style: italic; } .sheet-lib4 { font-style: italic; text-align: center; width: 4em; } .sheet-lib5 { font-style: italic; text-align: center; width: 6em; } .sheet-lib6 { font-style: italic; text-align: center; width: 24em; } button[type=roll].sheet-light-button::before { content: "a"; font-family: 'dicefontd6'; font-size: 24px; } button[type=roll].sheet-dark-button::before { content: "F"; font-family: 'dicefontd6'; font-size: 24px; }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2008 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef Parser_h #define Parser_h #include <wtf/Platform.h> #if ENABLE(WREC) #include "Escapes.h" #include "Quantifier.h" #include "UString.h" #include "WRECGenerator.h" #include <wtf/ASCIICType.h> namespace JSC { namespace WREC { struct CharacterClass; class Parser { typedef Generator::JumpList JumpList; typedef Generator::ParenthesesType ParenthesesType; friend class SavedState; public: Parser(const UString& pattern, bool ignoreCase, bool multiline) : m_generator(*this) , m_data(pattern.data()) , m_size(pattern.size()) , m_ignoreCase(ignoreCase) , m_multiline(multiline) { reset(); } Generator& generator() { return m_generator; } bool ignoreCase() const { return m_ignoreCase; } bool multiline() const { return m_multiline; } void recordSubpattern() { ++m_numSubpatterns; } unsigned numSubpatterns() const { return m_numSubpatterns; } const char* error() const { return m_error; } const char* syntaxError() const { return m_error == ParenthesesNotSupported ? 0 : m_error; } void parsePattern(JumpList& failures) { reset(); parseDisjunction(failures); if (peek() != EndOfPattern) setError(ParenthesesUnmatched); // Parsing the pattern should fully consume it. } void parseDisjunction(JumpList& failures); void parseAlternative(JumpList& failures); bool parseTerm(JumpList& failures); bool parseNonCharacterEscape(JumpList& failures, const Escape&); bool parseParentheses(JumpList& failures); bool parseCharacterClass(JumpList& failures); bool parseCharacterClassQuantifier(JumpList& failures, const CharacterClass& charClass, bool invert); bool parseBackreferenceQuantifier(JumpList& failures, unsigned subpatternId); private: class SavedState { public: SavedState(Parser& parser) : m_parser(parser) , m_index(parser.m_index) { } void restore() { m_parser.m_index = m_index; } private: Parser& m_parser; unsigned m_index; }; void reset() { m_index = 0; m_numSubpatterns = 0; m_error = 0; } void setError(const char* error) { if (m_error) return; m_error = error; } int peek() { if (m_index >= m_size) return EndOfPattern; return m_data[m_index]; } int consume() { if (m_index >= m_size) return EndOfPattern; return m_data[m_index++]; } bool peekIsDigit() { return WTF::isASCIIDigit(peek()); } unsigned peekDigit() { ASSERT(peekIsDigit()); return peek() - '0'; } unsigned consumeDigit() { ASSERT(peekIsDigit()); return consume() - '0'; } unsigned consumeNumber() { int n = consumeDigit(); while (peekIsDigit()) { n *= 10; n += consumeDigit(); } return n; } int consumeHex(int count) { int n = 0; while (count--) { if (!WTF::isASCIIHexDigit(peek())) return -1; n = (n << 4) | WTF::toASCIIHexValue(consume()); } return n; } unsigned consumeOctal() { unsigned n = 0; while (n < 32 && WTF::isASCIIOctalDigit(peek())) n = n * 8 + consumeDigit(); return n; } ALWAYS_INLINE Quantifier consumeGreedyQuantifier(); Quantifier consumeQuantifier(); Escape consumeEscape(bool inCharacterClass); ParenthesesType consumeParenthesesType(); static const int EndOfPattern = -1; // Error messages. static const char* QuantifierOutOfOrder; static const char* QuantifierWithoutAtom; static const char* ParenthesesUnmatched; static const char* ParenthesesTypeInvalid; static const char* ParenthesesNotSupported; static const char* CharacterClassUnmatched; static const char* CharacterClassOutOfOrder; static const char* EscapeUnterminated; Generator m_generator; const UChar* m_data; unsigned m_size; unsigned m_index; bool m_ignoreCase; bool m_multiline; unsigned m_numSubpatterns; const char* m_error; }; } } // namespace JSC::WREC #endif // ENABLE(WREC) #endif // Parser_h
{ "pile_set_name": "Github" }
Format: 1.51 # This is the Natural Docs topics file for this project. If you change anything # here, it will apply to THIS PROJECT ONLY. If you'd like to change something # for all your projects, edit the Topics.txt in Natural Docs' Config directory # instead. # If you'd like to prevent keywords from being recognized by Natural Docs, you # can do it like this: # Ignore Keywords: [keyword], [keyword], ... # # Or you can use the list syntax like how they are defined: # Ignore Keywords: # [keyword] # [keyword], [plural keyword] # ... #------------------------------------------------------------------------------- # SYNTAX: # # Topic Type: [name] # Alter Topic Type: [name] # Creates a new topic type or alters one from the main file. Each type gets # its own index and behavior settings. Its name can have letters, numbers, # spaces, and these charaters: - / . ' # # Plural: [name] # Sets the plural name of the topic type, if different. # # Keywords: # [keyword] # [keyword], [plural keyword] # ... # Defines or adds to the list of keywords for the topic type. They may only # contain letters, numbers, and spaces and are not case sensitive. Plural # keywords are used for list topics. You can redefine keywords found in the # main topics file. # # Index: [yes|no] # Whether the topics get their own index. Defaults to yes. Everything is # included in the general index regardless of this setting. # # Scope: [normal|start|end|always global] # How the topics affects scope. Defaults to normal. # normal - Topics stay within the current scope. # start - Topics start a new scope for all the topics beneath it, # like class topics. # end - Topics reset the scope back to global for all the topics # beneath it. # always global - Topics are defined as global, but do not change the scope # for any other topics. # # Class Hierarchy: [yes|no] # Whether the topics are part of the class hierarchy. Defaults to no. # # Page Title If First: [yes|no] # Whether the topic's title becomes the page title if it's the first one in # a file. Defaults to no. # # Break Lists: [yes|no] # Whether list topics should be broken into individual topics in the output. # Defaults to no. # # Can Group With: [type], [type], ... # Defines a list of topic types that this one can possibly be grouped with. # Defaults to none. #------------------------------------------------------------------------------- # The following topics are defined in the main file, if you'd like to alter # their behavior or add keywords: # # Generic, Class, Interface, Section, File, Group, Function, Variable, # Property, Type, Constant, Enumeration, Event, Delegate, Macro, # Database, Database Table, Database View, Database Index, Database # Cursor, Database Trigger, Cookie, Build Target # If you add something that you think would be useful to other developers # and should be included in Natural Docs by default, please e-mail it to # topics [at] naturaldocs [dot] org.
{ "pile_set_name": "Github" }
// DO NOT EDIT THIS FILE - it is machine generated -*- c++ -*- #ifndef __org_omg_PortableInterceptor_IORInterceptor_3_0Holder__ #define __org_omg_PortableInterceptor_IORInterceptor_3_0Holder__ #pragma interface #include <java/lang/Object.h> extern "Java" { namespace org { namespace omg { namespace CORBA { class TypeCode; namespace portable { class InputStream; class OutputStream; } } namespace PortableInterceptor { class IORInterceptor_3_0; class IORInterceptor_3_0Holder; } } } } class org::omg::PortableInterceptor::IORInterceptor_3_0Holder : public ::java::lang::Object { public: IORInterceptor_3_0Holder(); IORInterceptor_3_0Holder(::org::omg::PortableInterceptor::IORInterceptor_3_0 *); void _read(::org::omg::CORBA::portable::InputStream *); void _write(::org::omg::CORBA::portable::OutputStream *); ::org::omg::CORBA::TypeCode * _type(); ::org::omg::PortableInterceptor::IORInterceptor_3_0 * __attribute__((aligned(__alignof__( ::java::lang::Object)))) value; static ::java::lang::Class class$; }; #endif // __org_omg_PortableInterceptor_IORInterceptor_3_0Holder__
{ "pile_set_name": "Github" }
// martin 1-3-2002: it seems there is a problem with the way Serializable is loaded. object test { def f() = "hello".concat("world"); } // #1000 object A { println("""This a "raw" string ending with a "double quote"""") } -----
{ "pile_set_name": "Github" }