code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.Core40
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.Core40 (
-- * Types
GLbitfield,
GLboolean,
GLbyte,
GLchar,
GLclampd,
GLclampf,
GLdouble,
GLenum,
GLfloat,
GLhalf,
GLint,
GLint64,
GLintptr,
GLshort,
GLsizei,
GLsizeiptr,
GLsync,
GLubyte,
GLuint,
GLuint64,
GLushort,
GLvoid,
-- * Enums
gl_ACTIVE_ATTRIBUTES,
gl_ACTIVE_ATTRIBUTE_MAX_LENGTH,
gl_ACTIVE_SUBROUTINES,
gl_ACTIVE_SUBROUTINE_MAX_LENGTH,
gl_ACTIVE_SUBROUTINE_UNIFORMS,
gl_ACTIVE_SUBROUTINE_UNIFORM_LOCATIONS,
gl_ACTIVE_SUBROUTINE_UNIFORM_MAX_LENGTH,
gl_ACTIVE_TEXTURE,
gl_ACTIVE_UNIFORMS,
gl_ACTIVE_UNIFORM_BLOCKS,
gl_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH,
gl_ACTIVE_UNIFORM_MAX_LENGTH,
gl_ALIASED_LINE_WIDTH_RANGE,
gl_ALPHA,
gl_ALREADY_SIGNALED,
gl_ALWAYS,
gl_AND,
gl_AND_INVERTED,
gl_AND_REVERSE,
gl_ANY_SAMPLES_PASSED,
gl_ARRAY_BUFFER,
gl_ARRAY_BUFFER_BINDING,
gl_ATTACHED_SHADERS,
gl_BACK,
gl_BACK_LEFT,
gl_BACK_RIGHT,
gl_BGR,
gl_BGRA,
gl_BGRA_INTEGER,
gl_BGR_INTEGER,
gl_BLEND,
gl_BLEND_DST,
gl_BLEND_DST_ALPHA,
gl_BLEND_DST_RGB,
gl_BLEND_EQUATION_ALPHA,
gl_BLEND_EQUATION_RGB,
gl_BLEND_SRC,
gl_BLEND_SRC_ALPHA,
gl_BLEND_SRC_RGB,
gl_BLUE,
gl_BLUE_INTEGER,
gl_BOOL,
gl_BOOL_VEC2,
gl_BOOL_VEC3,
gl_BOOL_VEC4,
gl_BUFFER_ACCESS,
gl_BUFFER_ACCESS_FLAGS,
gl_BUFFER_MAPPED,
gl_BUFFER_MAP_LENGTH,
gl_BUFFER_MAP_OFFSET,
gl_BUFFER_MAP_POINTER,
gl_BUFFER_SIZE,
gl_BUFFER_USAGE,
gl_BYTE,
gl_CCW,
gl_CLAMP_READ_COLOR,
gl_CLAMP_TO_BORDER,
gl_CLAMP_TO_EDGE,
gl_CLEAR,
gl_CLIP_DISTANCE0,
gl_CLIP_DISTANCE1,
gl_CLIP_DISTANCE2,
gl_CLIP_DISTANCE3,
gl_CLIP_DISTANCE4,
gl_CLIP_DISTANCE5,
gl_CLIP_DISTANCE6,
gl_CLIP_DISTANCE7,
gl_COLOR,
gl_COLOR_ATTACHMENT0,
gl_COLOR_ATTACHMENT1,
gl_COLOR_ATTACHMENT10,
gl_COLOR_ATTACHMENT11,
gl_COLOR_ATTACHMENT12,
gl_COLOR_ATTACHMENT13,
gl_COLOR_ATTACHMENT14,
gl_COLOR_ATTACHMENT15,
gl_COLOR_ATTACHMENT2,
gl_COLOR_ATTACHMENT3,
gl_COLOR_ATTACHMENT4,
gl_COLOR_ATTACHMENT5,
gl_COLOR_ATTACHMENT6,
gl_COLOR_ATTACHMENT7,
gl_COLOR_ATTACHMENT8,
gl_COLOR_ATTACHMENT9,
gl_COLOR_BUFFER_BIT,
gl_COLOR_CLEAR_VALUE,
gl_COLOR_LOGIC_OP,
gl_COLOR_WRITEMASK,
gl_COMPARE_REF_TO_TEXTURE,
gl_COMPATIBLE_SUBROUTINES,
gl_COMPILE_STATUS,
gl_COMPRESSED_RED,
gl_COMPRESSED_RED_RGTC1,
gl_COMPRESSED_RG,
gl_COMPRESSED_RGB,
gl_COMPRESSED_RGBA,
gl_COMPRESSED_RG_RGTC2,
gl_COMPRESSED_SIGNED_RED_RGTC1,
gl_COMPRESSED_SIGNED_RG_RGTC2,
gl_COMPRESSED_SRGB,
gl_COMPRESSED_SRGB_ALPHA,
gl_COMPRESSED_TEXTURE_FORMATS,
gl_CONDITION_SATISFIED,
gl_CONSTANT_ALPHA,
gl_CONSTANT_COLOR,
gl_CONTEXT_COMPATIBILITY_PROFILE_BIT,
gl_CONTEXT_CORE_PROFILE_BIT,
gl_CONTEXT_FLAGS,
gl_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT,
gl_CONTEXT_PROFILE_MASK,
gl_COPY,
gl_COPY_INVERTED,
gl_COPY_READ_BUFFER,
gl_COPY_WRITE_BUFFER,
gl_CULL_FACE,
gl_CULL_FACE_MODE,
gl_CURRENT_PROGRAM,
gl_CURRENT_QUERY,
gl_CURRENT_VERTEX_ATTRIB,
gl_CW,
gl_DECR,
gl_DECR_WRAP,
gl_DELETE_STATUS,
gl_DEPTH,
gl_DEPTH24_STENCIL8,
gl_DEPTH32F_STENCIL8,
gl_DEPTH_ATTACHMENT,
gl_DEPTH_BUFFER_BIT,
gl_DEPTH_CLAMP,
gl_DEPTH_CLEAR_VALUE,
gl_DEPTH_COMPONENT,
gl_DEPTH_COMPONENT16,
gl_DEPTH_COMPONENT24,
gl_DEPTH_COMPONENT32,
gl_DEPTH_COMPONENT32F,
gl_DEPTH_FUNC,
gl_DEPTH_RANGE,
gl_DEPTH_STENCIL,
gl_DEPTH_STENCIL_ATTACHMENT,
gl_DEPTH_TEST,
gl_DEPTH_WRITEMASK,
gl_DITHER,
gl_DONT_CARE,
gl_DOUBLE,
gl_DOUBLEBUFFER,
gl_DOUBLE_MAT2,
gl_DOUBLE_MAT2x3,
gl_DOUBLE_MAT2x4,
gl_DOUBLE_MAT3,
gl_DOUBLE_MAT3x2,
gl_DOUBLE_MAT3x4,
gl_DOUBLE_MAT4,
gl_DOUBLE_MAT4x2,
gl_DOUBLE_MAT4x3,
gl_DOUBLE_VEC2,
gl_DOUBLE_VEC3,
gl_DOUBLE_VEC4,
gl_DRAW_BUFFER,
gl_DRAW_BUFFER0,
gl_DRAW_BUFFER1,
gl_DRAW_BUFFER10,
gl_DRAW_BUFFER11,
gl_DRAW_BUFFER12,
gl_DRAW_BUFFER13,
gl_DRAW_BUFFER14,
gl_DRAW_BUFFER15,
gl_DRAW_BUFFER2,
gl_DRAW_BUFFER3,
gl_DRAW_BUFFER4,
gl_DRAW_BUFFER5,
gl_DRAW_BUFFER6,
gl_DRAW_BUFFER7,
gl_DRAW_BUFFER8,
gl_DRAW_BUFFER9,
gl_DRAW_FRAMEBUFFER,
gl_DRAW_FRAMEBUFFER_BINDING,
gl_DRAW_INDIRECT_BUFFER,
gl_DRAW_INDIRECT_BUFFER_BINDING,
gl_DST_ALPHA,
gl_DST_COLOR,
gl_DYNAMIC_COPY,
gl_DYNAMIC_DRAW,
gl_DYNAMIC_READ,
gl_ELEMENT_ARRAY_BUFFER,
gl_ELEMENT_ARRAY_BUFFER_BINDING,
gl_EQUAL,
gl_EQUIV,
gl_EXTENSIONS,
gl_FALSE,
gl_FASTEST,
gl_FILL,
gl_FIRST_VERTEX_CONVENTION,
gl_FIXED_ONLY,
gl_FLOAT,
gl_FLOAT_32_UNSIGNED_INT_24_8_REV,
gl_FLOAT_MAT2,
gl_FLOAT_MAT2x3,
gl_FLOAT_MAT2x4,
gl_FLOAT_MAT3,
gl_FLOAT_MAT3x2,
gl_FLOAT_MAT3x4,
gl_FLOAT_MAT4,
gl_FLOAT_MAT4x2,
gl_FLOAT_MAT4x3,
gl_FLOAT_VEC2,
gl_FLOAT_VEC3,
gl_FLOAT_VEC4,
gl_FRACTIONAL_EVEN,
gl_FRACTIONAL_ODD,
gl_FRAGMENT_INTERPOLATION_OFFSET_BITS,
gl_FRAGMENT_SHADER,
gl_FRAGMENT_SHADER_DERIVATIVE_HINT,
gl_FRAMEBUFFER,
gl_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE,
gl_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE,
gl_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING,
gl_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE,
gl_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE,
gl_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE,
gl_FRAMEBUFFER_ATTACHMENT_LAYERED,
gl_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME,
gl_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE,
gl_FRAMEBUFFER_ATTACHMENT_RED_SIZE,
gl_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE,
gl_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE,
gl_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER,
gl_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL,
gl_FRAMEBUFFER_BINDING,
gl_FRAMEBUFFER_COMPLETE,
gl_FRAMEBUFFER_DEFAULT,
gl_FRAMEBUFFER_INCOMPLETE_ATTACHMENT,
gl_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER,
gl_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS,
gl_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT,
gl_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE,
gl_FRAMEBUFFER_INCOMPLETE_READ_BUFFER,
gl_FRAMEBUFFER_SRGB,
gl_FRAMEBUFFER_UNDEFINED,
gl_FRAMEBUFFER_UNSUPPORTED,
gl_FRONT,
gl_FRONT_AND_BACK,
gl_FRONT_FACE,
gl_FRONT_LEFT,
gl_FRONT_RIGHT,
gl_FUNC_ADD,
gl_FUNC_REVERSE_SUBTRACT,
gl_FUNC_SUBTRACT,
gl_GEOMETRY_INPUT_TYPE,
gl_GEOMETRY_OUTPUT_TYPE,
gl_GEOMETRY_SHADER,
gl_GEOMETRY_SHADER_INVOCATIONS,
gl_GEOMETRY_VERTICES_OUT,
gl_GEQUAL,
gl_GREATER,
gl_GREEN,
gl_GREEN_INTEGER,
gl_HALF_FLOAT,
gl_INCR,
gl_INCR_WRAP,
gl_INFO_LOG_LENGTH,
gl_INT,
gl_INTERLEAVED_ATTRIBS,
gl_INT_2_10_10_10_REV,
gl_INT_SAMPLER_1D,
gl_INT_SAMPLER_1D_ARRAY,
gl_INT_SAMPLER_2D,
gl_INT_SAMPLER_2D_ARRAY,
gl_INT_SAMPLER_2D_MULTISAMPLE,
gl_INT_SAMPLER_2D_MULTISAMPLE_ARRAY,
gl_INT_SAMPLER_2D_RECT,
gl_INT_SAMPLER_3D,
gl_INT_SAMPLER_BUFFER,
gl_INT_SAMPLER_CUBE,
gl_INT_SAMPLER_CUBE_MAP_ARRAY,
gl_INT_VEC2,
gl_INT_VEC3,
gl_INT_VEC4,
gl_INVALID_ENUM,
gl_INVALID_FRAMEBUFFER_OPERATION,
gl_INVALID_INDEX,
gl_INVALID_OPERATION,
gl_INVALID_VALUE,
gl_INVERT,
gl_ISOLINES,
gl_KEEP,
gl_LAST_VERTEX_CONVENTION,
gl_LEFT,
gl_LEQUAL,
gl_LESS,
gl_LINE,
gl_LINEAR,
gl_LINEAR_MIPMAP_LINEAR,
gl_LINEAR_MIPMAP_NEAREST,
gl_LINES,
gl_LINES_ADJACENCY,
gl_LINE_LOOP,
gl_LINE_SMOOTH,
gl_LINE_SMOOTH_HINT,
gl_LINE_STRIP,
gl_LINE_STRIP_ADJACENCY,
gl_LINE_WIDTH,
gl_LINE_WIDTH_GRANULARITY,
gl_LINE_WIDTH_RANGE,
gl_LINK_STATUS,
gl_LOGIC_OP_MODE,
gl_LOWER_LEFT,
gl_MAJOR_VERSION,
gl_MAP_FLUSH_EXPLICIT_BIT,
gl_MAP_INVALIDATE_BUFFER_BIT,
gl_MAP_INVALIDATE_RANGE_BIT,
gl_MAP_READ_BIT,
gl_MAP_UNSYNCHRONIZED_BIT,
gl_MAP_WRITE_BIT,
gl_MAX,
gl_MAX_3D_TEXTURE_SIZE,
gl_MAX_ARRAY_TEXTURE_LAYERS,
gl_MAX_CLIP_DISTANCES,
gl_MAX_COLOR_ATTACHMENTS,
gl_MAX_COLOR_TEXTURE_SAMPLES,
gl_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS,
gl_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS,
gl_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS,
gl_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS,
gl_MAX_COMBINED_TEXTURE_IMAGE_UNITS,
gl_MAX_COMBINED_UNIFORM_BLOCKS,
gl_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS,
gl_MAX_CUBE_MAP_TEXTURE_SIZE,
gl_MAX_DEPTH_TEXTURE_SAMPLES,
gl_MAX_DRAW_BUFFERS,
gl_MAX_DUAL_SOURCE_DRAW_BUFFERS,
gl_MAX_ELEMENTS_INDICES,
gl_MAX_ELEMENTS_VERTICES,
gl_MAX_FRAGMENT_INPUT_COMPONENTS,
gl_MAX_FRAGMENT_INTERPOLATION_OFFSET,
gl_MAX_FRAGMENT_UNIFORM_BLOCKS,
gl_MAX_FRAGMENT_UNIFORM_COMPONENTS,
gl_MAX_GEOMETRY_INPUT_COMPONENTS,
gl_MAX_GEOMETRY_OUTPUT_COMPONENTS,
gl_MAX_GEOMETRY_OUTPUT_VERTICES,
gl_MAX_GEOMETRY_SHADER_INVOCATIONS,
gl_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS,
gl_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS,
gl_MAX_GEOMETRY_UNIFORM_BLOCKS,
gl_MAX_GEOMETRY_UNIFORM_COMPONENTS,
gl_MAX_INTEGER_SAMPLES,
gl_MAX_PATCH_VERTICES,
gl_MAX_PROGRAM_TEXEL_OFFSET,
gl_MAX_PROGRAM_TEXTURE_GATHER_OFFSET,
gl_MAX_RECTANGLE_TEXTURE_SIZE,
gl_MAX_RENDERBUFFER_SIZE,
gl_MAX_SAMPLES,
gl_MAX_SAMPLE_MASK_WORDS,
gl_MAX_SERVER_WAIT_TIMEOUT,
gl_MAX_SUBROUTINES,
gl_MAX_SUBROUTINE_UNIFORM_LOCATIONS,
gl_MAX_TESS_CONTROL_INPUT_COMPONENTS,
gl_MAX_TESS_CONTROL_OUTPUT_COMPONENTS,
gl_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS,
gl_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS,
gl_MAX_TESS_CONTROL_UNIFORM_BLOCKS,
gl_MAX_TESS_CONTROL_UNIFORM_COMPONENTS,
gl_MAX_TESS_EVALUATION_INPUT_COMPONENTS,
gl_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS,
gl_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS,
gl_MAX_TESS_EVALUATION_UNIFORM_BLOCKS,
gl_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS,
gl_MAX_TESS_GEN_LEVEL,
gl_MAX_TESS_PATCH_COMPONENTS,
gl_MAX_TEXTURE_BUFFER_SIZE,
gl_MAX_TEXTURE_IMAGE_UNITS,
gl_MAX_TEXTURE_LOD_BIAS,
gl_MAX_TEXTURE_SIZE,
gl_MAX_TRANSFORM_FEEDBACK_BUFFERS,
gl_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS,
gl_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS,
gl_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS,
gl_MAX_UNIFORM_BLOCK_SIZE,
gl_MAX_UNIFORM_BUFFER_BINDINGS,
gl_MAX_VARYING_COMPONENTS,
gl_MAX_VARYING_FLOATS,
gl_MAX_VERTEX_ATTRIBS,
gl_MAX_VERTEX_OUTPUT_COMPONENTS,
gl_MAX_VERTEX_STREAMS,
gl_MAX_VERTEX_TEXTURE_IMAGE_UNITS,
gl_MAX_VERTEX_UNIFORM_BLOCKS,
gl_MAX_VERTEX_UNIFORM_COMPONENTS,
gl_MAX_VIEWPORT_DIMS,
gl_MIN,
gl_MINOR_VERSION,
gl_MIN_FRAGMENT_INTERPOLATION_OFFSET,
gl_MIN_PROGRAM_TEXEL_OFFSET,
gl_MIN_PROGRAM_TEXTURE_GATHER_OFFSET,
gl_MIN_SAMPLE_SHADING_VALUE,
gl_MIRRORED_REPEAT,
gl_MULTISAMPLE,
gl_NAND,
gl_NEAREST,
gl_NEAREST_MIPMAP_LINEAR,
gl_NEAREST_MIPMAP_NEAREST,
gl_NEVER,
gl_NICEST,
gl_NONE,
gl_NOOP,
gl_NOR,
gl_NOTEQUAL,
gl_NO_ERROR,
gl_NUM_COMPATIBLE_SUBROUTINES,
gl_NUM_COMPRESSED_TEXTURE_FORMATS,
gl_NUM_EXTENSIONS,
gl_OBJECT_TYPE,
gl_ONE,
gl_ONE_MINUS_CONSTANT_ALPHA,
gl_ONE_MINUS_CONSTANT_COLOR,
gl_ONE_MINUS_DST_ALPHA,
gl_ONE_MINUS_DST_COLOR,
gl_ONE_MINUS_SRC1_ALPHA,
gl_ONE_MINUS_SRC1_COLOR,
gl_ONE_MINUS_SRC_ALPHA,
gl_ONE_MINUS_SRC_COLOR,
gl_OR,
gl_OR_INVERTED,
gl_OR_REVERSE,
gl_OUT_OF_MEMORY,
gl_PACK_ALIGNMENT,
gl_PACK_IMAGE_HEIGHT,
gl_PACK_LSB_FIRST,
gl_PACK_ROW_LENGTH,
gl_PACK_SKIP_IMAGES,
gl_PACK_SKIP_PIXELS,
gl_PACK_SKIP_ROWS,
gl_PACK_SWAP_BYTES,
gl_PATCHES,
gl_PATCH_DEFAULT_INNER_LEVEL,
gl_PATCH_DEFAULT_OUTER_LEVEL,
gl_PATCH_VERTICES,
gl_PIXEL_PACK_BUFFER,
gl_PIXEL_PACK_BUFFER_BINDING,
gl_PIXEL_UNPACK_BUFFER,
gl_PIXEL_UNPACK_BUFFER_BINDING,
gl_POINT,
gl_POINTS,
gl_POINT_FADE_THRESHOLD_SIZE,
gl_POINT_SIZE,
gl_POINT_SIZE_GRANULARITY,
gl_POINT_SIZE_RANGE,
gl_POINT_SPRITE_COORD_ORIGIN,
gl_POLYGON_MODE,
gl_POLYGON_OFFSET_FACTOR,
gl_POLYGON_OFFSET_FILL,
gl_POLYGON_OFFSET_LINE,
gl_POLYGON_OFFSET_POINT,
gl_POLYGON_OFFSET_UNITS,
gl_POLYGON_SMOOTH,
gl_POLYGON_SMOOTH_HINT,
gl_PRIMITIVES_GENERATED,
gl_PRIMITIVE_RESTART,
gl_PRIMITIVE_RESTART_INDEX,
gl_PROGRAM_POINT_SIZE,
gl_PROVOKING_VERTEX,
gl_PROXY_TEXTURE_1D,
gl_PROXY_TEXTURE_1D_ARRAY,
gl_PROXY_TEXTURE_2D,
gl_PROXY_TEXTURE_2D_ARRAY,
gl_PROXY_TEXTURE_2D_MULTISAMPLE,
gl_PROXY_TEXTURE_2D_MULTISAMPLE_ARRAY,
gl_PROXY_TEXTURE_3D,
gl_PROXY_TEXTURE_CUBE_MAP,
gl_PROXY_TEXTURE_CUBE_MAP_ARRAY,
gl_PROXY_TEXTURE_RECTANGLE,
gl_QUADS,
gl_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION,
gl_QUERY_BY_REGION_NO_WAIT,
gl_QUERY_BY_REGION_WAIT,
gl_QUERY_COUNTER_BITS,
gl_QUERY_NO_WAIT,
gl_QUERY_RESULT,
gl_QUERY_RESULT_AVAILABLE,
gl_QUERY_WAIT,
gl_R11F_G11F_B10F,
gl_R16,
gl_R16F,
gl_R16I,
gl_R16UI,
gl_R16_SNORM,
gl_R32F,
gl_R32I,
gl_R32UI,
gl_R3_G3_B2,
gl_R8,
gl_R8I,
gl_R8UI,
gl_R8_SNORM,
gl_RASTERIZER_DISCARD,
gl_READ_BUFFER,
gl_READ_FRAMEBUFFER,
gl_READ_FRAMEBUFFER_BINDING,
gl_READ_ONLY,
gl_READ_WRITE,
gl_RED,
gl_RED_INTEGER,
gl_RENDERBUFFER,
gl_RENDERBUFFER_ALPHA_SIZE,
gl_RENDERBUFFER_BINDING,
gl_RENDERBUFFER_BLUE_SIZE,
gl_RENDERBUFFER_DEPTH_SIZE,
gl_RENDERBUFFER_GREEN_SIZE,
gl_RENDERBUFFER_HEIGHT,
gl_RENDERBUFFER_INTERNAL_FORMAT,
gl_RENDERBUFFER_RED_SIZE,
gl_RENDERBUFFER_SAMPLES,
gl_RENDERBUFFER_STENCIL_SIZE,
gl_RENDERBUFFER_WIDTH,
gl_RENDERER,
gl_REPEAT,
gl_REPLACE,
gl_RG,
gl_RG16,
gl_RG16F,
gl_RG16I,
gl_RG16UI,
gl_RG16_SNORM,
gl_RG32F,
gl_RG32I,
gl_RG32UI,
gl_RG8,
gl_RG8I,
gl_RG8UI,
gl_RG8_SNORM,
gl_RGB,
gl_RGB10,
gl_RGB10_A2,
gl_RGB10_A2UI,
gl_RGB12,
gl_RGB16,
gl_RGB16F,
gl_RGB16I,
gl_RGB16UI,
gl_RGB16_SNORM,
gl_RGB32F,
gl_RGB32I,
gl_RGB32UI,
gl_RGB4,
gl_RGB5,
gl_RGB5_A1,
gl_RGB8,
gl_RGB8I,
gl_RGB8UI,
gl_RGB8_SNORM,
gl_RGB9_E5,
gl_RGBA,
gl_RGBA12,
gl_RGBA16,
gl_RGBA16F,
gl_RGBA16I,
gl_RGBA16UI,
gl_RGBA16_SNORM,
gl_RGBA2,
gl_RGBA32F,
gl_RGBA32I,
gl_RGBA32UI,
gl_RGBA4,
gl_RGBA8,
gl_RGBA8I,
gl_RGBA8UI,
gl_RGBA8_SNORM,
gl_RGBA_INTEGER,
gl_RGB_INTEGER,
gl_RG_INTEGER,
gl_RIGHT,
gl_SAMPLER_1D,
gl_SAMPLER_1D_ARRAY,
gl_SAMPLER_1D_ARRAY_SHADOW,
gl_SAMPLER_1D_SHADOW,
gl_SAMPLER_2D,
gl_SAMPLER_2D_ARRAY,
gl_SAMPLER_2D_ARRAY_SHADOW,
gl_SAMPLER_2D_MULTISAMPLE,
gl_SAMPLER_2D_MULTISAMPLE_ARRAY,
gl_SAMPLER_2D_RECT,
gl_SAMPLER_2D_RECT_SHADOW,
gl_SAMPLER_2D_SHADOW,
gl_SAMPLER_3D,
gl_SAMPLER_BINDING,
gl_SAMPLER_BUFFER,
gl_SAMPLER_CUBE,
gl_SAMPLER_CUBE_MAP_ARRAY,
gl_SAMPLER_CUBE_MAP_ARRAY_SHADOW,
gl_SAMPLER_CUBE_SHADOW,
gl_SAMPLES,
gl_SAMPLES_PASSED,
gl_SAMPLE_ALPHA_TO_COVERAGE,
gl_SAMPLE_ALPHA_TO_ONE,
gl_SAMPLE_BUFFERS,
gl_SAMPLE_COVERAGE,
gl_SAMPLE_COVERAGE_INVERT,
gl_SAMPLE_COVERAGE_VALUE,
gl_SAMPLE_MASK,
gl_SAMPLE_MASK_VALUE,
gl_SAMPLE_POSITION,
gl_SAMPLE_SHADING,
gl_SCISSOR_BOX,
gl_SCISSOR_TEST,
gl_SEPARATE_ATTRIBS,
gl_SET,
gl_SHADER_SOURCE_LENGTH,
gl_SHADER_TYPE,
gl_SHADING_LANGUAGE_VERSION,
gl_SHORT,
gl_SIGNALED,
gl_SIGNED_NORMALIZED,
gl_SMOOTH_LINE_WIDTH_GRANULARITY,
gl_SMOOTH_LINE_WIDTH_RANGE,
gl_SMOOTH_POINT_SIZE_GRANULARITY,
gl_SMOOTH_POINT_SIZE_RANGE,
gl_SRC1_ALPHA,
gl_SRC1_COLOR,
gl_SRC_ALPHA,
gl_SRC_ALPHA_SATURATE,
gl_SRC_COLOR,
gl_SRGB,
gl_SRGB8,
gl_SRGB8_ALPHA8,
gl_SRGB_ALPHA,
gl_STATIC_COPY,
gl_STATIC_DRAW,
gl_STATIC_READ,
gl_STENCIL,
gl_STENCIL_ATTACHMENT,
gl_STENCIL_BACK_FAIL,
gl_STENCIL_BACK_FUNC,
gl_STENCIL_BACK_PASS_DEPTH_FAIL,
gl_STENCIL_BACK_PASS_DEPTH_PASS,
gl_STENCIL_BACK_REF,
gl_STENCIL_BACK_VALUE_MASK,
gl_STENCIL_BACK_WRITEMASK,
gl_STENCIL_BUFFER_BIT,
gl_STENCIL_CLEAR_VALUE,
gl_STENCIL_FAIL,
gl_STENCIL_FUNC,
gl_STENCIL_INDEX,
gl_STENCIL_INDEX1,
gl_STENCIL_INDEX16,
gl_STENCIL_INDEX4,
gl_STENCIL_INDEX8,
gl_STENCIL_PASS_DEPTH_FAIL,
gl_STENCIL_PASS_DEPTH_PASS,
gl_STENCIL_REF,
gl_STENCIL_TEST,
gl_STENCIL_VALUE_MASK,
gl_STENCIL_WRITEMASK,
gl_STEREO,
gl_STREAM_COPY,
gl_STREAM_DRAW,
gl_STREAM_READ,
gl_SUBPIXEL_BITS,
gl_SYNC_CONDITION,
gl_SYNC_FENCE,
gl_SYNC_FLAGS,
gl_SYNC_FLUSH_COMMANDS_BIT,
gl_SYNC_GPU_COMMANDS_COMPLETE,
gl_SYNC_STATUS,
gl_TESS_CONTROL_OUTPUT_VERTICES,
gl_TESS_CONTROL_SHADER,
gl_TESS_EVALUATION_SHADER,
gl_TESS_GEN_MODE,
gl_TESS_GEN_POINT_MODE,
gl_TESS_GEN_SPACING,
gl_TESS_GEN_VERTEX_ORDER,
gl_TEXTURE,
gl_TEXTURE0,
gl_TEXTURE1,
gl_TEXTURE10,
gl_TEXTURE11,
gl_TEXTURE12,
gl_TEXTURE13,
gl_TEXTURE14,
gl_TEXTURE15,
gl_TEXTURE16,
gl_TEXTURE17,
gl_TEXTURE18,
gl_TEXTURE19,
gl_TEXTURE2,
gl_TEXTURE20,
gl_TEXTURE21,
gl_TEXTURE22,
gl_TEXTURE23,
gl_TEXTURE24,
gl_TEXTURE25,
gl_TEXTURE26,
gl_TEXTURE27,
gl_TEXTURE28,
gl_TEXTURE29,
gl_TEXTURE3,
gl_TEXTURE30,
gl_TEXTURE31,
gl_TEXTURE4,
gl_TEXTURE5,
gl_TEXTURE6,
gl_TEXTURE7,
gl_TEXTURE8,
gl_TEXTURE9,
gl_TEXTURE_1D,
gl_TEXTURE_1D_ARRAY,
gl_TEXTURE_2D,
gl_TEXTURE_2D_ARRAY,
gl_TEXTURE_2D_MULTISAMPLE,
gl_TEXTURE_2D_MULTISAMPLE_ARRAY,
gl_TEXTURE_3D,
gl_TEXTURE_ALPHA_SIZE,
gl_TEXTURE_ALPHA_TYPE,
gl_TEXTURE_BASE_LEVEL,
gl_TEXTURE_BINDING_1D,
gl_TEXTURE_BINDING_1D_ARRAY,
gl_TEXTURE_BINDING_2D,
gl_TEXTURE_BINDING_2D_ARRAY,
gl_TEXTURE_BINDING_2D_MULTISAMPLE,
gl_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY,
gl_TEXTURE_BINDING_3D,
gl_TEXTURE_BINDING_BUFFER,
gl_TEXTURE_BINDING_CUBE_MAP,
gl_TEXTURE_BINDING_CUBE_MAP_ARRAY,
gl_TEXTURE_BINDING_RECTANGLE,
gl_TEXTURE_BLUE_SIZE,
gl_TEXTURE_BLUE_TYPE,
gl_TEXTURE_BORDER_COLOR,
gl_TEXTURE_BUFFER,
gl_TEXTURE_BUFFER_DATA_STORE_BINDING,
gl_TEXTURE_COMPARE_FUNC,
gl_TEXTURE_COMPARE_MODE,
gl_TEXTURE_COMPRESSED,
gl_TEXTURE_COMPRESSED_IMAGE_SIZE,
gl_TEXTURE_COMPRESSION_HINT,
gl_TEXTURE_CUBE_MAP,
gl_TEXTURE_CUBE_MAP_ARRAY,
gl_TEXTURE_CUBE_MAP_NEGATIVE_X,
gl_TEXTURE_CUBE_MAP_NEGATIVE_Y,
gl_TEXTURE_CUBE_MAP_NEGATIVE_Z,
gl_TEXTURE_CUBE_MAP_POSITIVE_X,
gl_TEXTURE_CUBE_MAP_POSITIVE_Y,
gl_TEXTURE_CUBE_MAP_POSITIVE_Z,
gl_TEXTURE_CUBE_MAP_SEAMLESS,
gl_TEXTURE_DEPTH,
gl_TEXTURE_DEPTH_SIZE,
gl_TEXTURE_DEPTH_TYPE,
gl_TEXTURE_FIXED_SAMPLE_LOCATIONS,
gl_TEXTURE_GREEN_SIZE,
gl_TEXTURE_GREEN_TYPE,
gl_TEXTURE_HEIGHT,
gl_TEXTURE_INTERNAL_FORMAT,
gl_TEXTURE_LOD_BIAS,
gl_TEXTURE_MAG_FILTER,
gl_TEXTURE_MAX_LEVEL,
gl_TEXTURE_MAX_LOD,
gl_TEXTURE_MIN_FILTER,
gl_TEXTURE_MIN_LOD,
gl_TEXTURE_RECTANGLE,
gl_TEXTURE_RED_SIZE,
gl_TEXTURE_RED_TYPE,
gl_TEXTURE_SAMPLES,
gl_TEXTURE_SHARED_SIZE,
gl_TEXTURE_STENCIL_SIZE,
gl_TEXTURE_SWIZZLE_A,
gl_TEXTURE_SWIZZLE_B,
gl_TEXTURE_SWIZZLE_G,
gl_TEXTURE_SWIZZLE_R,
gl_TEXTURE_SWIZZLE_RGBA,
gl_TEXTURE_WIDTH,
gl_TEXTURE_WRAP_R,
gl_TEXTURE_WRAP_S,
gl_TEXTURE_WRAP_T,
gl_TIMEOUT_EXPIRED,
gl_TIMEOUT_IGNORED,
gl_TIMESTAMP,
gl_TIME_ELAPSED,
gl_TRANSFORM_FEEDBACK,
gl_TRANSFORM_FEEDBACK_BINDING,
gl_TRANSFORM_FEEDBACK_BUFFER,
gl_TRANSFORM_FEEDBACK_BUFFER_ACTIVE,
gl_TRANSFORM_FEEDBACK_BUFFER_BINDING,
gl_TRANSFORM_FEEDBACK_BUFFER_MODE,
gl_TRANSFORM_FEEDBACK_BUFFER_PAUSED,
gl_TRANSFORM_FEEDBACK_BUFFER_SIZE,
gl_TRANSFORM_FEEDBACK_BUFFER_START,
gl_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN,
gl_TRANSFORM_FEEDBACK_VARYINGS,
gl_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH,
gl_TRIANGLES,
gl_TRIANGLES_ADJACENCY,
gl_TRIANGLE_FAN,
gl_TRIANGLE_STRIP,
gl_TRIANGLE_STRIP_ADJACENCY,
gl_TRUE,
gl_UNIFORM_ARRAY_STRIDE,
gl_UNIFORM_BLOCK_ACTIVE_UNIFORMS,
gl_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES,
gl_UNIFORM_BLOCK_BINDING,
gl_UNIFORM_BLOCK_DATA_SIZE,
gl_UNIFORM_BLOCK_INDEX,
gl_UNIFORM_BLOCK_NAME_LENGTH,
gl_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER,
gl_UNIFORM_BLOCK_REFERENCED_BY_GEOMETRY_SHADER,
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER,
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER,
gl_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER,
gl_UNIFORM_BUFFER,
gl_UNIFORM_BUFFER_BINDING,
gl_UNIFORM_BUFFER_OFFSET_ALIGNMENT,
gl_UNIFORM_BUFFER_SIZE,
gl_UNIFORM_BUFFER_START,
gl_UNIFORM_IS_ROW_MAJOR,
gl_UNIFORM_MATRIX_STRIDE,
gl_UNIFORM_NAME_LENGTH,
gl_UNIFORM_OFFSET,
gl_UNIFORM_SIZE,
gl_UNIFORM_TYPE,
gl_UNPACK_ALIGNMENT,
gl_UNPACK_IMAGE_HEIGHT,
gl_UNPACK_LSB_FIRST,
gl_UNPACK_ROW_LENGTH,
gl_UNPACK_SKIP_IMAGES,
gl_UNPACK_SKIP_PIXELS,
gl_UNPACK_SKIP_ROWS,
gl_UNPACK_SWAP_BYTES,
gl_UNSIGNALED,
gl_UNSIGNED_BYTE,
gl_UNSIGNED_BYTE_2_3_3_REV,
gl_UNSIGNED_BYTE_3_3_2,
gl_UNSIGNED_INT,
gl_UNSIGNED_INT_10F_11F_11F_REV,
gl_UNSIGNED_INT_10_10_10_2,
gl_UNSIGNED_INT_24_8,
gl_UNSIGNED_INT_2_10_10_10_REV,
gl_UNSIGNED_INT_5_9_9_9_REV,
gl_UNSIGNED_INT_8_8_8_8,
gl_UNSIGNED_INT_8_8_8_8_REV,
gl_UNSIGNED_INT_SAMPLER_1D,
gl_UNSIGNED_INT_SAMPLER_1D_ARRAY,
gl_UNSIGNED_INT_SAMPLER_2D,
gl_UNSIGNED_INT_SAMPLER_2D_ARRAY,
gl_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE,
gl_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY,
gl_UNSIGNED_INT_SAMPLER_2D_RECT,
gl_UNSIGNED_INT_SAMPLER_3D,
gl_UNSIGNED_INT_SAMPLER_BUFFER,
gl_UNSIGNED_INT_SAMPLER_CUBE,
gl_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY,
gl_UNSIGNED_INT_VEC2,
gl_UNSIGNED_INT_VEC3,
gl_UNSIGNED_INT_VEC4,
gl_UNSIGNED_NORMALIZED,
gl_UNSIGNED_SHORT,
gl_UNSIGNED_SHORT_1_5_5_5_REV,
gl_UNSIGNED_SHORT_4_4_4_4,
gl_UNSIGNED_SHORT_4_4_4_4_REV,
gl_UNSIGNED_SHORT_5_5_5_1,
gl_UNSIGNED_SHORT_5_6_5,
gl_UNSIGNED_SHORT_5_6_5_REV,
gl_UPPER_LEFT,
gl_VALIDATE_STATUS,
gl_VENDOR,
gl_VERSION,
gl_VERTEX_ARRAY_BINDING,
gl_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING,
gl_VERTEX_ATTRIB_ARRAY_DIVISOR,
gl_VERTEX_ATTRIB_ARRAY_ENABLED,
gl_VERTEX_ATTRIB_ARRAY_INTEGER,
gl_VERTEX_ATTRIB_ARRAY_NORMALIZED,
gl_VERTEX_ATTRIB_ARRAY_POINTER,
gl_VERTEX_ATTRIB_ARRAY_SIZE,
gl_VERTEX_ATTRIB_ARRAY_STRIDE,
gl_VERTEX_ATTRIB_ARRAY_TYPE,
gl_VERTEX_PROGRAM_POINT_SIZE,
gl_VERTEX_SHADER,
gl_VIEWPORT,
gl_WAIT_FAILED,
gl_WRITE_ONLY,
gl_XOR,
gl_ZERO,
-- * Functions
glActiveTexture,
glAttachShader,
glBeginConditionalRender,
glBeginQuery,
glBeginQueryIndexed,
glBeginTransformFeedback,
glBindAttribLocation,
glBindBuffer,
glBindBufferBase,
glBindBufferRange,
glBindFragDataLocation,
glBindFragDataLocationIndexed,
glBindFramebuffer,
glBindRenderbuffer,
glBindSampler,
glBindTexture,
glBindTransformFeedback,
glBindVertexArray,
glBlendColor,
glBlendEquation,
glBlendEquationSeparate,
glBlendEquationSeparatei,
glBlendEquationi,
glBlendFunc,
glBlendFuncSeparate,
glBlendFuncSeparatei,
glBlendFunci,
glBlitFramebuffer,
glBufferData,
glBufferSubData,
glCheckFramebufferStatus,
glClampColor,
glClear,
glClearBufferfi,
glClearBufferfv,
glClearBufferiv,
glClearBufferuiv,
glClearColor,
glClearDepth,
glClearStencil,
glClientWaitSync,
glColorMask,
glColorMaski,
glCompileShader,
glCompressedTexImage1D,
glCompressedTexImage2D,
glCompressedTexImage3D,
glCompressedTexSubImage1D,
glCompressedTexSubImage2D,
glCompressedTexSubImage3D,
glCopyBufferSubData,
glCopyTexImage1D,
glCopyTexImage2D,
glCopyTexSubImage1D,
glCopyTexSubImage2D,
glCopyTexSubImage3D,
glCreateProgram,
glCreateShader,
glCullFace,
glDeleteBuffers,
glDeleteFramebuffers,
glDeleteProgram,
glDeleteQueries,
glDeleteRenderbuffers,
glDeleteSamplers,
glDeleteShader,
glDeleteSync,
glDeleteTextures,
glDeleteTransformFeedbacks,
glDeleteVertexArrays,
glDepthFunc,
glDepthMask,
glDepthRange,
glDetachShader,
glDisable,
glDisableVertexAttribArray,
glDisablei,
glDrawArrays,
glDrawArraysIndirect,
glDrawArraysInstanced,
glDrawBuffer,
glDrawBuffers,
glDrawElements,
glDrawElementsBaseVertex,
glDrawElementsIndirect,
glDrawElementsInstanced,
glDrawElementsInstancedBaseVertex,
glDrawRangeElements,
glDrawRangeElementsBaseVertex,
glDrawTransformFeedback,
glDrawTransformFeedbackStream,
glEnable,
glEnableVertexAttribArray,
glEnablei,
glEndConditionalRender,
glEndQuery,
glEndQueryIndexed,
glEndTransformFeedback,
glFenceSync,
glFinish,
glFlush,
glFlushMappedBufferRange,
glFramebufferRenderbuffer,
glFramebufferTexture,
glFramebufferTexture1D,
glFramebufferTexture2D,
glFramebufferTexture3D,
glFramebufferTextureLayer,
glFrontFace,
glGenBuffers,
glGenFramebuffers,
glGenQueries,
glGenRenderbuffers,
glGenSamplers,
glGenTextures,
glGenTransformFeedbacks,
glGenVertexArrays,
glGenerateMipmap,
glGetActiveAttrib,
glGetActiveSubroutineName,
glGetActiveSubroutineUniformName,
glGetActiveSubroutineUniformiv,
glGetActiveUniform,
glGetActiveUniformBlockName,
glGetActiveUniformBlockiv,
glGetActiveUniformName,
glGetActiveUniformsiv,
glGetAttachedShaders,
glGetAttribLocation,
glGetBooleani_v,
glGetBooleanv,
glGetBufferParameteri64v,
glGetBufferParameteriv,
glGetBufferPointerv,
glGetBufferSubData,
glGetCompressedTexImage,
glGetDoublev,
glGetError,
glGetFloatv,
glGetFragDataIndex,
glGetFragDataLocation,
glGetFramebufferAttachmentParameteriv,
glGetInteger64i_v,
glGetInteger64v,
glGetIntegeri_v,
glGetIntegerv,
glGetMultisamplefv,
glGetProgramInfoLog,
glGetProgramStageiv,
glGetProgramiv,
glGetQueryIndexediv,
glGetQueryObjecti64v,
glGetQueryObjectiv,
glGetQueryObjectui64v,
glGetQueryObjectuiv,
glGetQueryiv,
glGetRenderbufferParameteriv,
glGetSamplerParameterIiv,
glGetSamplerParameterIuiv,
glGetSamplerParameterfv,
glGetSamplerParameteriv,
glGetShaderInfoLog,
glGetShaderSource,
glGetShaderiv,
glGetString,
glGetStringi,
glGetSubroutineIndex,
glGetSubroutineUniformLocation,
glGetSynciv,
glGetTexImage,
glGetTexLevelParameterfv,
glGetTexLevelParameteriv,
glGetTexParameterIiv,
glGetTexParameterIuiv,
glGetTexParameterfv,
glGetTexParameteriv,
glGetTransformFeedbackVarying,
glGetUniformBlockIndex,
glGetUniformIndices,
glGetUniformLocation,
glGetUniformSubroutineuiv,
glGetUniformdv,
glGetUniformfv,
glGetUniformiv,
glGetUniformuiv,
glGetVertexAttribIiv,
glGetVertexAttribIuiv,
glGetVertexAttribPointerv,
glGetVertexAttribdv,
glGetVertexAttribfv,
glGetVertexAttribiv,
glHint,
glIsBuffer,
glIsEnabled,
glIsEnabledi,
glIsFramebuffer,
glIsProgram,
glIsQuery,
glIsRenderbuffer,
glIsSampler,
glIsShader,
glIsSync,
glIsTexture,
glIsTransformFeedback,
glIsVertexArray,
glLineWidth,
glLinkProgram,
glLogicOp,
glMapBuffer,
glMapBufferRange,
glMinSampleShading,
glMultiDrawArrays,
glMultiDrawElements,
glMultiDrawElementsBaseVertex,
glPatchParameterfv,
glPatchParameteri,
glPauseTransformFeedback,
glPixelStoref,
glPixelStorei,
glPointParameterf,
glPointParameterfv,
glPointParameteri,
glPointParameteriv,
glPointSize,
glPolygonMode,
glPolygonOffset,
glPrimitiveRestartIndex,
glProvokingVertex,
glQueryCounter,
glReadBuffer,
glReadPixels,
glRenderbufferStorage,
glRenderbufferStorageMultisample,
glResumeTransformFeedback,
glSampleCoverage,
glSampleMaski,
glSamplerParameterIiv,
glSamplerParameterIuiv,
glSamplerParameterf,
glSamplerParameterfv,
glSamplerParameteri,
glSamplerParameteriv,
glScissor,
glShaderSource,
glStencilFunc,
glStencilFuncSeparate,
glStencilMask,
glStencilMaskSeparate,
glStencilOp,
glStencilOpSeparate,
glTexBuffer,
glTexImage1D,
glTexImage2D,
glTexImage2DMultisample,
glTexImage3D,
glTexImage3DMultisample,
glTexParameterIiv,
glTexParameterIuiv,
glTexParameterf,
glTexParameterfv,
glTexParameteri,
glTexParameteriv,
glTexSubImage1D,
glTexSubImage2D,
glTexSubImage3D,
glTransformFeedbackVaryings,
glUniform1d,
glUniform1dv,
glUniform1f,
glUniform1fv,
glUniform1i,
glUniform1iv,
glUniform1ui,
glUniform1uiv,
glUniform2d,
glUniform2dv,
glUniform2f,
glUniform2fv,
glUniform2i,
glUniform2iv,
glUniform2ui,
glUniform2uiv,
glUniform3d,
glUniform3dv,
glUniform3f,
glUniform3fv,
glUniform3i,
glUniform3iv,
glUniform3ui,
glUniform3uiv,
glUniform4d,
glUniform4dv,
glUniform4f,
glUniform4fv,
glUniform4i,
glUniform4iv,
glUniform4ui,
glUniform4uiv,
glUniformBlockBinding,
glUniformMatrix2dv,
glUniformMatrix2fv,
glUniformMatrix2x3dv,
glUniformMatrix2x3fv,
glUniformMatrix2x4dv,
glUniformMatrix2x4fv,
glUniformMatrix3dv,
glUniformMatrix3fv,
glUniformMatrix3x2dv,
glUniformMatrix3x2fv,
glUniformMatrix3x4dv,
glUniformMatrix3x4fv,
glUniformMatrix4dv,
glUniformMatrix4fv,
glUniformMatrix4x2dv,
glUniformMatrix4x2fv,
glUniformMatrix4x3dv,
glUniformMatrix4x3fv,
glUniformSubroutinesuiv,
glUnmapBuffer,
glUseProgram,
glValidateProgram,
glVertexAttrib1d,
glVertexAttrib1dv,
glVertexAttrib1f,
glVertexAttrib1fv,
glVertexAttrib1s,
glVertexAttrib1sv,
glVertexAttrib2d,
glVertexAttrib2dv,
glVertexAttrib2f,
glVertexAttrib2fv,
glVertexAttrib2s,
glVertexAttrib2sv,
glVertexAttrib3d,
glVertexAttrib3dv,
glVertexAttrib3f,
glVertexAttrib3fv,
glVertexAttrib3s,
glVertexAttrib3sv,
glVertexAttrib4Nbv,
glVertexAttrib4Niv,
glVertexAttrib4Nsv,
glVertexAttrib4Nub,
glVertexAttrib4Nubv,
glVertexAttrib4Nuiv,
glVertexAttrib4Nusv,
glVertexAttrib4bv,
glVertexAttrib4d,
glVertexAttrib4dv,
glVertexAttrib4f,
glVertexAttrib4fv,
glVertexAttrib4iv,
glVertexAttrib4s,
glVertexAttrib4sv,
glVertexAttrib4ubv,
glVertexAttrib4uiv,
glVertexAttrib4usv,
glVertexAttribDivisor,
glVertexAttribI1i,
glVertexAttribI1iv,
glVertexAttribI1ui,
glVertexAttribI1uiv,
glVertexAttribI2i,
glVertexAttribI2iv,
glVertexAttribI2ui,
glVertexAttribI2uiv,
glVertexAttribI3i,
glVertexAttribI3iv,
glVertexAttribI3ui,
glVertexAttribI3uiv,
glVertexAttribI4bv,
glVertexAttribI4i,
glVertexAttribI4iv,
glVertexAttribI4sv,
glVertexAttribI4ubv,
glVertexAttribI4ui,
glVertexAttribI4uiv,
glVertexAttribI4usv,
glVertexAttribIPointer,
glVertexAttribP1ui,
glVertexAttribP1uiv,
glVertexAttribP2ui,
glVertexAttribP2uiv,
glVertexAttribP3ui,
glVertexAttribP3uiv,
glVertexAttribP4ui,
glVertexAttribP4uiv,
glVertexAttribPointer,
glViewport,
glWaitSync
) where
import Graphics.Rendering.OpenGL.Raw.Types
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/Core40.hs | bsd-3-clause | 30,197 | 0 | 4 | 3,953 | 3,919 | 2,620 | 1,299 | 1,294 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module LJI.Music.Composer (
melody
, rootProgression
, fifths
, rootsFifths1
, rootsFifths2
, guideToneLine1
, guideToneLine2
, compose
, mordent
, target
, mixMelodies
, permutations
) where
import Prelude hiding (mapM, minimum)
import Control.Arrow
import Control.Applicative
import Control.Monad hiding (mapM)
import Control.Monad.Error hiding (mapM)
import Control.Monad.Error.Class
import Control.Monad.Random
import Control.Monad.Reader hiding (mapM)
import Control.Monad.State hiding (mapM)
import Control.Monad.Trans
import Control.Newtype
import Data.Monoid
import Data.Foldable
import Data.Traversable
import Data.List hiding (minimum)
import Debug.Trace
import LJI.Music (Tie(..), Class, Pitch, Dur, Note(..), Chord(..), (%),
minPitch, maxPitch, pitchClass, classDist)
import LJI.Music.Notation (Staff, Section, Chart(..))
import Data.Pointed
import qualified Data.Pointed as P (Pointed(..))
import Debug.Trace
melody, rootProgression, fifths, guideToneLine1, guideToneLine2, rootsFifths1, rootsFifths2
:: Chart -> Staff (Class, Dur)
melody = chrtMelody
rootProgression = fmap ((,) <$> chdRoot <*> chdDur) . chrtStaff
fifths = fmap ((,) <$> chdFifth <*> chdDur) . chrtStaff
guideToneLine1 = fmap (\(gt, d) -> (fst gt, d)) . guideToneLines chdThird chdSeventh
guideToneLine2 = fmap (\(gt, d) -> (snd gt, d)) . guideToneLines chdThird chdSeventh
rootsFifths1 = fmap (\(gt, d) -> (fst gt, d)) . guideToneLines chdRoot chdFifth
rootsFifths2 = fmap (\(gt, d) -> (snd gt, d)) . guideToneLines chdRoot chdFifth
guideToneLines :: (Chord -> Class) -> (Chord -> Class) -> Chart -> Staff ((Class, Class), Dur)
guideToneLines f g = pack . fmap (flip evalState Nothing
. mapM (guideToneLines' f g)) . unpack . chrtStaff
guideToneLines' :: (Chord -> Class) -> (Chord -> Class) -> Chord -> State (Maybe (Class, Class)) ((Class, Class), Dur)
guideToneLines' f g ch =
do s <- get
let d = chdDur ch
case s of
Nothing ->
do let t = f ch
s = g ch
put $ Just (t, s)
return ((t, s), d)
Just (t', s') ->
do let t = f ch
s = g ch
if (classDist t t' + classDist s s')
<= (classDist t s' + classDist t' s)
then put (Just (t, s)) >> return ((t, s), d)
else put (Just (s, t)) >> return ((s, t), d)
target :: Dur -> [Int] -> Note -> Note -> [Note]
target dt ts (Note p d) (Note p' _) =
let (ts', d') = targeting dt ts d
in (if d' > 0 then [Note p d'] else [])
++ map (flip Note dt . (p'+)) ts'
mordent :: Dur -> [Int] -> Note -> [Note]
mordent dt ts (Note p d) =
let (ts', d') = targeting dt ts d
in map (flip Note dt . (p+)) ts'
++ (if d' > 0 then [Note p d'] else [])
-- targeting - Given a targeting spec (duration and chromatics) return the
-- targeting that can be applied within the given duration along with what
-- remains
--
-- If the target note is not long enough return the longest possible tail of
-- the target notes
targeting :: Dur -> [Int] -> Dur -> ([Int], Dur)
targeting _ [] d = ([], d) -- True anyway, but not clear
targeting dt ts d | dt >= d = ([], d)
| otherwise =
let (ts', d') = head . dropWhile ((>=d) . snd) . map (flip (,) =<< dur)
. tails $ ts
in (ts', d - d')
where
dur = (*dt) . (%1) . fromIntegral . length
compose :: RandomGen g =>
(Pitch, Pitch)
-> (Class, Dur)
-> StateT (Maybe Pitch) (Rand g) Note
compose (l, h) (c, d) = do
p <- get
p' <- case p of
Nothing ->
lift
. randomChoice
. filter (\x -> pitchClass x == c
&& l <= x
&& x <= h
)
$ [l..h]
Just p ->
lift
. randomChoice
. filter (\x -> pitchClass x == c
&& l <= x
&& x <= h
&& abs (x - p) < 12
)
$ [l..h]
put . Just $ p'
return $ Note p' d
randomChoice :: RandomGen g => [a] -> Rand g a
randomChoice [] = fail "LJI: Internal error (randomChoice: emptyList)"
randomChoice as = do (n :: Int) <- getRandomR (0, length as - 1)
return $ as !! n
-- mixMelodies - deterministically mix two or more lists to create a new one
-- from segments of length n from each. We take the first segment from the
-- first melody and the second from the second and so on section by section
-- until there is nothing left.
mixMelodies :: Dur -> [Staff (a, Dur)] -> [Staff (a, Dur)]
mixMelodies d = map (mixMelodies' d) . permutations
where
mixMelodies' :: Dur -> [Staff (a, Dur)] -> Staff (a, Dur)
mixMelodies' _ [] = pack []
mixMelodies' d xs =
pack . map (mixSections d) . transpose . map unpack $ xs
mixSections :: Dur -> [Section (a, Dur)] -> Section (a, Dur)
mixSections d = pack . mixSections' d . map unpack
where
mixSections' :: Dur -> [[(a, Dur)]] -> [(a, Dur)]
mixSections' _ [] = []
mixSections' _ ([]:_) = []
mixSections' d (xs:xss) =
let (xs', rest) = takeMelody d xs
rests = map (snd . takeMelody d) xss
in xs' ++ mixSections' d (rests ++ [rest])
takeMelody :: Dur -> [(a, Dur)] -> ([(a, Dur)], [(a, Dur)])
takeMelody _ [] = ([], [])
takeMelody d ((a,da):rest) =
case da `compare` d of
EQ -> ([(a, da)], rest)
LT -> let (as, rest') = takeMelody (d-da) rest
in ((a, da):as, rest')
GT -> ([(a, d)], (a, da-d):rest)
| mrehayden1/lji | src/LJI/Music/Composer.hs | bsd-3-clause | 5,653 | 0 | 23 | 1,639 | 2,274 | 1,251 | 1,023 | 134 | 3 |
module Language.Interpreter.StdLib.BlockHandling
( addBlockHandlingStdLib
) where
import Language.Ast ( Value(Null) )
import Language.Interpreter.Types ( InterpreterProcess
, setBuiltIn
, withGfxCtx
)
import Gfx.Context ( popScope
, pushScope
)
addBlockHandlingStdLib :: InterpreterProcess ()
addBlockHandlingStdLib = do
setBuiltIn "pushScope" pushGfxScope
setBuiltIn "popScope" popGfxScope
pushGfxScope :: [Value] -> InterpreterProcess Value
pushGfxScope _ = withGfxCtx pushScope >> return Null
popGfxScope :: [Value] -> InterpreterProcess Value
popGfxScope _ = withGfxCtx popScope >> return Null
| rumblesan/improviz | src/Language/Interpreter/StdLib/BlockHandling.hs | bsd-3-clause | 923 | 0 | 7 | 393 | 154 | 83 | 71 | 16 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent
import Control.Concurrent.STM
import qualified Data.Text as Text
import Graphics.Blank
import Paths_blank_canvas_examples (getDataDir)
import Debug.Trace
-- a play bar with play/pause functionality
main :: IO ()
main = do
dat <- getDataDir
blankCanvas 3000 { events = ["mousedown"], root = dat } $ \context ->
startLoop context "music/sonata.ogg"
data Play = Playing | Paused
deriving (Eq,Ord,Show)
-- switch Play to opposite value
swap :: TVar Play -> STM ()
swap play = do
play' <- readTVar play
if (play' == Playing)
then writeTVar play (Paused)
else writeTVar play (Playing)
-- starts the loop.
-- divided into three threads:
-- getCurTime: updates the current time into the curTime TVar, this is its
-- own thread to keep the latency from retrieving the current time separate
-- from the updating of the frame
-- loopInput: get the mouse input
-- loopBar: draw the frame
startLoop :: DeviceContext -> Text.Text -> IO ()
startLoop context filename = do
music <- send context $ newAudio filename -- make new Audio
play <- newTVarIO (Paused) -- The audio is paused when the page loads
curTime <- newTVarIO 0 -- The audio starts at the beginning
forkIO $ getCurTime context music curTime
forkIO $ loopInput context music play
loopBar context music play curTime
-- Draw the pause symbol, ||
pauseDraw :: Double -> Double -> Canvas ()
pauseDraw x y = do
lineWidth 5
lineCap "round"
-- left line
beginPath()
moveTo(x+4,y)
lineTo(x+4,y+31)
stroke()
-- right line
beginPath()
moveTo(x+17,y)
lineTo(x+17,y+31)
stroke()
-- The play symbol |>
playDraw :: Double -> Double -> Canvas ()
playDraw x y = do
lineWidth 5
lineCap "round"
beginPath()
moveTo(x,y)
lineTo(x,y+31)
stroke()
beginPath()
moveTo(x,y+31)
lineTo(x+23,y+16)
stroke()
beginPath()
moveTo(x+23,y+16)
lineTo(x,y)
stroke()
-- draws the progress bar and fills it in as
-- audio progresses
playbarBox :: Double -> Double -> Double -> Double -> Canvas ()
playbarBox x y curTime time = do
-- emty bar
beginPath ()
rect(x+32,y,160,31)
lineWidth 5
stroke()
-- fill in
beginPath ()
let proportion = curTime / time
rect(x+32,y,160*proportion,30)
fill()
lineWidth 0
stroke()
-- draws the entire playbar with play/pause
-- symbol and progress bar
playbarDraw context audio play curTime = do
send context $ do
-- clear frame
clearRect (0,0,200,45)
-- get full duration of audio file
let time = durationAudio audio
playbarBox 5 10 curTime time
-- draw play/pause depending on current state
if (play == Playing)
then do
pauseDraw 5 10
else do
playDraw 5 10
-- controls the drawing/redrawing of the frames
loopBar :: DeviceContext -> CanvasAudio -> TVar Play -> TVar Double -> IO ()
loopBar context audio play curTime = do
play' <- readTVarIO play
curTime' <- readTVarIO curTime
playbarDraw context audio play' curTime'
threadDelay (40 * 1000)
loopBar context audio play curTime
-- continuously updates the TVar Double with the current time
getCurTime :: DeviceContext -> CanvasAudio -> TVar Double -> IO ()
getCurTime context audio curTime = do
curTime' <- send context $ currentTimeAudio audio
atomically $ writeTVar curTime curTime'
getCurTime context audio curTime
-- reads the mouse input
loopInput :: DeviceContext -> CanvasAudio -> TVar Play -> IO ()
loopInput context audio play = do
play' <- readTVarIO play
event <- wait context
case ePageXY event of
-- if no mouse location, ignore, and loop again
Nothing -> loopInput context audio play
-- rework to get proper clicking range
Just (w,h) -> do
-- checks to see if the mouse is being clicked on top of the play/pause button
if (w >= 5 && w <= 28 && h >= 10 && h <= 41) then
send context $ do
if (play' == Playing)
then do
pauseAudio audio
else do
playAudio audio
else loopInput context audio play
-- update play
atomically $ swap play
loopInput context audio play
| ku-fpg/blank-canvas | examples/playbar/Main.hs | bsd-3-clause | 4,200 | 0 | 20 | 984 | 1,298 | 630 | 668 | 108 | 4 |
-- | Output RSS feeds.
module Snap.App.RSS where
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Snap.App
import Snap.App.XML
import System.Locale
import Text.Feed.Export
import Text.Feed.Types
import Text.RSS.Syntax
import Text.XML.Light
-- | Output the given XML element.
outputRSS :: String -> String -> [(UTCTime,Text,Text,Text)] -> Controller c s ()
outputRSS title link = outputXML . makeFeed title link
-- | Make a simple RSS feed.
makeFeed :: String -> String -> [(UTCTime,Text,Text,Text)] -> Element
makeFeed title link = xmlFeed . RSSFeed . makeRSS where
makeRSS qs = (nullRSS title link)
{ rssChannel = makeChannel qs }
makeChannel qs = (nullChannel title link)
{ rssItems = map makeItem qs }
makeItem (time,itemtitle,desc,itemlink) =
(nullItem (T.unpack itemtitle))
{ rssItemPubDate = return (toPubDate time)
, rssItemDescription = return (T.unpack desc)
, rssItemLink = return (T.unpack itemlink)
}
toPubDate = formatTime defaultTimeLocale "%a, %d %b %Y %H:%M:%S UT"
| chrisdone/snap-app | src/Snap/App/RSS.hs | bsd-3-clause | 1,168 | 0 | 12 | 302 | 336 | 190 | 146 | 25 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-- |
-- Module : Data.Array.Nikola.Backend.CUDA.Nvcc
-- Copyright : (c) Geoffrey Mainland 2012
-- License : BSD-style
--
-- Maintainer : Geoffrey Mainland <[email protected]>
-- Stability : experimental
-- Portability : non-portable
module Data.Array.Nikola.Backend.CUDA.Nvcc (
compile
) where
import Control.Monad.State
import qualified Data.ByteString as B
import qualified Language.C as C
import System.Exit
import Text.PrettyPrint.Mainland
#if MIN_VERSION_process(1,1,0)
import System.Process (readProcessWithExitCode)
#else /* !MIN_VERSION_process(1,1,0) */
import Control.Concurrent (forkIO,
newEmptyMVar,
putMVar,
takeMVar)
import qualified Control.Exception as E
import System.Process (StdStream(..),
proc,
createProcess,
waitForProcess,
std_in,
std_out,
std_err)
import System.IO (hClose,
hFlush,
hGetContents,
hPutStr)
#endif /* !MIN_VERSION_process(1,1,0) */
#include "Nikola.h"
data NvccOpt = Ptx
| Cubin
| Fatbin
| Gencode10
| Gencode11
| Gencode20
| Gencode30
| Debug
| Freeform String
deriving (Eq, Ord)
opts2args :: NvccOpt -> [String]
opts2args Ptx = ["--ptx"]
opts2args Fatbin = ["--fatbin"]
opts2args Cubin = ["--cubin"]
opts2args Gencode10 = ["-gencode", "arch=compute_10,code=sm_10"]
opts2args Gencode11 = ["-gencode", "arch=compute_11,code=sm_11"]
opts2args Gencode20 = ["-gencode", "arch=compute_20,code=sm_20"]
opts2args Gencode30 = ["-gencode", "arch=compute_30,code=sm_30"]
opts2args Debug = ["-G"]
opts2args (Freeform opt) = [opt]
compileEx :: [NvccOpt] -> [C.Definition] -> IO B.ByteString
compileEx opts cdefs = do
writeFile cupath (show (stack (map ppr cdefs)))
(exitCode, _, err) <- readProcessWithExitCode NVCC
(["--compiler-bindir", NVCC_CC] ++
concatMap opts2args opts ++
[cupath, "-o", objpath])
""
when (exitCode /= ExitSuccess) $
fail $ "nvcc failed: " ++ err
B.readFile objpath
where
pathRoot :: FilePath
pathRoot = "temp"
cupath :: FilePath
cupath = pathRoot ++ ".cu"
objpath :: FilePath
objpath | Fatbin `elem` opts = pathRoot ++ ".fatbin"
| Cubin `elem` opts = pathRoot ++ ".cubin"
| otherwise = pathRoot ++ ".ptx"
-- Starting with version 4.0, nvcc can compile a fat binary.
compile :: [C.Definition] -> IO B.ByteString
compile = compileEx
#if NVCC_VERSION < 40
#error "nvcc < 4 not supported"
#elif NVCC_VERSION < 42
[Fatbin, Gencode11, Gencode20]
#else /* NVCC_VERSION >= 42 */
[Fatbin, Gencode11, Gencode20, Gencode30]
#endif /* NVCC_VERSION >= 42 */
#if !MIN_VERSION_process(1,1,0)
readProcessWithExitCode
:: FilePath -- ^ command to run
-> [String] -- ^ any arguments
-> String -- ^ standard input
-> IO (ExitCode,String,String) -- ^ exitcode, stdout, stderr
readProcessWithExitCode cmd args input = do
(Just inh, Just outh, Just errh, pid) <-
createProcess (proc cmd args){ std_in = CreatePipe,
std_out = CreatePipe,
std_err = CreatePipe }
outMVar <- newEmptyMVar
-- fork off a thread to start consuming stdout
out <- hGetContents outh
_ <- forkIO $ E.evaluate (length out) >> putMVar outMVar ()
-- fork off a thread to start consuming stderr
err <- hGetContents errh
_ <- forkIO $ E.evaluate (length err) >> putMVar outMVar ()
-- now write and flush any input
when (not (null input)) $ do hPutStr inh input; hFlush inh
hClose inh -- done with stdin
-- wait on the output
takeMVar outMVar
takeMVar outMVar
hClose outh
hClose errh
-- wait on the process
ex <- waitForProcess pid
return (ex, out, err)
#endif /* !MIN_VERSION_process(1,1,0) */
| mainland/nikola | src/Data/Array/Nikola/Backend/CUDA/Nvcc.hs | bsd-3-clause | 4,615 | 0 | 13 | 1,352 | 818 | 449 | 369 | -1 | -1 |
{-| Careless conversion of @ByteString@s to numbers, ignoring bytes that
aren't hex or decimal digits.
-}
module Data.ByteString.Nums.Careless
( module Data.ByteString.Nums.Careless.Int
, module Data.ByteString.Nums.Careless.Hex
, module Data.ByteString.Nums.Careless.Float
) where
import Data.ByteString.Nums.Careless.Int (Intable(..))
import Data.ByteString.Nums.Careless.Hex (Hexable(..))
import Data.ByteString.Nums.Careless.Float (Floatable(..))
| solidsnack/bytestring-nums | Data/ByteString/Nums/Careless.hs | bsd-3-clause | 471 | 0 | 6 | 55 | 89 | 65 | 24 | 7 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Numeral.EN.Tests
( tests
) where
import Data.String
import Prelude
import Test.Tasty
import Test.Tasty.HUnit
import Duckling.Dimensions.Types
import Duckling.Numeral.EN.Corpus
import Duckling.Numeral.Types
import Duckling.Testing.Asserts
import Duckling.Testing.Types
tests :: TestTree
tests = testGroup "EN Tests"
[ makeCorpusTest [This Numeral] corpus
, surroundTests
]
surroundTests :: TestTree
surroundTests = testCase "Surround Tests" $
mapM_ (analyzedFirstTest testContext . withTargets [This Numeral]) xs
where
xs = concat
[ examples (NumeralValue 3)
[ "3km"
]
, examples (NumeralValue 100000)
[ "100k€"
, "100k\x20ac"
]
, examples (NumeralValue 10.99)
[ "10.99$"
]
]
| rfranek/duckling | tests/Duckling/Numeral/EN/Tests.hs | bsd-3-clause | 1,197 | 0 | 11 | 310 | 207 | 120 | 87 | 27 | 1 |
module Text.Highlighter.Lexers.ActionScript3 (lexer) where
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "ActionScript 3"
, lAliases = ["as3", "actionscript3"]
, lExtensions = [".as"]
, lMimetypes = ["application/x-actionscript", "text/x-actionscript", "text/actionscript"]
, lStart = root'
, lFlags = [multiline, dotall]
}
defval' :: TokenMatcher
defval' =
[ tokNext "(=)(\\s*)([^(),]+)(\\s*)(,?)" (ByGroups [(Arbitrary "Operator"), (Arbitrary "Text"), (Using lexer), (Arbitrary "Text"), (Arbitrary "Operator")]) Pop
, tokNext ",?" (Arbitrary "Operator") Pop
]
type' :: TokenMatcher
type' =
[ tokNext "(\\s*)(:)(\\s*)([$a-zA-Z_][a-zA-Z0-9_]*|\\*)" (ByGroups [(Arbitrary "Text"), (Arbitrary "Operator"), (Arbitrary "Text"), (Arbitrary "Keyword" :. Arbitrary "Type")]) (PopNum 2)
, tokNext "\\s*" (Arbitrary "Text") (PopNum 2)
]
root' :: TokenMatcher
root' =
[ tok "\\s+" (Arbitrary "Text")
, tokNext "(function\\s+)([$a-zA-Z_][a-zA-Z0-9_]*)(\\s*)(\\()" (ByGroups [(Arbitrary "Keyword" :. Arbitrary "Declaration"), (Arbitrary "Name" :. Arbitrary "Function"), (Arbitrary "Text"), (Arbitrary "Operator")]) (GoTo funcparams')
, tok "(var|const)(\\s+)([$a-zA-Z_][a-zA-Z0-9_]*)(\\s*)(:)(\\s*)([$a-zA-Z_][a-zA-Z0-9_]*)" (ByGroups [(Arbitrary "Keyword" :. Arbitrary "Declaration"), (Arbitrary "Text"), (Arbitrary "Name"), (Arbitrary "Text"), (Arbitrary "Punctuation"), (Arbitrary "Text"), (Arbitrary "Keyword" :. Arbitrary "Type")])
, tok "(import|package)(\\s+)((?:[$a-zA-Z_][a-zA-Z0-9_]*|\\.)+)(\\s*)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text"), (Arbitrary "Name" :. Arbitrary "Namespace"), (Arbitrary "Text")])
, tok "(new)(\\s+)([$a-zA-Z_][a-zA-Z0-9_]*)(\\s*)(\\()" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text"), (Arbitrary "Keyword" :. Arbitrary "Type"), (Arbitrary "Text"), (Arbitrary "Operator")])
, tok "//.*?\\n" (Arbitrary "Comment" :. Arbitrary "Single")
, tok "/\\*.*?\\*/" (Arbitrary "Comment" :. Arbitrary "Multiline")
, tok "/(\\\\\\\\|\\\\/|[^\\n])*/[gisx]*" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Regex")
, tok "(\\.)([$a-zA-Z_][a-zA-Z0-9_]*)" (ByGroups [(Arbitrary "Operator"), (Arbitrary "Name" :. Arbitrary "Attribute")])
, tok "(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\\b" (Arbitrary "Keyword")
, tok "(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\\b" (Arbitrary "Keyword" :. Arbitrary "Declaration")
, tok "(true|false|null|NaN|Infinity|-Infinity|undefined|void)\\b" (Arbitrary "Keyword" :. Arbitrary "Constant")
, tok "(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\\b" (Arbitrary "Name" :. Arbitrary "Function")
, tok "[$a-zA-Z_][a-zA-Z0-9_]*" (Arbitrary "Name")
, tok "[0-9][0-9]*\\.[0-9]+([eE][0-9]+)?[fd]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float")
, tok "0x[0-9a-f]+" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Hex")
, tok "[0-9]+" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer")
, tok "\"(\\\\\\\\|\\\\\"|[^\"])*\"" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Double")
, tok "'(\\\\\\\\|\\\\'|[^'])*'" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Single")
, tok "[\126\\^\\*!%&<>\\|+=:;,/?\\\\{}\\[\\]();.-]+" (Arbitrary "Operator")
]
funcparams' :: TokenMatcher
funcparams' =
[ tok "\\s+" (Arbitrary "Text")
, tokNext "(\\s*)(\\.\\.\\.)?([$a-zA-Z_][a-zA-Z0-9_]*)(\\s*)(:)(\\s*)([$a-zA-Z_][a-zA-Z0-9_]*|\\*)(\\s*)" (ByGroups [(Arbitrary "Text"), (Arbitrary "Punctuation"), (Arbitrary "Name"), (Arbitrary "Text"), (Arbitrary "Operator"), (Arbitrary "Text"), (Arbitrary "Keyword" :. Arbitrary "Type"), (Arbitrary "Text")]) (GoTo defval')
, tokNext "\\)" (Arbitrary "Operator") (GoTo type')
]
| chemist/highlighter | src/Text/Highlighter/Lexers/ActionScript3.hs | bsd-3-clause | 4,237 | 0 | 12 | 522 | 1,093 | 573 | 520 | 46 | 1 |
#ifdef IncludedmakeNewIndicesNullLeft
#else
#include "../Proofs/makeNewIndicesNullLeft.hs"
#endif
#ifdef IncludedmakeNewIndicesNullRight
#else
#include "../Proofs/makeNewIndicesNullRight.hs"
#endif
#ifdef IncludedmapShiftZero
#else
#include "../Proofs/mapShiftZero.hs"
#endif
#ifdef IncludedmakeIndicesNull
#else
#include "../Proofs/makeIndicesNull.hs"
#endif
#ifdef IncludedcatIndices
#else
#include "../Proofs/catIndices.hs"
#endif
#ifdef IncludedmergeIndices
#else
#include "../Proofs/mergeIndices.hs"
#endif
#ifdef IncludedmapCastId
#else
#include "../Proofs/mapCastId.hs"
#endif
-------------------------------------------------------------------------------
---------- Lemmata on Shifting Indices ---------------------------------------
-------------------------------------------------------------------------------
{-@ shiftNewIndices
:: xi:RString
-> yi:RString
-> zi:RString
-> tg:{RString | stringLen yi < stringLen tg }
-> { append (makeNewIndices xi (yi <+> zi) tg) (map (shiftStringRight tg xi (yi <+> zi)) (makeNewIndices yi zi tg))
== append (map (castGoodIndexRight tg (xi <+> yi) zi) (makeNewIndices xi yi tg)) (makeNewIndices (xi <+> yi) zi tg)
}
@-}
shiftNewIndices :: RString -> RString -> RString -> RString -> Proof
shiftNewIndices xi yi zi tg
| stringLen tg < 2
= append (makeNewIndices xi yzi tg) (map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. append N (map (shiftStringRight tg xi yzi) N)
==. map (shiftStringRight tg xi yzi) N
==. N
==. append N N
==. append (makeNewIndices xi yi tg) (makeNewIndices xyi zi tg)
==. append (map (castGoodIndexRight tg xyi zi) (makeNewIndices xi yi tg)) (makeNewIndices xyi zi tg)
? mapCastId tg xyi zi (makeNewIndices xi yi tg)
*** QED
where
yzi = yi <+> zi
xyi = xi <+> yi
xyziL = xyi <+> zi
shiftNewIndices xi yi zi tg
| stringLen xi == 0
= append (makeNewIndices xi yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. append (makeNewIndices stringEmp yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
? stringEmpProp xi
==. append (makeNewIndices stringEmp yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
? makeNewIndicesNullRight yzi tg
==. append N
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg)
? stringEmpProp xi
==. map (shiftStringRight tg stringEmp yzi) (makeNewIndices yi zi tg)
? mapShiftZero tg yzi (makeNewIndices yi zi tg)
==. makeNewIndices yi zi tg
==. makeNewIndices xyi zi tg
? concatEmpLeft xi yi
==. append N (makeNewIndices xyi zi tg)
==. append (makeNewIndices stringEmp yi tg) (makeNewIndices xyi zi tg)
? makeNewIndicesNullRight yi tg
==. append (makeNewIndices xi yi tg) (makeNewIndices xyi zi tg)
? stringEmpProp xi
==. append (map (castGoodIndexRight tg xyi zi) (makeNewIndices xi yi tg)) (makeNewIndices xyi zi tg)
? mapCastId tg xyi zi (makeNewIndices xi yi tg)
*** QED
| stringLen yi == 0
= append (makeNewIndices xi yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. append (makeNewIndices xi zi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
?(stringEmpProp yi &&& concatEmpLeft yi zi)
==. append (makeNewIndices xi zi tg)
(map (shiftStringRight tg xi zi) (makeNewIndices stringEmp zi tg))
==. append (makeNewIndices xi zi tg)
(map (shiftStringRight tg xi (stringEmp <+> zi)) N)
?makeNewIndicesNullRight zi tg
==. append (makeNewIndices xi zi tg) N
==. makeNewIndices xi zi tg
?listLeftId (makeNewIndices xi zi tg)
==. makeNewIndices xyi zi tg
?concatEmpRight xi yi
==. append N (makeNewIndices xyi zi tg)
==. append (makeNewIndices xi stringEmp tg) (makeNewIndices xyi zi tg)
?makeNewIndicesNullLeft xi tg
==. append (makeNewIndices xi yi tg) (makeNewIndices xyi zi tg)
==. append (map (castGoodIndexRight tg xyi zi) (makeNewIndices xi yi tg)) (makeNewIndices xyi zi tg)
? (stringEmpProp yi &&& mapCastId tg xyi zi (makeNewIndices xi yi tg))
*** QED
| stringLen yi - stringLen tg == - 1
= append (makeNewIndices xi yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. append (makeIndices xyziR tg loxi hixi)
(map (shiftStringRight tg xi yzi) (makeIndices yzi tg loyi hiyi))
==. append (makeIndices xyziR tg loxi hixi)
(makeIndices xyziR tg midxyi hixyi)
?shiftIndicesRight loyi hiyi xi yzi tg
==. append (makeIndices xyziL tg loxi hixi)
(makeIndices xyziL tg midxyi hixyi)
?concatStringAssoc xi yi zi
==. append (append (makeIndices xyziR tg loxi midxi)
(makeIndices xyziR tg (midxi+1) hixi))
(makeIndices xyziR tg midxyi hixyi)
?mergeIndices xyziL tg loxi midxi hixi
==. append (append (makeIndices xyziR tg loxi midxi) N)
(makeIndices xyziR tg midxyi hixyi)
==. append (makeIndices xyziR tg loxi midxi)
(makeIndices xyziR tg midxyi hixyi)
?listLeftId (makeIndices xyziR tg loxi midxi)
==. append (makeIndices xyi tg loxi hixi)
(makeIndices xyziR tg midxyi hixyi)
?catIndices xyi zi tg loxi hixi
==. append (makeIndices xyi tg loxi hixi)
(makeIndices xyziL tg loxyi hixyi)
==. append (makeNewIndices xi yi tg) (makeNewIndices xyi zi tg)
==. append (map (castGoodIndexRight tg xyi zi) (makeNewIndices xi yi tg)) (makeNewIndices xyi zi tg)
?mapCastId tg xyi zi (makeNewIndices xi yi tg)
*** QED
| 0 <= stringLen xi + stringLen yi - stringLen tg
= append (makeNewIndices xi yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. append (makeIndices xyziR tg loxi hixi)
(map (shiftStringRight tg xi yzi) (makeIndices yzi tg loyi hiyi))
==. append (makeIndices xyziR tg loxi hixi)
(makeIndices xyziR tg midxyi hixyi)
?shiftIndicesRight loyi hiyi xi yzi tg
==. append (makeIndices xyziL tg loxi hixi)
(makeIndices xyziL tg midxyi hixyi)
?concatStringAssoc xi yi zi
==. append (append (makeIndices xyziR tg loxi midxi)
(makeIndices xyziR tg (midxi+1) hixi))
(makeIndices xyziR tg midxyi hixyi)
?mergeIndices xyziL tg loxi midxi hixi
==. append (makeIndices xyziL tg loxi midxi)
(append (makeIndices xyziL tg (midxi+1) hixi)
(makeIndices xyziL tg midxyi hixyi))
?listAssoc (makeIndices xyziR tg loxi midxi) (makeIndices xyziR tg (midxi+1) hixi) (makeIndices xyziR tg midxyi hixyi)
==. append (makeIndices xyziL tg loxi midxi)
(makeIndices xyziL tg (midxi+1) hixyi)
?mergeIndices xyziL tg (midxi+1) hixi hixyi
==. append (makeIndices xyi tg loxi hixi)
(makeIndices xyziL tg (midxi+1) hixyi)
?catIndices xyi zi tg loxi hixi
==. append (makeIndices xyi tg loxi hixi)
(makeIndices xyziL tg loxyi hixyi)
==. append (makeNewIndices xi yi tg) (makeNewIndices xyi zi tg)
==. append (map (castGoodIndexRight tg xyi zi) (makeNewIndices xi yi tg)) (makeNewIndices xyi zi tg)
?mapCastId tg xyi zi (makeNewIndices xi yi tg)
*** QED
| stringLen xi + stringLen yi < stringLen tg
= append (makeNewIndices xi yzi tg)
(map (shiftStringRight tg xi yzi) (makeNewIndices yi zi tg))
==. append (makeIndices xyziR tg loxi hixi)
(map (shiftStringRight tg xi yzi) (makeIndices yzi tg loyi hiyi))
==. append (makeIndices xyziR tg loxi hixi)
(makeIndices xyziR tg midxyi hixyi)
?shiftIndicesRight loyi hiyi xi yzi tg
==. append (makeIndices xyziL tg loxi hixi)
(makeIndices xyziL tg midxyi hixyi)
?concatStringAssoc xi yi zi
==. makeIndices xyziL tg 0 (stringLen xyi - 1)
?mergeIndices xyziL tg loxi hixi hixyi
==. append N (makeIndices xyziL tg 0 hixyi)
==. append (makeIndices xyi tg loxi hixi)
(makeIndices xyziL tg loxyi hixyi)
? makeIndicesNull xyi tg 0 (stringLen xi -1)
==. append (makeNewIndices xi yi tg) (makeNewIndices xyi zi tg)
==. append (map (castGoodIndexRight tg xyi zi) (makeNewIndices xi yi tg)) (makeNewIndices xyi zi tg)
? mapCastId tg xyi zi (makeNewIndices xi yi tg)
*** QED
where
xyziR = xi <+> (yi <+> zi)
xyziL = xyi <+> zi
yzi = yi <+> zi
xyi = xi <+> yi
midxyi = maxInt (stringLen xi + stringLen yi - stringLen tg + 1) (stringLen xi)
midxi = stringLen xi + stringLen yi - stringLen tg
loyi = maxInt (stringLen yi - stringLen tg + 1) 0
loxi = maxInt (stringLen xi - stringLen tg + 1) 0
loxyi = maxInt (stringLen xyi - stringLen tg + 1) 0
hiyi = stringLen yi - 1
hixi = stringLen xi - 1
hixyi = stringLen xi + hiyi
| nikivazou/verified_string_matching | src/Proofs/shiftNewIndices.hs | bsd-3-clause | 9,092 | 0 | 30 | 2,278 | 3,086 | 1,514 | 1,572 | 158 | 1 |
module Tisp.Expr (fromAST, toAST, Expr(..), ExprVal(..), emptyEnv, errors, normalize, Subst(..), subst, eval, infer) where
import Data.Text (Text)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Control.Lens
import Data.Maybe (fromMaybe)
import Data.Foldable
import Data.Monoid
import Control.Monad.Reader
import Control.Monad.Except
import Tisp.Tokenize
import Tisp.Value
import Tisp.AST (AST, Equiv, equiv, Pattern(..))
import qualified Tisp.AST as A
data Expr a = Expr { exprLabel :: a, exprVal :: (ExprVal a) }
deriving instance Eq a => Eq (Expr a)
deriving instance Show a => Show (Expr a)
data ExprVal a = Var Var
| Lambda Symbol (Expr a) (Expr a)
| Pi Symbol (Expr a) (Expr a)
| App (Expr a) (Expr a)
| Case (Expr a) [(Pattern, Expr a)]
| Data Symbol [Expr a]
| Literal Literal
| ExprError SourceLoc Text
deriving instance Eq a => Eq (ExprVal a)
deriving instance Show a => Show (ExprVal a)
instance Plated (Expr a) where
plate _ e@(Expr _ (Var _)) = pure e
plate f (Expr l (Lambda n t e)) = Expr l <$> (Lambda n <$> f t <*> f e)
plate f (Expr l (Pi n t e)) = Expr l <$> (Pi n <$> f t <*> f e)
plate f (Expr l (App g x)) = Expr l <$> (App <$> f g <*> f x)
plate f (Expr l (Case e cs)) = Expr l <$> (Case <$> f e <*> traverseOf (traverse._2) f cs)
plate f (Expr l (Data s xs)) = Expr l <$> (Data s <$> traverse f xs)
plate _ e@(Expr _ (Literal _)) = pure e
plate _ e@(Expr _ (ExprError _ _)) = pure e
data Subst a = Shift Int | Dot a (Subst a)
deriving (Eq, Show)
instance Functor Subst where
fmap _ (Shift s) = Shift s
fmap f (Dot x s) = Dot (f x) (fmap f s)
class CanSubst a where
subst :: Subst a -> a -> a
instance CanSubst a => Monoid (Subst a) where
mempty = Shift 0
mappend s (Shift 0) = s
mappend (Dot _ s) (Shift m) = s <> (Shift (pred m))
mappend (Shift n) (Shift m) = Shift (n + m)
mappend s (Dot e t) = Dot (subst s e) (s <> t)
instance CanSubst (Expr a) where
subst s e@(Expr label e') =
case (s, e') of
(Shift m, Var (Local k)) -> Expr label $ Var (Local (k+m))
(Dot x _, Var (Local 0)) -> x
(Dot _ s', Var (Local k)) -> subst s' (Expr label (Var (Local (pred k))))
(_, Var (Global _)) -> e
(_, Literal _) -> e
(_, ExprError _ _) -> e
(_, Pi n t x) -> Expr label $ Pi n (subst s t) (subst (Dot (Expr label (Var (Local 0))) (Shift 1 <> s)) x)
(_, Lambda n t x) -> Expr label $ Lambda n (subst s t) (subst (Dot (Expr label (Var (Local 0))) (Shift 1 <> s)) x)
(_, App f x) -> Expr label $ App (subst s f) (subst s x)
(_, Case x cs) -> Expr label $
Case (subst s x)
(map (\(pat, c) ->
(pat
,case pat of
PLit _ -> subst s c
PAny _ -> subst (Shift 1 <> s) c
PData _ vs -> subst (foldl' (\s' _ -> Dot (Expr label (Var (Local 0))) (Shift 1 <> s')) s vs) c))
cs)
(_, Data c xs) -> Expr label $ Data c (map (subst s) xs)
shift :: CanSubst a => Int -> a -> a
shift k e = subst (Shift k) e
singleton :: a -> Subst a
singleton x = Dot x (Shift 0)
errors :: Expr a -> [(a, SourceLoc, Text)]
errors e = [(label, loc, msg) | (Expr label (ExprError loc msg)) <- universe e]
instance Equiv (Expr a) where
-- Alpha equivalence
-- Assumes case branches are sorted.
equiv a b =
case (exprVal a, exprVal b) of
(Var x, Var y) -> x == y
(Lambda _ t1 x1, Lambda _ t2 x2) -> equiv t1 t2 && equiv x1 x2
(Pi _ t1 x1, Pi _ t2 x2) -> equiv t1 t2 && equiv x1 x2
(App f1 x1, App f2 x2) -> equiv f1 f2 && equiv x1 x2
(Case x1 cs1, Case x2 cs2) -> equiv x1 x2 && all (\((p1, c1), (p2, c2)) -> equiv p1 p2 && equiv c1 c2) (zip cs1 cs2)
(Literal x, Literal y) -> x == y
(ExprError _ _, ExprError _ _) -> True
_ -> False
data Constructor a = Constructor { _ctorResult :: Expr a, _ctorArgs :: [(Symbol, Expr a)] }
makeLenses ''Constructor
data Env a = Env { _envGlobals :: Map Symbol (Expr a, Expr a)
, _envCtors :: Map Symbol (Constructor a)
, _envLocals :: [(Symbol, Expr a, Maybe (Expr a))]
}
makeLenses ''Env
emptyEnv :: Env a
emptyEnv = Env M.empty M.empty []
lookupTy :: Var -> Reader (Env a) (Expr a)
lookupTy (Local i) = do
ls <- view envLocals
pure $ shift (succ i) (view _2 (ls !! i))
lookupTy (Global g) = do
gs <- view envGlobals
case M.lookup g gs of
Just x -> pure . fst $ x
Nothing -> error "undefined global"
lookupVal :: Var -> Reader (Env a) (Maybe (Expr a))
lookupVal (Local i) = do
ls <- view envLocals
pure $ shift (succ i) <$> (view _3 (ls !! i))
lookupVal (Global g) = do
gs <- view envGlobals
case M.lookup g gs of
Just x -> pure . Just . snd $ x
Nothing -> pure $ Nothing
normalize :: Env a -> Expr a -> Expr a
normalize e x = runReader (normalize' x) e
patternMatch :: Expr a -> Pattern -> Bool
patternMatch _ (PAny _) = True
patternMatch (Expr _ (Data s' _)) (PData s _) = s == s'
patternMatch (Expr _ (Literal l')) (PLit l) = l == l'
patternMatch _ _ = False
normalize' :: Expr a -> Reader (Env a) (Expr a)
normalize' (Expr l e) =
case e of
Var v -> fromMaybe (Expr l e) <$> lookupVal v
Lambda n t x -> do
t' <- normalize' t
x' <- local (envLocals %~ ((n, t', Nothing):)) $ normalize' x
pure . Expr l $ Lambda n t' x'
Pi n t x -> do
t' <- normalize' t
x' <- local (envLocals %~ ((n, t', Nothing):)) $ normalize' x
pure . Expr l $ Pi n t' x'
App f x -> do
x' <- normalize' x
f' <- normalize' f
case f' of
Expr _ (Lambda _ _ body) -> normalize' (subst (singleton x') body)
_ -> pure . Expr l $ App f' x'
Case x cs -> do
x' <- normalize' x
case find (patternMatch x' . fst) cs of
Just (_, c) ->
case exprVal x' of
Literal _ -> normalize' (subst (singleton x') c)
Data _ xs -> normalize' (subst (foldr Dot (Shift 0) xs) c)
_ -> error "normalize': impossible"
Nothing -> pure $ Expr l e
Data c xs -> Expr l . Data c <$> (mapM normalize' xs)
Literal _ -> pure $ Expr l e
ExprError _ _ -> pure $ Expr l e
eval :: HasRange a => Expr a -> Value a
eval = eval' []
eval' :: HasRange a => [Value a] -> Expr a -> Value a
eval' env (Expr label exprVal) =
case exprVal of
ExprError l m -> Value label $ VError l m
Var v@(Local i) -> fromMaybe (Value label (VNeutral (NVar v))) (env ^? ix i)
Var v@(Global _) -> Value label $ VNeutral (NVar v)
Literal l -> Value label $ VLiteral l
Lambda n t v -> Value label $ VLambda n (eval' env t) (\x -> eval' (x:env) v)
Pi n t v -> Value label $ VPi n (eval' env t) (\x -> eval' (x:env) v)
Data s xs -> Value label $ VData s (map (eval' env) xs)
App f x ->
let x' = eval' env x in
case eval' env f of
Value _ (VLambda _ _ f') -> f' x'
Value _ (VNeutral n) -> Value label $ VNeutral (NApp n x')
_ -> Value label $ VError (label ^. sourceRange.start) "applied non-function"
Case x cs ->
case eval' env x of
v@(Value _ (VData _ _)) -> findClause label env cs v
v@(Value _ (VLiteral _)) -> findClause label env cs v
_ -> Value label $ VError (exprLabel x ^. sourceRange.start) "case on non-data"
where
findClause :: HasRange a => a -> [Value a] -> [(Pattern, Expr a)] -> Value a -> Value a
findClause l _ [] _ = Value l $ VError (l ^. sourceRange.start) "no matching case for value"
findClause _ e ((PAny _, v):_) x = eval' (x:e) v
findClause label' e ((PLit l, v):vs) x@(Value _ (VLiteral l')) =
if l == l'
then eval' e v
else findClause label' e vs x
findClause label' e ((PData n syms, v):vs) x@(Value _ (VData n' vals)) =
if n == n'
then if length syms /= length vals
then error "eval': impossible"
else eval' (reverse vals ++ e) v
else findClause label' e vs x
findClause l _ _ _ = Value l $ VError (l ^. sourceRange.start) "pattern type mismatch"
-- BROKEN: Every variable ends up being 0
-- reify :: Value a -> Expr a
-- reify (Value label (VLiteral l)) = Expr label $ Literal l
-- reify (Value label (VNeutral n)) = helper label n
-- where
-- helper :: a -> Neutral a -> Expr a
-- helper l (NVar v) = Expr l $ Var v
-- helper l (NApp f v) = Expr l $ App (helper l f) (reify v)
-- reify (Value l (VData s vs)) = Expr l $ Data s (map reify vs)
-- reify (Value l (VLambda n t f)) = Expr l $ Lambda n (reify t) (reify (f (Value l (VNeutral (NVar (Local 0))))))
-- reify (Value l (VPi n t f)) = Expr l $ Pi n (reify t) (reify (f (Value l (VNeutral (NVar (Local 0))))))
-- reify (Value l (VError loc m)) = Expr l $ ExprError loc m
type Untyped = Expr SourceRange
toAST :: HasRange a => Expr a -> AST
toAST = toAST' []
fromAST :: AST -> Untyped
fromAST = fromAST' M.empty
toAST' :: HasRange a => [Symbol] -> Expr a -> AST
toAST' env (Expr label exprVal) =
let range = label ^. sourceRange in
A.AST range $
case exprVal of
ExprError loc msg -> A.ASTError loc msg
Lambda n t x -> uncurry A.Lambda (flattenLambda env n t x)
Pi n t x -> uncurry A.Pi (flattenPi env n t x)
App f x -> uncurry A.App (flattenApp env [] f x)
Literal l -> A.Literal l
Var (Local i) -> A.Var (env !! i)
Var (Global s) -> A.Var s
Case x cs -> A.Case (toAST' env x) (cs & traverse._2 %~ toAST' env)
Data s args -> A.App (A.AST range (A.Var s)) (map (toAST' env) args)
flattenLambda :: HasRange a => [Symbol] -> Symbol -> Expr a -> Expr a -> ([(Symbol, AST)], AST)
flattenLambda env n t (Expr _ (Lambda n' t' x)) = flattenLambda (n:env) n' t' x & _1 %~ ((n, toAST' env t):)
flattenLambda env n t x = ([(n, toAST' env t)], toAST' (n:env) x)
flattenPi :: HasRange a => [Symbol] -> Symbol -> Expr a -> Expr a -> ([(Symbol, AST)], AST)
flattenPi env n t (Expr _ (Pi n' t' x)) = flattenPi (n:env) n' t' x & _1 %~ ((n, toAST' env t):)
flattenPi env n t x = ([(n, toAST' env t)], toAST' (n:env) x)
flattenApp :: HasRange a => [Symbol] ->[AST] -> Expr a -> Expr a -> (AST, [AST])
flattenApp env accum (Expr _ (App f x')) x = flattenApp env (toAST' env x : accum) f x'
flattenApp env accum f x = (toAST' env f, toAST' env x : accum)
-- Could this use a better data structure than Map?
fromAST' :: Map Symbol Int -> AST -> Untyped
fromAST' locals (A.AST range astVal) =
case astVal of
A.ASTError loc msg -> Expr range $ ExprError loc msg
A.Lambda [] _ -> Expr range $ ExprError (range ^. start) "nullary lambda"
A.Lambda args body -> fromAbs Lambda locals args body
A.Pi [] _ -> Expr range $ ExprError (range ^. start) "nullary Pi"
A.Pi args body -> fromAbs Pi locals args body
A.App _ [] -> Expr range $ ExprError (range ^. start) "nullary application"
A.App f xs -> foldl (\f' x -> Expr range $ App f' (fromAST' locals x)) (fromAST' locals f) xs
A.Literal l -> Expr range $ Literal l
A.Var s -> Expr range $
case M.lookup s locals of
Nothing -> Var (Global s)
Just i -> Var (Local i)
A.Case expr cases -> Expr range $ Case (fromAST' locals expr) (map caseFromAST cases)
where
fromAbs :: (Symbol -> Untyped -> Untyped -> ExprVal SourceRange) -> Map Symbol Int -> [(Symbol, AST)] -> AST -> Untyped
fromAbs _ locals' [] body = fromAST' locals' body
fromAbs ctor locals' ((x, ty):xs) body = Expr range (ctor x (fromAST' locals' ty) (fromAbs ctor (bind x locals') xs body))
caseFromAST :: (Pattern, AST) -> (Pattern, Untyped)
caseFromAST (p@(PLit _), expr) = (p, fromAST' locals expr)
caseFromAST (p@(PAny sym), expr) = (p, fromAST' (bind sym locals) expr)
caseFromAST (p@(PData _ vars), expr) = (p, fromAST' (foldr bind locals vars) expr)
bind :: Symbol -> Map Symbol Int -> Map Symbol Int
bind s ls = M.insert s 0 (M.map succ ls)
literalTy :: Literal -> ExprVal a
literalTy (LitNum _) = Var (Global "Rational")
literalTy (LitUniverse i) = Literal (LitUniverse (succ i))
literalTy (LitText _) = Var (Global "Text")
literalTy (LitForeign _) = Var (Global "Foreign")
infer :: HasRange a => Env a -> Expr a -> Expr a
infer e x = runReader (infer' x) e
infer' :: HasRange a => Expr a -> Reader (Env a) (Expr a)
infer' (Expr label exprVal) =
case exprVal of
Var v -> lookupTy v
Literal x -> pure $ Expr label (literalTy x)
Pi n t x -> do
u <- runExceptT $ do
u1 <- inferUniverse t
u2 <- local (envLocals %~ ((n,t,Nothing):)) $ inferUniverse x
pure (max u1 u2)
case u of
Left (l, m) -> pure $ Expr label $ ExprError l m
Right u' -> pure $ Expr label . Literal $ LitUniverse u'
Lambda n t x -> do
u <- runExceptT $ inferUniverse t
case u of
Left (l, m) -> pure . Expr label $ ExprError l m
Right _ -> do
xty <- local (envLocals %~ ((n,t,Nothing):)) $ infer' x
pure . Expr label $ Pi n t xty
App f x -> do
fty <- inferPi f
xty <- normalize' =<< infer' x
case fty of
Left (l, m) -> pure . Expr label $ ExprError l m
Right (_, argty, retty) -> do
argty' <- normalize' argty
pure $ if equiv xty argty'
then subst (singleton x) retty
else Expr label $ ExprError (label ^. sourceRange.start) "argument type mismatch"
Case _ [] -> pure . Expr label $ Var (Global "Void")
Case x cs -> undefined -- TODO: Bind variables to correct types in case bodies
-- xty <- normalize' =<< infer' x
-- ptys <- map (fromMaybe xty) <$> mapM (inferPattern . fst) cs
-- if any (not . equiv xty) ptys
-- then pure . Expr label $ ExprError (label ^. sourceRange.start) "pattern/interrogand type mismatch"
-- else if any (not . equiv (head ctys)) (tail ctys)
-- then pure . Expr label $ ExprError (label ^. sourceRange.start) "result type mismatch"
-- else head ctys
Data s xs -> do
cts <- view envCtors
case M.lookup s cts of
Nothing -> pure . Expr label $ ExprError (label ^. sourceRange.start) "undefined data constructor"
Just ct -> pure $ subst (foldr Dot (Shift 0) xs) (ct ^. ctorResult)
ExprError _ _ -> pure . Expr label $ Var (Global "Void")
inferUniverse :: HasRange a => Expr a -> ExceptT (SourceLoc, Text) (Reader (Env a)) Integer
inferUniverse ty = do
ty' <- lift $ normalize' =<< infer' ty
case ty' of
Expr _ (Literal (LitUniverse i)) -> pure $ i
Expr l _ -> throwError (l ^. sourceRange.start, "expected a type")
inferPi :: HasRange a => Expr a -> Reader (Env a) (Either (SourceLoc, Text) (Symbol, Expr a, Expr a))
inferPi f = do
ty <- normalize' =<< infer' f
case ty of
Expr _ (Pi n t x) -> pure $ Right (n, t, x)
Expr l _ -> pure $ Left (l ^. sourceRange.start, "expected a function")
data PatternTy a = PTData Symbol | PTLit (ExprVal a) | PTAny
deriving (Eq)
-- instance Equiv (PatternTy a) where
-- equiv PTAny _ = True
-- equiv _ PTAny = True
-- equiv (PTLit x) (PTLit y) = x == y
-- equiv (PTData x) (PTData y) = x == y
-- inferPattern :: Pattern -> PatternTy a
-- inferPattern (PAny _) = PTAny
-- inferPattern (PLit l) = PTLit (literalTy l)
-- inferPattern (PData s _) = PTData s
| Ralith/tisp | src/Tisp/Expr.hs | bsd-3-clause | 15,459 | 0 | 30 | 4,360 | 7,121 | 3,541 | 3,580 | -1 | -1 |
import System.IO
import System.Environment
import System.Exit
import System.Random
import Data.List
import Data.Maybe
import Control.Monad
import Text.Printf
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lex.Double as B
import Equation
import qualified TDMA
import qualified Unstable
import OptionsParser
-- | Считывает Double из строки
readD :: B.ByteString -> Double
readD = fst . fromJust . B.readDouble
-- | Считывает из строки список вещественных чисел, разделенных пробелами
readDoubleList :: B.ByteString -> [Double]
readDoubleList = map readD . B.words
-- | Извлекает следующий вектор вещественных
-- чисел из stdin
nextVector :: IO [Double]
nextVector = liftM readDoubleList B.getLine
randomVector :: (Random a) => Int -> (a, a) -> IO [a]
randomVector n r = replicateM n (randomRIO r)
-- | Извлекает коэффициенты уравнения из stdin
getEqDataStdIn :: IO (EquationData Double)
getEqDataStdIn = do [a, b, c, d] <- replicateM 4 nextVector
return $ EquationData a b c d
-- | Генерирует случайный набор значений коэффициентов
getEqDataRandom :: Int -> IO (EquationData Double)
getEqDataRandom n = do [a, c] <- replicateM 2 (randomVector (n-1) (-1000.0, 1000.0))
[b, d] <- replicateM 2 (randomVector n (-1000.0, 1000.0))
return $ EquationData (0:a) b (c ++ [0]) d
-- | Аргумент соответствует optRandom
getEqData :: Maybe Int -> IO (EquationData Double)
getEqData Nothing = getEqDataStdIn
getEqData (Just n) = getEqDataRandom n
-- | Показать сообщение об использовании программы и выйти, если аргумент True
maybeShowUsage :: Bool -> IO ()
maybeShowUsage True = putStr usageMessage >> exitSuccess
maybeShowUsage _ = return ()
-- | Выводит список в stdin и переводит строку
printListLn :: (Show a) => [a] -> IO ()
printListLn xs = mapM_ (\x -> putStr (show x ++ " ")) xs >> putStr "\n"
testingMode :: Bool -> Maybe (EquationSolution Double) -> IO ()
testingMode True (Just sol) = do x <- nextVector
printListLn x
print $ dist x sol
testingMode _ _ = return ()
printEqData :: Bool -> EquationData Double -> IO ()
printEqData True (EquationData a b c d) = mapM_ printListLn [a, b, c, d]
printEqData _ _ = return ()
printSolution :: Maybe (EquationSolution Double) -> IO ()
printSolution (Just sol) = printListLn sol
printSolution _ = noSolutionExit
noSolutionExit :: IO ()
noSolutionExit = hPutStrLn stderr "Не удалось найти решение" >> exitFailure
-- | Считает норму разности между векторами a и b
dist :: (Real a) => [a] -> [a] -> a
dist a b = foldl1' max $ map abs $ zipWith (-) a b
solveEquation :: Method -> EquationData Double -> Maybe (EquationSolution Double)
solveEquation MethodTDMA = TDMA.getSolution
solveEquation MethodUnstable = Unstable.getSolution
main :: IO ()
main = do opts <- getArgs >>= programOpts
maybeShowUsage (optHelp opts)
eqd <- getEqData (optRandom opts)
printEqData (isJust $ optRandom opts) eqd
let sol = solveEquation (optMethod opts) eqd
printSolution sol
testingMode (optTesting opts && isNothing (optRandom opts)) sol
| kharvd/tridiagonal | Tridiagonal.hs | bsd-3-clause | 3,615 | 0 | 12 | 735 | 1,025 | 526 | 499 | 63 | 1 |
module Data.Functor.ComonadHoist where
import Data.Functor.Comonad
import Data.Functor.Id
class ComonadHoist t where
cohoist ::
Comonad f =>
t f a
-> t Id a | tonymorris/type-class | src/Data/Functor/ComonadHoist.hs | bsd-3-clause | 172 | 0 | 9 | 38 | 54 | 29 | 25 | 8 | 0 |
module NinetyNine.Problem22 where
import Test.Hspec
range :: Int -> Int -> [Int]
range x y = [x..y]
range2 :: Int -> Int -> [Int]
range2 x y = take (y-x + 1) $ iterate (+1) x
range3 :: Int -> Int -> [Int]
range3 = enumFromTo
range4 :: Int -> Int -> [Int]
range4 x y
| x > y = []
| x == y = [x]
| otherwise = [z | i <- [(x-1)..(y-1)], let z = i + 1]
rangeAtSpec :: Spec
rangeAtSpec = do
describe "Create a list containing all integers within a given range." $ do
it "Create a range between two integers." $ do
range 4 9 `shouldBe` [4,5,6,7,8,9]
it "[range2] Create a range between two integers." $ do
range2 4 9 `shouldBe` [4,5,6,7,8,9]
it "[range3] Create a range between two integers." $ do
range3 4 9 `shouldBe` [4,5,6,7,8,9]
it "[range4] Create a range between two integers." $ do
range4 4 9 `shouldBe` [4,5,6,7,8,9]
| chemouna/99Haskell | src/problem22.hs | bsd-3-clause | 892 | 0 | 14 | 232 | 411 | 221 | 190 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TupleSections #-}
-- |
module Main where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Monad (forever, unless, forM_)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Loops (whileM_)
import qualified Control.Monad.Trans.Writer as W
import Data.Default
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import Formatting
import Options.Applicative
import System.IO (hFlush, hPutStr, stdout)
import Network.Libtorrent
data Config = Config {
_source :: !Text
}
deriving Show
instance Default Config where
def = Config {
_source = ""
}
main :: IO ()
main = do
Config{..} <- execParser opts
ses <- newSession
fsets <- newFeedSettings
setFeedSettingsUrl fsets _source
fh <- addFeed ses fsets
liftIO $ race_
(forever $ spinner 100000)
(whileM_ (checkFeed fh) $ threadDelay 100000)
txt <- W.execWriterT $ do
fs <- getFeedStatus fh
url <- getFeedStatusUrl fs
ec <- getFeedStatusError fs
title <- getFeedStatusTitle fs
desc <- getFeedStatusDescription fs
ttl <- getFeedStatusTtl fs
ecv <- errorCodeValue ec
items <- getFeedStatusItems fs >>= liftIO . toList
outln $ format ("\n\nFEED:" % stext) url
unless (ecv == 0) $ do
outln $ format ("Error:" % shown) ec
outln $ format (" " % stext) title
outln $ format (" " % stext) desc
outln $ format (" ttl: " % int % " minutes") ttl
forM_ items $ \item -> do
ititle <- getFeedItemTitle item
iurl <- getFeedItemUrl item
isize <- getFeedItemSize item
iuuid <- getFeedItemUuid item
idesc <- getFeedItemDescription item
icom <- getFeedItemComment item
icat <- getFeedItemCategory item
outln $ format (stext % "\n------------------------------------------------------") ititle
outln $ format (" url: " % stext % "\n size: " % int %
"\n uuid: " % stext % "\n description: " % stext)
iurl isize iuuid idesc
outln $ format (" comment: " % stext % "\n category: " % stext)
icom icat
TL.putStrLn txt
hFlush stdout
where
opts = info (helper <*> optParser)
( fullDesc
<> progDesc "Torrent RSS feed reader"
<> header "rss-reader - torrent rss feed url reader")
checkFeed fh = do
fs <- getFeedStatus fh
getFeedStatusUpdating fs
spinner tm =
go ['|', '/', '-', '\\']
where
go [] = pure ()
go (c:rst) = do
hPutStr stdout ['\b', c]
hFlush stdout
threadDelay tm
go rst
optParser :: Parser Config
optParser = Config
<$> (T.pack <$> (argument str
(metavar "RSS-FEED-URL"
<> help "RSS feed url")))
outln :: MonadIO m => TL.Text -> W.WriterT TL.Text m ()
outln txt = W.tell txt >> W.tell "\n"
| eryx67/haskell-libtorrent | examples/RssReader.hs | bsd-3-clause | 3,309 | 11 | 24 | 996 | 826 | 434 | 392 | 93 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : GalFld.Sandbox.SerializeSandbox
-- Note : Beispiele und Platz zum Spielen und Probieren
--
-- Diese Sandbox ist zum testen der serialisierung gedacht.
--
--------------------------------------------------------------------------------
module GalFld.Sandbox.SerializeSandbox
where
import Prelude hiding (writeFile, readFile)
import GalFld.Core
import GalFld.Algorithmen
import Data.List
import Data.Binary
import Data.ByteString.Lazy
--import Control.Monad
{----------------------------------------------------------------------------------}
{--- Beispiele-}
e2f2Mipo = pList[1::F2,1,1] -- x²+x+1
e2f2 = FFElem (pList[0,1::F2]) e2f2Mipo
{- F16=E2(E2)
- als Grad 2 Erweiterung von E2 durch MPol x²+x+e2f2
- Mit einer Nullstelle: e2e2f2
-}
e2e2f2Mipo = pList[e2f2,one,one] -- x²+x+e2f2
e2e2f2 = FFElem (pList[0,one]) e2e2f2Mipo
--e2e2f2 = FFElem (pList[0,e2f2]) e2e2f2Mipo
{- F16=E4
- als Grad 4 Erweiterung con F2 durch MPol x⁴+x²+1
- Mit einer Nullstelle: e4f2
-}
e4f2Mipo = pList[1::F2,1::F2,0,0,1::F2] -- x⁴+x²+1
e4f2 = FFElem (pList[0,1::F2]) e4f2Mipo
{-
- Beispiel in F3[x]:
- f = X¹¹+2x⁹+2x⁸+x⁶+x⁵+2x³+2x²+1
- = (x+1)(x²+1)³(x+2)⁴
-}
f=pList[1::F3,0,2,2,0,1,1,0,2,2,0,1]
testPoly1 = pList $ listFFElem e4f2Mipo [ pList[0::F2,0,1,1]
, 1
, pList[1::F2,1,1]
, pList[0::F2,1]
, pList[1::F2,1,0,1] ]
testPoly2 = pList $ listFFElem e4f2Mipo [ pList[0::F2,0,1,1]
, 1
, pList[1::F2,1,0,1] ]
testPoly3 = pList $ listFFElem e4f2Mipo [ pList[0::F2,0,1,1]
, 1
, 1
, pList[1::F2,1,0,1] ]
testPoly = testPoly1 * testPoly2
testSerializeWrite = writeFile "/tmp/serialize" (encode testPoly)
testSerializeRead = readFile "/tmp/serialize" >>= return . decode :: IO (Polynom (FFElem F2))
main :: IO ()
{-main = print $ map fst $ sffAndBerlekamp testPoly-}
main = do
testSerializeWrite
r <- testSerializeRead
print r
| maximilianhuber/softwareProjekt | src/GalFld/Sandbox/SerializeSandbox.hs | bsd-3-clause | 2,303 | 0 | 9 | 608 | 555 | 331 | 224 | 34 | 1 |
-- | purely-functional queues (taken from Okasaki)
module Q (Q, empty, isEmpty, maybeRev, snoc,head,tail,pop) where
import Prelude hiding (head,tail)
newtype Q a = Q ([a],[a])
deriving Show
empty :: Q a
empty = Q ([],[])
isEmpty :: Q a -> Bool
isEmpty (Q (f,_)) = null f
-- internal use only
maybeRev :: ([a], [a]) -> Q a
maybeRev ([],r) = Q (reverse r, [])
maybeRev q = Q q
snoc :: Q a -> a -> Q a
snoc (Q (f,r)) x = maybeRev (f, x:r)
head :: Q t -> t
head (Q ([],_)) = error "head: empty queue"
head (Q (x:_,_)) = x
tail :: Q a -> Q a
tail (Q ([], _)) = error "tail: empty queue"
tail (Q (_:f',r)) = maybeRev (f',r)
pop :: Q a -> (a, Q a)
pop (Q ([],_)) = error "pop: empty queue"
pop (Q (x:f',r)) = (x, maybeRev (f',r))
| abstools/abs-haskell-formal | src/Q.hs | bsd-3-clause | 736 | 0 | 9 | 164 | 467 | 257 | 210 | 22 | 1 |
cONTROL_GROUP_CONST_291 :: DynFlags -> Int
cONTROL_GROUP_CONST_291 dflags = pc_CONTROL_GROUP_CONST_291 (sPlatformConstants (settings dflags))
sTD_HDR_SIZE :: DynFlags -> Int
sTD_HDR_SIZE dflags = pc_STD_HDR_SIZE (sPlatformConstants (settings dflags))
pROF_HDR_SIZE :: DynFlags -> Int
pROF_HDR_SIZE dflags = pc_PROF_HDR_SIZE (sPlatformConstants (settings dflags))
bLOCK_SIZE :: DynFlags -> Int
bLOCK_SIZE dflags = pc_BLOCK_SIZE (sPlatformConstants (settings dflags))
bLOCKS_PER_MBLOCK :: DynFlags -> Int
bLOCKS_PER_MBLOCK dflags = pc_BLOCKS_PER_MBLOCK (sPlatformConstants (settings dflags))
tICKY_BIN_COUNT :: DynFlags -> Int
tICKY_BIN_COUNT dflags = pc_TICKY_BIN_COUNT (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR1 :: DynFlags -> Int
oFFSET_StgRegTable_rR1 dflags = pc_OFFSET_StgRegTable_rR1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR2 :: DynFlags -> Int
oFFSET_StgRegTable_rR2 dflags = pc_OFFSET_StgRegTable_rR2 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR3 :: DynFlags -> Int
oFFSET_StgRegTable_rR3 dflags = pc_OFFSET_StgRegTable_rR3 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR4 :: DynFlags -> Int
oFFSET_StgRegTable_rR4 dflags = pc_OFFSET_StgRegTable_rR4 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR5 :: DynFlags -> Int
oFFSET_StgRegTable_rR5 dflags = pc_OFFSET_StgRegTable_rR5 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR6 :: DynFlags -> Int
oFFSET_StgRegTable_rR6 dflags = pc_OFFSET_StgRegTable_rR6 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR7 :: DynFlags -> Int
oFFSET_StgRegTable_rR7 dflags = pc_OFFSET_StgRegTable_rR7 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR8 :: DynFlags -> Int
oFFSET_StgRegTable_rR8 dflags = pc_OFFSET_StgRegTable_rR8 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR9 :: DynFlags -> Int
oFFSET_StgRegTable_rR9 dflags = pc_OFFSET_StgRegTable_rR9 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rR10 :: DynFlags -> Int
oFFSET_StgRegTable_rR10 dflags = pc_OFFSET_StgRegTable_rR10 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rF1 :: DynFlags -> Int
oFFSET_StgRegTable_rF1 dflags = pc_OFFSET_StgRegTable_rF1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rF2 :: DynFlags -> Int
oFFSET_StgRegTable_rF2 dflags = pc_OFFSET_StgRegTable_rF2 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rF3 :: DynFlags -> Int
oFFSET_StgRegTable_rF3 dflags = pc_OFFSET_StgRegTable_rF3 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rF4 :: DynFlags -> Int
oFFSET_StgRegTable_rF4 dflags = pc_OFFSET_StgRegTable_rF4 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rF5 :: DynFlags -> Int
oFFSET_StgRegTable_rF5 dflags = pc_OFFSET_StgRegTable_rF5 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rF6 :: DynFlags -> Int
oFFSET_StgRegTable_rF6 dflags = pc_OFFSET_StgRegTable_rF6 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rD1 :: DynFlags -> Int
oFFSET_StgRegTable_rD1 dflags = pc_OFFSET_StgRegTable_rD1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rD2 :: DynFlags -> Int
oFFSET_StgRegTable_rD2 dflags = pc_OFFSET_StgRegTable_rD2 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rD3 :: DynFlags -> Int
oFFSET_StgRegTable_rD3 dflags = pc_OFFSET_StgRegTable_rD3 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rD4 :: DynFlags -> Int
oFFSET_StgRegTable_rD4 dflags = pc_OFFSET_StgRegTable_rD4 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rD5 :: DynFlags -> Int
oFFSET_StgRegTable_rD5 dflags = pc_OFFSET_StgRegTable_rD5 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rD6 :: DynFlags -> Int
oFFSET_StgRegTable_rD6 dflags = pc_OFFSET_StgRegTable_rD6 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rXMM1 :: DynFlags -> Int
oFFSET_StgRegTable_rXMM1 dflags = pc_OFFSET_StgRegTable_rXMM1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rXMM2 :: DynFlags -> Int
oFFSET_StgRegTable_rXMM2 dflags = pc_OFFSET_StgRegTable_rXMM2 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rXMM3 :: DynFlags -> Int
oFFSET_StgRegTable_rXMM3 dflags = pc_OFFSET_StgRegTable_rXMM3 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rXMM4 :: DynFlags -> Int
oFFSET_StgRegTable_rXMM4 dflags = pc_OFFSET_StgRegTable_rXMM4 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rXMM5 :: DynFlags -> Int
oFFSET_StgRegTable_rXMM5 dflags = pc_OFFSET_StgRegTable_rXMM5 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rXMM6 :: DynFlags -> Int
oFFSET_StgRegTable_rXMM6 dflags = pc_OFFSET_StgRegTable_rXMM6 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rYMM1 :: DynFlags -> Int
oFFSET_StgRegTable_rYMM1 dflags = pc_OFFSET_StgRegTable_rYMM1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rYMM2 :: DynFlags -> Int
oFFSET_StgRegTable_rYMM2 dflags = pc_OFFSET_StgRegTable_rYMM2 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rYMM3 :: DynFlags -> Int
oFFSET_StgRegTable_rYMM3 dflags = pc_OFFSET_StgRegTable_rYMM3 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rYMM4 :: DynFlags -> Int
oFFSET_StgRegTable_rYMM4 dflags = pc_OFFSET_StgRegTable_rYMM4 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rYMM5 :: DynFlags -> Int
oFFSET_StgRegTable_rYMM5 dflags = pc_OFFSET_StgRegTable_rYMM5 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rYMM6 :: DynFlags -> Int
oFFSET_StgRegTable_rYMM6 dflags = pc_OFFSET_StgRegTable_rYMM6 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rZMM1 :: DynFlags -> Int
oFFSET_StgRegTable_rZMM1 dflags = pc_OFFSET_StgRegTable_rZMM1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rZMM2 :: DynFlags -> Int
oFFSET_StgRegTable_rZMM2 dflags = pc_OFFSET_StgRegTable_rZMM2 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rZMM3 :: DynFlags -> Int
oFFSET_StgRegTable_rZMM3 dflags = pc_OFFSET_StgRegTable_rZMM3 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rZMM4 :: DynFlags -> Int
oFFSET_StgRegTable_rZMM4 dflags = pc_OFFSET_StgRegTable_rZMM4 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rZMM5 :: DynFlags -> Int
oFFSET_StgRegTable_rZMM5 dflags = pc_OFFSET_StgRegTable_rZMM5 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rZMM6 :: DynFlags -> Int
oFFSET_StgRegTable_rZMM6 dflags = pc_OFFSET_StgRegTable_rZMM6 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rL1 :: DynFlags -> Int
oFFSET_StgRegTable_rL1 dflags = pc_OFFSET_StgRegTable_rL1 (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rSp :: DynFlags -> Int
oFFSET_StgRegTable_rSp dflags = pc_OFFSET_StgRegTable_rSp (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rSpLim :: DynFlags -> Int
oFFSET_StgRegTable_rSpLim dflags = pc_OFFSET_StgRegTable_rSpLim (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rHp :: DynFlags -> Int
oFFSET_StgRegTable_rHp dflags = pc_OFFSET_StgRegTable_rHp (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rHpLim :: DynFlags -> Int
oFFSET_StgRegTable_rHpLim dflags = pc_OFFSET_StgRegTable_rHpLim (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rCCCS :: DynFlags -> Int
oFFSET_StgRegTable_rCCCS dflags = pc_OFFSET_StgRegTable_rCCCS (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rCurrentTSO :: DynFlags -> Int
oFFSET_StgRegTable_rCurrentTSO dflags = pc_OFFSET_StgRegTable_rCurrentTSO (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rCurrentNursery :: DynFlags -> Int
oFFSET_StgRegTable_rCurrentNursery dflags = pc_OFFSET_StgRegTable_rCurrentNursery (sPlatformConstants (settings dflags))
oFFSET_StgRegTable_rHpAlloc :: DynFlags -> Int
oFFSET_StgRegTable_rHpAlloc dflags = pc_OFFSET_StgRegTable_rHpAlloc (sPlatformConstants (settings dflags))
oFFSET_stgEagerBlackholeInfo :: DynFlags -> Int
oFFSET_stgEagerBlackholeInfo dflags = pc_OFFSET_stgEagerBlackholeInfo (sPlatformConstants (settings dflags))
oFFSET_stgGCEnter1 :: DynFlags -> Int
oFFSET_stgGCEnter1 dflags = pc_OFFSET_stgGCEnter1 (sPlatformConstants (settings dflags))
oFFSET_stgGCFun :: DynFlags -> Int
oFFSET_stgGCFun dflags = pc_OFFSET_stgGCFun (sPlatformConstants (settings dflags))
oFFSET_Capability_r :: DynFlags -> Int
oFFSET_Capability_r dflags = pc_OFFSET_Capability_r (sPlatformConstants (settings dflags))
oFFSET_bdescr_start :: DynFlags -> Int
oFFSET_bdescr_start dflags = pc_OFFSET_bdescr_start (sPlatformConstants (settings dflags))
oFFSET_bdescr_free :: DynFlags -> Int
oFFSET_bdescr_free dflags = pc_OFFSET_bdescr_free (sPlatformConstants (settings dflags))
oFFSET_bdescr_blocks :: DynFlags -> Int
oFFSET_bdescr_blocks dflags = pc_OFFSET_bdescr_blocks (sPlatformConstants (settings dflags))
oFFSET_bdescr_flags :: DynFlags -> Int
oFFSET_bdescr_flags dflags = pc_OFFSET_bdescr_flags (sPlatformConstants (settings dflags))
sIZEOF_CostCentreStack :: DynFlags -> Int
sIZEOF_CostCentreStack dflags = pc_SIZEOF_CostCentreStack (sPlatformConstants (settings dflags))
oFFSET_CostCentreStack_mem_alloc :: DynFlags -> Int
oFFSET_CostCentreStack_mem_alloc dflags = pc_OFFSET_CostCentreStack_mem_alloc (sPlatformConstants (settings dflags))
oFFSET_CostCentreStack_scc_count :: DynFlags -> Int
oFFSET_CostCentreStack_scc_count dflags = pc_OFFSET_CostCentreStack_scc_count (sPlatformConstants (settings dflags))
oFFSET_StgHeader_ccs :: DynFlags -> Int
oFFSET_StgHeader_ccs dflags = pc_OFFSET_StgHeader_ccs (sPlatformConstants (settings dflags))
oFFSET_StgHeader_ldvw :: DynFlags -> Int
oFFSET_StgHeader_ldvw dflags = pc_OFFSET_StgHeader_ldvw (sPlatformConstants (settings dflags))
sIZEOF_StgSMPThunkHeader :: DynFlags -> Int
sIZEOF_StgSMPThunkHeader dflags = pc_SIZEOF_StgSMPThunkHeader (sPlatformConstants (settings dflags))
oFFSET_StgEntCounter_allocs :: DynFlags -> Int
oFFSET_StgEntCounter_allocs dflags = pc_OFFSET_StgEntCounter_allocs (sPlatformConstants (settings dflags))
oFFSET_StgEntCounter_allocd :: DynFlags -> Int
oFFSET_StgEntCounter_allocd dflags = pc_OFFSET_StgEntCounter_allocd (sPlatformConstants (settings dflags))
oFFSET_StgEntCounter_registeredp :: DynFlags -> Int
oFFSET_StgEntCounter_registeredp dflags = pc_OFFSET_StgEntCounter_registeredp (sPlatformConstants (settings dflags))
oFFSET_StgEntCounter_link :: DynFlags -> Int
oFFSET_StgEntCounter_link dflags = pc_OFFSET_StgEntCounter_link (sPlatformConstants (settings dflags))
oFFSET_StgEntCounter_entry_count :: DynFlags -> Int
oFFSET_StgEntCounter_entry_count dflags = pc_OFFSET_StgEntCounter_entry_count (sPlatformConstants (settings dflags))
sIZEOF_StgUpdateFrame_NoHdr :: DynFlags -> Int
sIZEOF_StgUpdateFrame_NoHdr dflags = pc_SIZEOF_StgUpdateFrame_NoHdr (sPlatformConstants (settings dflags))
sIZEOF_StgMutArrPtrs_NoHdr :: DynFlags -> Int
sIZEOF_StgMutArrPtrs_NoHdr dflags = pc_SIZEOF_StgMutArrPtrs_NoHdr (sPlatformConstants (settings dflags))
oFFSET_StgMutArrPtrs_ptrs :: DynFlags -> Int
oFFSET_StgMutArrPtrs_ptrs dflags = pc_OFFSET_StgMutArrPtrs_ptrs (sPlatformConstants (settings dflags))
oFFSET_StgMutArrPtrs_size :: DynFlags -> Int
oFFSET_StgMutArrPtrs_size dflags = pc_OFFSET_StgMutArrPtrs_size (sPlatformConstants (settings dflags))
sIZEOF_StgSmallMutArrPtrs_NoHdr :: DynFlags -> Int
sIZEOF_StgSmallMutArrPtrs_NoHdr dflags = pc_SIZEOF_StgSmallMutArrPtrs_NoHdr (sPlatformConstants (settings dflags))
oFFSET_StgSmallMutArrPtrs_ptrs :: DynFlags -> Int
oFFSET_StgSmallMutArrPtrs_ptrs dflags = pc_OFFSET_StgSmallMutArrPtrs_ptrs (sPlatformConstants (settings dflags))
sIZEOF_StgArrBytes_NoHdr :: DynFlags -> Int
sIZEOF_StgArrBytes_NoHdr dflags = pc_SIZEOF_StgArrBytes_NoHdr (sPlatformConstants (settings dflags))
oFFSET_StgArrBytes_bytes :: DynFlags -> Int
oFFSET_StgArrBytes_bytes dflags = pc_OFFSET_StgArrBytes_bytes (sPlatformConstants (settings dflags))
oFFSET_StgTSO_alloc_limit :: DynFlags -> Int
oFFSET_StgTSO_alloc_limit dflags = pc_OFFSET_StgTSO_alloc_limit (sPlatformConstants (settings dflags))
oFFSET_StgTSO_cccs :: DynFlags -> Int
oFFSET_StgTSO_cccs dflags = pc_OFFSET_StgTSO_cccs (sPlatformConstants (settings dflags))
oFFSET_StgTSO_stackobj :: DynFlags -> Int
oFFSET_StgTSO_stackobj dflags = pc_OFFSET_StgTSO_stackobj (sPlatformConstants (settings dflags))
oFFSET_StgStack_sp :: DynFlags -> Int
oFFSET_StgStack_sp dflags = pc_OFFSET_StgStack_sp (sPlatformConstants (settings dflags))
oFFSET_StgStack_stack :: DynFlags -> Int
oFFSET_StgStack_stack dflags = pc_OFFSET_StgStack_stack (sPlatformConstants (settings dflags))
oFFSET_StgUpdateFrame_updatee :: DynFlags -> Int
oFFSET_StgUpdateFrame_updatee dflags = pc_OFFSET_StgUpdateFrame_updatee (sPlatformConstants (settings dflags))
oFFSET_StgFunInfoExtraFwd_arity :: DynFlags -> Int
oFFSET_StgFunInfoExtraFwd_arity dflags = pc_OFFSET_StgFunInfoExtraFwd_arity (sPlatformConstants (settings dflags))
sIZEOF_StgFunInfoExtraRev :: DynFlags -> Int
sIZEOF_StgFunInfoExtraRev dflags = pc_SIZEOF_StgFunInfoExtraRev (sPlatformConstants (settings dflags))
oFFSET_StgFunInfoExtraRev_arity :: DynFlags -> Int
oFFSET_StgFunInfoExtraRev_arity dflags = pc_OFFSET_StgFunInfoExtraRev_arity (sPlatformConstants (settings dflags))
mAX_SPEC_SELECTEE_SIZE :: DynFlags -> Int
mAX_SPEC_SELECTEE_SIZE dflags = pc_MAX_SPEC_SELECTEE_SIZE (sPlatformConstants (settings dflags))
mAX_SPEC_AP_SIZE :: DynFlags -> Int
mAX_SPEC_AP_SIZE dflags = pc_MAX_SPEC_AP_SIZE (sPlatformConstants (settings dflags))
mIN_PAYLOAD_SIZE :: DynFlags -> Int
mIN_PAYLOAD_SIZE dflags = pc_MIN_PAYLOAD_SIZE (sPlatformConstants (settings dflags))
mIN_INTLIKE :: DynFlags -> Int
mIN_INTLIKE dflags = pc_MIN_INTLIKE (sPlatformConstants (settings dflags))
mAX_INTLIKE :: DynFlags -> Int
mAX_INTLIKE dflags = pc_MAX_INTLIKE (sPlatformConstants (settings dflags))
mIN_CHARLIKE :: DynFlags -> Int
mIN_CHARLIKE dflags = pc_MIN_CHARLIKE (sPlatformConstants (settings dflags))
mAX_CHARLIKE :: DynFlags -> Int
mAX_CHARLIKE dflags = pc_MAX_CHARLIKE (sPlatformConstants (settings dflags))
mUT_ARR_PTRS_CARD_BITS :: DynFlags -> Int
mUT_ARR_PTRS_CARD_BITS dflags = pc_MUT_ARR_PTRS_CARD_BITS (sPlatformConstants (settings dflags))
mAX_Vanilla_REG :: DynFlags -> Int
mAX_Vanilla_REG dflags = pc_MAX_Vanilla_REG (sPlatformConstants (settings dflags))
mAX_Float_REG :: DynFlags -> Int
mAX_Float_REG dflags = pc_MAX_Float_REG (sPlatformConstants (settings dflags))
mAX_Double_REG :: DynFlags -> Int
mAX_Double_REG dflags = pc_MAX_Double_REG (sPlatformConstants (settings dflags))
mAX_Long_REG :: DynFlags -> Int
mAX_Long_REG dflags = pc_MAX_Long_REG (sPlatformConstants (settings dflags))
mAX_XMM_REG :: DynFlags -> Int
mAX_XMM_REG dflags = pc_MAX_XMM_REG (sPlatformConstants (settings dflags))
mAX_Real_Vanilla_REG :: DynFlags -> Int
mAX_Real_Vanilla_REG dflags = pc_MAX_Real_Vanilla_REG (sPlatformConstants (settings dflags))
mAX_Real_Float_REG :: DynFlags -> Int
mAX_Real_Float_REG dflags = pc_MAX_Real_Float_REG (sPlatformConstants (settings dflags))
mAX_Real_Double_REG :: DynFlags -> Int
mAX_Real_Double_REG dflags = pc_MAX_Real_Double_REG (sPlatformConstants (settings dflags))
mAX_Real_XMM_REG :: DynFlags -> Int
mAX_Real_XMM_REG dflags = pc_MAX_Real_XMM_REG (sPlatformConstants (settings dflags))
mAX_Real_Long_REG :: DynFlags -> Int
mAX_Real_Long_REG dflags = pc_MAX_Real_Long_REG (sPlatformConstants (settings dflags))
rESERVED_C_STACK_BYTES :: DynFlags -> Int
rESERVED_C_STACK_BYTES dflags = pc_RESERVED_C_STACK_BYTES (sPlatformConstants (settings dflags))
rESERVED_STACK_WORDS :: DynFlags -> Int
rESERVED_STACK_WORDS dflags = pc_RESERVED_STACK_WORDS (sPlatformConstants (settings dflags))
aP_STACK_SPLIM :: DynFlags -> Int
aP_STACK_SPLIM dflags = pc_AP_STACK_SPLIM (sPlatformConstants (settings dflags))
wORD_SIZE :: DynFlags -> Int
wORD_SIZE dflags = pc_WORD_SIZE (sPlatformConstants (settings dflags))
dOUBLE_SIZE :: DynFlags -> Int
dOUBLE_SIZE dflags = pc_DOUBLE_SIZE (sPlatformConstants (settings dflags))
cINT_SIZE :: DynFlags -> Int
cINT_SIZE dflags = pc_CINT_SIZE (sPlatformConstants (settings dflags))
cLONG_SIZE :: DynFlags -> Int
cLONG_SIZE dflags = pc_CLONG_SIZE (sPlatformConstants (settings dflags))
cLONG_LONG_SIZE :: DynFlags -> Int
cLONG_LONG_SIZE dflags = pc_CLONG_LONG_SIZE (sPlatformConstants (settings dflags))
bITMAP_BITS_SHIFT :: DynFlags -> Int
bITMAP_BITS_SHIFT dflags = pc_BITMAP_BITS_SHIFT (sPlatformConstants (settings dflags))
tAG_BITS :: DynFlags -> Int
tAG_BITS dflags = pc_TAG_BITS (sPlatformConstants (settings dflags))
wORDS_BIGENDIAN :: DynFlags -> Bool
wORDS_BIGENDIAN dflags = pc_WORDS_BIGENDIAN (sPlatformConstants (settings dflags))
dYNAMIC_BY_DEFAULT :: DynFlags -> Bool
dYNAMIC_BY_DEFAULT dflags = pc_DYNAMIC_BY_DEFAULT (sPlatformConstants (settings dflags))
lDV_SHIFT :: DynFlags -> Int
lDV_SHIFT dflags = pc_LDV_SHIFT (sPlatformConstants (settings dflags))
iLDV_CREATE_MASK :: DynFlags -> Integer
iLDV_CREATE_MASK dflags = pc_ILDV_CREATE_MASK (sPlatformConstants (settings dflags))
iLDV_STATE_CREATE :: DynFlags -> Integer
iLDV_STATE_CREATE dflags = pc_ILDV_STATE_CREATE (sPlatformConstants (settings dflags))
iLDV_STATE_USE :: DynFlags -> Integer
iLDV_STATE_USE dflags = pc_ILDV_STATE_USE (sPlatformConstants (settings dflags))
| ghcjs/ghcjs | lib/ghc/includes/GHCConstantsHaskellWrappers.hs | mit | 17,002 | 0 | 9 | 1,500 | 4,001 | 2,000 | 2,001 | 250 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Network.HTTP.Conduit
import Data.Aeson
import Data.Aeson.Types
import Network.HTTP.Conduit
import Data.Char(toLower)
import qualified Data.Text as T
import qualified Data.Vector as V
import Data.Maybe
import Data.List(find, intercalate)
import Network.HTTP(urlEncode)
------------------------------------------------------------------------
-- Endpoint
------------------------------------------------------------------------
class Endpoint a where
buildURI :: a -> String
callJsonEndpoint :: (FromJSON j, Endpoint e) => e -> IO j
callJsonEndpoint e =
do responseBody <- simpleHttp (buildURI e)
case eitherDecode responseBody of
Left err -> fail err
Right res -> return res
------------------------------------------------------------------------
-- GeocoderEndpoint
------------------------------------------------------------------------
-- https://developers.google.com/maps/documentation/geocoding/
data GeocoderEndpoint =
GeocodeEndpoint { address :: String, sensor :: Bool }
-- This instance converts a data structure into a URI, by passing in
-- paramater values as variables
instance Endpoint GeocoderEndpoint where
buildURI GeocodeEndpoint { address = address, sensor = sensor } =
let params = [("address", Just address), ("sensor", Just $ map toLower $ show sensor)]
in "http://maps.googleapis.com/maps/api/geocode/json" ++ renderQuery True params
------------------------------------------------------------------------
-- GeocoderModel
------------------------------------------------------------------------
data GeocodeResponse = GeocodeResponse LatLng deriving Show
instance FromJSON GeocodeResponse where
parseJSON val =
do let Object obj = val
(Array results) <- obj .: "results"
(Object location) <- navigateJson (results V.! 0) ["geometry", "location"]
(Number lat) <- location .: "lat"
(Number lng) <- location .: "lng"
return $ GeocodeResponse (realToFrac lat, realToFrac lng)
------------------------------------------------------------------------
-- FoursquareEndpoint
------------------------------------------------------------------------
foursquareApiVersion = "20130721"
data FoursquareCredentials = FoursquareCredentials { clientId :: String, clientSecret :: String }
data FoursquareEndpoint =
VenuesTrendingEndpoint { ll :: LatLng, limit :: Maybe Int, radius :: Maybe Double }
instance Endpoint FoursquareEndpoint where
buildURI VenuesTrendingEndpoint {ll = ll, limit = limit, radius = radius} =
let params = [("ll", Just $ renderLatLng ll), ("limit", fmap show limit), ("radius", fmap show radius)]
in "https://api.foursquare.com/v2/venues/trending" ++ renderQuery True params
data AuthorizedFoursquareEndpoint = AuthorizedFoursquareEndpoint FoursquareCredentials FoursquareEndpoint
instance Endpoint AuthorizedFoursquareEndpoint where
buildURI (AuthorizedFoursquareEndpoint creds e) = appendParams originalUri authorizationParams
where originalUri = buildURI e
authorizationParams = [("client_id", Just $ clientId creds),
("client_secret", Just $ clientSecret creds),
("v", Just foursquareApiVersion)]
authorizeWith :: FoursquareEndpoint -> FoursquareCredentials -> AuthorizedFoursquareEndpoint
authorizeWith = flip AuthorizedFoursquareEndpoint
------------------------------------------------------------------------
-- FoursquareModel
------------------------------------------------------------------------
withFoursquareResponse :: (Object -> Parser a) -> Value -> Parser a
withFoursquareResponse func val = do let Object obj = val
response <- obj .: "response"
func response
data Venue = Venue { venueId :: String, name :: String } deriving Show
data VenuesTrendingResponse = VenuesTrendingResponse { venues :: [Venue] } deriving Show
instance FromJSON VenuesTrendingResponse where
parseJSON = withFoursquareResponse parseResponse
where parseResponse :: Object -> Parser VenuesTrendingResponse
parseResponse obj = do (Array venues) <- obj .: "venues"
parsedVenues <- V.mapM (\(Object o) -> parseVenue o) venues
return $ VenuesTrendingResponse $ V.toList parsedVenues
parseVenue :: Object -> Parser Venue
parseVenue obj = do (String idText) <- obj .: "id"
(String nameText) <- obj .: "name"
return $ Venue { venueId = T.unpack idText, name = T.unpack nameText }
------------------------------------------------------------------------
-- Core
------------------------------------------------------------------------
type LatLng = (Double, Double)
renderLatLng :: LatLng -> String
renderLatLng (lat, lng) = show lat ++ "," ++ show lng
navigateJson :: Value -> [T.Text] -> Parser Value
navigateJson (Object obj) (first : second : rest) =
do next <- obj .: first
navigateJson next (second : rest)
navigateJson (Object obj) [last] = obj .: last
renderQuery :: Bool -> [(String, Maybe String)] -> String
renderQuery b params = (if b then "?" else "") ++ intercalate "&" serializedParams
where serializedParams = catMaybes $ map renderParam params
renderParam (key, Just val) = Just $ key ++ "=" ++ (urlEncode val)
renderParam (_, Nothing) = Nothing
appendParams :: String -> [(String, Maybe String)] -> String
appendParams uri params
| isJust (find (=='?') uri) = uri ++ "&" ++ renderQuery False params
| otherwise = uri ++ renderQuery True params
------------------------------------------------------------------------
-- Main
------------------------------------------------------------------------
-- | Main entry point to the application.
targetAddress = "568 Broadway, New York, NY"
-- | The main entry point.
main :: IO ()
main =
do putStrLn "API key?"
apiKey <- getLine
putStrLn "API secret?"
apiSecret <- getLine
let creds = FoursquareCredentials apiKey apiSecret
(GeocodeResponse latLng) <- callJsonEndpoint $ GeocodeEndpoint targetAddress False
let venuesTrendingEndpoint = VenuesTrendingEndpoint latLng Nothing Nothing `authorizeWith` creds
(VenuesTrendingResponse venues) <- callJsonEndpoint venuesTrendingEndpoint
let printVenue v = putStrLn $ "- " ++ name v
mapM_ printVenue venues
------------------------------------------------------------------------
-- REFERENCE & LINKS --
-- This file comes from a tutorial published in Aug 2013.
-- The original version is published at: https://www.fpcomplete.com/school/to-infinity-and-beyond/pick-of-the-week/foursquare-api-example
-- In order to use this code, you'll need to install http-conduit and aeson.
-- They can be installed with the following command:
-- cabal install http-conduit aeson
| wavelets/getting-started-with-haskell | tutorials/foursquaretrends.hs | mit | 6,962 | 0 | 15 | 1,276 | 1,551 | 815 | 736 | 97 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module ExceptionSpec (main, spec) where
import Control.Applicative
import Control.Monad
import Control.Concurrent (forkIO, threadDelay)
import Network.HTTP
import Network.Stream
import Network.HTTP.Types hiding (Header)
import Network.Wai hiding (Response)
import Network.Wai.Internal (Request(..))
import Network.Wai.Handler.Warp
import System.IO.Unsafe (unsafePerformIO)
import Test.Hspec
import Control.Exception
import qualified Data.Streaming.Network as N
import Control.Concurrent.Async (withAsync)
import Network.Socket (sClose)
main :: IO ()
main = hspec spec
withTestServer :: (Int -> IO a) -> IO a
withTestServer inner = bracket
(N.bindRandomPortTCP "*4")
(sClose . snd)
$ \(port, lsocket) -> do
withAsync (runSettingsSocket defaultSettings lsocket testApp)
$ \_ -> inner port
testApp :: Application
testApp (Network.Wai.Internal.Request {pathInfo = [x]}) f
| x == "statusError" =
f $ responseLBS undefined [] "foo"
| x == "headersError" =
f $ responseLBS ok200 undefined "foo"
| x == "headerError" =
f $ responseLBS ok200 [undefined] "foo"
| x == "bodyError" =
f $ responseLBS ok200 [] undefined
| x == "ioException" = do
void $ fail "ioException"
f $ responseLBS ok200 [] "foo"
testApp _ f =
f $ responseLBS ok200 [] "foo"
spec :: Spec
spec = describe "responds even if there is an exception" $ do
{- Disabling these tests. We can consider forcing evaluation in Warp.
it "statusError" $ do
sc <- rspCode <$> sendGET "http://localhost:2345/statusError"
sc `shouldBe` (5,0,0)
it "headersError" $ do
sc <- rspCode <$> sendGET "http://localhost:2345/headersError"
sc `shouldBe` (5,0,0)
it "headerError" $ do
sc <- rspCode <$> sendGET "http://localhost:2345/headerError"
sc `shouldBe` (5,0,0)
it "bodyError" $ do
sc <- rspCode <$> sendGET "http://localhost:2345/bodyError"
sc `shouldBe` (5,0,0)
-}
it "ioException" $ withTestServer $ \port -> do
sc <- rspCode <$> sendGET (concat $ ["http://localhost:", show port, "/ioException"])
sc `shouldBe` (5,0,0)
----------------------------------------------------------------
sendGET :: String -> IO (Response String)
sendGET url = sendGETwH url []
sendGETwH :: String -> [Header] -> IO (Response String)
sendGETwH url hdr = unResult $ simpleHTTP $ (getRequest url) { rqHeaders = hdr }
unResult :: IO (Result (Response String)) -> IO (Response String)
unResult action = do
res <- action
case res of
Right rsp -> return rsp
Left _ -> error "Connection error"
| sol/wai | warp/test/ExceptionSpec.hs | mit | 2,761 | 0 | 18 | 660 | 707 | 372 | 335 | 56 | 2 |
--
-- Chapter 3, definitions from the book.
--
module B'C'3 where
-- From subchapter 3.2, Relational operators (example).
threeEqual :: Integer -> Integer -> Integer -> Bool
threeEqual a b c
= a == b
&& b == c
| pascal-knodel/haskell-craft | _/links/B'C'3.hs | mit | 231 | 0 | 7 | 61 | 49 | 28 | 21 | 5 | 1 |
{-# LANGUAGE CPP, MultiParamTypeClasses, TypeSynonymInstances
, FlexibleInstances #-}
{- |
Module : $Header$
Description : Instance of class Logic for SoftFOL.
Copyright : (c) Rene Wagner, Klaus Luettich, Uni Bremen 2005-2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (imports Logic)
Instance of class Logic for SoftFOL.
-}
module SoftFOL.Logic_SoftFOL where
import Common.DefaultMorphism
import Common.DocUtils
import Common.ProofTree
import ATC.ProofTree ()
import Logic.Logic
import SoftFOL.ATC_SoftFOL ()
import SoftFOL.Sign
import SoftFOL.StatAna
import SoftFOL.Print
import SoftFOL.Conversions
import SoftFOL.Morphism
import SoftFOL.PrintTPTP ()
import SoftFOL.ProveSPASS
import SoftFOL.ProveHyperHyper
#ifndef NOHTTP
import SoftFOL.ProveMathServ
import SoftFOL.ProveVampire
#endif
import SoftFOL.ProveDarwin
import SoftFOL.ProveMetis
instance Pretty Sign where
pretty = pretty . signToSPLogicalPart
{- |
A dummy datatype for the LogicGraph and for identifying the right
instances
-}
data SoftFOL = SoftFOL deriving (Show)
instance Language SoftFOL where
description _ =
"SoftFOL - Softly typed First Order Logic for " ++
"Automated Theorem Proving Systems\n\n" ++
"This logic corresponds to the logic of SPASS, \n" ++
"but the generation of TPTP is also possible.\n" ++
"See http://spass.mpi-sb.mpg.de/\n" ++
"and http://www.cs.miami.edu/~tptp/TPTP/SyntaxBNF.html"
instance Logic.Logic.Syntax SoftFOL [TPTP] SFSymbol () ()
-- default implementation is fine!
instance Sentences SoftFOL Sentence Sign
SoftFOLMorphism SFSymbol where
map_sen SoftFOL _ = return
sym_of SoftFOL = singletonList . symOf
sym_name SoftFOL = symbolToId
symKind SoftFOL = sfSymbKind . sym_type
print_named SoftFOL = printFormula
negation _ = negateSentence
-- other default implementations are fine
instance StaticAnalysis SoftFOL [TPTP] Sentence
() ()
Sign
SoftFOLMorphism SFSymbol () where
empty_signature SoftFOL = emptySign
is_subsig SoftFOL _ _ = True
subsig_inclusion SoftFOL = defaultInclusion
basic_analysis SoftFOL = Just basicAnalysis
instance Logic SoftFOL () [TPTP] Sentence () ()
Sign
SoftFOLMorphism SFSymbol () ProofTree where
stability _ = Testing
provers SoftFOL = [spassProver]
#ifndef NOHTTP
++ [mathServBroker, vampire]
#endif
++ map darwinProver tptpProvers
++ [metisProver, hyperProver]
cons_checkers SoftFOL = map darwinConsChecker tptpProvers
++ [hyperConsChecker]
| mariefarrell/Hets | SoftFOL/Logic_SoftFOL.hs | gpl-2.0 | 2,780 | 0 | 10 | 621 | 459 | 250 | 209 | 59 | 0 |
module PatConArity2 where
data Tree a = Leaf a | Bin (Tree a) (Tree a) | (:+) (Tree a)
main = firstLeaf (Bin (Leaf 3) (Leaf 4))
firstLeaf (l :+ r) = 4
firstLeaf (Leaf x y) = x
firstLeaf (Bin t) = firstLeaf t
| roberth/uu-helium | test/staticerrors/PatConArity2.hs | gpl-3.0 | 213 | 5 | 9 | 50 | 121 | 65 | 56 | 6 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.OpsWorks.DescribeTimeBasedAutoScaling
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Describes time-based auto scaling configurations for specified
-- instances.
--
-- You must specify at least one of the parameters.
--
-- __Required Permissions__: To use this action, an IAM user must have a
-- Show, Deploy, or Manage permissions level for the stack, or an attached
-- policy that explicitly grants permissions. For more information on user
-- permissions, see
-- <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing User Permissions>.
--
-- /See:/ <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DescribeTimeBasedAutoScaling.html AWS API Reference> for DescribeTimeBasedAutoScaling.
module Network.AWS.OpsWorks.DescribeTimeBasedAutoScaling
(
-- * Creating a Request
describeTimeBasedAutoScaling
, DescribeTimeBasedAutoScaling
-- * Request Lenses
, dtbasInstanceIds
-- * Destructuring the Response
, describeTimeBasedAutoScalingResponse
, DescribeTimeBasedAutoScalingResponse
-- * Response Lenses
, dtbasrsTimeBasedAutoScalingConfigurations
, dtbasrsResponseStatus
) where
import Network.AWS.OpsWorks.Types
import Network.AWS.OpsWorks.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeTimeBasedAutoScaling' smart constructor.
newtype DescribeTimeBasedAutoScaling = DescribeTimeBasedAutoScaling'
{ _dtbasInstanceIds :: [Text]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeTimeBasedAutoScaling' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dtbasInstanceIds'
describeTimeBasedAutoScaling
:: DescribeTimeBasedAutoScaling
describeTimeBasedAutoScaling =
DescribeTimeBasedAutoScaling'
{ _dtbasInstanceIds = mempty
}
-- | An array of instance IDs.
dtbasInstanceIds :: Lens' DescribeTimeBasedAutoScaling [Text]
dtbasInstanceIds = lens _dtbasInstanceIds (\ s a -> s{_dtbasInstanceIds = a}) . _Coerce;
instance AWSRequest DescribeTimeBasedAutoScaling
where
type Rs DescribeTimeBasedAutoScaling =
DescribeTimeBasedAutoScalingResponse
request = postJSON opsWorks
response
= receiveJSON
(\ s h x ->
DescribeTimeBasedAutoScalingResponse' <$>
(x .?> "TimeBasedAutoScalingConfigurations" .!@
mempty)
<*> (pure (fromEnum s)))
instance ToHeaders DescribeTimeBasedAutoScaling where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("OpsWorks_20130218.DescribeTimeBasedAutoScaling" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DescribeTimeBasedAutoScaling where
toJSON DescribeTimeBasedAutoScaling'{..}
= object
(catMaybes
[Just ("InstanceIds" .= _dtbasInstanceIds)])
instance ToPath DescribeTimeBasedAutoScaling where
toPath = const "/"
instance ToQuery DescribeTimeBasedAutoScaling where
toQuery = const mempty
-- | Contains the response to a 'DescribeTimeBasedAutoScaling' request.
--
-- /See:/ 'describeTimeBasedAutoScalingResponse' smart constructor.
data DescribeTimeBasedAutoScalingResponse = DescribeTimeBasedAutoScalingResponse'
{ _dtbasrsTimeBasedAutoScalingConfigurations :: !(Maybe [TimeBasedAutoScalingConfiguration])
, _dtbasrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeTimeBasedAutoScalingResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dtbasrsTimeBasedAutoScalingConfigurations'
--
-- * 'dtbasrsResponseStatus'
describeTimeBasedAutoScalingResponse
:: Int -- ^ 'dtbasrsResponseStatus'
-> DescribeTimeBasedAutoScalingResponse
describeTimeBasedAutoScalingResponse pResponseStatus_ =
DescribeTimeBasedAutoScalingResponse'
{ _dtbasrsTimeBasedAutoScalingConfigurations = Nothing
, _dtbasrsResponseStatus = pResponseStatus_
}
-- | An array of 'TimeBasedAutoScalingConfiguration' objects that describe
-- the configuration for the specified instances.
dtbasrsTimeBasedAutoScalingConfigurations :: Lens' DescribeTimeBasedAutoScalingResponse [TimeBasedAutoScalingConfiguration]
dtbasrsTimeBasedAutoScalingConfigurations = lens _dtbasrsTimeBasedAutoScalingConfigurations (\ s a -> s{_dtbasrsTimeBasedAutoScalingConfigurations = a}) . _Default . _Coerce;
-- | The response status code.
dtbasrsResponseStatus :: Lens' DescribeTimeBasedAutoScalingResponse Int
dtbasrsResponseStatus = lens _dtbasrsResponseStatus (\ s a -> s{_dtbasrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/DescribeTimeBasedAutoScaling.hs | mpl-2.0 | 5,634 | 0 | 13 | 1,092 | 604 | 365 | 239 | 80 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module View.Header (header) where
import Text.Blaze.Html5.Attributes (href, rel)
import qualified Text.Blaze.Html5 as H
header :: H.Html
header =
H.head $ do
H.title "Pirate Gold"
H.link H.! rel "stylesheet" H.! href "css/style.css"
| codemiller/pirate-gold | src/View/Header.hs | apache-2.0 | 283 | 0 | 10 | 50 | 85 | 48 | 37 | 9 | 1 |
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{- |
Module : SrcLoc
Description : This module contains types and utility functions for tagging things with locations
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Weixin Zhang <[email protected]>
Stability : experimental
Portability : portable
-}
module SrcLoc where
import Text.PrettyPrint.ANSI.Leijen
type Located a = GenLocated Loc a
data GenLocated l e = L l e
deriving (Eq, Ord, Show) -- Typeable, Data)
instance Functor (GenLocated l) where
fmap f (L l e) = L l (f e)
deriving instance Foldable (GenLocated l)
deriving instance Traversable (GenLocated l)
data Loc = Loc { line :: !Int, column :: !Int }
| NoLoc
deriving (Eq, Ord, Show)
instance Pretty Loc where
pretty (Loc l c) = int l <> colon <> int c <> colon
pretty NoLoc = empty
unLoc :: Located a -> a
unLoc (L _ x) = x
withLoc :: b -> Located a -> Located b
x `withLoc` (L loc _) = L loc x
noLoc :: a -> Located a
noLoc = L NoLoc
withLocs :: b -> [Located a] -> Located b
withLocs x [] = noLoc x
withLocs x (l:_) = x `withLoc` l
| zhiyuanshi/fcore | frontend/abstractsyntax/SrcLoc.hs | bsd-2-clause | 1,253 | 0 | 9 | 301 | 371 | 195 | 176 | 31 | 1 |
module BrownPLT.Flapjax.DevServer
( flapjaxDevServer
) where
import qualified Data.ByteString.Lazy.Char8 as BC
import qualified Data.Map as M
import System.FilePath
import Control.Monad.Trans
import Network.WebServer
import Network.WebServer.Files
import Network.WebServer.HTTP.Listen
import Text.XHtml (showHtml,toHtml,HTML)
import BrownPLT.Html
import Flapjax.Compiler
serveFxFiles :: MonadIO m
=> FilePath -- ^root path
-> FilePath -- ^path to Flapjax, relative to the server root
-> ServerT m Response
serveFxFiles localRootPath fxPath = do
rq <- getRequest
liftIO $ do
maybePath <- uriToLocalPath ["index.fx", "index.html"] localRootPath
(rqPaths rq)
case maybePath of
Nothing
| rqPaths rq == ["flapjax.js"] -> do
file <- getFile fxPath
case file of
Nothing -> do
putStrLn $ "Could not open " ++ fxPath
fail "permission denied accessing flapjax.js"
Just (body, size, modifiedTime) -> do
renderFile "text/javascript" size modifiedTime body rq
| otherwise -> do
putStrLn $ "File not found: " ++ (show $ rqPaths rq)
fail "file not found"
Just path
| takeExtension path == ".fx" -> do
parseResult <- parseHtmlFromFile path
case parseResult of
Left err -> return $ ok (showHtml (toHtml err))
Right (fxHtml, warnings) -> do
mapM_ (putStrLn.show) warnings
(msgs, html) <- compilePage
(defaults { flapjaxPath = "/flapjax.js" })
fxHtml
mapM_ (putStrLn.show) msgs
renderFile "text/html" 0 undefined (BC.pack $ renderHtml html)
rq
| otherwise -> do
file <- getFile path
case file of
Nothing -> fail "permission denied"
Just (body, size, modifiedTime) -> do
let mimeType = M.findWithDefault "application/octet-stream"
(takeExtension path) mimeTypes
renderFile mimeType size modifiedTime body rq
flapjaxDevServer :: Int -> FilePath -> FilePath -> IO ()
flapjaxDevServer port fxPath rootPath =
runServer port (serveFxFiles rootPath fxPath)
| ducis/flapjax-fixed | flapjax/trunk/compiler/src/BrownPLT/Flapjax/DevServer.hs | bsd-3-clause | 2,424 | 0 | 26 | 846 | 600 | 299 | 301 | 58 | 5 |
{-# language CPP #-}
{-# language MultiParamTypeClasses #-}
#ifndef ENABLE_INTERNAL_DOCUMENTATION
{-# OPTIONS_HADDOCK hide #-}
#endif
module OpenCV.Internal.Core.Types.Vec
( Vec(..)
, VecDim
, IsVec(..)
) where
import "base" Foreign.ForeignPtr ( ForeignPtr, withForeignPtr )
import "base" GHC.TypeLits
import "linear" Linear ( V2, V3, V4 )
import "this" OpenCV.Internal.C.Types
--------------------------------------------------------------------------------
newtype Vec (dim :: Nat) (depth :: *)
= Vec {unVec :: ForeignPtr (C'Vec dim depth)}
type instance C (Vec dim depth) = C'Vec dim depth
instance WithPtr (Vec dim depth) where
withPtr = withForeignPtr . unVec
type family VecDim (v :: * -> *) :: Nat
type instance VecDim (Vec dim) = dim
type instance VecDim V2 = 2
type instance VecDim V3 = 3
type instance VecDim V4 = 4
class IsVec (v :: * -> *) (depth :: *) where
toVec :: v depth -> Vec (VecDim v) depth
fromVec :: Vec (VecDim v) depth -> v depth
toVecIO :: v depth -> IO (Vec (VecDim v) depth)
toVecIO = pure . toVec
--------------------------------------------------------------------------------
instance (IsVec V2 a, Show a)
=> Show (Vec 2 a) where
showsPrec prec vec =
showParen (prec >= 10)
$ showString "toVec "
. showParen True (shows v2)
where
v2 :: V2 a
v2 = fromVec vec
instance (IsVec V3 a, Show a)
=> Show (Vec 3 a) where
showsPrec prec vec =
showParen (prec >= 10)
$ showString "toVec "
. showParen True (shows v3)
where
v3 :: V3 a
v3 = fromVec vec
instance (IsVec V4 a, Show a)
=> Show (Vec 4 a) where
showsPrec prec vec =
showParen (prec >= 10)
$ showString "toVec "
. showParen True (shows v4)
where
v4 :: V4 a
v4 = fromVec vec
| lukexi/haskell-opencv | src/OpenCV/Internal/Core/Types/Vec.hs | bsd-3-clause | 1,882 | 0 | 12 | 507 | 619 | 338 | 281 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses, TypeSynonymInstances, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.WorkspaceDir
-- Copyright : (c) 2007 David Roundy <[email protected]>
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : none
-- Stability : unstable
-- Portability : unportable
--
-- WorkspaceDir is an extension to set the current directory in a workspace.
--
-- Actually, it sets the current directory in a layout, since there's no way I
-- know of to attach a behavior to a workspace. This means that any terminals
-- (or other programs) pulled up in that workspace (with that layout) will
-- execute in that working directory. Sort of handy, I think.
--
-- Note this extension requires the 'directory' package to be installed.
--
-----------------------------------------------------------------------------
module XMonad.Layout.WorkspaceDir (
-- * Usage
-- $usage
workspaceDir,
changeDir,
WorkspaceDir,
) where
import Prelude hiding (catch)
import System.Directory ( setCurrentDirectory, getCurrentDirectory )
import Control.Monad ( when )
import XMonad hiding ( focus )
import XMonad.Prompt ( XPConfig )
import XMonad.Prompt.Directory ( directoryPrompt )
import XMonad.Layout.LayoutModifier
import XMonad.StackSet ( tag, currentTag )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.WorkspaceDir
--
-- Then edit your @layoutHook@ by adding the Workspace layout modifier
-- to some layout:
--
-- > myLayout = workspaceDir "~" (Tall 1 (3/100) (1/2)) ||| Full ||| etc..
-- > main = xmonad defaultConfig { layoutHook = myLayout }
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
--
-- WorkspaceDir provides also a prompt. To use it you need to import
-- "XMonad.Prompt" and add something like this to your key bindings:
--
-- > , ((modm .|. shiftMask, xK_x ), changeDir defaultXPConfig)
--
-- For detailed instruction on editing the key binding see:
--
-- "XMonad.Doc.Extending#Editing_key_bindings".
data Chdir = Chdir String deriving ( Typeable )
instance Message Chdir
data WorkspaceDir a = WorkspaceDir String deriving ( Read, Show )
instance LayoutModifier WorkspaceDir Window where
modifyLayout (WorkspaceDir d) w r = do tc <- gets (currentTag.windowset)
when (tc == tag w) $ scd d
runLayout w r
handleMess (WorkspaceDir _) m
| Just (Chdir wd) <- fromMessage m = do wd' <- cleanDir wd
return $ Just $ WorkspaceDir wd'
| otherwise = return Nothing
workspaceDir :: LayoutClass l a => String -> l a
-> ModifiedLayout WorkspaceDir l a
workspaceDir s = ModifiedLayout (WorkspaceDir s)
cleanDir :: String -> X String
cleanDir x = scd x >> io getCurrentDirectory
scd :: String -> X ()
scd x = catchIO $ setCurrentDirectory x
changeDir :: XPConfig -> X ()
changeDir c = directoryPrompt c "Set working directory: " (sendMessage . Chdir)
| kmels/xmonad-launcher | XMonad/Layout/WorkspaceDir.hs | bsd-3-clause | 3,428 | 0 | 12 | 867 | 482 | 269 | 213 | 33 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "dist/dist-sandbox-261cd265/build/System/Posix/Resource.hs" #-}
{-# LINE 1 "System/Posix/Resource.hsc" #-}
{-# LANGUAGE CApiFFI #-}
{-# LINE 2 "System/Posix/Resource.hsc" #-}
{-# LINE 3 "System/Posix/Resource.hsc" #-}
{-# LANGUAGE Safe #-}
{-# LINE 7 "System/Posix/Resource.hsc" #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Posix.Resource
-- Copyright : (c) The University of Glasgow 2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (requires POSIX)
--
-- POSIX resource support
--
-----------------------------------------------------------------------------
module System.Posix.Resource (
-- * Resource Limits
ResourceLimit(..), ResourceLimits(..), Resource(..),
getResourceLimit,
setResourceLimit,
) where
{-# LINE 29 "System/Posix/Resource.hsc" #-}
import System.Posix.Types
import Foreign
import Foreign.C
-- -----------------------------------------------------------------------------
-- Resource limits
data Resource
= ResourceCoreFileSize
| ResourceCPUTime
| ResourceDataSize
| ResourceFileSize
| ResourceOpenFiles
| ResourceStackSize
{-# LINE 45 "System/Posix/Resource.hsc" #-}
| ResourceTotalMemory
{-# LINE 47 "System/Posix/Resource.hsc" #-}
deriving Eq
data ResourceLimits
= ResourceLimits { softLimit, hardLimit :: ResourceLimit }
deriving Eq
data ResourceLimit
= ResourceLimitInfinity
| ResourceLimitUnknown
| ResourceLimit Integer
deriving Eq
data {-# CTYPE "struct rlimit" #-} RLimit
foreign import capi unsafe "HsUnix.h getrlimit"
c_getrlimit :: CInt -> Ptr RLimit -> IO CInt
foreign import capi unsafe "HsUnix.h setrlimit"
c_setrlimit :: CInt -> Ptr RLimit -> IO CInt
getResourceLimit :: Resource -> IO ResourceLimits
getResourceLimit res = do
allocaBytes (16) $ \p_rlimit -> do
{-# LINE 70 "System/Posix/Resource.hsc" #-}
throwErrnoIfMinus1_ "getResourceLimit" $
c_getrlimit (packResource res) p_rlimit
soft <- ((\hsc_ptr -> peekByteOff hsc_ptr 0)) p_rlimit
{-# LINE 73 "System/Posix/Resource.hsc" #-}
hard <- ((\hsc_ptr -> peekByteOff hsc_ptr 8)) p_rlimit
{-# LINE 74 "System/Posix/Resource.hsc" #-}
return (ResourceLimits {
softLimit = unpackRLimit soft,
hardLimit = unpackRLimit hard
})
setResourceLimit :: Resource -> ResourceLimits -> IO ()
setResourceLimit res ResourceLimits{softLimit=soft,hardLimit=hard} = do
allocaBytes (16) $ \p_rlimit -> do
{-# LINE 82 "System/Posix/Resource.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 0)) p_rlimit (packRLimit soft True)
{-# LINE 83 "System/Posix/Resource.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 8)) p_rlimit (packRLimit hard False)
{-# LINE 84 "System/Posix/Resource.hsc" #-}
throwErrnoIfMinus1_ "setResourceLimit" $
c_setrlimit (packResource res) p_rlimit
return ()
packResource :: Resource -> CInt
packResource ResourceCoreFileSize = (4)
{-# LINE 90 "System/Posix/Resource.hsc" #-}
packResource ResourceCPUTime = (0)
{-# LINE 91 "System/Posix/Resource.hsc" #-}
packResource ResourceDataSize = (2)
{-# LINE 92 "System/Posix/Resource.hsc" #-}
packResource ResourceFileSize = (1)
{-# LINE 93 "System/Posix/Resource.hsc" #-}
packResource ResourceOpenFiles = (7)
{-# LINE 94 "System/Posix/Resource.hsc" #-}
packResource ResourceStackSize = (3)
{-# LINE 95 "System/Posix/Resource.hsc" #-}
{-# LINE 96 "System/Posix/Resource.hsc" #-}
packResource ResourceTotalMemory = (9)
{-# LINE 97 "System/Posix/Resource.hsc" #-}
{-# LINE 98 "System/Posix/Resource.hsc" #-}
unpackRLimit :: CRLim -> ResourceLimit
unpackRLimit (18446744073709551615) = ResourceLimitInfinity
{-# LINE 101 "System/Posix/Resource.hsc" #-}
unpackRLimit other
{-# LINE 103 "System/Posix/Resource.hsc" #-}
| ((18446744073709551615) :: CRLim) /= (18446744073709551615) &&
{-# LINE 104 "System/Posix/Resource.hsc" #-}
other == (18446744073709551615) = ResourceLimitUnknown
{-# LINE 105 "System/Posix/Resource.hsc" #-}
{-# LINE 106 "System/Posix/Resource.hsc" #-}
{-# LINE 107 "System/Posix/Resource.hsc" #-}
| ((18446744073709551615) :: CRLim) /= (18446744073709551615) &&
{-# LINE 108 "System/Posix/Resource.hsc" #-}
other == (18446744073709551615) = ResourceLimitUnknown
{-# LINE 109 "System/Posix/Resource.hsc" #-}
{-# LINE 110 "System/Posix/Resource.hsc" #-}
| otherwise = ResourceLimit (fromIntegral other)
packRLimit :: ResourceLimit -> Bool -> CRLim
packRLimit ResourceLimitInfinity _ = (18446744073709551615)
{-# LINE 114 "System/Posix/Resource.hsc" #-}
{-# LINE 115 "System/Posix/Resource.hsc" #-}
packRLimit ResourceLimitUnknown True = (18446744073709551615)
{-# LINE 116 "System/Posix/Resource.hsc" #-}
{-# LINE 117 "System/Posix/Resource.hsc" #-}
{-# LINE 118 "System/Posix/Resource.hsc" #-}
packRLimit ResourceLimitUnknown False = (18446744073709551615)
{-# LINE 119 "System/Posix/Resource.hsc" #-}
{-# LINE 120 "System/Posix/Resource.hsc" #-}
packRLimit (ResourceLimit other) _ = fromIntegral other
-- -----------------------------------------------------------------------------
-- Test code
{-
import System.Posix
import Control.Monad
main = do
zipWithM_ (\r n -> setResourceLimit r ResourceLimits{
hardLimit = ResourceLimit n,
softLimit = ResourceLimit n })
allResources [1..]
showAll
mapM_ (\r -> setResourceLimit r ResourceLimits{
hardLimit = ResourceLimit 1,
softLimit = ResourceLimitInfinity })
allResources
-- should fail
showAll =
mapM_ (\r -> getResourceLimit r >>= (putStrLn . showRLims)) allResources
allResources =
[ResourceCoreFileSize, ResourceCPUTime, ResourceDataSize,
ResourceFileSize, ResourceOpenFiles, ResourceStackSize
#ifdef RLIMIT_AS
, ResourceTotalMemory
#endif
]
showRLims ResourceLimits{hardLimit=h,softLimit=s}
= "hard: " ++ showRLim h ++ ", soft: " ++ showRLim s
showRLim ResourceLimitInfinity = "infinity"
showRLim ResourceLimitUnknown = "unknown"
showRLim (ResourceLimit other) = show other
-}
| phischu/fragnix | tests/packages/scotty/System.Posix.Resource.hs | bsd-3-clause | 6,362 | 6 | 16 | 1,143 | 805 | 459 | 346 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Facebook.TestUsers
( TestUser(..)
, CreateTestUser(..)
, CreateTestUserInstalled(..)
, getTestUsers
, removeTestUser
, createTestUser
, makeFriendConn
, incompleteTestUserAccessToken
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (unless, mzero)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Default
import Data.Text
import Data.Time (UTCTime(..), Day(..))
import Data.Typeable (Typeable)
import qualified Control.Exception.Lifted as E
import qualified Control.Monad.Trans.Resource as R
import qualified Data.Aeson as A
import qualified Data.ByteString.Char8 as B
import Facebook.Auth
import Facebook.Base
import Facebook.Graph
import Facebook.Monad
import Facebook.Types
import Facebook.Pager
-- | A Facebook test user.
data TestUser =
TestUser { tuId :: UserId
, tuAccessToken :: Maybe AccessTokenData
, tuLoginUrl :: Maybe Text
, tuEmail :: Maybe Text
, tuPassword :: Maybe Text
}
deriving (Eq, Ord, Show, Read, Typeable)
instance A.FromJSON TestUser where
parseJSON (A.Object v) =
TestUser <$> v A..: "id"
<*> v A..:? "access_token"
<*> v A..:? "login_url"
<*> v A..:? "email"
<*> v A..:? "password"
parseJSON _ = mzero
-- | Data type used to hold information of a new test user. This type
-- also accepts a Data.Default value.
data CreateTestUser =
CreateTestUser
{ ctuInstalled :: CreateTestUserInstalled
, ctuName :: Maybe Text
, ctuLocale :: Maybe Text
}
-- | Specify if the app is to be installed on the new test user. If
-- it is, then you must tell what permissions should be given.
data CreateTestUserInstalled =
CreateTestUserNotInstalled
| CreateTestUserInstalled { ctuiPermissions :: [Permission] }
| CreateTestUserFbDefault
-- ^ Uses Facebook's default. It seems that this is equivalent to
-- @CreateTestUserInstalled []@, but Facebook's documentation is
-- not clear about it.
-- | Default instance for 'CreateTestUser'.
instance Default CreateTestUser where
def = CreateTestUser def def def
-- | Default instance for 'CreateTestUserInstalled'.
instance Default CreateTestUserInstalled where
def = CreateTestUserFbDefault
-- | Construct a query from a 'CreateTestUser'.
createTestUserQueryArgs :: CreateTestUser -> [Argument]
createTestUserQueryArgs (CreateTestUser installed name locale) =
forInst installed ++ forField "name" name ++ forField "locale" locale
where
forInst (CreateTestUserInstalled p) = [ "installed" #= True, "permissions" #= p ]
forInst CreateTestUserNotInstalled = [ "installed" #= False ]
forInst CreateTestUserFbDefault = []
forField _ Nothing = []
forField fieldName (Just f) = [ fieldName #= f ]
-- | Create a new test user.
createTestUser :: (R.MonadResource m, MonadBaseControl IO m)
=> CreateTestUser -- ^ How the test user should be
-- created.
-> AppAccessToken -- ^ Access token for your app.
-> FacebookT Auth m TestUser
createTestUser userInfo token = do
creds <- getCreds
let query = ("method","post") : createTestUserQueryArgs userInfo
getObject ("/" <> appId creds <> "/accounts/test-users") query (Just token)
-- | Get a list of test users.
getTestUsers :: (R.MonadResource m, MonadBaseControl IO m)
=> AppAccessToken -- ^ Access token for your app.
-> FacebookT Auth m (Pager TestUser)
getTestUsers token = do
creds <- getCreds
getObject ("/" <> appId creds <> "/accounts/test-users") [] (Just token)
-- | Remove an existing test user.
removeTestUser :: (R.MonadResource m, MonadBaseControl IO m)
=> TestUser -- ^ The TestUser to be removed.
-> AppAccessToken -- ^ Access token for your app (ignored since fb 0.14.7).
-> FacebookT Auth m Bool
removeTestUser testUser _token =
getObjectBool ("/" <> idCode (tuId testUser)) [("method","delete")] token
where token = incompleteTestUserAccessToken testUser
-- | Make a friend connection between two test users.
--
-- This is how Facebook's API work: two calls must be made. The first
-- call has the format: \"\/userA_id\/friends\/userB_id\" with the
-- access token of user A as query parameter. The second call has the
-- format: \"\/userB_id\/friends\/userA_id\" with the access token of
-- user B as query parameter. The first call creates a friend request
-- and the second call accepts the friend request.
makeFriendConn :: (R.MonadResource m, MonadBaseControl IO m)
=> TestUser
-> TestUser
-> FacebookT Auth m ()
makeFriendConn (TestUser { tuAccessToken = Nothing }) _ = E.throw $
FbLibraryException "The test user passed on the first argument doesn't have\
\ a token. Both users must have a token."
makeFriendConn _ (TestUser { tuAccessToken = Nothing }) = E.throw $
FbLibraryException "The test user passed on the second argument doesn't have\
\ a token. Both users must have a token."
makeFriendConn (TestUser {tuId = id1, tuAccessToken = (Just token1)}) (TestUser {tuId = id2, tuAccessToken = (Just token2)}) = do
let friendReq userId1 userId2 token =
getObjectBool ("/" <> idCode userId1 <> "/friends/" <> idCode userId2)
[ "method" #= ("post" :: B.ByteString),
"access_token" #= token ]
Nothing
r1 <- friendReq id1 id2 token1
r2 <- friendReq id2 id1 token2
unless r1 $ E.throw $ FbLibraryException "Couldn't make friend request."
unless r2 $ E.throw $ FbLibraryException "Couldn't accept friend request."
return ()
-- | Create an 'UserAccessToken' from a 'TestUser'. It's incomplete
-- because it will not have the right expiration time.
incompleteTestUserAccessToken :: TestUser -> Maybe UserAccessToken
incompleteTestUserAccessToken t = do
tokenData <- tuAccessToken t
let farFuture = UTCTime (ModifiedJulianDay 100000) 0
return (UserAccessToken (tuId t) tokenData farFuture)
-- | Same as 'getObject', but instead of parsing the result
-- as a JSON, it tries to parse either as "true" or "false".
-- Used only by the Test User API bindings.
getObjectBool :: (R.MonadResource m, MonadBaseControl IO m)
=> Text
-- ^ Path (should begin with a slash @\/@).
-> [Argument]
-- ^ Arguments to be passed to Facebook.
-> Maybe (AccessToken anyKind)
-- ^ Optional access token.
-> FacebookT anyAuth m Bool
getObjectBool path query mtoken =
runResourceInFb $ do
bs <- asBS =<< fbhttp =<< fbreq path mtoken query
return (bs == "true")
| prowdsponsor/fb | src/Facebook/TestUsers.hs | bsd-3-clause | 6,994 | 0 | 15 | 1,775 | 1,385 | 751 | 634 | 117 | 4 |
--
-- Haskell wrapper for Dotnet.System.Xml.XmlReader
--
module Dotnet.System.Xml.XmlReader where
import Dotnet
import qualified Dotnet.System.Object
import Dotnet.System.Xml.XmlNameTable
import Dotnet.System.Xml
data XmlReader_ a
type XmlReader a = Dotnet.System.Object.Object (XmlReader_ a)
attributeCount :: XmlReader a -> IO Int
attributeCount = invoke "get_AttributeCount" ()
baseURI :: XmlReader a -> IO String
baseURI = invoke "get_BaseURI" ()
canResolveEntity :: XmlReader a -> IO Bool
canResolveEntity = invoke "get_CanResolveEntity" ()
depth :: XmlReader a -> IO Int
depth = invoke "get_Depth" ()
eof :: XmlReader a -> IO Bool
eof = invoke "get_EOF" ()
hasAttributes :: XmlReader a -> IO Bool
hasAttributes = invoke "get_HasAttributes" ()
hasValue :: XmlReader a -> IO Bool
hasValue = invoke "get_HasValue" ()
isDefault :: XmlReader a -> IO Bool
isDefault = invoke "get_IsDefault" ()
isEmptyElement :: XmlReader a -> IO Bool
isEmptyElement = invoke "get_IsEmptyElement" ()
itemIndex :: Int -> XmlReader a -> IO String
itemIndex idx = invoke "get_Item" idx
itemName :: String -> XmlReader a -> IO String
itemName nm = invoke "get_Item" nm
itemNameURI :: String -> String -> XmlReader a -> IO String
itemNameURI nm uri = invoke "get_Item" (nm,uri)
localName :: XmlReader a -> IO String
localName = invoke "get_LocalName" ()
name :: XmlReader a -> IO String
name = invoke "get_Name" ()
namespaceURI :: XmlReader a -> IO String
namespaceURI = invoke "get_NamespaceURI" ()
nameTable :: XmlReader a -> IO (Dotnet.System.Xml.XmlNameTable.XmlNameTable b)
nameTable = invoke "get_NameTable" ()
nodeType :: XmlReader a -> IO Dotnet.System.Xml.XmlNodeType
nodeType this = do
v <- this # invoke "get_NodeType" ()
return (toEnum v)
prefix :: XmlReader a -> IO String
prefix = invoke "get_Prefix" ()
quoteChar :: XmlReader a -> IO Char
quoteChar = invoke "get_QuoteChar" ()
readState :: XmlReader a -> IO Dotnet.System.Xml.ReadState
readState this = do
v <- this # invoke "get_ReadState" ()
return (toEnum v)
value :: XmlReader a -> IO String
value = invoke "get_Value" ()
xmlLang :: XmlReader a -> IO String
xmlLang = invoke "get_XmlLang" ()
xmlSpace :: XmlReader a -> IO Dotnet.System.Xml.XmlSpace
xmlSpace this = do
v <- this # invoke "get_XmlSpace" ()
return (toEnum v)
close :: XmlReader a -> IO ()
close = invoke "Close" ()
getAttributeIndex :: Int -> XmlReader a -> IO String
getAttributeIndex idx = invoke "GetAttribute" idx
getAttributeName :: String -> XmlReader a -> IO String
getAttributeName nm = invoke "GetAttribute" nm
getAttributeNameURI :: String -> String -> XmlReader a -> IO String
getAttributeNameURI nm uri = invoke "getAttribute" (nm,uri)
isName :: String -> XmlReader a -> IO Bool
isName str = invoke "IsName" str
isNameToken :: String -> XmlReader a -> IO Bool
isNameToken str = invoke "IsNameToken" str
isStartElement :: XmlReader a -> IO Bool
isStartElement = invoke "IsStartElement" ()
isStartElementName :: String -> XmlReader a -> IO Bool
isStartElementName str = invoke "IsStartElement" str
isStartElementNameURI :: String -> String -> XmlReader a -> IO Bool
isStartElementNameURI str uri = invoke "IsStartElement" (str,uri)
lookupNamespace :: String -> XmlReader a -> IO String
lookupNamespace str = invoke "LookupNamespace" str
moveToAttributeIndex :: Int -> XmlReader a -> IO ()
moveToAttributeIndex idx = invoke "MoveToAttribute" idx
moveToAttributeName :: String -> XmlReader a -> IO Bool
moveToAttributeName str = invoke "MoveToAttribute" str
moveToAttributeNameURI :: String -> String -> XmlReader a -> IO Bool
moveToAttributeNameURI str uri = invoke "MoveToAttribute" (str,uri)
moveToContent :: XmlReader a -> IO Dotnet.System.Xml.XmlNodeType
moveToContent this = do
v <- this # invoke "MoveToContent" ()
return (toEnum v)
moveToElement :: XmlReader a -> IO Bool
moveToElement = invoke "MoveToElement" ()
moveToFirstAttribute :: XmlReader a -> IO Bool
moveToFirstAttribute = invoke "MoveToFirstAttribute" ()
moveToNextAttribute :: XmlReader a -> IO Bool
moveToNextAttribute = invoke "MoveToNextAttribute" ()
readNext :: XmlReader a -> IO Bool
readNext = invoke "Read" ()
readAttributeValue :: XmlReader a -> IO Bool
readAttributeValue = invoke "ReadAttributeValue" ()
readElementString :: XmlReader a -> IO String
readElementString = invoke "ReadElementString" ()
readElementStringName :: String -> XmlReader a -> IO String
readElementStringName str = invoke "ReadElementString" str
readElementStringNameURI :: String -> String -> XmlReader a -> IO String
readElementStringNameURI str uri = invoke "ReadElementString" (str,uri)
readEndElement :: XmlReader a -> IO ()
readEndElement = invoke "ReadEndElement" ()
readInnerXml :: XmlReader a -> IO String
readInnerXml = invoke "ReadInnerXml" ()
readOuterXml :: XmlReader a -> IO String
readOuterXml = invoke "ReadOuterXml" ()
readStartElement :: XmlReader a -> IO ()
readStartElement = invoke "ReadStartElement" ()
readStartElementName :: String -> XmlReader a -> IO ()
readStartElementName str = invoke "ReadStartElement" str
readStartElementNameURI :: String -> String -> XmlReader a -> IO ()
readStartElementNameURI str uri = invoke "ReadStartElement" (str,uri)
readString :: XmlReader a -> IO String
readString = invoke "ReadString" ()
resolveEntity :: XmlReader a -> IO ()
resolveEntity = invoke "ResolveEntity" ()
skip :: XmlReader a -> IO ()
skip = invoke "Skip" ()
| FranklinChen/Hugs | dotnet/lib/Dotnet/System/Xml/XmlReader.hs | bsd-3-clause | 5,437 | 0 | 10 | 867 | 1,825 | 891 | 934 | -1 | -1 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Wiki page view.
module HL.View.Wiki where
import HL.View
import HL.View.Code
import HL.View.Template
import Control.Monad.Identity
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.List (isPrefixOf)
import Data.Monoid
import Data.Text (unpack,pack)
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder
import Language.Haskell.HsColour.CSS (hscolour)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Text.HTML.TagStream.Text
import Text.Pandoc.Definition
import Text.Pandoc.Options
import Text.Pandoc.Walk
import Text.Pandoc.Writers.HTML
-- | Wiki view.
wikiV :: (Route App -> Text) -> Either Text (Text,Pandoc) -> FromLucid App
wikiV urlr result =
template
([WikiHomeR] ++
[WikiR n | Right (n,_) <- [result]])
(case result of
Left{} -> "Wiki error!"
Right (t,_) -> t)
(\_ ->
container_
(row_
(span12_ [class_ "col-md-12"]
(case result of
Left err ->
do h1_ "Wiki page retrieval problem!"
p_ (toHtml err)
Right (t,pan) ->
do h1_ (toHtml t)
toHtmlRaw (renderHtml (writeHtml writeOptions (cleanup urlr pan)))))))
where cleanup url = highlightBlock . highlightInline . relativize url
writeOptions = def { writerTableOfContents = True }
-- | Make all wiki links use the wiki route.
relativize :: (Route App -> Text) -> Pandoc -> Pandoc
relativize url = walk links
where links asis@(Link is (ref,t))
| isPrefixOf "http://" ref || isPrefixOf "https://" ref = asis
| otherwise = Link is (unpack (url (WikiR (pack ref))),t)
links x = x
-- | Highlight code blocks and inline code samples with a decent
-- Haskell syntax highlighter.
highlightBlock :: Pandoc -> Pandoc
highlightBlock = walk codes
where codes (CodeBlock ("",["haskell"],[]) t) =
RawBlock "html" (hscolour False 1 t)
codes x = x
-- | Highlight code blocks and inline code samples with a decent
-- Haskell syntax highlighter.
highlightInline :: Pandoc -> Pandoc
highlightInline = walk codes
where codes (Code ("",["haskell"],[]) txt) =
RawInline "html" (preToCode (hscolour False 1 txt))
codes (Code x txt) = Code x (unpack (decodeEntities (pack txt)))
codes x = x
-- | Decode entities because for some reason MediaWiki syntax allows
-- entities and decodes them inside a <code></code> block.
decodeEntities :: Text -> Text
decodeEntities t =
runIdentity (fmap (toStrict . toLazyText . mconcat)
(CL.sourceList [t]
$= tokenStream
$= CL.map (showToken (\x ->x))
$$ CL.consume))
| josefs/hl | src/HL/View/Wiki.hs | bsd-3-clause | 2,991 | 0 | 25 | 901 | 836 | 453 | 383 | 66 | 3 |
{-# LANGUAGE CPP, MagicHash #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- | ByteCodeGen: Generate bytecode from Core
module ByteCodeGen ( UnlinkedBCO, byteCodeGen, coreExprToBCOs ) where
#include "HsVersions.h"
import ByteCodeInstr
import ByteCodeItbls
import ByteCodeAsm
import ByteCodeLink
import LibFFI
import DynFlags
import Outputable
import Platform
import Name
import MkId
import Id
import ForeignCall
import HscTypes
import CoreUtils
import CoreSyn
import PprCore
import Literal
import PrimOp
import CoreFVs
import Type
import DataCon
import TyCon
import Util
import VarSet
import TysPrim
import ErrUtils
import Unique
import FastString
import Panic
import StgCmmLayout ( ArgRep(..), toArgRep, argRepSizeW )
import SMRep
import Bitmap
import OrdList
import Data.List
import Foreign
import Foreign.C
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
import Control.Monad
import Data.Char
import UniqSupply
import BreakArray
import Data.Maybe
import Module
import qualified Data.ByteString as BS
import qualified Data.ByteString.Unsafe as BS
import Data.Map (Map)
import qualified Data.Map as Map
import qualified FiniteMap as Map
import Data.Ord
-- -----------------------------------------------------------------------------
-- Generating byte code for a complete module
byteCodeGen :: DynFlags
-> Module
-> CoreProgram
-> [TyCon]
-> ModBreaks
-> IO CompiledByteCode
byteCodeGen dflags this_mod binds tycs modBreaks
= do showPass dflags "ByteCodeGen"
let flatBinds = [ (bndr, freeVars rhs)
| (bndr, rhs) <- flattenBinds binds]
us <- mkSplitUniqSupply 'y'
(BcM_State _dflags _us _this_mod _final_ctr mallocd _, proto_bcos)
<- runBc dflags us this_mod modBreaks (mapM schemeTopBind flatBinds)
when (notNull mallocd)
(panic "ByteCodeGen.byteCodeGen: missing final emitBc?")
dumpIfSet_dyn dflags Opt_D_dump_BCOs
"Proto-BCOs" (vcat (intersperse (char ' ') (map ppr proto_bcos)))
assembleBCOs dflags proto_bcos tycs
-- -----------------------------------------------------------------------------
-- Generating byte code for an expression
-- Returns: (the root BCO for this expression,
-- a list of auxilary BCOs resulting from compiling closures)
coreExprToBCOs :: DynFlags
-> Module
-> CoreExpr
-> IO UnlinkedBCO
coreExprToBCOs dflags this_mod expr
= do showPass dflags "ByteCodeGen"
-- create a totally bogus name for the top-level BCO; this
-- should be harmless, since it's never used for anything
let invented_name = mkSystemVarName (mkPseudoUniqueE 0) (fsLit "ExprTopLevel")
invented_id = Id.mkLocalId invented_name (panic "invented_id's type")
-- the uniques are needed to generate fresh variables when we introduce new
-- let bindings for ticked expressions
us <- mkSplitUniqSupply 'y'
(BcM_State _dflags _us _this_mod _final_ctr mallocd _ , proto_bco)
<- runBc dflags us this_mod emptyModBreaks $
schemeTopBind (invented_id, freeVars expr)
when (notNull mallocd)
(panic "ByteCodeGen.coreExprToBCOs: missing final emitBc?")
dumpIfSet_dyn dflags Opt_D_dump_BCOs "Proto-BCOs" (ppr proto_bco)
assembleBCO dflags proto_bco
-- -----------------------------------------------------------------------------
-- Compilation schema for the bytecode generator
type BCInstrList = OrdList BCInstr
type Sequel = Word -- back off to this depth before ENTER
-- Maps Ids to the offset from the stack _base_ so we don't have
-- to mess with it after each push/pop.
type BCEnv = Map Id Word -- To find vars on the stack
{-
ppBCEnv :: BCEnv -> SDoc
ppBCEnv p
= text "begin-env"
$$ nest 4 (vcat (map pp_one (sortBy cmp_snd (Map.toList p))))
$$ text "end-env"
where
pp_one (var, offset) = int offset <> colon <+> ppr var <+> ppr (bcIdArgRep var)
cmp_snd x y = compare (snd x) (snd y)
-}
-- Create a BCO and do a spot of peephole optimisation on the insns
-- at the same time.
mkProtoBCO
:: DynFlags
-> name
-> BCInstrList
-> Either [AnnAlt Id VarSet] (AnnExpr Id VarSet)
-> Int
-> Word16
-> [StgWord]
-> Bool -- True <=> is a return point, rather than a function
-> [BcPtr]
-> ProtoBCO name
mkProtoBCO dflags nm instrs_ordlist origin arity bitmap_size bitmap is_ret mallocd_blocks
= ProtoBCO {
protoBCOName = nm,
protoBCOInstrs = maybe_with_stack_check,
protoBCOBitmap = bitmap,
protoBCOBitmapSize = bitmap_size,
protoBCOArity = arity,
protoBCOExpr = origin,
protoBCOPtrs = mallocd_blocks
}
where
-- Overestimate the stack usage (in words) of this BCO,
-- and if >= iNTERP_STACK_CHECK_THRESH, add an explicit
-- stack check. (The interpreter always does a stack check
-- for iNTERP_STACK_CHECK_THRESH words at the start of each
-- BCO anyway, so we only need to add an explicit one in the
-- (hopefully rare) cases when the (overestimated) stack use
-- exceeds iNTERP_STACK_CHECK_THRESH.
maybe_with_stack_check
| is_ret && stack_usage < fromIntegral (aP_STACK_SPLIM dflags) = peep_d
-- don't do stack checks at return points,
-- everything is aggregated up to the top BCO
-- (which must be a function).
-- That is, unless the stack usage is >= AP_STACK_SPLIM,
-- see bug #1466.
| stack_usage >= fromIntegral iNTERP_STACK_CHECK_THRESH
= STKCHECK stack_usage : peep_d
| otherwise
= peep_d -- the supposedly common case
-- We assume that this sum doesn't wrap
stack_usage = sum (map bciStackUse peep_d)
-- Merge local pushes
peep_d = peep (fromOL instrs_ordlist)
peep (PUSH_L off1 : PUSH_L off2 : PUSH_L off3 : rest)
= PUSH_LLL off1 (off2-1) (off3-2) : peep rest
peep (PUSH_L off1 : PUSH_L off2 : rest)
= PUSH_LL off1 (off2-1) : peep rest
peep (i:rest)
= i : peep rest
peep []
= []
argBits :: DynFlags -> [ArgRep] -> [Bool]
argBits _ [] = []
argBits dflags (rep : args)
| isFollowableArg rep = False : argBits dflags args
| otherwise = take (argRepSizeW dflags rep) (repeat True) ++ argBits dflags args
-- -----------------------------------------------------------------------------
-- schemeTopBind
-- Compile code for the right-hand side of a top-level binding
schemeTopBind :: (Id, AnnExpr Id VarSet) -> BcM (ProtoBCO Name)
schemeTopBind (id, rhs)
| Just data_con <- isDataConWorkId_maybe id,
isNullaryRepDataCon data_con = do
dflags <- getDynFlags
-- Special case for the worker of a nullary data con.
-- It'll look like this: Nil = /\a -> Nil a
-- If we feed it into schemeR, we'll get
-- Nil = Nil
-- because mkConAppCode treats nullary constructor applications
-- by just re-using the single top-level definition. So
-- for the worker itself, we must allocate it directly.
-- ioToBc (putStrLn $ "top level BCO")
emitBc (mkProtoBCO dflags (getName id) (toOL [PACK data_con 0, ENTER])
(Right rhs) 0 0 [{-no bitmap-}] False{-not alts-})
| otherwise
= schemeR [{- No free variables -}] (id, rhs)
-- -----------------------------------------------------------------------------
-- schemeR
-- Compile code for a right-hand side, to give a BCO that,
-- when executed with the free variables and arguments on top of the stack,
-- will return with a pointer to the result on top of the stack, after
-- removing the free variables and arguments.
--
-- Park the resulting BCO in the monad. Also requires the
-- variable to which this value was bound, so as to give the
-- resulting BCO a name.
schemeR :: [Id] -- Free vars of the RHS, ordered as they
-- will appear in the thunk. Empty for
-- top-level things, which have no free vars.
-> (Id, AnnExpr Id VarSet)
-> BcM (ProtoBCO Name)
schemeR fvs (nm, rhs)
{-
| trace (showSDoc (
(char ' '
$$ (ppr.filter (not.isTyVar).varSetElems.fst) rhs
$$ pprCoreExpr (deAnnotate rhs)
$$ char ' '
))) False
= undefined
| otherwise
-}
= schemeR_wrk fvs nm rhs (collect rhs)
collect :: AnnExpr Id VarSet -> ([Var], AnnExpr' Id VarSet)
collect (_, e) = go [] e
where
go xs e | Just e' <- bcView e = go xs e'
go xs (AnnLam x (_,e))
| UbxTupleRep _ <- repType (idType x)
= unboxedTupleException
| otherwise
= go (x:xs) e
go xs not_lambda = (reverse xs, not_lambda)
schemeR_wrk :: [Id] -> Id -> AnnExpr Id VarSet -> ([Var], AnnExpr' Var VarSet) -> BcM (ProtoBCO Name)
schemeR_wrk fvs nm original_body (args, body)
= do
dflags <- getDynFlags
let
all_args = reverse args ++ fvs
arity = length all_args
-- all_args are the args in reverse order. We're compiling a function
-- \fv1..fvn x1..xn -> e
-- i.e. the fvs come first
szsw_args = map (fromIntegral . idSizeW dflags) all_args
szw_args = sum szsw_args
p_init = Map.fromList (zip all_args (mkStackOffsets 0 szsw_args))
-- make the arg bitmap
bits = argBits dflags (reverse (map bcIdArgRep all_args))
bitmap_size = genericLength bits
bitmap = mkBitmap dflags bits
body_code <- schemeER_wrk szw_args p_init body
emitBc (mkProtoBCO dflags (getName nm) body_code (Right original_body)
arity bitmap_size bitmap False{-not alts-})
-- introduce break instructions for ticked expressions
schemeER_wrk :: Word -> BCEnv -> AnnExpr' Id VarSet -> BcM BCInstrList
schemeER_wrk d p rhs
| AnnTick (Breakpoint tick_no fvs) (_annot, newRhs) <- rhs
= do code <- schemeE (fromIntegral d) 0 p newRhs
arr <- getBreakArray
this_mod <- getCurrentModule
let idOffSets = getVarOffSets d p fvs
let breakInfo = BreakInfo
{ breakInfo_module = this_mod
, breakInfo_number = tick_no
, breakInfo_vars = idOffSets
, breakInfo_resty = exprType (deAnnotate' newRhs)
}
let breakInstr = case arr of
BA arr# ->
BRK_FUN arr# (fromIntegral tick_no) breakInfo
return $ breakInstr `consOL` code
| otherwise = schemeE (fromIntegral d) 0 p rhs
getVarOffSets :: Word -> BCEnv -> [Id] -> [(Id, Word16)]
getVarOffSets d p = catMaybes . map (getOffSet d p)
getOffSet :: Word -> BCEnv -> Id -> Maybe (Id, Word16)
getOffSet d env id
= case lookupBCEnv_maybe id env of
Nothing -> Nothing
Just offset -> Just (id, trunc16 $ d - offset)
trunc16 :: Word -> Word16
trunc16 w
| w > fromIntegral (maxBound :: Word16)
= panic "stack depth overflow"
| otherwise
= fromIntegral w
fvsToEnv :: BCEnv -> VarSet -> [Id]
-- Takes the free variables of a right-hand side, and
-- delivers an ordered list of the local variables that will
-- be captured in the thunk for the RHS
-- The BCEnv argument tells which variables are in the local
-- environment: these are the ones that should be captured
--
-- The code that constructs the thunk, and the code that executes
-- it, have to agree about this layout
fvsToEnv p fvs = [v | v <- varSetElems fvs,
isId v, -- Could be a type variable
v `Map.member` p]
-- -----------------------------------------------------------------------------
-- schemeE
returnUnboxedAtom :: Word -> Sequel -> BCEnv
-> AnnExpr' Id VarSet -> ArgRep
-> BcM BCInstrList
-- Returning an unlifted value.
-- Heave it on the stack, SLIDE, and RETURN.
returnUnboxedAtom d s p e e_rep
= do (push, szw) <- pushAtom d p e
return (push -- value onto stack
`appOL` mkSLIDE szw (d-s) -- clear to sequel
`snocOL` RETURN_UBX e_rep) -- go
-- Compile code to apply the given expression to the remaining args
-- on the stack, returning a HNF.
schemeE :: Word -> Sequel -> BCEnv -> AnnExpr' Id VarSet -> BcM BCInstrList
schemeE d s p e
| Just e' <- bcView e
= schemeE d s p e'
-- Delegate tail-calls to schemeT.
schemeE d s p e@(AnnApp _ _) = schemeT d s p e
schemeE d s p e@(AnnLit lit) = returnUnboxedAtom d s p e (typeArgRep (literalType lit))
schemeE d s p e@(AnnCoercion {}) = returnUnboxedAtom d s p e V
schemeE d s p e@(AnnVar v)
| isUnLiftedType (idType v) = returnUnboxedAtom d s p e (bcIdArgRep v)
| otherwise = schemeT d s p e
schemeE d s p (AnnLet (AnnNonRec x (_,rhs)) (_,body))
| (AnnVar v, args_r_to_l) <- splitApp rhs,
Just data_con <- isDataConWorkId_maybe v,
dataConRepArity data_con == length args_r_to_l
= do -- Special case for a non-recursive let whose RHS is a
-- saturatred constructor application.
-- Just allocate the constructor and carry on
alloc_code <- mkConAppCode d s p data_con args_r_to_l
body_code <- schemeE (d+1) s (Map.insert x d p) body
return (alloc_code `appOL` body_code)
-- General case for let. Generates correct, if inefficient, code in
-- all situations.
schemeE d s p (AnnLet binds (_,body)) = do
dflags <- getDynFlags
let (xs,rhss) = case binds of AnnNonRec x rhs -> ([x],[rhs])
AnnRec xs_n_rhss -> unzip xs_n_rhss
n_binds = genericLength xs
fvss = map (fvsToEnv p' . fst) rhss
-- Sizes of free vars
sizes = map (\rhs_fvs -> sum (map (fromIntegral . idSizeW dflags) rhs_fvs)) fvss
-- the arity of each rhs
arities = map (genericLength . fst . collect) rhss
-- This p', d' defn is safe because all the items being pushed
-- are ptrs, so all have size 1. d' and p' reflect the stack
-- after the closures have been allocated in the heap (but not
-- filled in), and pointers to them parked on the stack.
p' = Map.insertList (zipE xs (mkStackOffsets d (genericReplicate n_binds 1))) p
d' = d + fromIntegral n_binds
zipE = zipEqual "schemeE"
-- ToDo: don't build thunks for things with no free variables
build_thunk _ [] size bco off arity
= return (PUSH_BCO bco `consOL` unitOL (mkap (off+size) size))
where
mkap | arity == 0 = MKAP
| otherwise = MKPAP
build_thunk dd (fv:fvs) size bco off arity = do
(push_code, pushed_szw) <- pushAtom dd p' (AnnVar fv)
more_push_code <- build_thunk (dd + fromIntegral pushed_szw) fvs size bco off arity
return (push_code `appOL` more_push_code)
alloc_code = toOL (zipWith mkAlloc sizes arities)
where mkAlloc sz 0
| is_tick = ALLOC_AP_NOUPD sz
| otherwise = ALLOC_AP sz
mkAlloc sz arity = ALLOC_PAP arity sz
is_tick = case binds of
AnnNonRec id _ -> occNameFS (getOccName id) == tickFS
_other -> False
compile_bind d' fvs x rhs size arity off = do
bco <- schemeR fvs (x,rhs)
build_thunk d' fvs size bco off arity
compile_binds =
[ compile_bind d' fvs x rhs size arity n
| (fvs, x, rhs, size, arity, n) <-
zip6 fvss xs rhss sizes arities [n_binds, n_binds-1 .. 1]
]
body_code <- schemeE d' s p' body
thunk_codes <- sequence compile_binds
return (alloc_code `appOL` concatOL thunk_codes `appOL` body_code)
-- introduce a let binding for a ticked case expression. This rule
-- *should* only fire when the expression was not already let-bound
-- (the code gen for let bindings should take care of that). Todo: we
-- call exprFreeVars on a deAnnotated expression, this may not be the
-- best way to calculate the free vars but it seemed like the least
-- intrusive thing to do
schemeE d s p exp@(AnnTick (Breakpoint _id _fvs) _rhs)
= if isUnLiftedType ty
then do
-- If the result type is unlifted, then we must generate
-- let f = \s . tick<n> e
-- in f realWorld#
-- When we stop at the breakpoint, _result will have an unlifted
-- type and hence won't be bound in the environment, but the
-- breakpoint will otherwise work fine.
id <- newId (mkFunTy realWorldStatePrimTy ty)
st <- newId realWorldStatePrimTy
let letExp = AnnLet (AnnNonRec id (fvs, AnnLam st (emptyVarSet, exp)))
(emptyVarSet, (AnnApp (emptyVarSet, AnnVar id)
(emptyVarSet, AnnVar realWorldPrimId)))
schemeE d s p letExp
else do
id <- newId ty
-- Todo: is emptyVarSet correct on the next line?
let letExp = AnnLet (AnnNonRec id (fvs, exp)) (emptyVarSet, AnnVar id)
schemeE d s p letExp
where exp' = deAnnotate' exp
fvs = exprFreeVars exp'
ty = exprType exp'
-- ignore other kinds of tick
schemeE d s p (AnnTick _ (_, rhs)) = schemeE d s p rhs
schemeE d s p (AnnCase (_,scrut) _ _ []) = schemeE d s p scrut
-- no alts: scrut is guaranteed to diverge
schemeE d s p (AnnCase scrut bndr _ [(DataAlt dc, [bind1, bind2], rhs)])
| isUnboxedTupleCon dc
, UnaryRep rep_ty1 <- repType (idType bind1), UnaryRep rep_ty2 <- repType (idType bind2)
-- Convert
-- case .... of x { (# V'd-thing, a #) -> ... }
-- to
-- case .... of a { DEFAULT -> ... }
-- becuse the return convention for both are identical.
--
-- Note that it does not matter losing the void-rep thing from the
-- envt (it won't be bound now) because we never look such things up.
, Just res <- case () of
_ | VoidRep <- typePrimRep rep_ty1
-> Just $ doCase d s p scrut bind2 [(DEFAULT, [], rhs)] (Just bndr){-unboxed tuple-}
| VoidRep <- typePrimRep rep_ty2
-> Just $ doCase d s p scrut bind1 [(DEFAULT, [], rhs)] (Just bndr){-unboxed tuple-}
| otherwise
-> Nothing
= res
schemeE d s p (AnnCase scrut bndr _ [(DataAlt dc, [bind1], rhs)])
| isUnboxedTupleCon dc, UnaryRep _ <- repType (idType bind1)
-- Similarly, convert
-- case .... of x { (# a #) -> ... }
-- to
-- case .... of a { DEFAULT -> ... }
= --trace "automagic mashing of case alts (# a #)" $
doCase d s p scrut bind1 [(DEFAULT, [], rhs)] (Just bndr){-unboxed tuple-}
schemeE d s p (AnnCase scrut bndr _ [(DEFAULT, [], rhs)])
| Just (tc, tys) <- splitTyConApp_maybe (idType bndr)
, isUnboxedTupleTyCon tc
, Just res <- case tys of
[ty] | UnaryRep _ <- repType ty
, let bind = bndr `setIdType` ty
-> Just $ doCase d s p scrut bind [(DEFAULT, [], rhs)] (Just bndr){-unboxed tuple-}
[ty1, ty2] | UnaryRep rep_ty1 <- repType ty1
, UnaryRep rep_ty2 <- repType ty2
-> case () of
_ | VoidRep <- typePrimRep rep_ty1
, let bind2 = bndr `setIdType` ty2
-> Just $ doCase d s p scrut bind2 [(DEFAULT, [], rhs)] (Just bndr){-unboxed tuple-}
| VoidRep <- typePrimRep rep_ty2
, let bind1 = bndr `setIdType` ty1
-> Just $ doCase d s p scrut bind1 [(DEFAULT, [], rhs)] (Just bndr){-unboxed tuple-}
| otherwise
-> Nothing
_ -> Nothing
= res
schemeE d s p (AnnCase scrut bndr _ alts)
= doCase d s p scrut bndr alts Nothing{-not an unboxed tuple-}
schemeE _ _ _ expr
= pprPanic "ByteCodeGen.schemeE: unhandled case"
(pprCoreExpr (deAnnotate' expr))
{-
Ticked Expressions
------------------
The idea is that the "breakpoint<n,fvs> E" is really just an annotation on
the code. When we find such a thing, we pull out the useful information,
and then compile the code as if it was just the expression E.
-}
-- Compile code to do a tail call. Specifically, push the fn,
-- slide the on-stack app back down to the sequel depth,
-- and enter. Four cases:
--
-- 0. (Nasty hack).
-- An application "GHC.Prim.tagToEnum# <type> unboxed-int".
-- The int will be on the stack. Generate a code sequence
-- to convert it to the relevant constructor, SLIDE and ENTER.
--
-- 1. The fn denotes a ccall. Defer to generateCCall.
--
-- 2. (Another nasty hack). Spot (# a::V, b #) and treat
-- it simply as b -- since the representations are identical
-- (the V takes up zero stack space). Also, spot
-- (# b #) and treat it as b.
--
-- 3. Application of a constructor, by defn saturated.
-- Split the args into ptrs and non-ptrs, and push the nonptrs,
-- then the ptrs, and then do PACK and RETURN.
--
-- 4. Otherwise, it must be a function call. Push the args
-- right to left, SLIDE and ENTER.
schemeT :: Word -- Stack depth
-> Sequel -- Sequel depth
-> BCEnv -- stack env
-> AnnExpr' Id VarSet
-> BcM BCInstrList
schemeT d s p app
-- | trace ("schemeT: env in = \n" ++ showSDocDebug (ppBCEnv p)) False
-- = panic "schemeT ?!?!"
-- | trace ("\nschemeT\n" ++ showSDoc (pprCoreExpr (deAnnotate' app)) ++ "\n") False
-- = error "?!?!"
-- Case 0
| Just (arg, constr_names) <- maybe_is_tagToEnum_call app
= implement_tagToId d s p arg constr_names
-- Case 1
| Just (CCall ccall_spec) <- isFCallId_maybe fn
= generateCCall d s p ccall_spec fn args_r_to_l
-- Case 2: Constructor application
| Just con <- maybe_saturated_dcon,
isUnboxedTupleCon con
= case args_r_to_l of
[arg1,arg2] | isVAtom arg1 ->
unboxedTupleReturn d s p arg2
[arg1,arg2] | isVAtom arg2 ->
unboxedTupleReturn d s p arg1
_other -> unboxedTupleException
-- Case 3: Ordinary data constructor
| Just con <- maybe_saturated_dcon
= do alloc_con <- mkConAppCode d s p con args_r_to_l
return (alloc_con `appOL`
mkSLIDE 1 (d - s) `snocOL`
ENTER)
-- Case 4: Tail call of function
| otherwise
= doTailCall d s p fn args_r_to_l
where
-- Extract the args (R->L) and fn
-- The function will necessarily be a variable,
-- because we are compiling a tail call
(AnnVar fn, args_r_to_l) = splitApp app
-- Only consider this to be a constructor application iff it is
-- saturated. Otherwise, we'll call the constructor wrapper.
n_args = length args_r_to_l
maybe_saturated_dcon
= case isDataConWorkId_maybe fn of
Just con | dataConRepArity con == n_args -> Just con
_ -> Nothing
-- -----------------------------------------------------------------------------
-- Generate code to build a constructor application,
-- leaving it on top of the stack
mkConAppCode :: Word -> Sequel -> BCEnv
-> DataCon -- The data constructor
-> [AnnExpr' Id VarSet] -- Args, in *reverse* order
-> BcM BCInstrList
mkConAppCode _ _ _ con [] -- Nullary constructor
= ASSERT( isNullaryRepDataCon con )
return (unitOL (PUSH_G (getName (dataConWorkId con))))
-- Instead of doing a PACK, which would allocate a fresh
-- copy of this constructor, use the single shared version.
mkConAppCode orig_d _ p con args_r_to_l
= ASSERT( dataConRepArity con == length args_r_to_l )
do_pushery orig_d (non_ptr_args ++ ptr_args)
where
-- The args are already in reverse order, which is the way PACK
-- expects them to be. We must push the non-ptrs after the ptrs.
(ptr_args, non_ptr_args) = partition isPtrAtom args_r_to_l
do_pushery d (arg:args)
= do (push, arg_words) <- pushAtom d p arg
more_push_code <- do_pushery (d + fromIntegral arg_words) args
return (push `appOL` more_push_code)
do_pushery d []
= return (unitOL (PACK con n_arg_words))
where
n_arg_words = trunc16 $ d - orig_d
-- -----------------------------------------------------------------------------
-- Returning an unboxed tuple with one non-void component (the only
-- case we can handle).
--
-- Remember, we don't want to *evaluate* the component that is being
-- returned, even if it is a pointed type. We always just return.
unboxedTupleReturn
:: Word -> Sequel -> BCEnv
-> AnnExpr' Id VarSet -> BcM BCInstrList
unboxedTupleReturn d s p arg = returnUnboxedAtom d s p arg (atomRep arg)
-- -----------------------------------------------------------------------------
-- Generate code for a tail-call
doTailCall
:: Word -> Sequel -> BCEnv
-> Id -> [AnnExpr' Id VarSet]
-> BcM BCInstrList
doTailCall init_d s p fn args
= do_pushes init_d args (map atomRep args)
where
do_pushes d [] reps = do
ASSERT( null reps ) return ()
(push_fn, sz) <- pushAtom d p (AnnVar fn)
ASSERT( sz == 1 ) return ()
return (push_fn `appOL` (
mkSLIDE (trunc16 $ d - init_d + 1) (init_d - s) `appOL`
unitOL ENTER))
do_pushes d args reps = do
let (push_apply, n, rest_of_reps) = findPushSeq reps
(these_args, rest_of_args) = splitAt n args
(next_d, push_code) <- push_seq d these_args
instrs <- do_pushes (next_d + 1) rest_of_args rest_of_reps
-- ^^^ for the PUSH_APPLY_ instruction
return (push_code `appOL` (push_apply `consOL` instrs))
push_seq d [] = return (d, nilOL)
push_seq d (arg:args) = do
(push_code, sz) <- pushAtom d p arg
(final_d, more_push_code) <- push_seq (d + fromIntegral sz) args
return (final_d, push_code `appOL` more_push_code)
-- v. similar to CgStackery.findMatch, ToDo: merge
findPushSeq :: [ArgRep] -> (BCInstr, Int, [ArgRep])
findPushSeq (P: P: P: P: P: P: rest)
= (PUSH_APPLY_PPPPPP, 6, rest)
findPushSeq (P: P: P: P: P: rest)
= (PUSH_APPLY_PPPPP, 5, rest)
findPushSeq (P: P: P: P: rest)
= (PUSH_APPLY_PPPP, 4, rest)
findPushSeq (P: P: P: rest)
= (PUSH_APPLY_PPP, 3, rest)
findPushSeq (P: P: rest)
= (PUSH_APPLY_PP, 2, rest)
findPushSeq (P: rest)
= (PUSH_APPLY_P, 1, rest)
findPushSeq (V: rest)
= (PUSH_APPLY_V, 1, rest)
findPushSeq (N: rest)
= (PUSH_APPLY_N, 1, rest)
findPushSeq (F: rest)
= (PUSH_APPLY_F, 1, rest)
findPushSeq (D: rest)
= (PUSH_APPLY_D, 1, rest)
findPushSeq (L: rest)
= (PUSH_APPLY_L, 1, rest)
findPushSeq _
= panic "ByteCodeGen.findPushSeq"
-- -----------------------------------------------------------------------------
-- Case expressions
doCase :: Word -> Sequel -> BCEnv
-> AnnExpr Id VarSet -> Id -> [AnnAlt Id VarSet]
-> Maybe Id -- Just x <=> is an unboxed tuple case with scrut binder, don't enter the result
-> BcM BCInstrList
doCase d s p (_,scrut) bndr alts is_unboxed_tuple
| UbxTupleRep _ <- repType (idType bndr)
= unboxedTupleException
| otherwise
= do
dflags <- getDynFlags
let
-- Top of stack is the return itbl, as usual.
-- underneath it is the pointer to the alt_code BCO.
-- When an alt is entered, it assumes the returned value is
-- on top of the itbl.
ret_frame_sizeW :: Word
ret_frame_sizeW = 2
-- An unlifted value gets an extra info table pushed on top
-- when it is returned.
unlifted_itbl_sizeW :: Word
unlifted_itbl_sizeW | isAlgCase = 0
| otherwise = 1
-- depth of stack after the return value has been pushed
d_bndr = d + ret_frame_sizeW + fromIntegral (idSizeW dflags bndr)
-- depth of stack after the extra info table for an unboxed return
-- has been pushed, if any. This is the stack depth at the
-- continuation.
d_alts = d_bndr + unlifted_itbl_sizeW
-- Env in which to compile the alts, not including
-- any vars bound by the alts themselves
d_bndr' = fromIntegral d_bndr - 1
p_alts0 = Map.insert bndr d_bndr' p
p_alts = case is_unboxed_tuple of
Just ubx_bndr -> Map.insert ubx_bndr d_bndr' p_alts0
Nothing -> p_alts0
bndr_ty = idType bndr
isAlgCase = not (isUnLiftedType bndr_ty) && isNothing is_unboxed_tuple
-- given an alt, return a discr and code for it.
codeAlt (DEFAULT, _, (_,rhs))
= do rhs_code <- schemeE d_alts s p_alts rhs
return (NoDiscr, rhs_code)
codeAlt alt@(_, bndrs, (_,rhs))
-- primitive or nullary constructor alt: no need to UNPACK
| null real_bndrs = do
rhs_code <- schemeE d_alts s p_alts rhs
return (my_discr alt, rhs_code)
| any (\bndr -> case repType (idType bndr) of UbxTupleRep _ -> True; _ -> False) bndrs
= unboxedTupleException
-- algebraic alt with some binders
| otherwise =
let
(ptrs,nptrs) = partition (isFollowableArg.bcIdArgRep) real_bndrs
ptr_sizes = map (fromIntegral . idSizeW dflags) ptrs
nptrs_sizes = map (fromIntegral . idSizeW dflags) nptrs
bind_sizes = ptr_sizes ++ nptrs_sizes
size = sum ptr_sizes + sum nptrs_sizes
-- the UNPACK instruction unpacks in reverse order...
p' = Map.insertList
(zip (reverse (ptrs ++ nptrs))
(mkStackOffsets d_alts (reverse bind_sizes)))
p_alts
in do
MASSERT(isAlgCase)
rhs_code <- schemeE (d_alts + size) s p' rhs
return (my_discr alt, unitOL (UNPACK (trunc16 size)) `appOL` rhs_code)
where
real_bndrs = filterOut isTyVar bndrs
my_discr (DEFAULT, _, _) = NoDiscr {-shouldn't really happen-}
my_discr (DataAlt dc, _, _)
| isUnboxedTupleCon dc
= unboxedTupleException
| otherwise
= DiscrP (fromIntegral (dataConTag dc - fIRST_TAG))
my_discr (LitAlt l, _, _)
= case l of MachInt i -> DiscrI (fromInteger i)
MachWord w -> DiscrW (fromInteger w)
MachFloat r -> DiscrF (fromRational r)
MachDouble r -> DiscrD (fromRational r)
MachChar i -> DiscrI (ord i)
_ -> pprPanic "schemeE(AnnCase).my_discr" (ppr l)
maybe_ncons
| not isAlgCase = Nothing
| otherwise
= case [dc | (DataAlt dc, _, _) <- alts] of
[] -> Nothing
(dc:_) -> Just (tyConFamilySize (dataConTyCon dc))
-- the bitmap is relative to stack depth d, i.e. before the
-- BCO, info table and return value are pushed on.
-- This bit of code is v. similar to buildLivenessMask in CgBindery,
-- except that here we build the bitmap from the known bindings of
-- things that are pointers, whereas in CgBindery the code builds the
-- bitmap from the free slots and unboxed bindings.
-- (ToDo: merge?)
--
-- NOTE [7/12/2006] bug #1013, testcase ghci/should_run/ghci002.
-- The bitmap must cover the portion of the stack up to the sequel only.
-- Previously we were building a bitmap for the whole depth (d), but we
-- really want a bitmap up to depth (d-s). This affects compilation of
-- case-of-case expressions, which is the only time we can be compiling a
-- case expression with s /= 0.
bitmap_size = trunc16 $ d-s
bitmap_size' :: Int
bitmap_size' = fromIntegral bitmap_size
bitmap = intsToReverseBitmap dflags bitmap_size'{-size-}
(sort (filter (< bitmap_size') rel_slots))
where
binds = Map.toList p
-- NB: unboxed tuple cases bind the scrut binder to the same offset
-- as one of the alt binders, so we have to remove any duplicates here:
rel_slots = nub $ map fromIntegral $ concat (map spread binds)
spread (id, offset) | isFollowableArg (bcIdArgRep id) = [ rel_offset ]
| otherwise = []
where rel_offset = trunc16 $ d - fromIntegral offset - 1
alt_stuff <- mapM codeAlt alts
alt_final <- mkMultiBranch maybe_ncons alt_stuff
let
alt_bco_name = getName bndr
alt_bco = mkProtoBCO dflags alt_bco_name alt_final (Left alts)
0{-no arity-} bitmap_size bitmap True{-is alts-}
-- trace ("case: bndr = " ++ showSDocDebug (ppr bndr) ++ "\ndepth = " ++ show d ++ "\nenv = \n" ++ showSDocDebug (ppBCEnv p) ++
-- "\n bitmap = " ++ show bitmap) $ do
scrut_code <- schemeE (d + ret_frame_sizeW)
(d + ret_frame_sizeW)
p scrut
alt_bco' <- emitBc alt_bco
let push_alts
| isAlgCase = PUSH_ALTS alt_bco'
| otherwise = PUSH_ALTS_UNLIFTED alt_bco' (typeArgRep bndr_ty)
return (push_alts `consOL` scrut_code)
-- -----------------------------------------------------------------------------
-- Deal with a CCall.
-- Taggedly push the args onto the stack R->L,
-- deferencing ForeignObj#s and adjusting addrs to point to
-- payloads in Ptr/Byte arrays. Then, generate the marshalling
-- (machine) code for the ccall, and create bytecodes to call that and
-- then return in the right way.
generateCCall :: Word -> Sequel -- stack and sequel depths
-> BCEnv
-> CCallSpec -- where to call
-> Id -- of target, for type info
-> [AnnExpr' Id VarSet] -- args (atoms)
-> BcM BCInstrList
generateCCall d0 s p (CCallSpec target cconv safety) fn args_r_to_l
= do
dflags <- getDynFlags
let
-- useful constants
addr_sizeW :: Word16
addr_sizeW = fromIntegral (argRepSizeW dflags N)
-- Get the args on the stack, with tags and suitably
-- dereferenced for the CCall. For each arg, return the
-- depth to the first word of the bits for that arg, and the
-- ArgRep of what was actually pushed.
pargs _ [] = return []
pargs d (a:az)
= let UnaryRep arg_ty = repType (exprType (deAnnotate' a))
in case tyConAppTyCon_maybe arg_ty of
-- Don't push the FO; instead push the Addr# it
-- contains.
Just t
| t == arrayPrimTyCon || t == mutableArrayPrimTyCon
-> do rest <- pargs (d + fromIntegral addr_sizeW) az
code <- parg_ArrayishRep (fromIntegral (arrPtrsHdrSize dflags)) d p a
return ((code,AddrRep):rest)
| t == smallArrayPrimTyCon || t == smallMutableArrayPrimTyCon
-> do rest <- pargs (d + fromIntegral addr_sizeW) az
code <- parg_ArrayishRep (fromIntegral (smallArrPtrsHdrSize dflags)) d p a
return ((code,AddrRep):rest)
| t == byteArrayPrimTyCon || t == mutableByteArrayPrimTyCon
-> do rest <- pargs (d + fromIntegral addr_sizeW) az
code <- parg_ArrayishRep (fromIntegral (arrWordsHdrSize dflags)) d p a
return ((code,AddrRep):rest)
-- Default case: push taggedly, but otherwise intact.
_
-> do (code_a, sz_a) <- pushAtom d p a
rest <- pargs (d + fromIntegral sz_a) az
return ((code_a, atomPrimRep a) : rest)
-- Do magic for Ptr/Byte arrays. Push a ptr to the array on
-- the stack but then advance it over the headers, so as to
-- point to the payload.
parg_ArrayishRep :: Word16 -> Word -> BCEnv -> AnnExpr' Id VarSet
-> BcM BCInstrList
parg_ArrayishRep hdrSize d p a
= do (push_fo, _) <- pushAtom d p a
-- The ptr points at the header. Advance it over the
-- header and then pretend this is an Addr#.
return (push_fo `snocOL` SWIZZLE 0 hdrSize)
code_n_reps <- pargs d0 args_r_to_l
let
(pushs_arg, a_reps_pushed_r_to_l) = unzip code_n_reps
a_reps_sizeW = fromIntegral (sum (map (primRepSizeW dflags) a_reps_pushed_r_to_l))
push_args = concatOL pushs_arg
d_after_args = d0 + a_reps_sizeW
a_reps_pushed_RAW
| null a_reps_pushed_r_to_l || head a_reps_pushed_r_to_l /= VoidRep
= panic "ByteCodeGen.generateCCall: missing or invalid World token?"
| otherwise
= reverse (tail a_reps_pushed_r_to_l)
-- Now: a_reps_pushed_RAW are the reps which are actually on the stack.
-- push_args is the code to do that.
-- d_after_args is the stack depth once the args are on.
-- Get the result rep.
(returns_void, r_rep)
= case maybe_getCCallReturnRep (idType fn) of
Nothing -> (True, VoidRep)
Just rr -> (False, rr)
{-
Because the Haskell stack grows down, the a_reps refer to
lowest to highest addresses in that order. The args for the call
are on the stack. Now push an unboxed Addr# indicating
the C function to call. Then push a dummy placeholder for the
result. Finally, emit a CCALL insn with an offset pointing to the
Addr# just pushed, and a literal field holding the mallocville
address of the piece of marshalling code we generate.
So, just prior to the CCALL insn, the stack looks like this
(growing down, as usual):
<arg_n>
...
<arg_1>
Addr# address_of_C_fn
<placeholder-for-result#> (must be an unboxed type)
The interpreter then calls the marshall code mentioned
in the CCALL insn, passing it (& <placeholder-for-result#>),
that is, the addr of the topmost word in the stack.
When this returns, the placeholder will have been
filled in. The placeholder is slid down to the sequel
depth, and we RETURN.
This arrangement makes it simple to do f-i-dynamic since the Addr#
value is the first arg anyway.
The marshalling code is generated specifically for this
call site, and so knows exactly the (Haskell) stack
offsets of the args, fn address and placeholder. It
copies the args to the C stack, calls the stacked addr,
and parks the result back in the placeholder. The interpreter
calls it as a normal C call, assuming it has a signature
void marshall_code ( StgWord* ptr_to_top_of_stack )
-}
-- resolve static address
get_target_info = do
case target of
DynamicTarget
-> return (False, panic "ByteCodeGen.generateCCall(dyn)")
StaticTarget _ _ False ->
panic "generateCCall: unexpected FFI value import"
StaticTarget target _ True
-> do res <- ioToBc (lookupStaticPtr stdcall_adj_target)
return (True, res)
where
stdcall_adj_target
| OSMinGW32 <- platformOS (targetPlatform dflags)
, StdCallConv <- cconv
= let size = fromIntegral a_reps_sizeW * wORD_SIZE dflags in
mkFastString (unpackFS target ++ '@':show size)
| otherwise
= target
(is_static, static_target_addr) <- get_target_info
let
-- Get the arg reps, zapping the leading Addr# in the dynamic case
a_reps -- | trace (showSDoc (ppr a_reps_pushed_RAW)) False = error "???"
| is_static = a_reps_pushed_RAW
| otherwise = if null a_reps_pushed_RAW
then panic "ByteCodeGen.generateCCall: dyn with no args"
else tail a_reps_pushed_RAW
-- push the Addr#
(push_Addr, d_after_Addr)
| is_static
= (toOL [PUSH_UBX (Right static_target_addr) addr_sizeW],
d_after_args + fromIntegral addr_sizeW)
| otherwise -- is already on the stack
= (nilOL, d_after_args)
-- Push the return placeholder. For a call returning nothing,
-- this is a V (tag).
r_sizeW = fromIntegral (primRepSizeW dflags r_rep)
d_after_r = d_after_Addr + fromIntegral r_sizeW
r_lit = mkDummyLiteral r_rep
push_r = (if returns_void
then nilOL
else unitOL (PUSH_UBX (Left r_lit) r_sizeW))
-- generate the marshalling code we're going to call
-- Offset of the next stack frame down the stack. The CCALL
-- instruction needs to describe the chunk of stack containing
-- the ccall args to the GC, so it needs to know how large it
-- is. See comment in Interpreter.c with the CCALL instruction.
stk_offset = trunc16 $ d_after_r - s
-- the only difference in libffi mode is that we prepare a cif
-- describing the call type by calling libffi, and we attach the
-- address of this to the CCALL instruction.
token <- ioToBc $ prepForeignCall dflags cconv a_reps r_rep
let addr_of_marshaller = castPtrToFunPtr token
recordItblMallocBc (ItblPtr (castFunPtrToPtr addr_of_marshaller))
let
-- do the call
do_call = unitOL (CCALL stk_offset (castFunPtrToPtr addr_of_marshaller)
(fromIntegral (fromEnum (playInterruptible safety))))
-- slide and return
wrapup = mkSLIDE r_sizeW (d_after_r - fromIntegral r_sizeW - s)
`snocOL` RETURN_UBX (toArgRep r_rep)
--trace (show (arg1_offW, args_offW , (map argRepSizeW a_reps) )) $
return (
push_args `appOL`
push_Addr `appOL` push_r `appOL` do_call `appOL` wrapup
)
-- Make a dummy literal, to be used as a placeholder for FFI return
-- values on the stack.
mkDummyLiteral :: PrimRep -> Literal
mkDummyLiteral pr
= case pr of
IntRep -> MachInt 0
WordRep -> MachWord 0
AddrRep -> MachNullAddr
DoubleRep -> MachDouble 0
FloatRep -> MachFloat 0
Int64Rep -> MachInt64 0
Word64Rep -> MachWord64 0
_ -> panic "mkDummyLiteral"
-- Convert (eg)
-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
-- -> (# GHC.Prim.State# GHC.Prim.RealWorld, GHC.Prim.Int# #)
--
-- to Just IntRep
-- and check that an unboxed pair is returned wherein the first arg is V'd.
--
-- Alternatively, for call-targets returning nothing, convert
--
-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
-- -> (# GHC.Prim.State# GHC.Prim.RealWorld #)
--
-- to Nothing
maybe_getCCallReturnRep :: Type -> Maybe PrimRep
maybe_getCCallReturnRep fn_ty
= let (_a_tys, r_ty) = splitFunTys (dropForAlls fn_ty)
maybe_r_rep_to_go
= if isSingleton r_reps then Nothing else Just (r_reps !! 1)
r_reps = case repType r_ty of
UbxTupleRep reps -> map typePrimRep reps
UnaryRep _ -> blargh
ok = ( ( r_reps `lengthIs` 2 && VoidRep == head r_reps)
|| r_reps == [VoidRep] )
&& case maybe_r_rep_to_go of
Nothing -> True
Just r_rep -> r_rep /= PtrRep
-- if it was, it would be impossible
-- to create a valid return value
-- placeholder on the stack
blargh :: a -- Used at more than one type
blargh = pprPanic "maybe_getCCallReturn: can't handle:"
(pprType fn_ty)
in
--trace (showSDoc (ppr (a_reps, r_reps))) $
if ok then maybe_r_rep_to_go else blargh
maybe_is_tagToEnum_call :: AnnExpr' Id VarSet -> Maybe (AnnExpr' Id VarSet, [Name])
-- Detect and extract relevant info for the tagToEnum kludge.
maybe_is_tagToEnum_call app
| AnnApp (_, AnnApp (_, AnnVar v) (_, AnnType t)) arg <- app
, Just TagToEnumOp <- isPrimOpId_maybe v
= Just (snd arg, extract_constr_Names t)
| otherwise
= Nothing
where
extract_constr_Names ty
| UnaryRep rep_ty <- repType ty
, Just tyc <- tyConAppTyCon_maybe rep_ty,
isDataTyCon tyc
= map (getName . dataConWorkId) (tyConDataCons tyc)
-- NOTE: use the worker name, not the source name of
-- the DataCon. See DataCon.lhs for details.
| otherwise
= pprPanic "maybe_is_tagToEnum_call.extract_constr_Ids" (ppr ty)
{- -----------------------------------------------------------------------------
Note [Implementing tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(implement_tagToId arg names) compiles code which takes an argument
'arg', (call it i), and enters the i'th closure in the supplied list
as a consequence. The [Name] is a list of the constructors of this
(enumeration) type.
The code we generate is this:
push arg
push bogus-word
TESTEQ_I 0 L1
PUSH_G <lbl for first data con>
JMP L_Exit
L1: TESTEQ_I 1 L2
PUSH_G <lbl for second data con>
JMP L_Exit
...etc...
Ln: TESTEQ_I n L_fail
PUSH_G <lbl for last data con>
JMP L_Exit
L_fail: CASEFAIL
L_exit: SLIDE 1 n
ENTER
The 'bogus-word' push is because TESTEQ_I expects the top of the stack
to have an info-table, and the next word to have the value to be
tested. This is very weird, but it's the way it is right now. See
Interpreter.c. We don't acutally need an info-table here; we just
need to have the argument to be one-from-top on the stack, hence pushing
a 1-word null. See Trac #8383.
-}
implement_tagToId :: Word -> Sequel -> BCEnv
-> AnnExpr' Id VarSet -> [Name] -> BcM BCInstrList
-- See Note [Implementing tagToEnum#]
implement_tagToId d s p arg names
= ASSERT( notNull names )
do (push_arg, arg_words) <- pushAtom d p arg
labels <- getLabelsBc (genericLength names)
label_fail <- getLabelBc
label_exit <- getLabelBc
let infos = zip4 labels (tail labels ++ [label_fail])
[0 ..] names
steps = map (mkStep label_exit) infos
return (push_arg
`appOL` unitOL (PUSH_UBX (Left MachNullAddr) 1)
-- Push bogus word (see Note [Implementing tagToEnum#])
`appOL` concatOL steps
`appOL` toOL [ LABEL label_fail, CASEFAIL,
LABEL label_exit ]
`appOL` mkSLIDE 1 (d - s + fromIntegral arg_words + 1)
-- "+1" to account for bogus word
-- (see Note [Implementing tagToEnum#])
`appOL` unitOL ENTER)
where
mkStep l_exit (my_label, next_label, n, name_for_n)
= toOL [LABEL my_label,
TESTEQ_I n next_label,
PUSH_G name_for_n,
JMP l_exit]
-- -----------------------------------------------------------------------------
-- pushAtom
-- Push an atom onto the stack, returning suitable code & number of
-- stack words used.
--
-- The env p must map each variable to the highest- numbered stack
-- slot for it. For example, if the stack has depth 4 and we
-- tagged-ly push (v :: Int#) on it, the value will be in stack[4],
-- the tag in stack[5], the stack will have depth 6, and p must map v
-- to 5 and not to 4. Stack locations are numbered from zero, so a
-- depth 6 stack has valid words 0 .. 5.
pushAtom :: Word -> BCEnv -> AnnExpr' Id VarSet -> BcM (BCInstrList, Word16)
pushAtom d p e
| Just e' <- bcView e
= pushAtom d p e'
pushAtom _ _ (AnnCoercion {}) -- Coercions are zero-width things,
= return (nilOL, 0) -- treated just like a variable V
pushAtom d p (AnnVar v)
| UnaryRep rep_ty <- repType (idType v)
, V <- typeArgRep rep_ty
= return (nilOL, 0)
| isFCallId v
= pprPanic "pushAtom: shouldn't get an FCallId here" (ppr v)
| Just primop <- isPrimOpId_maybe v
= return (unitOL (PUSH_PRIMOP primop), 1)
| Just d_v <- lookupBCEnv_maybe v p -- v is a local variable
= do dflags <- getDynFlags
let sz :: Word16
sz = fromIntegral (idSizeW dflags v)
l = trunc16 $ d - d_v + fromIntegral sz - 2
return (toOL (genericReplicate sz (PUSH_L l)), sz)
-- d - d_v the number of words between the TOS
-- and the 1st slot of the object
--
-- d - d_v - 1 the offset from the TOS of the 1st slot
--
-- d - d_v - 1 + sz - 1 the offset from the TOS of the last slot
-- of the object.
--
-- Having found the last slot, we proceed to copy the right number of
-- slots on to the top of the stack.
| otherwise -- v must be a global variable
= do dflags <- getDynFlags
let sz :: Word16
sz = fromIntegral (idSizeW dflags v)
MASSERT(sz == 1)
return (unitOL (PUSH_G (getName v)), sz)
pushAtom _ _ (AnnLit lit) = do
dflags <- getDynFlags
let code rep
= let size_host_words = fromIntegral (argRepSizeW dflags rep)
in return (unitOL (PUSH_UBX (Left lit) size_host_words),
size_host_words)
case lit of
MachLabel _ _ _ -> code N
MachWord _ -> code N
MachInt _ -> code N
MachWord64 _ -> code L
MachInt64 _ -> code L
MachFloat _ -> code F
MachDouble _ -> code D
MachChar _ -> code N
MachNullAddr -> code N
MachStr s -> pushStr s
-- No LitInteger's should be left by the time this is called.
-- CorePrep should have converted them all to a real core
-- representation.
LitInteger {} -> panic "pushAtom: LitInteger"
where
pushStr s
= let getMallocvilleAddr
=
-- we could grab the Ptr from the ForeignPtr,
-- but then we have no way to control its lifetime.
-- In reality it'll probably stay alive long enoungh
-- by virtue of the global FastString table, but
-- to be on the safe side we copy the string into
-- a malloc'd area of memory.
do let n = BS.length s
ptr <- ioToBc (mallocBytes (n+1))
recordMallocBc ptr
ioToBc (
BS.unsafeUseAsCString s $ \p -> do
memcpy ptr p (fromIntegral n)
pokeByteOff ptr n (fromIntegral (ord '\0') :: Word8)
return ptr
)
in do
addr <- getMallocvilleAddr
-- Get the addr on the stack, untaggedly
return (unitOL (PUSH_UBX (Right addr) 1), 1)
pushAtom _ _ expr
= pprPanic "ByteCodeGen.pushAtom"
(pprCoreExpr (deAnnotate (undefined, expr)))
foreign import ccall unsafe "memcpy"
memcpy :: Ptr a -> Ptr b -> CSize -> IO ()
-- -----------------------------------------------------------------------------
-- Given a bunch of alts code and their discrs, do the donkey work
-- of making a multiway branch using a switch tree.
-- What a load of hassle!
mkMultiBranch :: Maybe Int -- # datacons in tycon, if alg alt
-- a hint; generates better code
-- Nothing is always safe
-> [(Discr, BCInstrList)]
-> BcM BCInstrList
mkMultiBranch maybe_ncons raw_ways = do
lbl_default <- getLabelBc
let
mkTree :: [(Discr, BCInstrList)] -> Discr -> Discr -> BcM BCInstrList
mkTree [] _range_lo _range_hi = return (unitOL (JMP lbl_default))
-- shouldn't happen?
mkTree [val] range_lo range_hi
| range_lo == range_hi
= return (snd val)
| null defaults -- Note [CASEFAIL]
= do lbl <- getLabelBc
return (testEQ (fst val) lbl
`consOL` (snd val
`appOL` (LABEL lbl `consOL` unitOL CASEFAIL)))
| otherwise
= return (testEQ (fst val) lbl_default `consOL` snd val)
-- Note [CASEFAIL] It may be that this case has no default
-- branch, but the alternatives are not exhaustive - this
-- happens for GADT cases for example, where the types
-- prove that certain branches are impossible. We could
-- just assume that the other cases won't occur, but if
-- this assumption was wrong (because of a bug in GHC)
-- then the result would be a segfault. So instead we
-- emit an explicit test and a CASEFAIL instruction that
-- causes the interpreter to barf() if it is ever
-- executed.
mkTree vals range_lo range_hi
= let n = length vals `div` 2
vals_lo = take n vals
vals_hi = drop n vals
v_mid = fst (head vals_hi)
in do
label_geq <- getLabelBc
code_lo <- mkTree vals_lo range_lo (dec v_mid)
code_hi <- mkTree vals_hi v_mid range_hi
return (testLT v_mid label_geq
`consOL` (code_lo
`appOL` unitOL (LABEL label_geq)
`appOL` code_hi))
the_default
= case defaults of
[] -> nilOL
[(_, def)] -> LABEL lbl_default `consOL` def
_ -> panic "mkMultiBranch/the_default"
instrs <- mkTree notd_ways init_lo init_hi
return (instrs `appOL` the_default)
where
(defaults, not_defaults) = partition (isNoDiscr.fst) raw_ways
notd_ways = sortBy (comparing fst) not_defaults
testLT (DiscrI i) fail_label = TESTLT_I i fail_label
testLT (DiscrW i) fail_label = TESTLT_W i fail_label
testLT (DiscrF i) fail_label = TESTLT_F i fail_label
testLT (DiscrD i) fail_label = TESTLT_D i fail_label
testLT (DiscrP i) fail_label = TESTLT_P i fail_label
testLT NoDiscr _ = panic "mkMultiBranch NoDiscr"
testEQ (DiscrI i) fail_label = TESTEQ_I i fail_label
testEQ (DiscrW i) fail_label = TESTEQ_W i fail_label
testEQ (DiscrF i) fail_label = TESTEQ_F i fail_label
testEQ (DiscrD i) fail_label = TESTEQ_D i fail_label
testEQ (DiscrP i) fail_label = TESTEQ_P i fail_label
testEQ NoDiscr _ = panic "mkMultiBranch NoDiscr"
-- None of these will be needed if there are no non-default alts
(init_lo, init_hi)
| null notd_ways
= panic "mkMultiBranch: awesome foursome"
| otherwise
= case fst (head notd_ways) of
DiscrI _ -> ( DiscrI minBound, DiscrI maxBound )
DiscrW _ -> ( DiscrW minBound, DiscrW maxBound )
DiscrF _ -> ( DiscrF minF, DiscrF maxF )
DiscrD _ -> ( DiscrD minD, DiscrD maxD )
DiscrP _ -> ( DiscrP algMinBound, DiscrP algMaxBound )
NoDiscr -> panic "mkMultiBranch NoDiscr"
(algMinBound, algMaxBound)
= case maybe_ncons of
-- XXX What happens when n == 0?
Just n -> (0, fromIntegral n - 1)
Nothing -> (minBound, maxBound)
isNoDiscr NoDiscr = True
isNoDiscr _ = False
dec (DiscrI i) = DiscrI (i-1)
dec (DiscrW w) = DiscrW (w-1)
dec (DiscrP i) = DiscrP (i-1)
dec other = other -- not really right, but if you
-- do cases on floating values, you'll get what you deserve
-- same snotty comment applies to the following
minF, maxF :: Float
minD, maxD :: Double
minF = -1.0e37
maxF = 1.0e37
minD = -1.0e308
maxD = 1.0e308
-- -----------------------------------------------------------------------------
-- Supporting junk for the compilation schemes
-- Describes case alts
data Discr
= DiscrI Int
| DiscrW Word
| DiscrF Float
| DiscrD Double
| DiscrP Word16
| NoDiscr
deriving (Eq, Ord)
instance Outputable Discr where
ppr (DiscrI i) = int i
ppr (DiscrW w) = text (show w)
ppr (DiscrF f) = text (show f)
ppr (DiscrD d) = text (show d)
ppr (DiscrP i) = ppr i
ppr NoDiscr = text "DEF"
lookupBCEnv_maybe :: Id -> BCEnv -> Maybe Word
lookupBCEnv_maybe = Map.lookup
idSizeW :: DynFlags -> Id -> Int
idSizeW dflags = argRepSizeW dflags . bcIdArgRep
bcIdArgRep :: Id -> ArgRep
bcIdArgRep = toArgRep . bcIdPrimRep
bcIdPrimRep :: Id -> PrimRep
bcIdPrimRep = typePrimRep . bcIdUnaryType
isFollowableArg :: ArgRep -> Bool
isFollowableArg P = True
isFollowableArg _ = False
isVoidArg :: ArgRep -> Bool
isVoidArg V = True
isVoidArg _ = False
bcIdUnaryType :: Id -> UnaryType
bcIdUnaryType x = case repType (idType x) of
UnaryRep rep_ty -> rep_ty
UbxTupleRep [rep_ty] -> rep_ty
UbxTupleRep [rep_ty1, rep_ty2]
| VoidRep <- typePrimRep rep_ty1 -> rep_ty2
| VoidRep <- typePrimRep rep_ty2 -> rep_ty1
_ -> pprPanic "bcIdUnaryType" (ppr x $$ ppr (idType x))
-- See bug #1257
unboxedTupleException :: a
unboxedTupleException
= throwGhcException
(ProgramError
("Error: bytecode compiler can't handle unboxed tuples.\n"++
" Possibly due to foreign import/export decls in source.\n"++
" Workaround: use -fobject-code, or compile this module to .o separately."))
mkSLIDE :: Word16 -> Word -> OrdList BCInstr
mkSLIDE n d
-- if the amount to slide doesn't fit in a word,
-- generate multiple slide instructions
| d > fromIntegral limit
= SLIDE n limit `consOL` mkSLIDE n (d - fromIntegral limit)
| d == 0
= nilOL
| otherwise
= if d == 0 then nilOL else unitOL (SLIDE n $ fromIntegral d)
where
limit :: Word16
limit = maxBound
splitApp :: AnnExpr' Var ann -> (AnnExpr' Var ann, [AnnExpr' Var ann])
-- The arguments are returned in *right-to-left* order
splitApp e | Just e' <- bcView e = splitApp e'
splitApp (AnnApp (_,f) (_,a)) = case splitApp f of
(f', as) -> (f', a:as)
splitApp e = (e, [])
bcView :: AnnExpr' Var ann -> Maybe (AnnExpr' Var ann)
-- The "bytecode view" of a term discards
-- a) type abstractions
-- b) type applications
-- c) casts
-- d) ticks (but not breakpoints)
-- Type lambdas *can* occur in random expressions,
-- whereas value lambdas cannot; that is why they are nuked here
bcView (AnnCast (_,e) _) = Just e
bcView (AnnLam v (_,e)) | isTyVar v = Just e
bcView (AnnApp (_,e) (_, AnnType _)) = Just e
bcView (AnnTick Breakpoint{} _) = Nothing
bcView (AnnTick _other_tick (_,e)) = Just e
bcView _ = Nothing
isVAtom :: AnnExpr' Var ann -> Bool
isVAtom e | Just e' <- bcView e = isVAtom e'
isVAtom (AnnVar v) = isVoidArg (bcIdArgRep v)
isVAtom (AnnCoercion {}) = True
isVAtom _ = False
atomPrimRep :: AnnExpr' Id ann -> PrimRep
atomPrimRep e | Just e' <- bcView e = atomPrimRep e'
atomPrimRep (AnnVar v) = bcIdPrimRep v
atomPrimRep (AnnLit l) = typePrimRep (literalType l)
atomPrimRep (AnnCoercion {}) = VoidRep
atomPrimRep other = pprPanic "atomPrimRep" (ppr (deAnnotate (undefined,other)))
atomRep :: AnnExpr' Id ann -> ArgRep
atomRep e = toArgRep (atomPrimRep e)
isPtrAtom :: AnnExpr' Id ann -> Bool
isPtrAtom e = isFollowableArg (atomRep e)
-- Let szsw be the sizes in words of some items pushed onto the stack,
-- which has initial depth d'. Return the values which the stack environment
-- should map these items to.
mkStackOffsets :: Word -> [Word] -> [Word]
mkStackOffsets original_depth szsw
= map (subtract 1) (tail (scanl (+) original_depth szsw))
typeArgRep :: Type -> ArgRep
typeArgRep = toArgRep . typePrimRep
-- -----------------------------------------------------------------------------
-- The bytecode generator's monad
type BcPtr = Either ItblPtr (Ptr ())
data BcM_State
= BcM_State
{ bcm_dflags :: DynFlags
, uniqSupply :: UniqSupply -- for generating fresh variable names
, thisModule :: Module -- current module (for breakpoints)
, nextlabel :: Word16 -- for generating local labels
, malloced :: [BcPtr] -- thunks malloced for current BCO
-- Should be free()d when it is GCd
, breakArray :: BreakArray -- array of breakpoint flags
}
newtype BcM r = BcM (BcM_State -> IO (BcM_State, r))
ioToBc :: IO a -> BcM a
ioToBc io = BcM $ \st -> do
x <- io
return (st, x)
runBc :: DynFlags -> UniqSupply -> Module -> ModBreaks -> BcM r
-> IO (BcM_State, r)
runBc dflags us this_mod modBreaks (BcM m)
= m (BcM_State dflags us this_mod 0 [] breakArray)
where
breakArray = modBreaks_flags modBreaks
thenBc :: BcM a -> (a -> BcM b) -> BcM b
thenBc (BcM expr) cont = BcM $ \st0 -> do
(st1, q) <- expr st0
let BcM k = cont q
(st2, r) <- k st1
return (st2, r)
thenBc_ :: BcM a -> BcM b -> BcM b
thenBc_ (BcM expr) (BcM cont) = BcM $ \st0 -> do
(st1, _) <- expr st0
(st2, r) <- cont st1
return (st2, r)
returnBc :: a -> BcM a
returnBc result = BcM $ \st -> (return (st, result))
instance Functor BcM where
fmap = liftM
instance Applicative BcM where
pure = return
(<*>) = ap
instance Monad BcM where
(>>=) = thenBc
(>>) = thenBc_
return = returnBc
instance HasDynFlags BcM where
getDynFlags = BcM $ \st -> return (st, bcm_dflags st)
emitBc :: ([BcPtr] -> ProtoBCO Name) -> BcM (ProtoBCO Name)
emitBc bco
= BcM $ \st -> return (st{malloced=[]}, bco (malloced st))
recordMallocBc :: Ptr a -> BcM ()
recordMallocBc a
= BcM $ \st -> return (st{malloced = Right (castPtr a) : malloced st}, ())
recordItblMallocBc :: ItblPtr -> BcM ()
recordItblMallocBc a
= BcM $ \st -> return (st{malloced = Left a : malloced st}, ())
getLabelBc :: BcM Word16
getLabelBc
= BcM $ \st -> do let nl = nextlabel st
when (nl == maxBound) $
panic "getLabelBc: Ran out of labels"
return (st{nextlabel = nl + 1}, nl)
getLabelsBc :: Word16 -> BcM [Word16]
getLabelsBc n
= BcM $ \st -> let ctr = nextlabel st
in return (st{nextlabel = ctr+n}, [ctr .. ctr+n-1])
getBreakArray :: BcM BreakArray
getBreakArray = BcM $ \st -> return (st, breakArray st)
newUnique :: BcM Unique
newUnique = BcM $
\st -> case takeUniqFromSupply (uniqSupply st) of
(uniq, us) -> let newState = st { uniqSupply = us }
in return (newState, uniq)
getCurrentModule :: BcM Module
getCurrentModule = BcM $ \st -> return (st, thisModule st)
newId :: Type -> BcM Id
newId ty = do
uniq <- newUnique
return $ mkSysLocal tickFS uniq ty
tickFS :: FastString
tickFS = fsLit "ticked"
| green-haskell/ghc | compiler/ghci/ByteCodeGen.hs | bsd-3-clause | 65,516 | 0 | 24 | 21,135 | 14,213 | 7,295 | 6,918 | -1 | -1 |
{-# LANGUAGE PartialTypeSignatures #-}
module ExtraConstraintsWildcardTwice where
foo :: ((_), _) => a -> a
foo = undefined
| urbanslug/ghc | testsuite/tests/partial-sigs/should_fail/ExtraConstraintsWildcardTwice.hs | bsd-3-clause | 125 | 0 | 6 | 19 | 32 | 20 | 12 | 4 | 1 |
module Holyhaskell where
import Holyhaskell.Swallow ()
import Holyhaskell.Coconut ()
| duikboot/holyhaskell | src/Holyhaskell.hs | mit | 85 | 0 | 4 | 9 | 20 | 13 | 7 | 3 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Package where
import Control.Applicative
import qualified Control.Exception as E
import Control.Monad
import qualified Data.Attoparsec.Text as P
import qualified Data.List as L
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Data.Typeable
import qualified Distribution.Hackage.DB as H
import qualified Distribution.Version as V
import Github.GitData.Trees (GitTree (..), nestedTree,
treeGitTrees)
import Network.HTTP.Conduit
type Path = T.Text
type Tree = [T.Text]
type Hackage = M.Map T.Text [V.Version]
getTree :: IO Tree
getTree = do eTree <- nestedTree "gentoo-haskell" "gentoo-haskell" "master"
tree <- either (fail . show) return eTree
return $ T.pack . gitTreePath <$> filter validEbuild (treeGitTrees tree)
validEbuild :: GitTree -> Bool
validEbuild g = gitTreeType g == "blob" &&
".ebuild" `L.isSuffixOf` gitTreePath g
data Ebuild = Ebuild { ebuildCategory :: T.Text
, ebuildPackage :: T.Text
, ebuildVersion :: T.Text
} deriving (Show, Eq)
data Package = Package { packageName :: T.Text
, packageVersion :: V.Version
} deriving (Show, Eq, Ord, Typeable)
parseEbuildVersion :: P.Parser T.Text
parseEbuildVersion = do chars <- P.manyTill P.anyChar (void ".ebuild" <|> P.endOfInput)
return $ T.pack chars
parseEbuild :: P.Parser Ebuild
parseEbuild = do cat <- P.takeWhile1 (/= '/')
_ <- P.char '/'
pkg <- P.takeWhile1 (/='/')
_ <- P.char '/'
_ <- P.string pkg
_ <- P.char '-'
ver <- parseEbuildVersion
return $ Ebuild cat pkg ver
ebuildInPath :: Path -> Either String Ebuild
ebuildInPath = P.parseOnly parseEbuild
ebuildsInTree :: Tree -> Either String [Ebuild]
ebuildsInTree = mapM ebuildInPath
parsePackageVersion :: P.Parser V.Version
parsePackageVersion = do branches <- P.decimal `P.sepBy1` P.char '.'
return $ V.Version branches []
packageInEbuild :: Ebuild -> Either String Package
packageInEbuild ebuild = do ver <- P.parseOnly parsePackageVersion (ebuildVersion ebuild)
return $ Package (ebuildPackage ebuild) ver
packagesInEbuilds :: [Ebuild] -> Either String [Package]
packagesInEbuilds = mapM packageInEbuild
packagesInTree :: Tree -> Either String [Package]
packagesInTree = (packagesInEbuilds =<<) . ebuildsInTree
notLive :: Package -> Bool
notLive (Package _ (V.Version branches _)) = branches < [9,9,9,9]
latest :: [Package] -> [Package]
latest ps = maximum <$> L.groupBy (\x y -> packageName x == packageName y) ps
data PackageStatus = Old | New | Unknown deriving (Show, Eq, Ord)
hackageVersion :: Hackage -> Package -> V.Version
hackageVersion hackage package =
case M.lookup (packageName package) hackage of
Just m -> maximum m
Nothing -> packageVersion package
packageStatus :: Hackage -> Package -> PackageStatus
packageStatus hackage package =
case M.lookup (packageName package) hackage of
Just m -> if L.any (packageVersion package <) m
then Old
else New
Nothing -> Unknown
showVersion :: V.Version -> String
showVersion (V.Version vs _) = L.foldl1 (\a b -> a ++ "." ++ b) $ show `fmap` vs
buildHackage :: H.Hackage -> Hackage
buildHackage = M.mapKeys T.pack . H.map H.keys
readHackage :: IO Hackage
readHackage = buildHackage <$> H.readHackage
readPackages :: IO [Package]
readPackages = do tree <- getTree
packages <- either fail return $ packagesInTree tree
let notLivePackages = filter notLive packages
latestPackages = latest notLivePackages
return latestPackages
doUpdatePackages :: IO ()
doUpdatePackages = do initReq <- parseUrl "http://gentoo-haskell.herokuapp.com/updatePackages"
let req = initReq { method = "POST" }
E.catch (void $ withManager $ httpLbs req)
ignoreException
doUpdateHackage :: IO ()
doUpdateHackage = do initReq <- parseUrl "http://gentoo-haskell.herokuapp.com/updateHackage"
let req = initReq { method = "POST" }
E.catch (void $ withManager $ httpLbs req)
ignoreException
ignoreException :: HttpException -> IO ()
ignoreException e = return ()
| vikraman/gentoo-haskell-status | src/Package.hs | mit | 4,711 | 0 | 11 | 1,352 | 1,361 | 709 | 652 | 100 | 3 |
module Assembler(createMifFile) where
import Control.Exception
import System.Environment
import System.FilePath
import System.IO
import System.Exit
import Data.Bits
import Data.Int
import Data.Maybe
import Parser
import Syntax
createMifFile :: FilePath -> IO ()
createMifFile path = do
insts <- parseFile path
len <- evaluate $ length insts
let machineLang = assemble insts
mifPath = dropExtension path ++ ".mif"
withFile mifPath WriteMode $ \handle ->
do
mapM_ (hPutStrLn handle) mifHeader
mapM_ (hPutStrLn handle) (zipWith line [0..len-1] machineLang)
hPutStrLn handle $ concat ["[", show len, "..2047]:", replicate 16 '0', ";"]
hPutStrLn handle "END;"
where mifHeader = ["WIDTH = 16;"
,"DEPTH = 2048;"
,"ADDRESS_RADIX = DEC;"
,"DATA_RADIX = BIN;"
,"CONTENT BEGIN"]
line addr dat = concat [show addr, ":", dat, ";"]
assemble :: [Instruction] -> [String]
assemble = foldr (\i acc -> (conv i) : acc) []
conv :: Instruction -> String
conv (Prim op rd rs) = concat ["11", dec2bin rs 3, dec2bin rd 3, convArithOp op, "0000"]
conv (Shift op rd d) = concat ["11", "000", dec2bin rd 3, convShiftOp op, dec2bin d 4]
conv (Input rd) = concat ["11", "000", dec2bin rd 3, "1100", "0000"]
conv (Output rs) = concat ["11", dec2bin rs 3, "000", "1101", "0000"]
conv Nop = "1100000011101111"
conv Halt = "1100000011110000"
conv (Load ra d rb) = concat ["00", dec2bin ra 3, dec2bin rb 3, dec2bin d 8]
conv (Store ra d rb) = concat ["01", dec2bin ra 3, dec2bin rb 3, dec2bin d 8]
conv (LoadIm rb d) = concat ["10", "000", dec2bin rb 3, dec2bin d 8]
conv (AddI rb d) = concat ["10", "001", dec2bin rb 3, dec2bin d 8]
conv (UncondBr d) = concat ["10", "100", "000", dec2bin d 8]
conv (CondBr op d) = concat ["10", "111", convBrOp op, dec2bin d 8]
convArithOp :: String -> String
convArithOp "ADD" = "0000"
convArithOp "SUB" = "0001"
convArithOp "AND" = "0010"
convArithOp "OR" = "0011"
convArithOp "XOR" = "0100"
convArithOp "CMP" = "0101"
convArithOp "MOV" = "0110"
convShiftOp :: String -> String
convShiftOp "SLL" = "1000"
convShiftOp "SLR" = "1001"
convShiftOp "SRL" = "1010"
convShiftOp "SRA" = "1011"
convBrOp :: String -> String
convBrOp "BE" = "000"
convBrOp "BLT" = "001"
convBrOp "BLE" = "010"
convBrOp "BNE" = "011"
convBrOp _ = "111"
dec2bin :: Int16 -> Int -> String
dec2bin n len = lastN len $
foldr (\i acc -> (head . show $ (n `shiftR` i) .&. 1):acc) "" [15,14..0]
zipLeftover :: [a] -> [a] -> [a]
zipLeftover [] [] = []
zipLeftover xs [] = xs
zipLeftover [] ys = ys
zipLeftover (x:xs) (y:ys) = zipLeftover xs ys
lastN :: Int -> [a] -> [a]
lastN n xs = zipLeftover (drop n xs) xs
| yu-i9/HaSS | src/Assembler.hs | mit | 2,938 | 0 | 15 | 800 | 1,149 | 597 | 552 | 73 | 1 |
module PE0019 where
import Data.Time.Calendar
import Data.Time.Calendar.OrdinalDate
days :: [Day]
days = [fromGregorian y m d | y <- [1901..2000], m <- [1..12], d <- [1]]
main :: IO ()
main = do
print $ length $ filter isSunday days
where isSunday d = (snd (mondayStartWeek d) == 7)
| mvidner/projecteuler | src/PE0019.hs | mit | 292 | 0 | 11 | 58 | 134 | 73 | 61 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- Infer.hs ---
--
-- Filename: Infer.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Wed Nov 2 15:34:35 2016 (+0100)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Sun Oct 27 18:26:56 2019 (+0100)
-- By: Manuel Schneckenreither
-- Update #: 65
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
--
--
-- Code:
module Data.Rewriting.ARA.InferTypes
( inferTypesAndSignature
) where
import Data.Rewriting.Typed.Datatype
import Data.Rewriting.Typed.Problem
import Data.Rewriting.Typed.Rule
import Data.Rewriting.Typed.Signature
import Control.Lens
import Control.Monad.State
import Data.List
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Text.PrettyPrint.ANSI.Leijen
import Debug.Trace
type St f v s sDt = (Problem f v f String String f, M.Map f Int, [f],[f])
inferTypesAndSignature :: (Pretty f, Pretty v, Ord f, Show f, Eq f) =>
[f] -> Problem f v s sDt dt f -> Problem f v f String String f
inferTypesAndSignature mustFuns prob =
let res = execState infer (prob {datatypes = Nothing, signatures = Nothing}, M.empty, [], mustFuns)
in res ^. _1
-- trace ("RES: " ++ show mustFuns)
infer :: (Pretty f, Pretty v, Show f, Ord f) => State (St f v s sDt) ()
infer = do
inferSigs
inferTypes
getProblem :: State (St f v s sDt) (Problem f v f String String f)
getProblem = do
st <- get
return (st^._1)
getMustFunctionSymbols :: State (St f v s sDt) [f]
getMustFunctionSymbols = do
st <- get
return (st^._4)
inferSigs :: (Pretty f, Pretty v, Show f, Ord f, Eq f) => State (St f v s sDt) ()
inferSigs = do
p <- getProblem
mustFuns <- getMustFunctionSymbols
let syms = nub $ symbols p
let termColl m (Var v) = m
termColl m (Fun f ch) =
let m' = case M.lookup f m of
Nothing -> M.insert f (length ch) m
Just x -> if x == length ch
then m
else error $ "different number of parameters in function " ++ show f
in
-- trace ("ruls: " ++ show (rules p))
foldl termColl m' ch
let ruls = allRules (rules p)
let paramLen = foldl termColl M.empty (map lhs ruls ++ map rhs ruls)
let definedFuns = nub $ mustFuns ++ map ((\(Fun f _) -> f). lhs) ruls
let getSig f =
let pLen = M.findWithDefault 0 f paramLen
in Signature f (replicate pLen "A") "A"
let definedFunsSigs = map getSig definedFuns
(pr , ma, fs, mustFuns) <- get
put (pr { signatures = Just definedFunsSigs }
, paramLen
, filter (`notElem` definedFuns) syms
, mustFuns)
-- modify $ _1 %~ (\x -> x { signatures = Just definedFunsSigs })
-- modify $ _2 .~ paramLen
-- modify $ _3 .~ filter (`notElem` definedFuns) syms
-- trace ("problem: " ++ show (prettyWST' p))
-- trace ("startTerms: " ++ show (startTerms p))
-- trace ("symbols: " ++ show (symbols p))
-- trace ("paramLen: " ++ show paramLen)
-- trace ("definedFuns: " ++ show definedFuns)
-- trace ("definedFunsSigs: " ++ show definedFunsSigs)
-- undefined
getParamLens :: State (St f v s sDt) (M.Map f Int)
getParamLens = do
st <- get
return (st^._2)
getConstructorNames :: State (St f v s sDt) [f]
getConstructorNames = do
st <- get
return (st^._3)
inferTypes :: (Ord f) => State (St f v s sDt) ()
inferTypes = do
paramLens <- getParamLens
constrs <- getConstructorNames
let makeConstructors n =
let len = M.findWithDefault 0 n paramLens
in Constructor n (replicate len ConstructorRecursive)
let dt = Datatype "A" $ map makeConstructors constrs
modify $ _1 %~ (\x -> x { datatypes = Just [dt]})
-- trace ("paramLens: " ++ show paramLens)
-- trace ("constrs: " ++ show constrs)
-- trace ("datatypes : " ++ show dt)
-- undefined
--
-- Infer.hs ends here
| ComputationWithBoundedResources/ara-inference | src/Data/Rewriting/ARA/InferTypes.hs | mit | 4,093 | 0 | 19 | 1,117 | 1,186 | 635 | 551 | 74 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Main(main) where
import Prelude hiding ((.), id)
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Function
import Control.FRP.Wire
import Control.FRP.Wire.Utils
import Control.Category
import Control.Arrow
import Control.Applicative
import Control.Arrow.Operations(delay)
import Control.Arrow.Signal
main :: IO ()
main = hspec $ do
describe "Wire" $ do
it "ignores its input for 'pure'" $ property $
\x -> viewWire (pure "bees") [(x :: String)] == ["bees"]
it "obeys identity" $ property $
\x -> viewWire id x == (x :: String)
it "obeys arr" $ property $
\x f -> viewWire (arr (apply f)) (x :: [String]) ==
(map (apply f) x :: [Int])
it "can form a circuit" $ property $
\x y z -> viewWire (delay 0) [x, y, z] == [0 :: Int, x, y]
describe "the utilities" $ do
it "can differentiate streams of integers" $ property $ do
\x y z -> viewWire (differentiate 0 (-)) [x, y, z] ==
[x :: Integer, y - x, z - y]
it "can integrate streams of integers" $ property $ do
\x y z -> viewWire wsum [x, y, z] ==
[x :: Integer, x + y, x + y + z]
describe "signal combinators" $ do
it "can latch signals" $ property $ do
\x y -> viewWire wlatch [Nothing, Just x, Nothing, Just y] `shouldBe`
[Nothing, Just (x :: Int), Just x, Just y]
it "can wait for initials from signals" $ property $ do
\x y -> viewWire wdecisive [Nothing, Just x, Nothing, Just y] `shouldBe`
[Nothing, Just (x :: Int), Just x, Just x]
it "can detect and latch changes" $ property $ do
\x -> viewWire (changes >>> wlatch) x `shouldBe` map Just (x :: [Int])
describe "the convenience instances" $ do
it "can handle strings" $ do
viewWire "badgers" [undefined] `shouldBe` ["badgers"]
describe "liftSignal" $ do
it "acts only on events" $ property $ do
\x -> viewWire (liftSignal $ arr (* (2 :: Int)) >>> arr Just) x `shouldBe`
map (fmap (* 2)) x
| prophile/trivial-wire | Tests.hs | mit | 2,099 | 0 | 22 | 580 | 825 | 438 | 387 | 48 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Data.FreeAgent.Types.Invoices where
import Control.Applicative (empty, (<$>), (<*>))
import Control.Lens
import Data.Aeson
import Data.Aeson.TH
import qualified Data.ByteString as BS
import Data.Data
type InvoiceTimelineItems = [ InvoiceTimelineItem ]
data InvoiceTimelineItem = InvoiceTimelineItem {
-- _dated_on :: BS.ByteString -- 2011-09-02
-- , _description :: BS.ByteString -- resras
_summary :: BS.ByteString -- Payment: 007: £14.40 received
, _amount :: BS.ByteString -- 14.4
-- , _reference :: BS.ByteString -- 007
} deriving (Show, Data, Typeable)
type InvoiceItems = [ InvoiceItem ]
data InvoiceItem = InvoiceItem {
_item_type :: BS.ByteString -- Hours
, _description :: BS.ByteString -- Test InvoiceItem
, _quantity :: BS.ByteString -- 0.0
, _price :: BS.ByteString -- 0.0
} deriving (Show, Data, Typeable)
data Invoice = Invoice {
_omit_header :: Bool
, _dated_on :: BS.ByteString -- 2001-12-12T00:00:00+00:00
, _status :: BS.ByteString -- Draft
, _contact :: BS.ByteString -- https://api.freeagent.com/v2/contacts/2
, _currency :: BS.ByteString -- GBP
, _invoice_items :: [ InvoiceItem ]
, _exchange_rate :: BS.ByteString -- 1.0
, _payment_terms_in_days :: Double
, _net_value :: BS.ByteString -- 0.0
, _reference :: BS.ByteString -- 003
, _due_on :: BS.ByteString -- 2001-12-17T00:00:00+00:00
} deriving (Show, Data, Typeable)
instance FromJSON InvoiceTimelineItem where
parseJSON (Object v) = InvoiceTimelineItem <$>
-- v .: "dated_on" <*>
-- v .: "description" <*>
v .: "summary" <*>
v .: "amount" -- <*>
-- v .: "reference"
parseJSON _ = empty
instance FromJSON InvoiceItem where
parseJSON (Object v) = InvoiceItem <$>
v .: "item_type" <*>
v .: "description" <*>
v .: "quantity" <*>
v .: "price"
parseJSON _ = empty
instance FromJSON Invoice where
parseJSON (Object v) = Invoice <$>
v .: "omit_header" <*>
v .: "dated_on" <*>
v .: "status" <*>
v .: "contact" <*>
v .: "currency" <*>
v .: "invoice_items" <*>
v .: "exchange_rate" <*>
v .: "payment_terms_in_days" <*>
v .: "net_value" <*>
v .: "reference" <*>
v .: "due_on"
parseJSON _ = empty
$(deriveToJSON tail ''InvoiceTimelineItem)
$(makeLenses ''InvoiceTimelineItem)
$(deriveToJSON tail ''InvoiceItem)
$(makeLenses ''InvoiceItem)
$(deriveToJSON tail ''Invoice)
$(makeLenses ''Invoice)
| perurbis/hfreeagent | src/Data/FreeAgent/Types/Invoices.hs | mit | 3,119 | 0 | 27 | 1,082 | 608 | 344 | 264 | 67 | 0 |
{-# LANGUAGE OverloadedStrings, DeriveGeneric, GADTs, StandaloneDeriving #-}
import GHC.Generics
import Data.Maybe
import Data.Text
import qualified Data.ByteString.Lazy as BL
import Data.Aeson
import Data.Aeson.Types
import Control.Monad.Writer.Lazy
import Control.Applicative
import Charts
import Weather
import Result
import Dice
jsonRequest :: BL.ByteString
jsonRequest = "\
\{ \"state\" : {\
\ \"altitude\":\"High\",\
\ \"weather\":\"Poor\",\
\ \"fuel\":16,\
\ \"direction\":\"Outbound\"\
\ },\
\ \"roll\":8\
\}"
main :: IO ()
main = do
-- Test the "Parse then dispatch" method
putStrLn "Does it parse?"
case decodeRequest jsonRequest of
Just (initial, roll) -> do
putStrLn "It sure does!"
putStrLn . show $ (initial :: WeatherInZone)
case d6 2 roll of
Just dice -> do
let (final, results) = runWriter $ post initial dice
BL.putStrLn . encode $ object ["newState" .= final, "results" .= results]
Nothing -> do
putStrLn "Invalid dice!"
Nothing -> do
putStrLn "That's a big ol' Negatory, Space Captain!"
putStrLn ""
-- Test the "Stupid Hacky Bullshit" method
putStrLn "Does stupid hacky bullshit get the job done?"
let maybeRequest= decode jsonRequest :: Maybe WeatherInZoneRequest
case maybeRequest of
Just request -> do
putStrLn "Damn skippy it does!"
putStrLn . show $ state request
case d6 2 $ roll request of
Just dice -> do
let (final, results) = runWriter $ post (state request) dice
BL.putStrLn . encode $ object ["newState" .= final, "results" .= results]
Nothing -> do
putStrLn "Invalid dice!"
Nothing ->
putStrLn "Nope, that doesn't work either!"
putStrLn ""
-- Attempt to separate out the parsing of JSON from the handling of requests
case decodeRequest jsonRequest of
Just (initial, roll) -> do
case d6 2 roll of
Just dice -> do
BL.putStrLn $ postChart (initial :: WeatherInZone) dice
Nothing -> do
putStrLn "Bad dice response"
Nothing -> do
putStrLn "Bad data response"
data WeatherInZoneRequest = WeatherInZoneRequest
{ state :: WeatherInZone
, roll :: Int } deriving(Show, Generic)
instance FromJSON WeatherInZoneRequest
instance ToJSON WeatherInZoneRequest
decodeRequest :: (Chart a) => BL.ByteString -> Maybe (a, Int)
decodeRequest request = do
root <- decode request
flip parseMaybe root $ \obj -> do
state <- obj .: "state"
roll <- obj .: "roll"
return (state, roll)
postChart :: (Chart a) => a -> Dice -> BL.ByteString
postChart initial dice = encode $ object ["newState" .= final, "results" .= results]
where
(final, results) = runWriter $ post initial dice
| rjackson90/b29-tools | dummy.hs | mit | 3,081 | 0 | 23 | 974 | 737 | 363 | 374 | 70 | 7 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Model where
import Data.Text
import Database.Persist.TH
import Permissions
-- N.B. discordid is their snowflake id, NOT their username
-- In the same way, PlayerId is their id number, not their username
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
User json
playerId Text
username Text
discordId Text Maybe
permissions Permissions
UniqueUser playerId
Primary playerId
deriving Show Read Eq
ApiKey json
key Text
user UserId
UniqueKey key
Primary user
deriving Show Read Eq
|]
| shiyah/lupa-bot | src/Model.hs | mit | 993 | 0 | 7 | 289 | 53 | 36 | 17 | 14 | 0 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.SVGPathElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.SVGPathElement
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.SVGPathElement
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/SVGPathElement.hs | mit | 355 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
module Constant where
import Control.Applicative
import Data.Monoid
newtype Constant a b =
Constant { getConstant :: a }
deriving (Eq, Ord, Show)
instance Functor (Constant a) where
fmap _ (Constant a) = (Constant a)
instance Monoid a => Applicative (Constant a) where
pure _ = Constant mempty
(<*>) (Constant a) (Constant a') = Constant (a <> a')
| NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter17.hsproj/Constant.hs | mit | 369 | 0 | 8 | 78 | 146 | 79 | 67 | 11 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Consensus.Paxos.Storage.Memory
-- Copyright : (c) Phil Hargett 2016
-- License : MIT (see LICENSE file)
--
-- Maintainer : [email protected]
-- Stability : $(Stability)
-- Portability : $(Portability)
--
--
-----------------------------------------------------------------------------
module Control.Consensus.Paxos.Storage.Memory (
storage
) where
-- local imports
import Control.Consensus.Paxos.Types
-- external imports
import Control.Concurrent.STM
import qualified Data.Map as M
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
type MemoryStore d = TVar (M.Map InstanceId (Ledger d))
storage :: (Decreeable d) => IO (Storage d)
storage = do
store <- atomically $ newTVar M.empty
return Storage {
loadLedger = memoryLoad store,
saveLedger = memorySave store
}
memoryLoad :: MemoryStore d -> InstanceId -> IO (Maybe (Ledger d))
memoryLoad store instId = atomically $ do
ledgers <- readTVar store
return $ M.lookup instId ledgers
memorySave :: MemoryStore d -> InstanceId -> Ledger d -> IO ()
memorySave store instId ledger = atomically $
modifyTVar store $ \ledgers -> M.insert instId ledger ledgers
| hargettp/paxos | src/Control/Consensus/Paxos/Storage/Memory.hs | mit | 1,380 | 0 | 11 | 208 | 281 | 153 | 128 | 19 | 1 |
module DoesItTypeCheck where
import Data.List (sort)
data Person = Person Bool deriving Show
printPerson :: Person -> IO ()
printPerson person = putStrLn (show person)
-- Don't need this if you derive Show above
-- instance Show Person where
-- show (Person b) = "Person " ++ show b
data Mood =
Blah
| Woot deriving (Show, Eq)
settleDown :: Mood -> Mood
settleDown x =
if x == Woot
then Blah
else x
-- Don't need this if you are deriving Eq above
-- instance Eq Mood where
-- (==) Blah Blah = True
-- (==) Woot Woot = True
-- (==) _ _ = False
type Subject = String
type Verb = String
type Object = String
data Sentence =
Sentence Subject Verb Object deriving (Eq, Show)
s1 :: Object -> Sentence
s1 = Sentence "dogs" "drool"
s2 :: Sentence
s2 = Sentence "Julie" "loves" "dogs"
data Rocks =
Rocks String deriving (Eq, Show)
data Yeah =
Yeah Bool deriving (Eq, Show)
data Papu =
Papu Rocks Yeah deriving (Eq, Show)
-- Papu data constructor takes a Rocks and Yeah
-- phew = Papu "chases" True
truth = Papu (Rocks "chomsydoz") (Yeah True)
equalityForall :: Papu -> Papu -> Bool
equalityForall p p' = p == p'
-- Papu doesn't implement Ord typeclass
-- comparePapus :: Papu -> Papu -> Bool
-- comparePapus p p' = p > p'
i :: Num a => a
-- Can't use this because GHC can't assign a Num
-- to the generic a
-- i :: a
i = 1
-- f :: Float
-- Can't use this because a needs to be a Fractional
-- f :: Num a => a
-- This is OK because 1.0 is a Fractional
-- f :: Fractional a => a
-- This is OK because RealFrac is a Fractional as well.
f :: RealFrac a => a
f = 1.0
-- freud :: a -> a
-- OK because it just tightens up the bounds
-- freud :: Ord a => a -> a
-- OK because it tightens up the bounds further.
freud :: Int -> Int
freud x = x
myX = 1 :: Int
sigmund :: Int -> Int
-- This doesn't work because a can't be polymorphic
-- because it returns an Int in the form of myX
-- sigmund :: a -> a
sigmund x = myX
sigmund' :: Int -> Int
-- Doesn't work because Num is still too general.
-- sigmund' :: Num a => a -> a
sigmund' x = myX
-- jung :: Ord a => [a] -> a
-- This works because Int is more specific than Ord
jung :: [Int] -> Int
jung xs = head (sort xs)
-- young :: [Char] -> Char
-- This works because you don't really need to be
-- as specific as Char. This will work for anything orderable.
young :: Ord a => [a] -> a
young xs = head (sort xs)
mySort :: [Char] -> [Char]
mySort = sort
signifier :: [Char] -> Char
-- This will not work because mySort is already too specific when
-- it specifies Char
-- signifier :: Ord a => [a] -> a
signifier xs = head (mySort xs)
chk :: Eq b => (a -> b) -> a -> b -> Bool
chk fn a b = fn a == b
arith :: Num b => (a -> b) -> Integer -> a -> b
arith fn int a = fn a + fromInteger int
| mikegehard/haskellBookExercises | chapter6/doesItTypeCheck.hs | mit | 2,955 | 0 | 8 | 827 | 645 | 362 | 283 | 54 | 2 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.HTMLTextAreaElement
(checkValidity, checkValidity_, reportValidity, reportValidity_,
setCustomValidity, select, setRangeText, setRangeText4,
setSelectionRange, setAutofocus, getAutofocus, setDirName,
getDirName, setDisabled, getDisabled, getForm, setMinLength,
getMinLength, setMaxLength, getMaxLength, setName, getName,
setPlaceholder, getPlaceholder, setReadOnly, getReadOnly,
setRequired, getRequired, setRows, getRows, setCols, getCols,
setWrap, getWrap, getType, setDefaultValue, getDefaultValue,
setValue, getValue, getTextLength, getWillValidate, getValidity,
getValidationMessage, getLabels, setSelectionStart,
getSelectionStart, setSelectionEnd, getSelectionEnd,
setSelectionDirection, getSelectionDirection, setAutocomplete,
getAutocomplete, HTMLTextAreaElement(..), gTypeHTMLTextAreaElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.checkValidity Mozilla HTMLTextAreaElement.checkValidity documentation>
checkValidity :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
checkValidity self
= liftDOM ((self ^. jsf "checkValidity" ()) >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.checkValidity Mozilla HTMLTextAreaElement.checkValidity documentation>
checkValidity_ :: (MonadDOM m) => HTMLTextAreaElement -> m ()
checkValidity_ self
= liftDOM (void (self ^. jsf "checkValidity" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.reportValidity Mozilla HTMLTextAreaElement.reportValidity documentation>
reportValidity :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
reportValidity self
= liftDOM ((self ^. jsf "reportValidity" ()) >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.reportValidity Mozilla HTMLTextAreaElement.reportValidity documentation>
reportValidity_ :: (MonadDOM m) => HTMLTextAreaElement -> m ()
reportValidity_ self
= liftDOM (void (self ^. jsf "reportValidity" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setCustomValidity Mozilla HTMLTextAreaElement.setCustomValidity documentation>
setCustomValidity ::
(MonadDOM m, ToJSString error) =>
HTMLTextAreaElement -> error -> m ()
setCustomValidity self error
= liftDOM (void (self ^. jsf "setCustomValidity" [toJSVal error]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.select Mozilla HTMLTextAreaElement.select documentation>
select :: (MonadDOM m) => HTMLTextAreaElement -> m ()
select self = liftDOM (void (self ^. jsf "select" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setRangeText Mozilla HTMLTextAreaElement.setRangeText documentation>
setRangeText ::
(MonadDOM m, ToJSString replacement) =>
HTMLTextAreaElement -> replacement -> m ()
setRangeText self replacement
= liftDOM (void (self ^. jsf "setRangeText" [toJSVal replacement]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setRangeText Mozilla HTMLTextAreaElement.setRangeText documentation>
setRangeText4 ::
(MonadDOM m, ToJSString replacement, ToJSString selectionMode) =>
HTMLTextAreaElement ->
replacement -> Word -> Word -> Maybe selectionMode -> m ()
setRangeText4 self replacement start end selectionMode
= liftDOM
(void
(self ^. jsf "setRangeText"
[toJSVal replacement, toJSVal start, toJSVal end,
toJSVal selectionMode]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setSelectionRange Mozilla HTMLTextAreaElement.setSelectionRange documentation>
setSelectionRange ::
(MonadDOM m, ToJSString direction) =>
HTMLTextAreaElement ->
Maybe Int -> Maybe Int -> Maybe direction -> m ()
setSelectionRange self start end direction
= liftDOM
(void
(self ^. jsf "setSelectionRange"
[toJSVal start, toJSVal end, toJSVal direction]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autofocus Mozilla HTMLTextAreaElement.autofocus documentation>
setAutofocus :: (MonadDOM m) => HTMLTextAreaElement -> Bool -> m ()
setAutofocus self val
= liftDOM (self ^. jss "autofocus" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autofocus Mozilla HTMLTextAreaElement.autofocus documentation>
getAutofocus :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
getAutofocus self
= liftDOM ((self ^. js "autofocus") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.dirName Mozilla HTMLTextAreaElement.dirName documentation>
setDirName ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setDirName self val = liftDOM (self ^. jss "dirName" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.dirName Mozilla HTMLTextAreaElement.dirName documentation>
getDirName ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getDirName self
= liftDOM ((self ^. js "dirName") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.disabled Mozilla HTMLTextAreaElement.disabled documentation>
setDisabled :: (MonadDOM m) => HTMLTextAreaElement -> Bool -> m ()
setDisabled self val
= liftDOM (self ^. jss "disabled" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.disabled Mozilla HTMLTextAreaElement.disabled documentation>
getDisabled :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
getDisabled self = liftDOM ((self ^. js "disabled") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.form Mozilla HTMLTextAreaElement.form documentation>
getForm :: (MonadDOM m) => HTMLTextAreaElement -> m HTMLFormElement
getForm self = liftDOM ((self ^. js "form") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.minLength Mozilla HTMLTextAreaElement.minLength documentation>
setMinLength :: (MonadDOM m) => HTMLTextAreaElement -> Int -> m ()
setMinLength self val
= liftDOM (self ^. jss "minLength" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.minLength Mozilla HTMLTextAreaElement.minLength documentation>
getMinLength :: (MonadDOM m) => HTMLTextAreaElement -> m Int
getMinLength self
= liftDOM (round <$> ((self ^. js "minLength") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.maxLength Mozilla HTMLTextAreaElement.maxLength documentation>
setMaxLength :: (MonadDOM m) => HTMLTextAreaElement -> Int -> m ()
setMaxLength self val
= liftDOM (self ^. jss "maxLength" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.maxLength Mozilla HTMLTextAreaElement.maxLength documentation>
getMaxLength :: (MonadDOM m) => HTMLTextAreaElement -> m Int
getMaxLength self
= liftDOM (round <$> ((self ^. js "maxLength") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.name Mozilla HTMLTextAreaElement.name documentation>
setName ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setName self val = liftDOM (self ^. jss "name" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.name Mozilla HTMLTextAreaElement.name documentation>
getName ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getName self = liftDOM ((self ^. js "name") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.placeholder Mozilla HTMLTextAreaElement.placeholder documentation>
setPlaceholder ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setPlaceholder self val
= liftDOM (self ^. jss "placeholder" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.placeholder Mozilla HTMLTextAreaElement.placeholder documentation>
getPlaceholder ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getPlaceholder self
= liftDOM ((self ^. js "placeholder") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.readOnly Mozilla HTMLTextAreaElement.readOnly documentation>
setReadOnly :: (MonadDOM m) => HTMLTextAreaElement -> Bool -> m ()
setReadOnly self val
= liftDOM (self ^. jss "readOnly" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.readOnly Mozilla HTMLTextAreaElement.readOnly documentation>
getReadOnly :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
getReadOnly self = liftDOM ((self ^. js "readOnly") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.required Mozilla HTMLTextAreaElement.required documentation>
setRequired :: (MonadDOM m) => HTMLTextAreaElement -> Bool -> m ()
setRequired self val
= liftDOM (self ^. jss "required" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.required Mozilla HTMLTextAreaElement.required documentation>
getRequired :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
getRequired self = liftDOM ((self ^. js "required") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.rows Mozilla HTMLTextAreaElement.rows documentation>
setRows :: (MonadDOM m) => HTMLTextAreaElement -> Word -> m ()
setRows self val = liftDOM (self ^. jss "rows" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.rows Mozilla HTMLTextAreaElement.rows documentation>
getRows :: (MonadDOM m) => HTMLTextAreaElement -> m Word
getRows self
= liftDOM (round <$> ((self ^. js "rows") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.cols Mozilla HTMLTextAreaElement.cols documentation>
setCols :: (MonadDOM m) => HTMLTextAreaElement -> Word -> m ()
setCols self val = liftDOM (self ^. jss "cols" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.cols Mozilla HTMLTextAreaElement.cols documentation>
getCols :: (MonadDOM m) => HTMLTextAreaElement -> m Word
getCols self
= liftDOM (round <$> ((self ^. js "cols") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.wrap Mozilla HTMLTextAreaElement.wrap documentation>
setWrap ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setWrap self val = liftDOM (self ^. jss "wrap" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.wrap Mozilla HTMLTextAreaElement.wrap documentation>
getWrap ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getWrap self = liftDOM ((self ^. js "wrap") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.type Mozilla HTMLTextAreaElement.type documentation>
getType ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getType self = liftDOM ((self ^. js "type") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.defaultValue Mozilla HTMLTextAreaElement.defaultValue documentation>
setDefaultValue ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setDefaultValue self val
= liftDOM (self ^. jss "defaultValue" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.defaultValue Mozilla HTMLTextAreaElement.defaultValue documentation>
getDefaultValue ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getDefaultValue self
= liftDOM ((self ^. js "defaultValue") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.value Mozilla HTMLTextAreaElement.value documentation>
setValue ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setValue self val = liftDOM (self ^. jss "value" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.value Mozilla HTMLTextAreaElement.value documentation>
getValue ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getValue self
= liftDOM ((self ^. js "value") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.textLength Mozilla HTMLTextAreaElement.textLength documentation>
getTextLength :: (MonadDOM m) => HTMLTextAreaElement -> m Word
getTextLength self
= liftDOM (round <$> ((self ^. js "textLength") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.willValidate Mozilla HTMLTextAreaElement.willValidate documentation>
getWillValidate :: (MonadDOM m) => HTMLTextAreaElement -> m Bool
getWillValidate self
= liftDOM ((self ^. js "willValidate") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.validity Mozilla HTMLTextAreaElement.validity documentation>
getValidity ::
(MonadDOM m) => HTMLTextAreaElement -> m ValidityState
getValidity self
= liftDOM ((self ^. js "validity") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.validationMessage Mozilla HTMLTextAreaElement.validationMessage documentation>
getValidationMessage ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getValidationMessage self
= liftDOM ((self ^. js "validationMessage") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.labels Mozilla HTMLTextAreaElement.labels documentation>
getLabels :: (MonadDOM m) => HTMLTextAreaElement -> m NodeList
getLabels self
= liftDOM ((self ^. js "labels") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionStart Mozilla HTMLTextAreaElement.selectionStart documentation>
setSelectionStart ::
(MonadDOM m) => HTMLTextAreaElement -> Int -> m ()
setSelectionStart self val
= liftDOM (self ^. jss "selectionStart" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionStart Mozilla HTMLTextAreaElement.selectionStart documentation>
getSelectionStart :: (MonadDOM m) => HTMLTextAreaElement -> m Int
getSelectionStart self
= liftDOM
(round <$> ((self ^. js "selectionStart") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionEnd Mozilla HTMLTextAreaElement.selectionEnd documentation>
setSelectionEnd ::
(MonadDOM m) => HTMLTextAreaElement -> Int -> m ()
setSelectionEnd self val
= liftDOM (self ^. jss "selectionEnd" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionEnd Mozilla HTMLTextAreaElement.selectionEnd documentation>
getSelectionEnd :: (MonadDOM m) => HTMLTextAreaElement -> m Int
getSelectionEnd self
= liftDOM (round <$> ((self ^. js "selectionEnd") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionDirection Mozilla HTMLTextAreaElement.selectionDirection documentation>
setSelectionDirection ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setSelectionDirection self val
= liftDOM (self ^. jss "selectionDirection" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionDirection Mozilla HTMLTextAreaElement.selectionDirection documentation>
getSelectionDirection ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getSelectionDirection self
= liftDOM
((self ^. js "selectionDirection") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autocomplete Mozilla HTMLTextAreaElement.autocomplete documentation>
setAutocomplete ::
(MonadDOM m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setAutocomplete self val
= liftDOM (self ^. jss "autocomplete" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autocomplete Mozilla HTMLTextAreaElement.autocomplete documentation>
getAutocomplete ::
(MonadDOM m, FromJSString result) =>
HTMLTextAreaElement -> m result
getAutocomplete self
= liftDOM ((self ^. js "autocomplete") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/HTMLTextAreaElement.hs | mit | 17,963 | 0 | 12 | 2,596 | 3,552 | 1,893 | 1,659 | 222 | 1 |
module Chapter4Exercises where
import Chapter4 hiding (maxThree)
import Test.HUnit
import Test.QuickCheck hiding (Result)
-------------------------------------------------------------------------------
-- Exercise 4.1
-------------------------------------------------------------------------------
-- Copied from Chapter4.hs to allow using Integer
maxThree :: Integer -> Integer -> Integer -> Integer
maxThree x y z = (x `max` y) `max` z
-- Using maxThree, but not max
maxFourA :: Integer -> Integer -> Integer -> Integer -> Integer
maxFourA a b c d
| (maxThree a b c) >= d = maxThree a b c
| otherwise = d
-- Using max only
maxFourB :: Integer -> Integer -> Integer -> Integer -> Integer
maxFourB a b c d = max (max a b) (max c d)
-- Using maxThree and max
maxFourC :: Integer -> Integer -> Integer -> Integer -> Integer
maxFourC a b c d = (maxThree a b c) `max` d
maxFour = maxFourC
-- All three should yield the same result
prop_maxFour :: Integer -> Integer -> Integer -> Integer -> Bool
prop_maxFour a b c d =
maxFourA a b c d == maxFourB a b c d &&
maxFourB a b c d == maxFourC a b c d
-------------------------------------------------------------------------------
-- Exercise 4.2
-------------------------------------------------------------------------------
-- Implementation of between had to be done in Chapter4.hs
-------------------------------------------------------------------------------
-- Exercise 4.3
-------------------------------------------------------------------------------
howManyEqual :: Integer -> Integer -> Integer -> Integer
howManyEqual a b c
| a == b && b == c = 3
| a == b = 2
| a == c = 2
| b == c = 2
| otherwise = 0
-------------------------------------------------------------------------------
-- Exercise 4.4
-------------------------------------------------------------------------------
howManyOfFourEqual :: Integer -> Integer -> Integer -> Integer -> Integer
howManyOfFourEqual a b c d
| a == b && b == c && c == d = 4
| otherwise = maxFour (howManyEqual a b c)
(howManyEqual a b d)
(howManyEqual a c d)
(howManyEqual b c d)
-------------------------------------------------------------------------------
-- Exercise 4.8
-------------------------------------------------------------------------------
triArea'' :: Float -> Float -> Float -> Float
triArea'' a b c
| possible = sqrt(s*(s-a)*(s-b)*(s-c))
| otherwise = 0
where
s = (a+b+c)/2
possible = allPositive &&
allSatisfyTriangleInequality
allPositive = a > 0 && b > 0 && c > 0
allSatisfyTriangleInequality = satisfyTriangleInequality a b c &&
satisfyTriangleInequality b a c &&
satisfyTriangleInequality c a b
satisfyTriangleInequality a b c = a < (b + c)
-------------------------------------------------------------------------------
-- Exercise 4.9
-------------------------------------------------------------------------------
maxThreeOccurs :: Integer -> Integer -> Integer -> (Integer, Integer)
maxThreeOccurs a b c = (maxValue, occurences)
where
maxValue = maxThree a b c
occurences = occurrencesOf maxValue
occurrencesOf n
| a == n && b == n && c == n = 3
| a == n && b == n = 2
| a == n && c == n = 2
| b == n && c == n = 2
| otherwise = 1
-------------------------------------------------------------------------------
-- Exercise 4.11, 4.12, 4.13
-------------------------------------------------------------------------------
data Result = Win | Lose | Draw deriving (Show, Eq)
outcome :: Move -> Move -> Result
outcome a b
| a == beat b = Win
| a == lose b = Lose
| otherwise = Draw
testRPS = TestList [
TestCase (assertEqual "rock beats scissors" Win (outcome Rock Scissors)),
TestCase (assertEqual "paper beats rock" Win (outcome Paper Rock)),
TestCase (assertEqual "scissors beats paper" Win (outcome Scissors Paper)),
TestCase (assertEqual "scissors loses to rock" Lose (outcome Scissors Rock)),
TestCase (assertEqual "rock loses to paper" Lose (outcome Rock Paper)),
TestCase (assertEqual "paper loses to scissors" Lose (outcome Paper Scissors)),
TestCase (assertEqual "draw Scissors" Draw (outcome Scissors Scissors)),
TestCase (assertEqual "draw Paper" Draw (outcome Paper Paper)),
TestCase (assertEqual "draw Rock" Draw (outcome Rock Rock))
]
propCannotBeatAndLoseAgainstTheSame a = beat a /= lose a
-------------------------------------------------------------------------------
-- Exercise 4.15, 4.16
-------------------------------------------------------------------------------
data Temp = Cold | Hot deriving (Eq, Show, Ord)
data Season = Spring | Summer | Autumn | Winter deriving (Eq, Show, Ord)
temperatureIn :: Season -> Temp
temperatureIn Spring = Cold
temperatureIn Summer = Hot
temperatureIn Autumn = Cold
temperatureIn Winter = Cold
data Month = January | February | March | April | May | June |
July | August | September | October | November| December
deriving (Show, Eq, Ord)
seasonIn :: Month -> Season
seasonIn month
| month <= March = Spring
| month <= August = Summer
| month <= September = Autumn
| otherwise = Winter
-------------------------------------------------------------------------------
-- Exercise 4.17
-------------------------------------------------------------------------------
rangeProduct :: Integer -> Integer -> Integer
rangeProduct m n
| n < m = 0
| m == n = n
| otherwise = (rangeProduct m (n-1)) * n
testRangeProduct = TestList
[ TestCase (assertEqual "for m > n" 0 (rangeProduct 2 1))
, TestCase (assertEqual "for m=n=1" 1 (rangeProduct 1 1))
, TestCase (assertEqual "for m=1,n=2" 2 (rangeProduct 1 2))
, TestCase (assertEqual "for m=1,n=3" 6 (rangeProduct 1 3))
, TestCase (assertEqual "for m=1,n=4" 24 (rangeProduct 1 4))
, TestCase (assertEqual "for m=4,n=4" 4 (rangeProduct 4 4))
, TestCase (assertEqual "for m=4,n=5" 20 (rangeProduct 4 5))
]
-------------------------------------------------------------------------------
-- Exercise 4.18
-------------------------------------------------------------------------------
fac' :: Integer -> Integer
fac' n
| n < 0 = error "fac only defined on natural numbers"
| n == 0 = 1
| otherwise = rangeProduct 1 n
propFac'ShouldBeSameAsFac n
| n >= 0 = fac n == fac' n
| otherwise = True
-------------------------------------------------------------------------------
-- Exercise 4.19
-------------------------------------------------------------------------------
multiplyUsingAdd a b
| a == 0 = 0
| otherwise = multiplyUsingAdd (a-1) b + b
testMultiplyUsingAddition = TestList
[ TestCase (assertEqual "0 * 2" 0 (multiplyUsingAdd 0 2))
, TestCase (assertEqual "2 * 0" 0 (multiplyUsingAdd 2 0))
, TestCase (assertEqual "1 * 2" 2 (multiplyUsingAdd 1 2))
, TestCase (assertEqual "2 * 1" 2 (multiplyUsingAdd 2 1))
, TestCase (assertEqual "3 * 1" 3 (multiplyUsingAdd 3 1))
, TestCase (assertEqual "1 * 3" 3 (multiplyUsingAdd 1 3))
, TestCase (assertEqual "2 * 2" 4 (multiplyUsingAdd 2 2))
, TestCase (assertEqual "7 * 9" 63 (multiplyUsingAdd 7 9))
]
propMultiplyUsingAddShouldEqualMul a b
| a >= 0 && b >= 0 = multiplyUsingAdd a b == a * b
| otherwise = True
-------------------------------------------------------------------------------
-- Exercise 4.20
-------------------------------------------------------------------------------
integerSquareRoot :: Integer -> Integer
integerSquareRoot n = isrInternal n n
where isrInternal n m
| n*n <= m = n
| otherwise = isrInternal (n-1) m
testIntegerSquareRoot = TestList
[ TestCase (assertEqual "4" 2 (integerSquareRoot 4))
, TestCase (assertEqual "15" 3 (integerSquareRoot 15))
, TestCase (assertEqual "16" 4 (integerSquareRoot 16))
]
-------------------------------------------------------------------------------
-- Exercise 4.21
-------------------------------------------------------------------------------
f :: Integer -> Integer
f 0 = 0
f 1 = 44
f 2 = 17
f _ = 0
maxOfFn:: (Integer->Integer) -> Integer -> Integer
maxOfFn f limit
| limit < 0 = error "not defined for limit < 0"
| limit == 0 = f 0
| otherwise = max (f limit) (maxOfFn f (limit-1))
testMaxOfFn = TestList
[ TestCase (assertEqual "f 0 is always the max" 0 (maxOfFn f 0))
, TestCase (assertEqual "f 1 is > f 0" 44 (maxOfFn f 1))
, TestCase (assertEqual "f 2 is < f 1" 44 (maxOfFn f 2))
, TestCase (assertEqual "f 1 is max" 44 (maxOfFn f 99))
]
prop_maxOfFn_mod limit
| limit < 0 = True
| otherwise = (maxOfFn f limit) < divisor
where
divisor = 5
f n = mod n divisor
-------------------------------------------------------------------------------
-- Exercise 4.22
-------------------------------------------------------------------------------
any0TestFn :: Integer -> Integer
any0TestFn 0 = 1
any0TestFn 1 = 99
any0TestFn 2 = 42
any0TestFn 3 = 0
-- any of f 0 to f limit is zero
any0 :: (Integer->Integer) -> Integer -> Bool
any0 f limit
| limit < 0 = error "not defined for limit < 0"
| limit == 0 = f 0 == 0
| otherwise = f limit == 0 || (any0 f (limit-1))
-------------------------------------------------------------------------------
-- Exercise 4.23
-------------------------------------------------------------------------------
regions' :: Integer -> Integer
regions' x = (sumFun id x) + 1
prop_regionsImplementations a
| a >=0 = regions a == regions' a
| otherwise = True
-------------------------------------------------------------------------------
-- Exercise 4.33 / 4.34
-------------------------------------------------------------------------------
test_allEqual = TestList
[ TestCase (assertEqual "all equal" True (allEqual 1 1 1))
, TestCase (assertEqual "1st and 2nd eq" False (allEqual 1 1 2))
, TestCase (assertEqual "1st and 3rd eq" False (allEqual 1 2 1))
, TestCase (assertEqual "2nd and 3rd eq" False (allEqual 2 1 1))
, TestCase (assertEqual "all different" False (allEqual 1 2 3))
, TestCase (assertEqual "with 0 Equal" False (allEqual 0 0 1))
, TestCase (assertEqual "with 0 different" True (allEqual 0 0 0))
, TestCase (assertEqual "all neg equal" True (allEqual (-1) (-1) (-1)))
, TestCase (assertEqual "all neg 2 diff" False (allEqual (-1) (-2) (-2)))
, TestCase (assertEqual "all neg different" False (allEqual (-1) (-2) (-3)))
, TestCase (assertEqual "all neg different" False (allEqual (-3) (-2) (-1)))
, TestCase (assertEqual "all neg different" False (allEqual (-3) (-1) (-2)))
, TestCase (assertEqual "m negative" False (allEqual (-1) ( 1) ( 1)))
, TestCase (assertEqual "n negative" False (allEqual ( 1) (-1) (-1)))
, TestCase (assertEqual "p negative" False (allEqual ( 1) ( 1) (-1)))
, TestCase (assertEqual "neg, 0, pos" False (allEqual (-1) ( 0) ( 1)))
, TestCase (assertEqual "neg, pos, 0" False (allEqual (-1) ( 1) ( 0)))
, TestCase (assertEqual "0, pos, neg" False (allEqual (-1) ( 1) ( 0)))
, TestCase (assertEqual "0, neg, pos" False (allEqual ( 0) (-1) ( 1)))
]
-- solution from book
allEqual m n p = ((m + n + p) == 3*p)
-- different solution for a quickCheck
allEqual' m n p = m == n && n ==p
prop_allEqual m n p = allEqual m n p == allEqual' m n p
-- discussion: quickCheck ftw! lots of combinations here, hard to find all
-- failing test cases...
-------------------------------------------------------------------------------
-- Exercise 4.35 / 4.36
-------------------------------------------------------------------------------
test_allDifferent = TestList
-- all different
[ TestCase ( assertEqual "pos, pos, pos" True (allDifferent 1 2 3))
, TestCase ( assertEqual "pos, pos, 0" True (allDifferent 1 2 0))
, TestCase ( assertEqual "pos, pos, neg" True (allDifferent 1 2 (-1)))
, TestCase ( assertEqual "pos, 0, pos" True (allDifferent 1 0 1))
, TestCase ( assertEqual "pos, 0, neg" True (allDifferent 1 0 (-1)))
, TestCase ( assertEqual "pos, neg, pos" True (allDifferent 1 (-1) 2))
, TestCase ( assertEqual "pos, neg, 0" True (allDifferent 1 (-1) 0))
, TestCase ( assertEqual "pos, neg, neg" True (allDifferent 1 (-1) (-2)))
, TestCase ( assertEqual "0, pos, pos" True (allDifferent 0 1 2))
, TestCase ( assertEqual "0, pos, neg" True (allDifferent 0 1 (-1)))
, TestCase ( assertEqual "0, neg, pos" True (allDifferent 0 (-1) 1))
, TestCase ( assertEqual "0, neg, neg" True (allDifferent 0 (-1) (-2)))
, TestCase ( assertEqual "neg, pos, pos" True (allDifferent (-1) 1 2))
, TestCase ( assertEqual "neg, pos, 0" True (allDifferent (-1) 1 0))
, TestCase ( assertEqual "neg, pos, neg" True (allDifferent (-1) 1 (-1)))
, TestCase ( assertEqual "neg, 0, pos" True (allDifferent (-1) 0 (1)))
, TestCase ( assertEqual "neg, 0, neg" True (allDifferent (-1) (0) (-2)))
, TestCase ( assertEqual "neg, neg, pos" True (allDifferent (-1) (-2) 1))
, TestCase ( assertEqual "neg, neg, 0" True (allDifferent (-1) (-2) 0))
, TestCase ( assertEqual "neg, neg, neg" True (allDifferent (-1) (-2) (-3)))
-- two or more zeros -> can't be all different
, TestCase ( assertEqual "0, 0, pos" False (allDifferent 0 0 1))
, TestCase ( assertEqual "0, 0, neg" False (allDifferent 0 0 (-1)))
, TestCase ( assertEqual "pos, 0, 0" False (allDifferent 1 0 0))
, TestCase ( assertEqual "neg, 0, 0" False (allDifferent (-1) 0 0))
, TestCase ( assertEqual "0, pos, 0" False (allDifferent 0 1 0))
, TestCase ( assertEqual "0, neg, 0" False (allDifferent 0 (-1) 0))
, TestCase ( assertEqual "0, 0, 0" False (allDifferent 0 0 0))
-- other cases of two equal
, TestCase ( assertEqual "all pos, a b b" False (allDifferent 1 2 2))
, TestCase ( assertEqual "all pos, a b a" False (allDifferent 1 2 1))
, TestCase ( assertEqual "all pos, b b a" False (allDifferent 2 2 1))
, TestCase ( assertEqual "all neg, a b b" False (allDifferent (-1) (-2) (-2)))
, TestCase ( assertEqual "all neg, a b a" False (allDifferent (-1) (-2) (-1)))
, TestCase ( assertEqual "all neg, b b a" False (allDifferent (-2) (-2) (-1)))
-- there are even more but I'm a bit bored now ;)
]
-- attempt from book
allDifferent :: Integer -> Integer -> Integer -> Bool
allDifferent m n p = (m/=n) && (n/=p)
| c089/haskell-craft3e | Chapter4Exercises.hs | mit | 15,412 | 0 | 14 | 3,774 | 4,654 | 2,393 | 2,261 | 227 | 1 |
import Cook.Facts
import Cook.Recipe
import Cook.Recipe.Util
main :: IO ()
main = do
testSsh
{-testError-}
testError :: IO ()
testError = runRecipe $ do
runProc0 "uptime"
runProc0 "true"
--getHTTP "foo"
withRecipeName "foo.bar.xxx.in" $
withRecipeName "foo.bar.here" $
withRecipeName "foo.bar.yyy.there" $
failWith "WTF"
testSsh :: IO ()
testSsh = runRecipe $
withSsh "alarm" "192.168.1.155" $ do
runProc0 "uname -a"
runProc0 "date"
runProc0 "uptime"
withCd "/" $ do
runProc "echo" ["begin"]
execCwd $ proc0 "ls"
runProc0 "ls"
runSh "ls /foo"
runProc "echo" ["end"]
| jimenezrick/cook.hs | test/Playground.hs | mit | 726 | 0 | 12 | 240 | 195 | 88 | 107 | 26 | 1 |
module Javelin.Interpreter.Termination where
import Control.Monad.IO.Class (liftIO)
import Javelin.Capability.Classes
import Javelin.Interpreter.JVMApp
import System.Exit (die)
instance Termination JVM where
terminate = liftIO . die . show
| antonlogvinenko/javelin | src/Javelin/Interpreter/Termination.hs | mit | 244 | 0 | 7 | 28 | 62 | 38 | 24 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Test.HUnit
import Data.Binary
import qualified Data.Text as T
import TestFileUtils
import CladeModel
import NucModel
import Alignment
-- Numeric testing of NucModels:
small_prob = 0.0001 :: Double -- need to be specific, else logBase complains
small_score = round (scale_factor * (logBase 10 small_prob))
scale_factor = 1000 :: Double
test_aln1 = [
AlnRow "my-OTU" "ATGC-" 1,
AlnRow "my-OTU" "AACG-" 1,
AlnRow "my-OTU" "AACTN" 1,
AlnRow "my-OTU" "ATG--" 1,
AlnRow "my-OTU" "ATAAT" 1
]
aln1Mod = alnToNucModel small_prob scale_factor "my-OTU" test_aln1
-- There are 5 out of 5 'A's in the first column, hence 5/5 ... 1
test_1 = "A@1(aln1)" ~: round (scale_factor * (logBase 10 (5/5))) ~?=
nucScoreOf aln1Mod 'A' 1
-- There are 0 out of 5 'C's in the first column: use small_prob
test_2 = "C@1(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'C' 1)
-- and so on...
test_3 = "G@1(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'G' 1)
test_4 = "T@1(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'T' 1)
-- 'D' is for 'dash'
test_5 = "D@1(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod '-' 1)
test_6 = "A@2(aln1)" ~: (round (scale_factor * (logBase 10 (2/5)))) ~?= (nucScoreOf aln1Mod 'A' 2)
test_7 = "C@2(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'C' 2)
test_8 = "G@2(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'G' 2)
test_9 = "T@2(aln1)" ~: (round (scale_factor * (logBase 10 (3/5)))) ~?= (nucScoreOf aln1Mod 'T' 2)
test_10 = "D@2(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod '-' 2)
test_11 = "A@3(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod 'A' 3)
test_12 = "C@3(aln1)" ~: (round (scale_factor * (logBase 10 (2/5)))) ~?= (nucScoreOf aln1Mod 'C' 3)
test_13 = "G@3(aln1)" ~: (round (scale_factor * (logBase 10 (2/5)))) ~?= (nucScoreOf aln1Mod 'G' 3)
test_14 = "T@3(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'T' 3)
test_15 = "D@3(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod '-' 3)
test_16 = "A@4(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod 'A' 4)
test_17 = "C@4(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod 'C' 4)
test_18 = "G@4(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod 'G' 4)
test_19 = "T@4(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod 'T' 4)
test_20 = "D@4(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod '-' 4)
test_21 = "A@5(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'A' 5)
test_22 = "C@5(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'C' 5)
test_23 = "G@5(aln1)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod 'G' 5)
-- we _always_ divide by the number of sequences in the aln, even when non-{ATGC-} residues occur (like N)
test_24 = "T@5(aln1)" ~: (round (scale_factor * (logBase 10 (1/5)))) ~?= (nucScoreOf aln1Mod 'T' 5)
test_25 = "D@5(aln1)" ~: (round (scale_factor * (logBase 10 (3/5)))) ~?= (nucScoreOf aln1Mod '-' 5)
-- Test Binary functions, i.e. storage as binary file to disk, and reading from
-- it, then check every single value in the model, just as above. These cases
-- are all under the same test, because I don't want to write and read the file
-- every time.
test_26 = TestCase (do
removeIfExists "aln1Mod.bcls"
encodeFile "aln1Mod.bcls" aln1Mod
aln2Mod <- decodeFile "aln1Mod.bcls"
assertEqual "store-read" aln1Mod aln2Mod
(round (scale_factor * (logBase 10 (5/5)))) @=? (nucScoreOf aln2Mod 'A' 1)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'C' 1)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'G' 1)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'T' 1)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod '-' 1)
(round (scale_factor * (logBase 10 (2/5)))) @=? (nucScoreOf aln2Mod 'A' 2)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'C' 2)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'G' 2)
(round (scale_factor * (logBase 10 (3/5)))) @=? (nucScoreOf aln2Mod 'T' 2)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod '-' 2)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod 'A' 3)
(round (scale_factor * (logBase 10 (2/5)))) @=? (nucScoreOf aln2Mod 'C' 3)
(round (scale_factor * (logBase 10 (2/5)))) @=? (nucScoreOf aln2Mod 'G' 3)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'T' 3)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod '-' 3)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod 'A' 4)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod 'C' 4)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod 'G' 4)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod 'T' 4)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod '-' 4)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'A' 5)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'C' 5)
(round (scale_factor * (logBase 10 small_prob))) @=? (nucScoreOf aln2Mod 'G' 5)
(round (scale_factor * (logBase 10 (1/5)))) @=? (nucScoreOf aln2Mod 'T' 5)
(round (scale_factor * (logBase 10 (3/5)))) @=? (nucScoreOf aln2Mod '-' 5)
)
-- Computes the expected score from the list of positional frequencies. Short
-- name to save some space.
--
es ps = sum $ map (round . (scale_factor *) . (logBase 10)) ps
test_27 = "AAAAA" ~: (nucScoreSeq aln1Mod "AAAAA") ~?= (es [5/5, 2/5, 1/5, 1/5, small_prob])
test_28 = "-----" ~: (nucScoreSeq aln1Mod "-----") ~?= (es [small_prob, small_prob, small_prob, 1/5, 3/5])
test_29 = "ATCG-" ~: (nucScoreSeq aln1Mod "ATCG-") ~?= (es [5/5, 3/5, 2/5, 1/5, 3/5])
test_30 = "ATCGN" ~: (nucScoreSeq aln1Mod "ATCGN") ~?= (es [5/5, 3/5, 2/5, 1/5, small_prob])
-- A dot stands for a masked position, and has a score of 0
test_33 = "mask" ~: (nucScoreSeq aln1Mod ".....") ~?= 0
test_34 = "pmask" ~: (nucScoreSeq aln1Mod ".TCG.") ~?= (es [3/5, 2/5, 1/5])
-- Test model length
test_31 = "modLength" ~: (nucModLength aln1Mod) ~?= 5
-- Tests for empty alignments
--
aln2Mod = alnToNucModel small_prob scale_factor "anonymous" []
test_32 = "emptyAln score" ~: (nucScoreSeq aln2Mod "AATGC") ~?= 5 * small_score
-- Tests models with weighted sequences
--
-- This is exactly the same as aln1, so we can happiliy re-use those tests.
-- TODO: once the test pass, remove this aln - aln1 and aln3 are now of the same
-- type, apart from having the same value.
test_aln3 = [
AlnRow "my_OTU" "ATGC-" 1,
AlnRow "my_OTU" "AACG-" 1,
AlnRow "my_OTU" "AACTN" 1,
AlnRow "my_OTU" "ATG--" 1,
AlnRow "my_OTU" "ATAAT" 1
]
aln3Mod = alnToNucModel small_prob scale_factor "my_OTU" test_aln3
test_40 = "A@1(aln3)" ~: (round (scale_factor * (logBase 10 (5/5)))) ~?= (nucScoreOf aln3Mod 'A' 1)
test_41 = "C@1(aln3)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln3Mod 'C' 1)
test_42 = "G@1(aln3)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln3Mod 'G' 1)
test_43 = "T@1(aln3)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln3Mod 'T' 1)
test_44 = "D@1(aln3)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln1Mod '-' 1)
-- Now, we use non-unit weights.
test_aln4 = [
AlnRow "my_OTU_wght" "ATGC-" 2,
AlnRow "my_OTU_wght" "AACG-" 1,
AlnRow "my_OTU_wght" "AACTN" 3,
AlnRow "my_OTU_wght" "ATG--" 2,
AlnRow "my_OTU_wght" "ATAAT" 1
]
aln4Mod = alnToNucModel small_prob scale_factor "my_OTU_wght" test_aln4
test_51 = "A@1(aln4)" ~: (round (scale_factor * (logBase 10 (9/9)))) ~?= (nucScoreOf aln4Mod 'A' 1)
test_52 = "C@1(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'C' 1)
test_53 = "G@1(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'G' 1)
test_54 = "T@1(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'T' 1)
test_55 = "D@1(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod '-' 1)
test_56 = "A@2(aln4)" ~: (round (scale_factor * (logBase 10 (4/9)))) ~?= (nucScoreOf aln4Mod 'A' 2)
test_57 = "C@2(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'C' 2)
test_58 = "G@2(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'G' 2)
test_59 = "T@2(aln4)" ~: (round (scale_factor * (logBase 10 (5/9)))) ~?= (nucScoreOf aln4Mod 'T' 2)
test_60 = "D@2(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod '-' 2)
test_61 = "A@3(aln4)" ~: (round (scale_factor * (logBase 10 (1/9)))) ~?= (nucScoreOf aln4Mod 'A' 3)
test_62 = "C@3(aln4)" ~: (round (scale_factor * (logBase 10 (4/9)))) ~?= (nucScoreOf aln4Mod 'C' 3)
test_63 = "G@3(aln4)" ~: (round (scale_factor * (logBase 10 (4/9)))) ~?= (nucScoreOf aln4Mod 'G' 3)
test_64 = "T@3(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'T' 3)
test_65 = "D@3(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod '-' 3)
test_66 = "A@4(aln4)" ~: (round (scale_factor * (logBase 10 (1/9)))) ~?= (nucScoreOf aln4Mod 'A' 4)
test_67 = "C@4(aln4)" ~: (round (scale_factor * (logBase 10 (2/9)))) ~?= (nucScoreOf aln4Mod 'C' 4)
test_68 = "G@4(aln4)" ~: (round (scale_factor * (logBase 10 (1/9)))) ~?= (nucScoreOf aln4Mod 'G' 4)
test_69 = "T@4(aln4)" ~: (round (scale_factor * (logBase 10 (3/9)))) ~?= (nucScoreOf aln4Mod 'T' 4)
test_70 = "D@4(aln4)" ~: (round (scale_factor * (logBase 10 (2/9)))) ~?= (nucScoreOf aln4Mod '-' 4)
test_71 = "A@5(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'A' 5)
test_72 = "C@5(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'C' 5)
test_73 = "G@5(aln4)" ~: (round (scale_factor * (logBase 10 small_prob))) ~?= (nucScoreOf aln4Mod 'G' 5)
test_74 = "T@5(aln4)" ~: (round (scale_factor * (logBase 10 (1/9)))) ~?= (nucScoreOf aln4Mod 'T' 5)
test_75 = "D@5(aln4)" ~: (round (scale_factor * (logBase 10 (5/9)))) ~?= (nucScoreOf aln4Mod '-' 5)
-- Test the scoring function on a model made from a weighted alignment.
--
-- Computes the expected score from the list of positional frequencies. Short
-- name to save some space.
test_80 = "AAAAA" ~: (nucScoreSeq aln4Mod "AAAAA") ~?= (es [9/9, 4/9, 1/9, 1/9, small_prob])
test_81 = "-----" ~: (nucScoreSeq aln4Mod "-----") ~?= (es [small_prob, small_prob, small_prob, 2/9, 5/9])
test_82 = "ATCG-" ~: (nucScoreSeq aln4Mod "ATCG-") ~?= (es [9/9, 5/9, 4/9, 1/9, 5/9])
test_83 = "ATCGN" ~: (nucScoreSeq aln4Mod "ATCGN") ~?= (es [9/9, 5/9, 4/9, 1/9, small_prob])
-- Test model length
tests = TestList [
TestLabel "nucScoreOf" test_1
, TestLabel "nucScoreOf" test_2
, TestLabel "nucScoreOf" test_3
, TestLabel "nucScoreOf" test_4
, TestLabel "nucScoreOf" test_5
, TestLabel "nucScoreOf" test_6
, TestLabel "nucScoreOf" test_7
, TestLabel "nucScoreOf" test_8
, TestLabel "nucScoreOf" test_9
, TestLabel "nucScoreOf" test_10
, TestLabel "nucScoreOf" test_11
, TestLabel "nucScoreOf" test_12
, TestLabel "nucScoreOf" test_13
, TestLabel "nucScoreOf" test_14
, TestLabel "nucScoreOf" test_15
, TestLabel "nucScoreOf" test_16
, TestLabel "nucScoreOf" test_17
, TestLabel "nucScoreOf" test_18
, TestLabel "nucScoreOf" test_19
, TestLabel "nucScoreOf" test_20
, TestLabel "nucScoreOf" test_21
, TestLabel "nucScoreOf" test_22
, TestLabel "nucScoreOf" test_23
, TestLabel "nucScoreOf" test_24
, TestLabel "nucScoreOf" test_25
, TestLabel "nucScoreOf" test_26
, TestLabel "nucScoreSeq" test_27
, TestLabel "nucScoreSeq" test_28
, TestLabel "nucScoreSeq" test_29
, TestLabel "nucScoreSeq" test_30
, TestLabel "nucScoreSeq" test_31
, TestLabel "nucScoreSeq" test_32
, TestLabel "nucScoreSeq" test_33
, TestLabel "nucScoreSeq" test_34
, TestLabel "wgt nucScoreOf" test_40
, TestLabel "wgt nucScoreOf" test_41
, TestLabel "wgt nucScoreOf" test_42
, TestLabel "wgt nucScoreOf" test_43
, TestLabel "wgt nucScoreOf" test_44
, TestLabel "wgt nucScoreOf" test_51
, TestLabel "wgt nucScoreOf" test_52
, TestLabel "wgt nucScoreOf" test_53
, TestLabel "wgt nucScoreOf" test_54
, TestLabel "wgt nucScoreOf" test_55
, TestLabel "wgt nucScoreOf" test_56
, TestLabel "wgt nucScoreOf" test_57
, TestLabel "wgt nucScoreOf" test_58
, TestLabel "wgt nucScoreOf" test_59
, TestLabel "wgt nucScoreOf" test_60
, TestLabel "wgt nucScoreOf" test_61
, TestLabel "wgt nucScoreOf" test_62
, TestLabel "wgt nucScoreOf" test_63
, TestLabel "wgt nucScoreOf" test_64
, TestLabel "wgt nucScoreOf" test_65
, TestLabel "wgt nucScoreOf" test_66
, TestLabel "wgt nucScoreOf" test_67
, TestLabel "wgt nucScoreOf" test_68
, TestLabel "wgt nucScoreOf" test_69
, TestLabel "wgt nucScoreOf" test_70
, TestLabel "wgt nucScoreOf" test_71
, TestLabel "wgt nucScoreOf" test_72
, TestLabel "wgt nucScoreOf" test_73
, TestLabel "wgt nucScoreOf" test_74
, TestLabel "wgt nucScoreOf" test_75
, TestLabel "nucScoreSeq" test_80
, TestLabel "nucScoreSeq" test_81
, TestLabel "nucScoreSeq" test_82
, TestLabel "nucScoreSeq" test_83
]
main = do
runTestTT tests
| tjunier/mlgsc | test/TestNucModel.hs | mit | 15,421 | 0 | 17 | 3,831 | 5,182 | 2,713 | 2,469 | 203 | 1 |
module Machine (execute) where
import Types
import qualified Data.Map as Map
lookup :: String -> Env -> Value
lookup s e = Map.findWithDefault (error $ "Unbound name: " ++ s ++ " in " ++ show e) s e
bind :: String -> Value -> Env -> Env
bind = Map.insert
makeClosure :: Env -> Code -> Value
makeClosure e cs = VClosure (e, cs)
execute :: Monad m => (Stack -> m Stack) -> (Env, Code) -> Stack -> m Stack
execute render (e, []) s = return s
execute render (e, c:cs) s = do (e2, s2) <- step (e, s, c)
execute render (e2, cs) s2
where
step (e, s, (PushConstant v)) = return (e, v:s)
step (e, s, (PushClosure c)) = return (e, (makeClosure e c):s)
step (e, s, (PushArray c)) = do s2 <- execute render (e, c) []
return (e, (VArray s2):s)
step (e, s, (Lookup name)) = return (e, (Machine.lookup name e):s)
step (e, v:s, (Bind name)) = return ((bind name v e), s)
step (e, s, (Invoke name f)) = return (e, f s)
step (e, (VClosure c):s, Apply) = do s2 <- execute render c s
return (e, s2)
step (e, (VClosure c2):(VClosure c1):(VBool b):s, If) =
do ss <- execute render (if b then c1 else c2) s
return (e, ss)
step (e, s, Render) = do s2 <- render s
return (e, s2)
step (e, s, i) = error $ "Error in step:" ++ show s ++ show i | jchl/jtrace | src/Machine.hs | mit | 1,448 | 0 | 13 | 482 | 748 | 398 | 350 | 28 | 11 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Tests.Writers.PseudoPod (tests) where
import Test.Framework
import Text.Pandoc.Builder
import Text.Pandoc
import Tests.Helpers
import Tests.Arbitrary()
pseudopod :: (ToString a, ToPandoc a) => a -> String
pseudopod = writePseudoPod defaultWriterOptions . toPandoc
{-
"my test" =: X =?> Y
is shorthand for
test pseudopod "my test" $ X =?> Y
which is in turn shorthand for
test pseudopod "my test" (X,Y)
-}
infix 4 =:
(=:) :: (ToString a, ToPandoc a)
=> String -> (a, String) -> Test
(=:) = test pseudopod
tests :: [Test]
tests = [ "escaped > in string"
=: (para "string with > in it" )
=?> "string with E<gt> in it"
]
| castaway/pandoc | src/Tests/Writers/PseudoPod.hs | gpl-2.0 | 726 | 0 | 9 | 164 | 160 | 94 | 66 | 17 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Modular.Seed where
import Network.XmlRpc.Internals
import Autolib.ToDoc
data Seed =
Seed { contents :: Int
}
instance XmlRpcType ( Seed ) where
toValue d = toValue [("contents",toValue (contents d))
]
fromValue v = do
t <- fromValue v
c <- getField "contents" t
return $ Seed { contents = c }
getType _ = TStruct
$(derives [makeToDoc] [''Seed])
-- local variables:
-- mode: haskell
-- end
| Erdwolf/autotool-bonn | src/Modular/Seed.hs | gpl-2.0 | 498 | 5 | 11 | 120 | 155 | 82 | 73 | 14 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
module Web.Google.API.Rest ( GoogleApiRequest(..)
, GoogleApiConfig(..)
, GoogleApiAuthConfig(..)
, GoogleApiResult(..)
, def
, methodGet
, runApiRequest) where
import Control.Applicative
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.Aeson as Json
import qualified Data.Attoparsec as P
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C8
import Data.Conduit
import Data.Conduit.Attoparsec
import Data.Map (Map)
import Network.HTTP.Conduit hiding (def)
import qualified Network.HTTP.Conduit as HTTP
import Network.HTTP.Types
import Data.Default
data GoogleApiAuthConfig = GoogleApiUnauthenticated
| GoogleApiKey BS.ByteString
| GoogleApiOAuth
deriving (Show, Eq)
data GoogleApiResult a = Success a
| Failure BS.ByteString
deriving (Show)
instance Functor GoogleApiResult where
fmap f (Success a) = Success (f a)
fmap _ (Failure msg) = Failure msg
data GoogleApiConfig = GApiConf
{ getApiAuth :: GoogleApiAuthConfig
, getApiEndpoint :: BS.ByteString
} deriving (Show, Eq)
instance Default GoogleApiConfig where
def = GApiConf GoogleApiUnauthenticated "www.googleapis.com"
runApiRequest :: (GoogleApiRequest a b) => GoogleApiConfig -> a -> IO (GoogleApiResult b)
runApiRequest config req =
withManager $ \manager -> do
let request = buildHttpRequest config req
bodySource <- responseBody <$> http request manager
parsedBody <- bodySource $$+- sinkParser (Json.fromJSON <$> Json.json)
case parsedBody of
Json.Success resp -> return $ Success resp
_ -> return $ Failure ""
class (Json.FromJSON b) => GoogleApiRequest a b | a -> b where
getPath :: a -> BS.ByteString
getQuery :: a -> Query
getMethod :: a -> Method
{-
class GoogleApi m where
request :: (MonadResource m, MonadBaseControl IO m, MonadThrow m, MonadUnsafeIO m, GoogleApiRequest a b) => a -> m (Maybe b)
getConfig :: m GoogleApiConfig
getFromConfig :: (GoogleApi m, Functor m) => (GoogleApiConfig -> a) -> m a
getFromConfig = (<$> getConfig)
newtype GoogleApiT m a = GoogleApiT { runGoogleApiT :: GoogleApiConfig -> m a }
instance Monad m => GoogleApi (GoogleApiT m) where
getConfig = GoogleApiT (\conf -> return conf)
request req = withManager $ \manager -> do
conf <- getConfig
let request = buildHttpRequest conf req
bodySource <- responseBody <$> http request manager
parsedBody <- bodySource $$+- sinkParser (Json.fromJSON <$> Json.json)
case parsedBody of
Json.Success resp -> return $ Just resp
_ -> return Nothing
instance Monad m => Monad (GoogleApiT m) where
return a = GoogleApiT $ \conf -> return a
x >>= f = GoogleApiT $ \conf -> do
x_val <- runGoogleApiT x conf
runGoogleApiT (f x_val) conf
-}
buildHttpRequest :: (GoogleApiRequest a b) => GoogleApiConfig -> a -> Request m
buildHttpRequest api req
= HTTP.def { method = getMethod req
, host = getApiEndpoint api
, port = 443
, secure = True
, path = getPath req
, queryString = renderQuery False query
}
where
query = case getApiAuth api of
GoogleApiUnauthenticated -> getQuery req
GoogleApiKey key -> ("key", Just key) : getQuery req
GoogleApiOAuth -> undefined
simpleRequest :: (Json.FromJSON b, GoogleApiRequest a b) => GoogleApiConfig -> a -> IO (Maybe b)
simpleRequest api req = do
let request = buildHttpRequest api req
res <- withManager $ httpLbs request
return . Json.decode . responseBody $ res
httpRequest :: (MonadResource m, MonadBaseControl IO m, GoogleApiRequest a b) => GoogleApiConfig -> a -> Manager -> m (Maybe b)
httpRequest api req manager = do
let request = buildHttpRequest api req
bodySource <- responseBody <$> http request manager
parsedBody <- bodySource $$+- sinkParser (Json.fromJSON <$> Json.json)
case parsedBody of
Json.Success resp -> return $ Just resp
_ -> return Nothing
| periodic/youtube-haskell | Web/Google/API/Rest.hs | gpl-3.0 | 4,594 | 0 | 14 | 1,326 | 905 | 486 | 419 | 77 | 3 |
-- This file is part of KSQuant2.
-- Copyright (c) 2010 - 2011, Kilian Sprotte. All rights reserved.
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
import Distribution.Simple
main = defaultMain
| kisp/ksquant2 | old/Setup.hs | gpl-3.0 | 792 | 1 | 4 | 139 | 26 | 18 | 8 | 2 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzleMode.Do.Grid.Modify
(
controlPathPlay,
--controlPathSpecialComplete,
) where
import MyPrelude
import Game
import Game.Grid.GridWorld
import Game.Grid.Modify
import Game.LevelPuzzleMode.LevelPuzzleWorld
controlPathPlay :: s -> GridWorld -> LevelPuzzleWorld ->
MEnv' (s, GridWorld, LevelPuzzleWorld)
controlPathPlay = \s grid lvl -> do
-- camera input
grid' <- inputCamera grid
-- path input
grid'' <- case levelpuzzleIsPuzzle lvl of
False -> inputPathContinue grid'
True -> inputPathWait grid'
return (s, grid'', lvl)
| karamellpelle/grid | designer/source/Game/LevelPuzzleMode/Do/Grid/Modify.hs | gpl-3.0 | 1,384 | 0 | 12 | 295 | 160 | 98 | 62 | 16 | 2 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Main where
------------------------------------------------------------------------------
import Control.Lens
import qualified Data.Map as Map
import qualified Data.Vector as V
------------------------------------------------------------------------------
import Graphics.Gloss
import Graphics.Gloss.Interface.IO.Game
------------------------------------------------------------------------------
import Data.Ephys.GlossPictures
import Data.Ephys.OldMWL.ParsePFile
import Data.Ephys.Position
import Data.Ephys.TrackPosition
------------------------------------------------------------------------------
-- Current time, current pos, track segs, occupancy)
type World = (Float, Position, Track, Field)
------------------------------------------------------------------------------
-- The default track for caillou's Nov 28 sample data.
myTrack :: Track
myTrack = circularTrack (0,0) 0.75 0 0.2 0.25
------------------------------------------------------------------------------
gScale :: Float
gScale = 200
------------------------------------------------------------------------------
main :: IO ()
main = playIO (InWindow "My Window" (400,400) (10,10))
white
60
(0,p0,t0,f0)
(drawWorld :: World -> IO Picture)
(eventUpdateWorld :: Event -> World -> IO World)
(timeUpdateWorld)
where p0 = Position 0 (Location 0 0 0)
(Angle 0 0 0) 0 0 ConfSure someZeros someZeros
(-1/0) (Location 0 0 0):: Position
t0 = myTrack
f0 = V.replicate (V.length $ allTrackPos t0) 0 :: Field
someZeros = take 20 . repeat $ 0
eventUpdateWorld :: Event -> World -> IO World
eventUpdateWorld (EventMotion (x',y')) (now, p,t,occ) =
let --p' = Position 0 (Location ((r2 x')/ r2 gScale) ((r2 y') / r2 gScale) (p^.location.z))
-- (Angle 0 0 0) 0 0 ConfSure
p' = stepPos p (realToFrac now)
(Location ((realToFrac x')/realToFrac gScale)
((realToFrac y') / realToFrac gScale)
(p^.location.z))
(Angle 0 0 0) ConfSure
occ' = updateField (+) occ (posToField t p (PosGaussian 0.4))
in return (now, p',t,occ')
eventUpdateWorld (EventKey _ _ _ _) w = return w
eventUpdateWorld (EventResize _) w = return w
timeUpdateWorld :: Float -> World -> IO World
timeUpdateWorld t (now,p,track,occ) = return (now+t,p,track,occ)
drawWorld :: World -> IO Picture
drawWorld (now,p,t,occ) =
do print p
return . Scale gScale gScale $
pictures [drawTrack t, drawNormalizedField (labelField myTrack occ), drawPos p] | imalsogreg/tetrode-ephys | samples/watchpos.hs | gpl-3.0 | 2,670 | 0 | 15 | 548 | 713 | 395 | 318 | -1 | -1 |
module CacheFile where
import System.FilePath
indexFile :: String
indexFile = "00-index.tar.gz"
hackportDir :: String
hackportDir = ".hackport"
cacheFile :: FilePath -> FilePath
cacheFile tree = tree </> hackportDir </> indexFile
| Heather/hackport | CacheFile.hs | gpl-3.0 | 234 | 0 | 6 | 34 | 54 | 31 | 23 | 8 | 1 |
module Mechanism.Profile.Sequence where
import qualified Data.Foldable as F
import Data.Sequence (Seq)
import qualified Data.Sequence as S
type Coll = Seq
fromList :: [a] -> Seq a
fromList = S.fromList
replicateM :: (Monad m) => Int -> m a -> m (Seq a)
replicateM = S.replicateM
zip :: Seq a -> Seq b -> Seq (a, b)
zip = S.zip
unzip :: Seq (a, b) -> (Seq a, Seq b)
unzip = F.foldr (\(a,b) ~(as,bs) -> (a S.<| as, b S.<| bs)) (S.empty,S.empty)
(!) :: Seq a -> Int -> a
(!) = S.index
ifilter :: (Int -> a -> Bool) -> Seq a -> Seq a
ifilter f = fmap snd . S.filter (uncurry f) . S.mapWithIndex (,)
imap :: (Int -> a -> b) -> Seq a -> Seq b
imap = S.mapWithIndex
zipWith :: (a -> b -> c) -> Seq a -> Seq b -> Seq c
zipWith = S.zipWith
replicate :: Int -> a -> Seq a
replicate = S.replicate
length :: Seq a -> Int
length = S.length
(<|) :: a -> Seq a -> Seq a
(<|) = (S.<|)
filterM :: (Monad m) => (a -> m Bool) -> Seq a -> m (Seq a)
filterM p as =
case S.viewl as of
S.EmptyL -> return S.empty
x S.:< xs -> do
flg <- p x
ys <- filterM p xs
return
(if flg
then x S.<| ys
else ys)
partition :: (a -> Bool) -> Seq a -> (Seq a, Seq a)
partition = S.partition
| pseudonom/haskell-mechanism | src/Mechanism/Profile/Sequence.hs | agpl-3.0 | 1,234 | 0 | 13 | 335 | 655 | 347 | 308 | 40 | 3 |
module Hypergraph where
import FiniteMap
import Set
-- A hypergraph is a family of sets which are subsets of a vertex set X
-- This implementation uses an edge list representation that
-- directly corresponds to the mathematical definition
-- n index type, w weight type
-- type Hypergraph n w = Array n [(n,w)]
-- show functions for set and finitemap
instance (Show a) => Show (Set a) where
show set = show (setToList set)
instance (Show a, Show b) => Show (FiniteMap a b) where
show vs = show (fmToList vs)
type HEdges l = FiniteMap l (Set l)
data Hypergraph l = Hypergraph (HEdges l) (HEdges l) deriving Show
-- hgraph constructor
-- takes a list of edges
hgraph list = let edges = hEdges list in
Hypergraph edges (dualEdges edges)
-- hgraph edge_list
--hEdges :: [ [a] ] -> HEdges a
hEdges el = listToFM (zip [1..length el] (map mkSet el))
-- = Hypergraph (listToFM . mkSet (edge_list)) (listToFM . mkSet (edge_list))
-- compute dual of a given hgraph
dualEdge (edgeIx, hEdge) = map (\x->(x, mkSet [edgeIx])) (setToList hEdge)
dualEdges hEdges = ( (addListToFM_C union emptyFM) . concat .
(map dualEdge) . fmToList )
hEdges
--hVertices el = (zip (map mkSet el) [1..length el])
-- hyper vertices are the dual of the hypergraph
--hVertices hEdges = (foldFM uni emptyFM). (mapFM dual)) hEdges
-- where uni = plusFM_C
--dual hEdges = ( listToFM . map (\(x,y)->(y,x)) . fmToList) hEdges
| examachine/bitdrill | src/haskell/Hypergraph.hs | agpl-3.0 | 1,440 | 6 | 11 | 296 | 319 | 173 | 146 | 16 | 1 |
{-|
Module : $Header$
Copyright : (c) 2015 Edward O'Callaghan
License : LGPL-2.1
Maintainer : [email protected]
Stability : provisional
Portability : portable
This module encapsulates the foundational libftdi functions
commonly used into a Monadic style. This Monadic style avoids
passing around references of indirection to the device type and
so on.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK prune #-}
module LibFtdi.LibFtdi ( withFtdi
, ftdiInit
, ftdiDeInit
, DeviceHandle(..)
, VendorID(..)
, ProductID(..)
, FtdiError(..)
, FtdiReturnType(..)
, ftdiErrorValue
, ftdiErrorTy
, ftdiSetInterface
, ftdiUSBOpen
, ftdiUSBOpenString
, ftdiUSBClose
, ftdiUSBReset
) where
import Foreign
import Foreign.C.Types
import Foreign.C.String
import Foreign.Ptr
import Control.Exception
import Data.Typeable (Typeable, cast)
import Data.Maybe
import Data.Tuple
import LibFtdi.Types
import Bindings.LibFtdi
-- | Vendor ID
type VendorID = Int
-- | Product ID
type ProductID = Int
-- | Error codes returned by internal libftdi functions.
data FtdiError = FTDI_ERR_ALLOC_READ_BUF -- ^ Could not allocate read buffer
| FTDI_ERR_ALLOC_STRUCT_BUF -- ^ Coult not allocate struct buffer
| FTDI_ERR_INIT_FAILED -- ^ libusb_init() failed
deriving (Eq, Typeable)
instance Show FtdiError where
show FTDI_ERR_ALLOC_READ_BUF = "Could not allocate read buffer"
show FTDI_ERR_ALLOC_STRUCT_BUF = "Coult not allocate struct buffer"
show FTDI_ERR_INIT_FAILED = "libusb_init() failed"
instance Exception FtdiError
-- | Returned C Error codes
--
-- ftdi library routines return negative values to indicate errors.
-- Values >= 0 are used to indicate success.
instance Enum FtdiError where
fromEnum = fromJust . flip lookup errors
toEnum = fromJust . flip lookup (map swap errors)
errors = [ (FTDI_ERR_ALLOC_READ_BUF, -1)
, (FTDI_ERR_ALLOC_STRUCT_BUF, -2)
, (FTDI_ERR_INIT_FAILED, -3)
]
-- | (For internal use) Obtain a 'FtdiError' type of a C value from the Error codes list.
ftdiErrorValue :: CInt -> FtdiReturnType Int
ftdiErrorValue c | c >= 0 = (Right . fromIntegral) c -- Success (on ret == 0)
| c < 0 = (Left . toEnum . fromIntegral) c -- C ret code to typed error
-- | (For internal use) Obtain a 'FtdiError' type of a C value from the Error codes list.
ftdiErrorTy :: CInt -> FtdiReturnType ()
ftdiErrorTy c | c >= 0 = return () -- Success (on ret == 0)
| c < 0 = (Left . toEnum . fromIntegral) c -- C ret code to typed error
-- | Short-hand type for brevity and clarity.
type FtdiReturnType a = Either FtdiError a
-- | DeviceHandle wrapper around C device descriptor pointer
newtype DeviceHandle = DeviceHandle { unDeviceHandle :: Ptr C'ftdi_context }
-- | Essential wrapper
withFtdi :: (DeviceHandle -> IO c) -> IO c
withFtdi = bracket openFtdi closeFtdi
-- | Handy helper to wrap around Either results
openFtdi :: IO DeviceHandle
openFtdi = do
r <- ftdiInit
case r of
Left e -> throwIO e
Right dev -> do ftdiSetInterface dev INTERFACE_ANY
ftdiUSBOpen dev (0x0403, 0x6010) -- XXX
return dev
-- | ..
ftdiInit :: IO (FtdiReturnType DeviceHandle)
ftdiInit = do
ptr <- c'ftdi_new
ret <- c'ftdi_init ptr
if ret /= 0 then return $ (Left . toEnum . fromIntegral) ret
else return (Right (DeviceHandle ptr))
-- | ..
ftdiDeInit :: DeviceHandle -> IO ()
ftdiDeInit d = do
_ <- c'ftdi_deinit (unDeviceHandle d)
return () -- XXX ignores errors
-- | ..
ftdiSetInterface :: DeviceHandle -> FtdiInterface -> IO ()
ftdiSetInterface d i = do
_ <- c'ftdi_set_interface (unDeviceHandle d) ((fromIntegral . fromEnum) i)
return () -- XXX ignores errors
-- | ..
ftdiUSBOpen :: DeviceHandle -> (VendorID, ProductID)-> IO ()
ftdiUSBOpen d (vid, pid) = do
_ <- c'ftdi_usb_open (unDeviceHandle d) (fromIntegral vid) (fromIntegral pid)
return () -- XXX ignores errors
-- | Open specified device using a device identifier string, e.g. "i:0x0403:0x6010"
ftdiUSBOpenString :: DeviceHandle -> String -> IO ()
ftdiUSBOpenString d s = withCString s $ \str -> do
_ <- c'ftdi_usb_open_string (unDeviceHandle d) str
return () -- XXX ignores errors
-- | ..
ftdiUSBClose :: DeviceHandle -> IO ()
ftdiUSBClose d = do
_ <- c'ftdi_usb_close (unDeviceHandle d)
return () -- XXX ignores errors
-- | Close device. Deallocates the memory allocated by openFtdi when called.
closeFtdi :: DeviceHandle -> IO ()
closeFtdi d = do
ftdiUSBClose d
ftdiDeInit d
-- | Resets the ftdi device.
-- XXX fix error handling
ftdiUSBReset :: DeviceHandle -> IO ()
ftdiUSBReset d = do
r <- c'ftdi_usb_reset $ unDeviceHandle d
case r of
(0) -> return ()
(-1) -> putStrLn "FTDI reset failed"
(-2) -> putStrLn "USB device unavailable"
| victoredwardocallaghan/bindings-libftdi | src/LibFtdi/LibFtdi.hs | lgpl-2.1 | 5,192 | 0 | 13 | 1,346 | 1,101 | 582 | 519 | 99 | 3 |
module Main where
import BitTorrent
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Concurrent.STM
import Control.Exception.Extra
import Control.ITC
import Control.Monad
import Data.Bencoding
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Extra
import Data.Maybe
import Data.Time.Clock
import LTorrent
import System.Environment ( getArgs )
import System.IO
import Text.Printf ( printf )
-- FIXME: See if you can use Jeff Heard's Control.Concurrent.Future in
-- place of the fokrIO...resp <- takeMVar o... sequences.
debugMode = False
main :: IO ()
main = do
fs <- getArgs
inCh <- startLTorrent
putStrLn "lTorrent ready"
mis <- attemptStartTorrents inCh fs
putStrLn "lTorrent downloading:"
mapM_ (printf "\t%s\n" . getName) mis
iTime <- getCurrentTime
let mainLoop i t = do
t' <- getCurrentTime
os <- mapM (sendGetPieceArray inCh . getInfoHash) mis
resps <- mapM takeMVar os
let ss = extractResponses $ zip resps mis
mapM_ (printBar i (iTime, t)) ss
threadDelay (400000) >> mainLoop (i+1) t'
mainLoop 0 iTime
forever $ do
sendNOP inCh
threadDelay (10^6)
forever $ threadDelay 6000000
attemptStartTorrents :: TChan BridgeMessage -> [FilePath] -> IO [Metainfo]
attemptStartTorrents inCh fs = do
mms <- mapM (attemptStartTorrent inCh) fs
return $ catMaybes mms
attemptStartTorrent :: TChan BridgeMessage -> FilePath -> IO (Maybe Metainfo)
attemptStartTorrent inCh fp = do
doStart `catchAll` (\e -> print e >> return Nothing)
where
doStart = do
mi <- readMetainfo fp
bs <- L.readFile fp
o <- sendDownloadTorrent inCh bs
resp <- takeMVar o
let (Just err) = fromBencoding =<< bencDictLookup "error" resp
if not . L.null $ err
then fail $ fromByteString err
else return $ Just mi
-- FIXME: extractResponses looks like it could use a few Arrows
extractResponses :: [(Bencoding, Metainfo)] -> [(String, Metainfo)]
extractResponses = catMaybes . map extractResponse
where
extractResponse (be, mi) = do
err <- fromBencoding =<< bencDictLookup "error" be
if not . L.null $ err
then fail $ printf "ERROR: Command resulted in error: %s\n" (L.unpack err)
else case fromBencoding =<< bencDictLookup "response" be of
Nothing -> fail "ERROR: could not extract response"
Just resp -> return (resp, mi)
-- FIXME: printBar is underbidding the actual progress
printBar :: Int -> (UTCTime, UTCTime) -> (String, Metainfo) -> IO ()
printBar i (iTime, t) (s, mi) = do
let pieceSize = getPieceLength mi
total = length s
done = length $ filter (=='c') s
inPrg = length $ filter (=='g') s
percent = done * 100 `div` total
percentPrg = inPrg * 100 `div` total
if debugMode
then do
printf "%3d%%[%s][%c] <%7s> SPEED %3s\n"
percent
(take 100 $ concat [replicate percent '=', replicate percentPrg '>', replicate 100 ' '])
("-\\|/" !! (i `mod` 4))
(showPrettyFilesize $ pieceSize * fromIntegral done)
(showPrettyTime $ ceiling $ diffUTCTime t iTime)
else do
printf blankLine
hFlush stdout
printf "%3d%%[%s][%c] <%7s> SPEED %3s"
percent
(take 100 $ concat [replicate percent '=', replicate percentPrg '>', replicate 100 ' '])
("-\\|/" !! (i `mod` 4))
(showPrettyFilesize $ pieceSize * fromIntegral done)
(showPrettyTime $ ceiling $ diffUTCTime t iTime)
hFlush stdout
blankLine :: String
blankLine = replicate 160 '\b'
| scvalex/ltorrent | apps/dltorrent.hs | lgpl-3.0 | 3,765 | 0 | 16 | 1,033 | 1,121 | 566 | 555 | 92 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{- Please read http://gwan.com/en_apachebench_httperf.html -}
module Experiment.Bench.Web (
benchWeb
, plotWeb
) where
import Data.Csv (ToNamedRecord (..), namedRecord, (.=))
import Data.Monoid ((<>))
import Control.Applicative ((<$>), (<*>))
import Data.Text (Text)
import Data.Text.Read (decimal)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Laborantin.DSL
import Laborantin.Types
import Laborantin.Implementation (EnvIO)
import Control.Monad (void, liftM)
import Control.Monad.IO.Class (liftIO)
import Data.Maybe (catMaybes)
import qualified Data.Map as M
import System.Directory (copyFile)
import System.FilePath.Posix ((</>))
import Control.Concurrent (threadDelay)
import Experiment.Bench.CSV
import Experiment.Bench.Process
type URL = Text
type IterationCount = Int
type ConcurrencyLevel = Int
type ProcessCount = Int
type RAM = Int
type GenerationsNumber = Int
{- Server-side -}
data GC = GC RAM GenerationsNumber
deriving (Show)
data Server = Mighty ConcurrencyLevel GC
deriving (Show)
httpServerShellCommand :: Server -> (Text, [Text])
httpServerShellCommand (Mighty conc (GC ram gens)) = ("mighty", ["+RTS"
, "-A" <> T.pack (show ram) <> "M"
, "-G" <> T.pack (show gens)
, "-N" <> T.pack (show conc)
, "-RTS"])
httpServerParams = do
ghcRTSParams
parameter "server-name" $ do
describe "Name of the server to use."
values [str "mighty"]
parameter "server-concurrency" $ do
describe "Number of concurrent server processes."
values [num 1]
ghcRTSParams = do
parameter "gc-area-size" $ do
describe "Initial RAM passed to the RTS (in MB)"
values [num 4]
parameter "gc-generations" $ do
describe "Number of garbage-collector generations"
values [num 2]
httpServer = do
(StringParam name) <- param "server-name"
(NumberParam ram) <- param "gc-area-size"
(NumberParam gens) <- param "gc-generations"
(NumberParam conc) <- param "server-concurrency"
case name of
"mighty" -> return $ Mighty (round conc) (GC (round gens) (round ram))
_ -> error "unknwon server name"
{- Client-side -}
data Client = Weighttp URL IterationCount ConcurrencyLevel ProcessCount
deriving (Show)
httpClientShellCommand :: Client -> (Text, [Text])
httpClientShellCommand (Weighttp url cnt conc procs) = ("weighttp", args ++ [url])
where args = map T.pack ["-n", show cnt, "-c", show conc, "-t", show procs]
httpClientParams = do
parameter "client-name" $ do
describe "Name of the client tool to use."
values [str "weighttp"]
parameter "client-concurrency" $ do
describe "Number of concurrent request to run."
values [num 1]
parameter "client-processes" $ do
describe "Number of concurrent processes to run."
values [num 1]
parameter "requests-count" $ do
describe "Number of requests to execute."
values [num 1]
parameter "probed-url" $ do
describe "URL to probe"
values [str "http://localhost:8080/index.html"]
httpClient = do
(StringParam name) <- param "client-name"
(StringParam url) <- param "probed-url"
(NumberParam conc) <- param "client-concurrency"
(NumberParam procs) <- param "client-processes"
(NumberParam reqs) <- param "requests-count"
case name of
"weighttp" -> return $ Weighttp url (round reqs) (round conc) (round procs)
_ -> error "unknwon server name"
{- analysis -}
data HttpPerformance = HttpPerformance {
requestsPerSeconds :: Int
, nSuccessfulRequests :: Int
} deriving (Show)
instance ToNamedRecord (Maybe HttpPerformance) where
toNamedRecord (Just (HttpPerformance rps n)) = namedRecord [
"rps" .= rps, "n.successes" .= n]
toNamedRecord Nothing = namedRecord []
parseClientResults :: Client -> Step EnvIO (Maybe HttpPerformance)
parseClientResults (Weighttp _ _ _ _) = do
content <- pRead =<< result "client-process.out"
return $ parseWeighttpOutput content
parseWeighttpOutput :: Text -> Maybe HttpPerformance
parseWeighttpOutput content = HttpPerformance <$> rps <*> statuses
where lines = T.lines content
findLine fstWord = filter ((fstWord ==) . (T.take (T.length fstWord))) lines
findLine' fstWord = if length (findLine fstWord) == 1
then Just (findLine fstWord !! 0)
else Nothing
safeAtIndex n xs = if length xs >= (n+1)
then Just (xs !! n)
else Nothing
safeParseInt txt = either (const Nothing) (Just . fst) (decimal txt)
rps = findLine' "finished" >>= safeAtIndex 9 . T.words >>= safeParseInt
statuses = findLine' "status" >>= safeAtIndex 2 . T.words >>= safeParseInt
{- actual experiment -}
benchWeb :: ScenarioDescription EnvIO
benchWeb = scenario "bench-web" $ do
describe "Benchmark web-servers static pages."
httpServerParams
httpClientParams
run $ do
dbg "starting server"
(srvCmd, srvCmdArgs) <- httpServerShellCommand <$> httpServer
endServer <- runProcess "server-process" srvCmd srvCmdArgs False
-- cheat to make sure that the server is ready
dbg "waiting half a second"
liftIO $ threadDelay 500000
dbg "starting client"
(cliCmd, cliCmdArgs) <- httpClientShellCommand <$> httpClient
endClient <- runProcess "client-process" cliCmd cliCmdArgs False
endClient Wait >> endServer Kill
analyze $ do
liftIO . print =<< parseClientResults =<< httpClient
{- analysis -}
plotWeb :: ScenarioDescription EnvIO
plotWeb = scenario "plot-web" $ do
describe "Plots the results for the web server benchmarks"
require benchWeb "@sc.param 'client-name' == 'weighttp'"
-- todo: autogenerate this kind of scenario
run $ do
aggregateCSV "performance.csv" "client-process.out" parseWeighttpOutput ["rps" , "n.successes"]
runPlots
runPlots = do
destPath <- liftM (\x -> ePath x </> "plot.R") self
let srcPath = "./scripts/r/plot-web/plot.R"
liftIO $ copyFile srcPath destPath
runProcess "rplots" "R" ["-f", "plot.R"] True >>= ($ Wait)
| lucasdicioccio/laborantin-bench-web | Experiment/Bench/Web.hs | apache-2.0 | 6,519 | 0 | 15 | 1,616 | 1,770 | 894 | 876 | 148 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Monad
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Tree
import Filesystem
import Filesystem.Path.CurrentOS as FS
import Options.Applicative
import qualified Options.Applicative as O
import Prelude hiding (FilePath)
import Shelly
import qualified Shelly as S
-- Settings
castleDir :: IO FilePath
castleDir = (FS.</> ".castle") <$> getHomeDirectory
-- Shell utilities
cabal_ :: T.Text -> [T.Text] -> Sh ()
cabal_ = command1_ "cabal" []
sandbox_ :: T.Text -> [T.Text] -> Sh ()
sandbox_ cmd = cabal_ "sandbox" . (cmd:)
-- Workflow and utility functions
installCastle :: Sh ()
installCastle = do
castle <- liftIO castleDir
chdir (parent castle) $ do
mkdirTree $ (filename castle) # leaves ["castles"]
where (#) = Node
leaves = map (# [])
getSandboxDir :: T.Text -> Sh FilePath
getSandboxDir name = liftIO $ fmap subdir castleDir
where subdir base = base FS.</> "castles" FS.</> S.fromText name
withSandbox :: T.Text -> (FilePath -> Sh ()) -> (FilePath -> Sh ()) -> Sh ()
withSandbox name onExists onFail = do
sandboxDir <- getSandboxDir name
exists <- test_d sandboxDir
if exists
then onExists sandboxDir
else onFail sandboxDir
withSandbox' :: T.Text -> (FilePath -> Sh ()) -> Sh ()
withSandbox' name onExists = withSandbox name onExists noop
where noop = const $ return ()
getConfigFile :: Sh FilePath
getConfigFile = (FS.</> "cabal.sandbox.config") <$> pwd
listCastles :: Sh [T.Text]
listCastles = liftIO (fmap (FS.</> "castles") castleDir)
>>= ls
>>= fmap (map (toTextIgnore . basename)) . filterM test_d
-- Command function
castle :: CastleCmd -> Sh ()
castle ListCmd = mapM_ echo =<< listCastles
castle NewCmd{..} = withSandbox castleName
(const $ errorExit $ "Sandbox " <> castleName <> " already exists.")
(\d -> mkdir_p d >> chdir d
(sandbox_ "init" ["--sandbox=" <> toTextIgnore d]))
castle UseCmd{..} = withSandbox castleName
(\d -> pwd >>= cp (d FS.</> "cabal.sandbox.config"))
(const $ errorExit $ "Sandbox " <> castleName <> " does not exist.\
\ Create it with 'sandbox new'.")
castle CurrentCmd = do
configFile <- getConfigFile
whenM (not <$> test_f configFile) $
errorExit "No sandbox in this directory."
config <- T.lines <$> readfile configFile
maybe (errorExit "No 'prefix:' line in configuration file.")
(echo . toTextIgnore . FS.basename . FS.fromText . T.drop 10)
. listToMaybe
$ filter (T.isPrefixOf " prefix: ") config
castle RemoveCmd = do
configFile <- getConfigFile
whenM (not <$> test_f configFile) $
errorExit "No sandbox in this directory."
rm configFile
castle DeleteCmd{..} = withSandbox castleName
rm_rf
(const $ errorExit $ "Sandbox " <> castleName <> " does not exist.")
castle ClearCmd{..} =
withSandbox' castleName rm_rf >> castle (NewCmd castleName)
castle SearchCmd{..} =
mapM_ echo =<< filter (T.isInfixOf searchQuery) <$> listCastles
-- Main
main :: IO ()
main = do
CastleOpts{..} <- execParser opts
shelly $ verbosely $ do
installCastle
castle mode
where
opts' = CastleOpts
<$> subparser ( O.command "list" listCmd
<> O.command "new" newCmd
<> O.command "use" useCmd
<> O.command "current" currCmd
<> O.command "remove" rmCmd
<> O.command "delete" delCmd
<> O.command "clear" clrCmd
<> O.command "search" srchCmd
)
listCmd = pinfo (pure ListCmd) "List sand castles." mempty
newCmd = pinfo (NewCmd <$> castleNameArg "The name of the castle to create.")
"Create a new castle." mempty
useCmd = pinfo (UseCmd <$> castleNameArg "The name of the castle to use.")
"Use an existing castle." mempty
currCmd = pinfo (pure CurrentCmd) "Display the current castle name."
mempty
rmCmd = pinfo (pure RemoveCmd) "Removes the sandbox from the current directory."
mempty
delCmd = pinfo (DeleteCmd <$> castleNameArg "The name of the castle to delete.")
"Deletes the castle." mempty
clrCmd = pinfo (ClearCmd <$> castleNameArg "The name of the castle to clear.")
"Clears a castle by deleting and re-creating it." mempty
srchCmd = pinfo (SearchCmd <$> textArg "QUERY" "Search the castles\
\ for one matching the name.")
"Searches for a castle with a name containing the QUERY." mempty
opts = pinfo opts' "Manage shared cabal sandboxes."
(header "castle - manage shared cabal sandboxes.")
-- Command-line parsing
-- | This is a builder utility for ParserInfo instances.
pinfo :: Parser a -> String -> InfoMod a -> ParserInfo a
pinfo p desc imod = info (helper <*> p) (fullDesc <> progDesc desc <> imod)
textOption :: Mod OptionFields T.Text -> Parser T.Text
textOption fields = option (T.pack <$> str) fields
fileOption :: Mod OptionFields FilePath -> Parser FilePath
fileOption fields = option (decodeString <$> str) fields
textArg :: String -> String -> Parser T.Text
textArg meta helpText = argument (T.pack <$> str) (metavar meta <> help helpText)
castleNameArg :: String -> Parser T.Text
castleNameArg = textArg "CASTLE_NAME"
data CastleOpts
= CastleOpts
{ mode :: CastleCmd
} deriving (Show)
data CastleCmd
= ListCmd
| NewCmd { castleName :: T.Text }
| UseCmd { castleName :: T.Text }
| CurrentCmd
| RemoveCmd
| DeleteCmd { castleName :: T.Text }
| ClearCmd { castleName :: T.Text }
| SearchCmd { searchQuery :: T.Text }
deriving (Show)
| erochest/castle | Castle.hs | apache-2.0 | 6,359 | 123 | 13 | 1,973 | 1,549 | 831 | 718 | 133 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{- | CouchDB database methods.
> runCouch def {couchDB="my_db"} $ couchPutDb
> runCouch def {couchDB="my_new_db"} $ couchPutDb
-}
module Database.CouchDB.Conduit.DB (
-- * Methods
couchPutDB,
couchPutDB_,
couchDeleteDB,
-- * Security
couchSecureDB,
-- * Replication
couchReplicateDB
) where
import Control.Monad (void)
import qualified Data.ByteString as B
import qualified Data.Aeson as A
import qualified Network.HTTP.Conduit as H
import qualified Network.HTTP.Types as HT
import Database.CouchDB.Conduit.Internal.Connection
(MonadCouch(..), Path, mkPath)
import Database.CouchDB.Conduit.LowLevel (couch, couch', protect, protect')
-- | Create CouchDB database.
couchPutDB :: MonadCouch m =>
Path -- ^ Database
-> m ()
couchPutDB db = void $ couch HT.methodPut
(mkPath [db]) [] []
(H.RequestBodyBS B.empty)
protect'
-- | \"Don't care\" version of couchPutDb. Create CouchDB database only in its
-- absence. For this it handles @412@ responses.
couchPutDB_ :: MonadCouch m =>
Path -- ^ Database
-> m ()
couchPutDB_ db = void $ couch HT.methodPut
(mkPath [db]) [] []
(H.RequestBodyBS B.empty)
(protect [HT.status200, HT.status201, HT.status202, HT.status304, HT.status412] return)
-- | Delete a database.
couchDeleteDB :: MonadCouch m =>
Path -- ^ Database
-> m ()
couchDeleteDB db = void $ couch HT.methodDelete
(mkPath [db]) [] []
(H.RequestBodyBS B.empty) protect'
-- | Maintain DB security.
couchSecureDB :: MonadCouch m =>
Path -- ^ Database
-> [B.ByteString] -- ^ Admin roles
-> [B.ByteString] -- ^ Admin names
-> [B.ByteString] -- ^ Readers roles
-> [B.ByteString] -- ^ Readers names
-> m ()
couchSecureDB db adminRoles adminNames readersRoles readersNames =
void $ couch HT.methodPut
(mkPath [db, "_security"]) [] []
reqBody protect'
where
reqBody = H.RequestBodyLBS $ A.encode $ A.object [
"admins" A..= A.object [ "roles" A..= adminRoles,
"names" A..= adminNames ],
"readers" A..= A.object [ "roles" A..= readersRoles,
"names" A..= readersNames ] ]
-- | Database replication.
--
-- See <http://guide.couchdb.org/editions/1/en/api.html#replication> for
-- details.
couchReplicateDB :: MonadCouch m =>
B.ByteString -- ^ Source database. Path or URL
-> B.ByteString -- ^ Target database. Path or URL
-> Bool -- ^ Target creation flag
-> Bool -- ^ Continuous flag
-> Bool -- ^ Cancel flag
-> m ()
couchReplicateDB source target createTarget continuous cancel =
void $ couch' HT.methodPost (const "/_replicate") [] []
reqBody protect'
where
reqBody = H.RequestBodyLBS $ A.encode $ A.object [
"source" A..= source,
"target" A..= target,
"create_target" A..= createTarget,
"continuous" A..= continuous,
"cancel" A..= cancel ]
| akaspin/couchdb-conduit | src/Database/CouchDB/Conduit/DB.hs | bsd-2-clause | 3,543 | 0 | 13 | 1,225 | 738 | 412 | 326 | 68 | 1 |
module Buster.Monitor (monitorForUpdates,
WatchDescriptor,
INotify,
withMonitoring,
installMonitor,
uninstallMonitor,
stopMonitoring) where
import System.INotify
import Buster.Config (reloadConfig)
import Buster.Logger
import Buster.Types
withMonitoring :: (INotify -> IO a) -> IO a
withMonitoring = withINotify
monitorForUpdates :: INotify -> FilePath -> IO () -> IO WatchDescriptor
monitorForUpdates inotify path callback =
addWatch inotify eventVarieties path callback'
where eventVarieties = [Modify]
callback' _ = callback
stopMonitoring :: WatchDescriptor -> IO ()
stopMonitoring = removeWatch
installMonitor :: INotify -> FilePath -> ConfigWatch -> IO WatchDescriptor
installMonitor inotify path configWatch = do
debugM $ "Installing monitor for " ++ path
desc <- monitorForUpdates inotify path callback
debugM $ "Monitor installed for " ++ path
return desc
where callback = logUpdate >> reloadConfig path configWatch
logUpdate = debugM $ "File " ++ path ++ " updated"
uninstallMonitor :: FilePath -> WatchDescriptor -> IO ()
uninstallMonitor path monitor = do debugM $ "Uninstalling monitor for " ++ path
stopMonitoring monitor
| MichaelXavier/Buster | src/Buster/Monitor.hs | bsd-2-clause | 1,351 | 0 | 9 | 366 | 316 | 162 | 154 | 31 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Model.Accessor where
import Model.Parse
import Foundation
import Model
import Yesod
import Control.Applicative ((<$>), (<*>))
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Data.Char
import Data.List (intersperse)
import Data.Text (Text, pack, unpack)
import Data.Time (getCurrentTime, UTCTime)
import Database.Persist
import Database.Persist.Sqlite
import Database.Persist.TH
import Text.Pandoc
import Text.Printf (printf)
import qualified Data.Text as T
import qualified Data.Text.IO as T
------------------------------------------------------------
-- Models
------------------------------------------------------------
getPage name = do
getBy $ UniqueName name
createOrUpdatePageBody name body = do
now <- liftIO getCurrentTime
result <- getBy $ UniqueName name
case result of
Just (key,_) -> update key [YikiPageBody =. body, YikiPageUpdated =. now]
Nothing -> do
insert $ YikiPage name body now now
return ()
getPages 0 = do
map snd <$> selectList [] [Desc YikiPageUpdated]
getPages n = do
map snd <$> selectList [] [LimitTo n]
getAllPages = getPages 0
numOfPages = do
Yesod.count ([] :: [Filter YikiPage])
validateYikiPageName :: Text -> Bool
validateYikiPageName = T.all isAlphaNum
insertDefaultDataIfNecessary = do
numOfPages <- numOfPages
when (numOfPages == 0) $ do
body <- liftIO $ T.readFile "Samples/sample.md"
now <- liftIO getCurrentTime
insert $ YikiPage (pack "home") body now now
return ()
markdownToHtml :: (YikiRoute -> Text) -> String -> Either String String
markdownToHtml urlRender s =
render <$> parseMarkdown s
where
render :: [MDLine] -> String
render = (writeHtmlString defaultWriterOptions {writerReferenceLinks = True}) .
readMarkdown defaultParserState .
mdRender
mdRender :: [MDLine] -> String
mdRender lines = concat $ intersperse "\n" $ map lineRender lines
lineRender :: MDLine -> String
lineRender (Line x) = concat $ map elemRender x
lineRender (QuotedLine x) = unpack x
elemRender :: MDElement -> String
elemRender (Elem x) = unpack x
elemRender (Model.Parse.Link x) = printf "<a href='%s'>%s</a>" url name
where url = unpack $ urlRender $ PageR x
name = unpack x
| masaedw/Yiki | Model/Accessor.hs | bsd-2-clause | 2,365 | 0 | 13 | 486 | 722 | 376 | 346 | 63 | 3 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QSyntaxHighlighter_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:27
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QSyntaxHighlighter_h (
QhighlightBlock_h(..)
) where
import Foreign.C.Types
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QSyntaxHighlighter ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QSyntaxHighlighter_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QSyntaxHighlighter_unSetUserMethod" qtc_QSyntaxHighlighter_unSetUserMethod :: Ptr (TQSyntaxHighlighter a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QSyntaxHighlighterSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QSyntaxHighlighter_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QSyntaxHighlighter ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QSyntaxHighlighter_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QSyntaxHighlighterSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QSyntaxHighlighter_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QSyntaxHighlighter ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QSyntaxHighlighter_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QSyntaxHighlighterSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QSyntaxHighlighter_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QSyntaxHighlighter ()) (QSyntaxHighlighter x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QSyntaxHighlighter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QSyntaxHighlighter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QSyntaxHighlighter_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QSyntaxHighlighter_setUserMethod" qtc_QSyntaxHighlighter_setUserMethod :: Ptr (TQSyntaxHighlighter a) -> CInt -> Ptr (Ptr (TQSyntaxHighlighter x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QSyntaxHighlighter :: (Ptr (TQSyntaxHighlighter x0) -> IO ()) -> IO (FunPtr (Ptr (TQSyntaxHighlighter x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QSyntaxHighlighter_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QSyntaxHighlighterSc a) (QSyntaxHighlighter x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QSyntaxHighlighter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QSyntaxHighlighter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QSyntaxHighlighter_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QSyntaxHighlighter ()) (QSyntaxHighlighter x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QSyntaxHighlighter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QSyntaxHighlighter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QSyntaxHighlighter_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QSyntaxHighlighter_setUserMethodVariant" qtc_QSyntaxHighlighter_setUserMethodVariant :: Ptr (TQSyntaxHighlighter a) -> CInt -> Ptr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QSyntaxHighlighter :: (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QSyntaxHighlighter_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QSyntaxHighlighterSc a) (QSyntaxHighlighter x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QSyntaxHighlighter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QSyntaxHighlighter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QSyntaxHighlighter_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QSyntaxHighlighter ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QSyntaxHighlighter_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QSyntaxHighlighter_unSetHandler" qtc_QSyntaxHighlighter_unSetHandler :: Ptr (TQSyntaxHighlighter a) -> CWString -> IO (CBool)
instance QunSetHandler (QSyntaxHighlighterSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QSyntaxHighlighter_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QSyntaxHighlighter ()) (QSyntaxHighlighter x0 -> String -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QSyntaxHighlighter1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QSyntaxHighlighter1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QSyntaxHighlighter_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQString ()) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qSyntaxHighlighterFromPtr x0
x1str <- stringFromPtr x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1str
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QSyntaxHighlighter_setHandler1" qtc_QSyntaxHighlighter_setHandler1 :: Ptr (TQSyntaxHighlighter a) -> CWString -> Ptr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQString ()) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QSyntaxHighlighter1 :: (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQString ()) -> IO ()) -> IO (FunPtr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQString ()) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QSyntaxHighlighter1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QSyntaxHighlighterSc a) (QSyntaxHighlighter x0 -> String -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QSyntaxHighlighter1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QSyntaxHighlighter1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QSyntaxHighlighter_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQString ()) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qSyntaxHighlighterFromPtr x0
x1str <- stringFromPtr x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1str
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
class QhighlightBlock_h x0 x1 where
highlightBlock_h :: x0 -> x1 -> IO ()
instance QhighlightBlock_h (QSyntaxHighlighter ()) ((String)) where
highlightBlock_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSyntaxHighlighter_highlightBlock cobj_x0 cstr_x1
foreign import ccall "qtc_QSyntaxHighlighter_highlightBlock" qtc_QSyntaxHighlighter_highlightBlock :: Ptr (TQSyntaxHighlighter a) -> CWString -> IO ()
instance QhighlightBlock_h (QSyntaxHighlighterSc a) ((String)) where
highlightBlock_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSyntaxHighlighter_highlightBlock cobj_x0 cstr_x1
instance QsetHandler (QSyntaxHighlighter ()) (QSyntaxHighlighter x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QSyntaxHighlighter2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QSyntaxHighlighter2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QSyntaxHighlighter_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qSyntaxHighlighterFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QSyntaxHighlighter_setHandler2" qtc_QSyntaxHighlighter_setHandler2 :: Ptr (TQSyntaxHighlighter a) -> CWString -> Ptr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QSyntaxHighlighter2 :: (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QSyntaxHighlighter2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QSyntaxHighlighterSc a) (QSyntaxHighlighter x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QSyntaxHighlighter2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QSyntaxHighlighter2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QSyntaxHighlighter_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qSyntaxHighlighterFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QSyntaxHighlighter ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSyntaxHighlighter_event cobj_x0 cobj_x1
foreign import ccall "qtc_QSyntaxHighlighter_event" qtc_QSyntaxHighlighter_event :: Ptr (TQSyntaxHighlighter a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QSyntaxHighlighterSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSyntaxHighlighter_event cobj_x0 cobj_x1
instance QsetHandler (QSyntaxHighlighter ()) (QSyntaxHighlighter x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QSyntaxHighlighter3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QSyntaxHighlighter3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QSyntaxHighlighter_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qSyntaxHighlighterFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QSyntaxHighlighter_setHandler3" qtc_QSyntaxHighlighter_setHandler3 :: Ptr (TQSyntaxHighlighter a) -> CWString -> Ptr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QSyntaxHighlighter3 :: (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQSyntaxHighlighter x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QSyntaxHighlighter3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QSyntaxHighlighterSc a) (QSyntaxHighlighter x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QSyntaxHighlighter3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QSyntaxHighlighter3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QSyntaxHighlighter_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQSyntaxHighlighter x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qSyntaxHighlighterFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QSyntaxHighlighter ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSyntaxHighlighter_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QSyntaxHighlighter_eventFilter" qtc_QSyntaxHighlighter_eventFilter :: Ptr (TQSyntaxHighlighter a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QSyntaxHighlighterSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSyntaxHighlighter_eventFilter cobj_x0 cobj_x1 cobj_x2
| keera-studios/hsQt | Qtc/Gui/QSyntaxHighlighter_h.hs | bsd-2-clause | 21,539 | 0 | 18 | 4,636 | 6,752 | 3,216 | 3,536 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Propellor.Types.ResultCheck (
UncheckedProperty,
unchecked,
checkResult,
check,
Checkable,
assume,
) where
import Propellor.Types
import Propellor.Exception
import Utility.Monad
import Data.Monoid
-- | This is a `Property` but its `Result` is not accurate; in particular
-- it may return `NoChange` despite having made a change.
--
-- However, when it returns `MadeChange`, it really did make a change,
-- and `FailedChange` is still an error.
data UncheckedProperty i = UncheckedProperty (Property i)
instance TightenTargets UncheckedProperty where
tightenTargets (UncheckedProperty p) = UncheckedProperty (tightenTargets p)
-- | Use to indicate that a Property is unchecked.
unchecked :: Property i -> UncheckedProperty i
unchecked = UncheckedProperty
-- | Checks the result of a property. Mostly used to convert a
-- `UncheckedProperty` to a `Property`, but can also be used to further
-- check a `Property`.
checkResult
:: (Checkable p i, LiftPropellor m)
=> m a
-- ^ Run before ensuring the property.
-> (a -> m Result)
-- ^ Run after ensuring the property. Return `MadeChange` if a
-- change was detected, or `NoChange` if no change was detected.
-> p i
-> Property i
checkResult precheck postcheck p = adjustPropertySatisfy (checkedProp p) $ \satisfy -> do
a <- liftPropellor precheck
r <- catchPropellor satisfy
-- Always run postcheck, even if the result is already MadeChange,
-- as it may need to clean up after precheck.
r' <- liftPropellor $ postcheck a
return (r <> r')
-- | Makes a `Property` or an `UncheckedProperty` only run
-- when a test succeeds.
check :: (Checkable p i, LiftPropellor m) => m Bool -> p i -> Property i
check test p = adjustPropertySatisfy (preCheckedProp p) $ \satisfy ->
ifM (liftPropellor test)
( satisfy
, return NoChange
)
class Checkable p i where
checkedProp :: p i -> Property i
preCheckedProp :: p i -> Property i
instance Checkable Property i where
checkedProp = id
preCheckedProp = id
instance Checkable UncheckedProperty i where
checkedProp (UncheckedProperty p) = p
-- Since it was pre-checked that the property needed to be run,
-- if the property succeeded, we can assume it made a change.
preCheckedProp (UncheckedProperty p) = p `assume` MadeChange
-- | Sometimes it's not practical to test if a property made a change.
-- In such a case, it's often fine to say:
--
-- > someprop `assume` MadeChange
--
-- However, beware assuming `NoChange`, as that will make combinators
-- like `onChange` not work.
assume :: Checkable p i => p i -> Result -> Property i
assume p result = adjustPropertySatisfy (checkedProp p) $ \satisfy -> do
r <- satisfy
return (r <> result)
| ArchiveTeam/glowing-computing-machine | src/Propellor/Types/ResultCheck.hs | bsd-2-clause | 2,785 | 39 | 11 | 535 | 555 | 299 | 256 | 46 | 1 |
module NLP.Walenty.Types
( Verb (..)
, CertLevel (..)
, Frame
, Argument (..)
, Function (..)
, Phrase (..)
, StdPhrase (..)
, SpecPhrase (..)
, Case (..)
, Aspect (..)
, Gender (..)
, Number (..)
, Negation (..)
, Degree (..)
, Control (..)
, Attribute (..)
, Agree (..)
) where
import Data.Text (Text)
-------------------------------------------------------------
-- Types
-------------------------------------------------------------
-- | A verbal lexical entry from /Walenty/.
data Verb = Verb
{ base :: Text
-- ^ Base form of the verb
, reflexiveV :: Bool
-- ^ "się"?
, certitude :: CertLevel
-- ^ Level of certitude of the entry
, negativity :: Maybe Negation
-- ^ Negativity
, predicativity :: Bool
-- ^ Predicativity
, aspect :: Maybe Aspect
-- ^ Aspect of the verb
, frame :: Frame
-- ^ Valency frame of the verb
} deriving (Show, Eq, Ord)
-- | A certitude level attached to a lexical entry.
data CertLevel
= Sure
-- ^ corresponding to /pewny/ in /Walenty/
| Dubious
-- ^ /wątpliwy/
| Bad
-- ^ /zły/
| Archaic
-- ^ /archaiczny/
| Colloquial
-- ^ /potoczny/
| Vulgar
-- ^ /wulgarny/
deriving (Show, Eq, Ord)
-- | An aspect of the verb.
data Aspect
= Perfective
| Imperfective
-- -- | UnknownAspect
deriving (Show, Eq, Ord)
-- | A valency frame of the verb.
type Frame = [Argument]
-- | An item of the frame.
data Argument = Argument
{ function :: Maybe Function
, control :: [Control]
-- ^ An argument can have assigned many different control values
, phraseAlt :: [Phrase]
-- ^ A list of phrase types which can
-- occur on the corresponding frame position
}
deriving (Show, Eq, Ord)
-- | Function of the corresponding argument.
data Function
= Subject
| Object
deriving (Show, Eq, Ord)
-- | From /Walenty/ documentation: /control relations are involved e.g.
-- in establishing the source of agreement for adjectival arguments.
data Control
= Controller
| Controllee
| Controller2
| Controllee2
deriving (Show, Eq, Ord)
data Phrase
= Standard StdPhrase
| Special SpecPhrase
deriving (Show, Eq, Ord)
-- | A standard syntactic constituency phrase.
data StdPhrase
= NP
{ caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical (semantic) head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Noun phrase
| PrepNP
{ preposition :: Text
-- ^ Preposition
, caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Prepositional phrase
| CP
{ complementizer :: Text
-- ^ complementizer type (e.g., 'że', 'żeby', 'int')
-- TODO: to be represented with a type?
, negation :: Maybe Negation
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, reflexive :: Bool
-- ^ "się"?
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Bare complementiser clause
| NCP
{ complementizer :: Text
-- ^ complementizer type (e.g., 'że', 'żeby', 'int')
, caseG :: Case
-- ^ Grammatical case
, negation :: Maybe Negation
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, reflexive :: Bool
-- ^ "się"?
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Complementiser clauses with a correlative pronoun
| PrepNCP
{ preposition :: Text
-- ^ Preposition
, complementizer :: Text
-- ^ complementizer type (e.g., 'że', 'żeby', 'int')
, caseG :: Case
-- ^ Grammatical case
, negation :: Maybe Negation
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, reflexive :: Bool
-- ^ "się"?
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Prepositional phrase involving a complementiser clause
-- with a correlative pronoun
| PrepGerP
{ preposition :: Text
-- ^ Preposition
, caseG :: Case
-- ^ Grammatical case
, number :: Maybe Number
-- ^ Number (if specified)
, negation :: Maybe Negation
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, reflexive :: Bool
-- ^ "się"?
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ TODO
| AdjP
{ caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, gender :: Maybe (Agree Gender)
-- ^ Gender (if specified)
, degree :: Maybe Degree
-- ^ Degree (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Adjectival phrase
| PactP
{ caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, gender :: Maybe (Agree Gender)
-- ^ Gender (if specified)
, negation :: Maybe Negation
-- ^ Negation (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, reflexive :: Bool
-- ^ "się"?
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ TODO phrase
| NumP
{ caseG :: Case
-- ^ Grammatical case
, lexicalNumber :: [Text]
-- ^ Lexical number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Numeral(?) phrase
| PrepNumP
{ preposition :: Text
-- ^ Preposition
, caseG :: Case
-- ^ Grammatical case
, lexicalNumber :: [Text]
-- ^ Lexical number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Prepositional numeral(?) phrase
| PrepAdjP
{ preposition :: Text
-- ^ Preposition
, caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, gender :: Maybe (Agree Gender)
-- ^ Gender (if specified)
, degree :: Maybe Degree
-- ^ Degree (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ TODO
| InfP
{ infAspect :: Maybe Aspect
-- ^ Aspect
, negation :: Maybe Negation
-- ^ Negation (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, reflexive :: Bool
-- ^ "się"?
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Infinitival phrase
| PasP
{ caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, gender :: Maybe (Agree Gender)
-- ^ Gender (if specified)
, negation :: Maybe Negation
-- ^ Negation (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ TODO
| PrepPasP
{ preposition :: Text
-- ^ Preposition
, caseG :: Case
-- ^ Grammatical case
, agrNumber :: Maybe (Agree Number)
-- ^ Number (if specified)
, gender :: Maybe (Agree Gender)
-- ^ Gender (if specified)
, negation :: Maybe Negation
-- ^ Negation (if specified)
, lexicalHead :: [Text]
-- ^ Lexical head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ TODO
| AdvP
{ advpCat :: Text
-- ^ Locative, ablative, adlative, etc. (see the Walenty webpage)
, degree :: Maybe Degree
-- ^ Number (if specified)
, lexicalHead :: [Text]
-- ^ Lexical (semantic) head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Adverbial phrase; such phrases have typically assigned expansions,
-- even though they constitute standard phrases.
| QubP
{ lexicalHead :: [Text]
-- ^ Lexical (semantic) head (if specified)
, dependents :: Attribute
-- ^ Dependents (if specified)
}
-- ^ Qublical phrase (:-))
| ComparP
{ lexicalHead :: [Text]
-- ^ Comparative conjunction
, comparFrame :: [Phrase]
-- ^ A list of arguments, with no functional or control specifications.
-- Alternatives cannot be represented. TODO: does it mean that all
-- have to be present? TODO: Could we encode them as dependents?
-- Then all standard phrases would provide `dependents`.
}
-- ^ Comparative phrase
deriving (Show, Eq, Ord)
-- | An phrase type occuring on a specific position of the frame.
data SpecPhrase
= Or
-- ^ Oratio recta, i.e. direct speech
| Refl
-- ^ Reflexive use marked through the word /się/
| E
-- ^ Implicit subject, transmitted subject when marked as controller
| Nonch
-- ^ TODO
| DistrP
-- ^ TODO
| PossP
-- ^ TODO
| ComPrepNP
{ complexPrep :: Text
-- ^ E.g. "przy okazji", "u podstaw"
}
-- ^ Complex (i.e. multi-word) preposition. This is not a standard phrase,
-- but only an expandable phrase, of which expansion should be defined
-- in a separate expansion file.
| XP
{ xpCat :: Text
-- ^ Locative, ablative, adlative, etc. (see the Walenty webpage)
, xpVal :: Maybe Phrase
-- ^ The value of the XP category can be specified
-- at the site of its use
}
-- ^ "Adverbial" phrases involving semantic requirements
-- (expressible through adverbs, prepositional phrases,
-- or sentential phrases). Syntactic sugar, also should
-- be handled by expansions' file.
| Fixed
{ fixedTyp :: Phrase
-- ^ Type of the fixed phrase
, fixedLex :: Text
-- ^ Lexical form of the fixed phrase
}
-- ^ Fixed phrase
deriving (Show, Eq, Ord)
-- | Allows to construct an attribute which, instead of one of its
-- regular values, can take the agreement value.
data Agree a
= Agree
| Value a
deriving (Show, Eq, Ord)
-- | Grammatical case. Note that /vocative/ case does not
-- actually occur in /Walenty/, thus it is not represented
-- in the data structure below.
data Case
= Nominative
| Genitive
| Dative
| Accusative
| Instrumental
| Locative
-- -- | Vocative
| Structural
-- ^ Structural case, i.e, the case which depends on
-- the grammatical function?
| Partitive
| Agreement
| PostPrep
| Predicative
deriving (Show, Eq, Ord)
-- | Grammatical number.
data Number
= Singular
| Plural
deriving (Show, Eq, Ord)
-- | Grammatical gender.
data Gender
= M1
| M2
| M3
| F
| N
deriving (Show, Eq, Ord)
-- | Grammatical degree.
data Degree
= Pos
| Com
| Sup
deriving (Show, Eq, Ord)
-- | Negation.
data Negation
= Neg
| Aff
deriving (Show, Eq, Ord)
-- | Attribute is used to specify dependents of the given phrase.
--
-- TODO: I've checked that dependents can be specified for their
-- function; can they be specified for the control as well?
-- (I didn't find any example).
data Attribute
= NAtr
-- ^ No dependents allowed
| Atr Frame
-- ^ Optional dependent arguments; if none is specified,
-- any argument is allowed.
| Atr1 Frame
-- ^ Like `Atr`, but at most one dependent
| RAtr Frame
-- ^ Required attribute
| RAtr1 Frame
-- ^ Like `RAtr`, but at most one dependent
-- TODO: therefore, this is not really a frame,
-- because in a frame all arguments have to be
-- realized.
deriving (Show, Eq, Ord)
| kawu/walenty | src/NLP/Walenty/Types.hs | bsd-2-clause | 12,347 | 0 | 11 | 3,712 | 1,765 | 1,151 | 614 | 228 | 0 |
module Learn where
mult2 = z / x + y
where x = 7
y = negate x
z = y * 10 | pdmurray/haskell-book-ex | src/ch2/Learn.hs | bsd-3-clause | 99 | 0 | 7 | 49 | 41 | 23 | 18 | 5 | 1 |
module Main where
import Control.Monad
import System.Exit (exitFailure)
import System.Environment
import L2.ParL
import L2.ErrM
import L2ToL1.Compile
import L1.PrintL
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $ do
putStrLn "usage: filename"
exitFailure
ts <- liftM myLexer $ readFile (head args)
case pProgram ts of
Bad s -> do
putStrLn "\nParse Failed...\n"
putStrLn "Tokens:"
print ts
putStrLn s
Ok prog -> do
case translate prog of
(Left err) -> putStrLn err
(Right cp) -> putStrLn . printTree $ cp
| mhuesch/scheme_compiler | src/L2ToL1/Main.hs | bsd-3-clause | 642 | 0 | 16 | 203 | 215 | 101 | 114 | 25 | 3 |
{-# LANGUAGE BangPatterns #-}
module Backbone where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad
import Data.Hash.MD5
import Data.Map
import Data.Maybe
import Helpers
import HETypes
import Logo
import Network.Socket hiding (recv, recvFrom, send, sendTo)
import Network.Socket.ByteString (recv, send)
import System.IO
import qualified DHT as DHT
addBB :: TVar Env -> Node -> STM ()
addBB e n = do
e' <- readTVar e
let m = insert (_nodeId n) n (_bbm e')
writeTVar e $ e' { _bbm = m }
delBB :: TVar Env -> Node -> STM ()
delBB e n = do
e' <- readTVar e
let m = delete (_nodeId n) (_bbm e')
writeTVar e $ e' { _bbm = m }
numBB :: TVar Env -> STM Int
numBB e = do
e' <- readTVar e
return $ size $ _bbm e'
constructBBNode :: TVar Env -> (Socket , SockAddr) -> IO (Maybe Node)
constructBBNode e (s,sa@(SockAddrInet port host)) = do
let nodeid = md5i $ Str $ show sa --host
hdl <- socketToHandle s ReadWriteMode
hSetBuffering hdl NoBuffering
(bbc,bbq,bbm,mynodeid,mydhtport) <- atomically $ do
e' <- readTVar e
return $ (_bbChan e',_bbQueue e',_bbm e',_selfid e',_dhtport e')
case Data.Map.lookup nodeid bbm of
Just n -> do
hPutStrLn hdl "Already connected ... bye"
hClose hdl
return Nothing
_ -> do
tochan <- atomically $ dupTChan bbc
hPutStrLn hdl $ (show $ fromJust mynodeid) ++ " " ++ (show nodeid) ++ " " ++ (show $ fromJust mydhtport)
return $ Just $ BackboneNode nodeid tochan bbq hdl
handleBBConnections :: TVar Env -> Socket -> IO ()
handleBBConnections e sock = forever $ do
conn <- accept sock
bbNode <- constructBBNode e conn
case bbNode of
Just cn -> do
atomically $ addBB e cn
forkIO $ bbHandler e cn
return ()
_ -> return ()
bbHandler :: TVar Env -> Node -> IO ()
bbHandler e n = do
nbb <- atomically $ numBB e
log2stdout $ "New Backbone server connected as " ++ (show $ _nodeId n)
log2stdout $ "Currently there are " ++ (show nbb ) ++ " backbone nodes connected"
node2bb <- forkIO $ forever $ do
outpub <- atomically $ readTChan ( _toChan n)
hPutStrLn (_handle n) $ show outpub
handle (\(SomeException _) -> return ()) $ forever $ do
inp <- hGetLine $ _handle n
log2stdout $ "bbHandler: got '" ++ inp ++ "'"
atomically $ writeTQueue (_fromQueue n) (read inp)
killThread node2bb
atomically $ delBB e n
nbb' <- atomically $ numBB e
log2stdout $ "Backbone Node " ++ (show $ _nodeId n) ++ " disconnected"
log2stdout $ "Currently there are " ++ (show nbb' ) ++ " backbone nodes connected"
hClose $ _handle n
bbUpstreamNodeHandler :: TVar Env -> String -> String -> IO ()
bbUpstreamNodeHandler e strhost strport = do
addrinfos <- getAddrInfo Nothing (Just strhost ) (Just strport)
s <- socket (addrFamily $ head addrinfos) Stream 0
setSocketOption s KeepAlive 1
connect s $ addrAddress $ head addrinfos
sn@(SockAddrInet p ha) <- getPeerName s
sn'@(SockAddrInet p' ha') <- getSocketName s
hn <- inet_ntoa ha -- upnode
hn' <- inet_ntoa ha' -- localnode
log2stdout $ "bbUpstreamNodeHandler: connected to " ++ (show sn)
hdl <- socketToHandle s ReadWriteMode
hSetBuffering hdl NoBuffering
(usuq,usdq,mydhtport) <- atomically $ do
e' <- readTVar e
return $ (_usUpQueue e', _usDownQueue e',_dhtport e')
f@[nodeid,mynodeid,dhtport] <- (words ) <$> (hGetLine hdl) -- read upstr/own nodeId from upstr
log2stdout $ "Upstream Read: " ++ (show f)
let usn = UpstreamNode (read nodeid) usuq usdq hdl
dhtinst <- DHT.new (DHT.Peer (DHT.ID (read mynodeid)) ha' (fromJust mydhtport))
DHT.join dhtinst (DHT.Peer (DHT.ID (read nodeid)) ha (read dhtport))
threadDelay $ 2*1000*1000
DHT.put dhtinst (DHT.ID (md5i $ Str $ "notRoot")) (Value "NotRoot")
atomically $ modifyTVar e (\env -> env { _usn = Just ((read nodeid),usn) , _selfid = Just (read mynodeid),_dhtinst = Just $ dhtinst} )
e'' <- atomically $ readTVar e
log2stdout $ show e''
usnUp <- forkIO $ forever $ do
msg <- atomically $ readTQueue usuq
hPutStrLn hdl $ show msg
handle (\(SomeException _) -> do
killThread usnUp
hClose hdl
threadDelay $ 5 * 1000 * 1000
fail "Upstream disconnected" ) $ forever $ do
l <- hGetLine hdl
--log2stdout $ "DXCluster: << : " ++ l
atomically $ writeTQueue usdq (read l)
killThread usnUp
`finally` do -- finally
e' <- atomically $ readTVar e
let (_,n) = fromJust $ _usn e'
let h = _handle n
hClose h
atomically $ modifyTVar e (\env -> env { _usn = Nothing } )
-- Respawn
-- threadDelay $ 10 * 1000 * 1000
-- bbUpstreamNodeHandler e strhost strport
| polyrod/hamexpress | src/Backbone.hs | bsd-3-clause | 4,882 | 0 | 19 | 1,261 | 1,859 | 890 | 969 | 118 | 2 |
{- |
Module : Data.Matrix.AsXYZ
Copyright : (c) Jun Narumi 2017-2020
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : ?
Read and Display Jones-Faithful notation for spacegroup (e.g. 'x,y,z') and planegroup (e.g. 'x,y')
-}
module Data.Matrix.AsXYZ (
fromXYZ,
fromXYZ',
fromABC,
prettyXYZ,
prettyABC,
fromXY,
fromXY',
fromAB,
prettyXY,
prettyAB,
) where
import Control.Monad (join)
import Data.Char (isAlpha)
import Data.List (intercalate)
import Data.Ratio (Ratio,(%))
import Data.Matrix (Matrix,fromList,fromLists,toLists,identity,zero,(<->),submatrix)
import Text.ParserCombinators.Parsec (parse,ParseError)
import Data.Ratio.Slash (getRatio,Slash(..))
import qualified Data.Matrix.AsXYZ.ParseXYZ as XYZ(equivalentPositions,transformPpABC,ratio)
import qualified Data.Matrix.AsXYZ.ParseXY as XY (equivalentPositions,transformPpAB)
import qualified Data.Matrix.AsXYZ.ParseXYZ as XY(ratio)
import qualified Data.Matrix.AsXYZ.Plain as Plain (showAs,showAs',xyzLabel,abcLabel)
-- | Create a matirx from xyz coordinate string of general equivalent position
--
-- > ( 1 % 1 0 % 1 0 % 1 0 % 1 )
-- > ( 0 % 1 1 % 1 0 % 1 0 % 1 )
-- > ( 0 % 1 0 % 1 1 % 1 0 % 1 )
-- > fromXYZ "x,y,z" :: Matrix Rational = ( 0 % 1 0 % 1 0 % 1 1 % 1 )
-- >
-- > ( 1 % 1 0 % 1 0 % 1 1 % 2 )
-- > ( 0 % 1 1 % 1 0 % 1 1 % 3 )
-- > ( 0 % 1 0 % 1 1 % 1 1 % 4 )
-- > fromXYZ "x+1/2,y+1/3,z+1/4" :: Matrix Rational = ( 0 % 1 0 % 1 0 % 1 1 % 1 )
-- >
-- > ( 1 2 3 4 )
-- > ( 5 6 7 8 )
-- > ( 9 10 11 12 )
-- > fromXYZ "x+2y+3z+4,5x+6y+7z+8,9x+10y+11z+12" :: Matrix Int = ( 0 0 0 1 )
fromXYZ :: Integral a => String -> Matrix (Ratio a)
fromXYZ input = unsafeGet $ makeMatrixS <$> parse (XYZ.equivalentPositions XYZ.ratio) input input
-- | Maybe version
fromXYZ' :: Integral a => String -> Maybe (Matrix (Ratio a))
fromXYZ' input = get $ makeMatrixS <$> parse (XYZ.equivalentPositions XYZ.ratio) input input
-- | It's uses abc instead of xyz
--
-- > ( 1 % 1 0 % 1 0 % 1 0 % 1 )
-- > ( 0 % 1 1 % 1 0 % 1 0 % 1 )
-- > ( 0 % 1 0 % 1 1 % 1 0 % 1 )
-- > fromXYZ "a,b,c" :: Matrix Rational = ( 0 % 1 0 % 1 0 % 1 1 % 1 )
fromABC :: Integral a => String -> Matrix (Ratio a)
fromABC input = unsafeGet $ makeMatrixS <$> parse (XYZ.transformPpABC XYZ.ratio) input input
makeMatrixS :: Num a => [[a]] -> Matrix a
makeMatrixS m = (submatrix 1 3 1 4 . fromLists) m <-> fromLists [[0,0,0,1]]
unsafeGet :: Either ParseError a -> a
unsafeGet e = case e of
Left s -> error $ show s
Right m -> m
get :: Either ParseError a -> Maybe a
get e = case e of
Left s -> Nothing
Right m -> Just m
----------------------------------
-- | Get the xyz string of matrix
--
-- >>> prettyXYZ (identity 4 :: Matrix Rational)
-- "x,y,z"
--
-- > ( 0 % 1 0 % 1 0 % 1 1 % 2 )
-- > ( 0 % 1 0 % 1 0 % 1 2 % 3 )
-- > ( 0 % 1 0 % 1 0 % 1 4 % 5 )
-- > prettyXYZ ( 0 % 1 0 % 1 0 % 1 1 % 1 ) = "1/2,2/3,4/5"
prettyXYZ :: (Integral a) =>
Matrix (Ratio a) -- ^ 3x3, 3x4 or 4x4 matrix
-> String
prettyXYZ = Plain.showAs Plain.xyzLabel
-- | It's uses abc instead of xyz as text format
--
-- >>> prettyABC (identity 4 :: Matrix Rational)
-- "a,b,c"
prettyABC :: (Integral a) =>
Matrix (Ratio a) -- ^ 3x3, 3x4 or 4x4 matrix
-> String
prettyABC = Plain.showAs Plain.abcLabel
-- | Create a matirx from xyz coordinate string of general equivalent position
--
-- >>> toLists . fromXY $ "x,y"
-- [[1 % 1,0 % 1,0 % 1],[0 % 1,1 % 1,0 % 1],[0 % 1,0 % 1,1 % 1]]
fromXY :: Integral a =>
String
-> Matrix (Ratio a)
fromXY input = unsafeGet $ makeMatrixP <$> parse (XY.equivalentPositions XY.ratio) input input
-- | Maybe version
--
-- >>> toLists <$> fromXY' "x,y"
-- Just [[1 % 1,0 % 1,0 % 1],[0 % 1,1 % 1,0 % 1],[0 % 1,0 % 1,1 % 1]]
fromXY' :: Integral a =>
String
-> Maybe (Matrix (Ratio a))
fromXY' input = get $ makeMatrixP <$> parse (XY.equivalentPositions XY.ratio) input input
-- | It's uses abc instead of xyz
--
-- >>> toLists . fromAB $ "a,b"
-- [[1 % 1,0 % 1,0 % 1],[0 % 1,1 % 1,0 % 1],[0 % 1,0 % 1,1 % 1]]
fromAB :: Integral a =>
String
-> Matrix (Ratio a)
fromAB input = unsafeGet $ makeMatrixP <$> parse (XY.transformPpAB XY.ratio) input input
makeMatrixP :: Num a => [[a]] -> Matrix a
makeMatrixP m = (submatrix 1 2 1 3 . fromLists) m <-> fromLists [[0,0,1]]
-- | Get the xyz string of matrix
--
-- >>> prettyXY (identity 4 :: Matrix Rational)
-- "x,y"
prettyXY :: (Integral a) =>
Matrix (Ratio a) -- ^ 2x2, 2x3 or 3x3 matrix
-> String
prettyXY = Plain.showAs' Plain.xyzLabel
-- | It's uses abc instead of xyz as text format
--
-- >>> prettyAB (identity 4 :: Matrix Rational)
-- "a,b"
prettyAB :: (Integral a) =>
Matrix (Ratio a) -- ^ 2x2, 2x3 or 3x3 matrix
-> String
prettyAB = Plain.showAs' Plain.abcLabel | narumij/matrix-as-xyz | src/Data/Matrix/AsXYZ.hs | bsd-3-clause | 5,467 | 0 | 11 | 1,752 | 1,052 | 593 | 459 | 68 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Client.BeamT where
import Control.Lens (makeLenses)
import Linear (V3(..))
import Types
makeLenses ''BeamT
newBeamT :: BeamT
newBeamT = BeamT
{ _bEntity = 0
, _bDestEntity = 0
, _bModel = Nothing
, _bEndTime = 0
, _bOffset = V3 0 0 0
, _bStart = V3 0 0 0
, _bEnd = V3 0 0 0
}
| ksaveljev/hake-2 | src/Client/BeamT.hs | bsd-3-clause | 416 | 0 | 7 | 161 | 115 | 68 | 47 | 15 | 1 |
---------------------------------------------------------
-- The main program for the hpc-markup tool, part of HPC.
-- Andy Gill and Colin Runciman, June 2006
---------------------------------------------------------
module HpcMarkup (markup_plugin) where
import Trace.Hpc.Mix
import Trace.Hpc.Tix
import Trace.Hpc.Util
import HpcFlags
import HpcUtils
import System.Directory
import System.IO (localeEncoding)
import Data.List
import Data.Maybe(fromJust)
import Data.Array
import Data.Monoid
import Control.Monad
import qualified Data.Set as Set
------------------------------------------------------------------------------
markup_options :: FlagOptSeq
markup_options
= excludeOpt
. includeOpt
. srcDirOpt
. hpcDirOpt
. funTotalsOpt
. altHighlightOpt
. destDirOpt
markup_plugin :: Plugin
markup_plugin = Plugin { name = "markup"
, usage = "[OPTION] .. <TIX_FILE> [<MODULE> [<MODULE> ..]]"
, options = markup_options
, summary = "Markup Haskell source with program coverage"
, implementation = markup_main
, init_flags = default_flags
, final_flags = default_final_flags
}
------------------------------------------------------------------------------
markup_main :: Flags -> [String] -> IO ()
markup_main flags (prog:modNames) = do
let hpcflags1 = flags
{ includeMods = Set.fromList modNames
`Set.union`
includeMods flags }
let Flags
{ funTotals = theFunTotals
, altHighlight = invertOutput
, destDir = dest_dir
} = hpcflags1
mtix <- readTix (getTixFileName prog)
Tix tixs <- case mtix of
Nothing -> hpcError markup_plugin $ "unable to find tix file for: " ++ prog
Just a -> return a
mods <-
sequence [ genHtmlFromMod dest_dir hpcflags1 tix theFunTotals invertOutput
| tix <- tixs
, allowModule hpcflags1 (tixModuleName tix)
]
let index_name = "hpc_index"
index_fun = "hpc_index_fun"
index_alt = "hpc_index_alt"
index_exp = "hpc_index_exp"
let writeSummary filename cmp = do
let mods' = sortBy cmp mods
putStrLn $ "Writing: " ++ (filename ++ ".html")
writeFileUsing (dest_dir ++ "/" ++ filename ++ ".html") $
"<html>" ++
"<head>" ++
charEncodingTag ++
"<style type=\"text/css\">" ++
"table.bar { background-color: #f25913; }\n" ++
"td.bar { background-color: #60de51; }\n" ++
"td.invbar { background-color: #f25913; }\n" ++
"table.dashboard { border-collapse: collapse ; border: solid 1px black }\n" ++
".dashboard td { border: solid 1px black }\n" ++
".dashboard th { border: solid 1px black }\n" ++
"</style>\n" ++
"</head>" ++
"<body>" ++
"<table class=\"dashboard\" width=\"100%\" border=1>\n" ++
"<tr>" ++
"<th rowspan=2><a href=\"" ++ index_name ++ ".html\">module</a></th>" ++
"<th colspan=3><a href=\"" ++ index_fun ++ ".html\">Top Level Definitions</a></th>" ++
"<th colspan=3><a href=\"" ++ index_alt ++ ".html\">Alternatives</a></th>" ++
"<th colspan=3><a href=\"" ++ index_exp ++ ".html\">Expressions</a></th>" ++
"</tr>" ++
"<tr>" ++
"<th>%</th>" ++
"<th colspan=2>covered / total</th>" ++
"<th>%</th>" ++
"<th colspan=2>covered / total</th>" ++
"<th>%</th>" ++
"<th colspan=2>covered / total</th>" ++
"</tr>" ++
concat [ showModuleSummary (modName,fileName,modSummary)
| (modName,fileName,modSummary) <- mods'
] ++
"<tr></tr>" ++
showTotalSummary (mconcat
[ modSummary
| (_,_,modSummary) <- mods'
])
++ "</table></body></html>\n"
writeSummary index_name $ \ (n1,_,_) (n2,_,_) -> compare n1 n2
writeSummary index_fun $ \ (_,_,s1) (_,_,s2) ->
compare (percent (topFunTicked s2) (topFunTotal s2))
(percent (topFunTicked s1) (topFunTotal s1))
writeSummary index_alt $ \ (_,_,s1) (_,_,s2) ->
compare (percent (altTicked s2) (altTotal s2))
(percent (altTicked s1) (altTotal s1))
writeSummary index_exp $ \ (_,_,s1) (_,_,s2) ->
compare (percent (expTicked s2) (expTotal s2))
(percent (expTicked s1) (expTotal s1))
markup_main _ []
= hpcError markup_plugin $ "no .tix file or executable name specified"
charEncodingTag :: String
charEncodingTag =
"<meta http-equiv=\"Content-Type\" " ++
"content=\"text/html; " ++ "charset=" ++ show localeEncoding ++ "\">"
genHtmlFromMod
:: String
-> Flags
-> TixModule
-> Bool
-> Bool
-> IO (String, [Char], ModuleSummary)
genHtmlFromMod dest_dir flags tix theFunTotals invertOutput = do
let theHsPath = srcDirs flags
let modName0 = tixModuleName tix
(Mix origFile _ _ tabStop mix') <- readMixWithFlags flags (Right tix)
let arr_tix :: Array Int Integer
arr_tix = listArray (0,length (tixModuleTixs tix) - 1)
$ tixModuleTixs tix
let tickedWith :: Int -> Integer
tickedWith n = arr_tix ! n
isTicked n = tickedWith n /= 0
let info = [ (pos,theMarkup)
| (gid,(pos,boxLabel)) <- zip [0 ..] mix'
, let binBox = case (isTicked gid,isTicked (gid+1)) of
(False,False) -> []
(True,False) -> [TickedOnlyTrue]
(False,True) -> [TickedOnlyFalse]
(True,True) -> []
, let tickBox = if isTicked gid
then [IsTicked]
else [NotTicked]
, theMarkup <- case boxLabel of
ExpBox {} -> tickBox
TopLevelBox {}
-> TopLevelDecl theFunTotals (tickedWith gid) : tickBox
LocalBox {} -> tickBox
BinBox _ True -> binBox
_ -> []
]
let modSummary = foldr (.) id
[ \ st ->
case boxLabel of
ExpBox False
-> st { expTicked = ticked (expTicked st)
, expTotal = succ (expTotal st)
}
ExpBox True
-> st { expTicked = ticked (expTicked st)
, expTotal = succ (expTotal st)
, altTicked = ticked (altTicked st)
, altTotal = succ (altTotal st)
}
TopLevelBox _ ->
st { topFunTicked = ticked (topFunTicked st)
, topFunTotal = succ (topFunTotal st)
}
_ -> st
| (gid,(_pos,boxLabel)) <- zip [0 ..] mix'
, let ticked = if isTicked gid
then succ
else id
] $ mempty
-- add prefix to modName argument
content <- readFileFromPath (hpcError markup_plugin) origFile theHsPath
let content' = markup tabStop info content
let show' = reverse . take 5 . (++ " ") . reverse . show
let addLine n xs = "<span class=\"lineno\">" ++ show' n ++ " </span>" ++ xs
let addLines = unlines . map (uncurry addLine) . zip [1 :: Int ..] . lines
let fileName = modName0 ++ ".hs.html"
putStrLn $ "Writing: " ++ fileName
writeFileUsing (dest_dir ++ "/" ++ fileName) $
unlines ["<html>",
"<head>",
charEncodingTag,
"<style type=\"text/css\">",
"span.lineno { color: white; background: #aaaaaa; border-right: solid white 12px }",
if invertOutput
then "span.nottickedoff { color: #404040; background: white; font-style: oblique }"
else "span.nottickedoff { background: " ++ yellow ++ "}",
if invertOutput
then "span.istickedoff { color: black; background: #d0c0ff; font-style: normal; }"
else "span.istickedoff { background: white }",
"span.tickonlyfalse { margin: -1px; border: 1px solid " ++ red ++ "; background: " ++ red ++ " }",
"span.tickonlytrue { margin: -1px; border: 1px solid " ++ green ++ "; background: " ++ green ++ " }",
"span.funcount { font-size: small; color: orange; z-index: 2; position: absolute; right: 20 }",
if invertOutput
then "span.decl { font-weight: bold; background: #d0c0ff }"
else "span.decl { font-weight: bold }",
"span.spaces { background: white }",
"</style>",
"</head>",
"<body>",
"<pre>"] ++ addLines content' ++ "\n</pre>\n</body>\n</html>\n";
modSummary `seq` return (modName0,fileName,modSummary)
data Loc = Loc !Int !Int
deriving (Eq,Ord,Show)
data Markup
= NotTicked
| TickedOnlyTrue
| TickedOnlyFalse
| IsTicked
| TopLevelDecl
Bool -- display entry totals
Integer
deriving (Eq,Show)
markup :: Int -- ^tabStop
-> [(HpcPos,Markup)] -- random list of tick location pairs
-> String -- text to mark up
-> String
markup tabStop mix str = addMarkup tabStop str (Loc 1 1) [] sortedTickLocs
where
tickLocs = [ (Loc ln1 c1,Loc ln2 c2,mark)
| (pos,mark) <- mix
, let (ln1,c1,ln2,c2) = fromHpcPos pos
]
sortedTickLocs = sortBy (\(locA1,locZ1,_) (locA2,locZ2,_) ->
(locA1,locZ2) `compare` (locA2,locZ1)) tickLocs
addMarkup :: Int -- tabStop
-> String -- text to mark up
-> Loc -- current location
-> [(Loc,Markup)] -- stack of open ticks, with closing location
-> [(Loc,Loc,Markup)] -- sorted list of tick location pairs
-> String
-- check the pre-condition.
--addMarkup tabStop cs loc os ticks
-- | not (isSorted (map fst os)) = error $ "addMarkup: bad closing ordering: " ++ show os
--addMarkup tabStop cs loc os@(_:_) ticks
-- | trace (show (loc,os,take 10 ticks)) False = undefined
-- close all open ticks, if we have reached the end
addMarkup _ [] _loc os [] =
concatMap (const closeTick) os
addMarkup tabStop cs loc ((o,_):os) ticks | loc > o =
closeTick ++ addMarkup tabStop cs loc os ticks
--addMarkup tabStop cs loc os ((t1,t2,tik@(TopLevelDecl {})):ticks) | loc == t1 =
-- openTick tik ++ closeTick ++ addMarkup tabStop cs loc os ticks
addMarkup tabStop cs loc os ((t1,t2,tik0):ticks) | loc == t1 =
case os of
((_,tik'):_)
| not (allowNesting tik0 tik')
-> addMarkup tabStop cs loc os ticks -- already marked or bool within marked bool
_ -> openTick tik0 ++ addMarkup tabStop cs loc (addTo (t2,tik0) os) ticks
where
addTo (t,tik) [] = [(t,tik)]
addTo (t,tik) ((t',tik'):xs) | t <= t' = (t,tik):(t',tik'):xs
| otherwise = (t',tik):(t',tik'):xs
addMarkup tabStop0 cs loc os ((t1,_t2,_tik):ticks) | loc > t1 =
-- throw away this tick, because it is from a previous place ??
addMarkup tabStop0 cs loc os ticks
addMarkup tabStop0 ('\n':cs) loc@(Loc ln col) os@((Loc ln2 col2,_):_) ticks
| ln == ln2 && col < col2
= addMarkup tabStop0 (' ':'\n':cs) loc os ticks
addMarkup tabStop0 (c0:cs) loc@(Loc _ p) os ticks =
if c0=='\n' && os/=[] then
concatMap (const closeTick) (downToTopLevel os) ++
c0 : "<span class=\"spaces\">" ++ expand 1 w ++ "</span>" ++
concatMap (openTick.snd) (reverse (downToTopLevel os)) ++
addMarkup tabStop0 cs' loc' os ticks
else if c0=='\t' then
expand p "\t" ++ addMarkup tabStop0 cs (incBy c0 loc) os ticks
else
escape c0 ++ addMarkup tabStop0 cs (incBy c0 loc) os ticks
where
(w,cs') = span (`elem` " \t") cs
loc' = foldl (flip incBy) loc (c0:w)
escape '>' = ">"
escape '<' = "<"
escape '"' = """
escape '&' = "&"
escape c = [c]
expand :: Int -> String -> String
expand _ "" = ""
expand c ('\t':s) = replicate (c' - c) ' ' ++ expand c' s
where
c' = tabStopAfter 8 c
expand c (' ':s) = ' ' : expand (c+1) s
expand _ _ = error "bad character in string for expansion"
incBy :: Char -> Loc -> Loc
incBy '\n' (Loc ln _c) = Loc (succ ln) 1
incBy '\t' (Loc ln c) = Loc ln (tabStopAfter tabStop0 c)
incBy _ (Loc ln c) = Loc ln (succ c)
tabStopAfter :: Int -> Int -> Int
tabStopAfter tabStop c = fromJust (find (>c) [1,(tabStop + 1)..])
addMarkup tabStop cs loc os ticks = "ERROR: " ++ show (take 10 cs,tabStop,loc,take 10 os,take 10 ticks)
openTick :: Markup -> String
openTick NotTicked = "<span class=\"nottickedoff\">"
openTick IsTicked = "<span class=\"istickedoff\">"
openTick TickedOnlyTrue = "<span class=\"tickonlytrue\">"
openTick TickedOnlyFalse = "<span class=\"tickonlyfalse\">"
openTick (TopLevelDecl False _) = openTopDecl
openTick (TopLevelDecl True 0)
= "<span class=\"funcount\">-- never entered</span>" ++
openTopDecl
openTick (TopLevelDecl True 1)
= "<span class=\"funcount\">-- entered once</span>" ++
openTopDecl
openTick (TopLevelDecl True n0)
= "<span class=\"funcount\">-- entered " ++ showBigNum n0 ++ " times</span>" ++ openTopDecl
where showBigNum n | n <= 9999 = show n
| otherwise = showBigNum' (n `div` 1000) ++ "," ++ showWith (n `mod` 1000)
showBigNum' n | n <= 999 = show n
| otherwise = showBigNum' (n `div` 1000) ++ "," ++ showWith (n `mod` 1000)
showWith n = take 3 $ reverse $ ("000" ++) $ reverse $ show n
closeTick :: String
closeTick = "</span>"
openTopDecl :: String
openTopDecl = "<span class=\"decl\">"
downToTopLevel :: [(Loc,Markup)] -> [(Loc,Markup)]
downToTopLevel ((_,TopLevelDecl {}):_) = []
downToTopLevel (o : os) = o : downToTopLevel os
downToTopLevel [] = []
-- build in logic for nesting bin boxes
allowNesting :: Markup -- innermost
-> Markup -- outermost
-> Bool
allowNesting n m | n == m = False -- no need to double nest
allowNesting IsTicked TickedOnlyFalse = False
allowNesting IsTicked TickedOnlyTrue = False
allowNesting _ _ = True
------------------------------------------------------------------------------
data ModuleSummary = ModuleSummary
{ expTicked :: !Int
, expTotal :: !Int
, topFunTicked :: !Int
, topFunTotal :: !Int
, altTicked :: !Int
, altTotal :: !Int
}
deriving (Show)
showModuleSummary :: (String, String, ModuleSummary) -> String
showModuleSummary (modName,fileName,modSummary) =
"<tr>\n" ++
"<td> <tt>module <a href=\"" ++ fileName ++ "\">"
++ modName ++ "</a></tt></td>\n" ++
showSummary (topFunTicked modSummary) (topFunTotal modSummary) ++
showSummary (altTicked modSummary) (altTotal modSummary) ++
showSummary (expTicked modSummary) (expTotal modSummary) ++
"</tr>\n"
showTotalSummary :: ModuleSummary -> String
showTotalSummary modSummary =
"<tr style=\"background: #e0e0e0\">\n" ++
"<th align=left> Program Coverage Total</tt></th>\n" ++
showSummary (topFunTicked modSummary) (topFunTotal modSummary) ++
showSummary (altTicked modSummary) (altTotal modSummary) ++
showSummary (expTicked modSummary) (expTotal modSummary) ++
"</tr>\n"
showSummary :: (Integral t, Show t) => t -> t -> String
showSummary ticked total =
"<td align=\"right\">" ++ showP (percent ticked total) ++ "</td>" ++
"<td>" ++ show ticked ++ "/" ++ show total ++ "</td>" ++
"<td width=100>" ++
(case percent ticked total of
Nothing -> " "
Just w -> bar w "bar"
) ++ "</td>"
where
showP Nothing = "- "
showP (Just x) = show x ++ "%"
bar 0 _ = bar 100 "invbar"
bar w inner = "<table cellpadding=0 cellspacing=0 width=\"100\" class=\"bar\">" ++
"<tr><td><table cellpadding=0 cellspacing=0 width=\"" ++ show w ++ "%\">" ++
"<tr><td height=12 class=" ++ show inner ++ "></td></tr>" ++
"</table></td></tr></table>"
percent :: (Integral a) => a -> a -> Maybe a
percent ticked total = if total == 0 then Nothing else Just (ticked * 100 `div` total)
instance Monoid ModuleSummary where
mempty = ModuleSummary
{ expTicked = 0
, expTotal = 0
, topFunTicked = 0
, topFunTotal = 0
, altTicked = 0
, altTotal = 0
}
mappend (ModuleSummary eTik1 eTot1 tTik1 tTot1 aTik1 aTot1)
(ModuleSummary eTik2 eTot2 tTik2 tTot2 aTik2 aTot2)
= ModuleSummary (eTik1 + eTik2) (eTot1 + eTot2) (tTik1 + tTik2) (tTot1 + tTot2) (aTik1 + aTik2) (aTot1 + aTot2)
------------------------------------------------------------------------------
writeFileUsing :: String -> String -> IO ()
writeFileUsing filename text = do
let dest_dir = reverse . dropWhile (\ x -> x /= '/') . reverse $ filename
-- We need to check for the dest_dir each time, because we use sub-dirs for
-- packages, and a single .tix file might contain information about
-- many package.
-- create the dest_dir if needed
when (not (null dest_dir)) $
createDirectoryIfMissing True dest_dir
writeFile filename text
------------------------------------------------------------------------------
-- global color pallete
red,green,yellow :: String
red = "#f20913"
green = "#60de51"
yellow = "yellow"
| nomeata/ghc | utils/hpc/HpcMarkup.hs | bsd-3-clause | 18,690 | 0 | 56 | 6,154 | 4,747 | 2,497 | 2,250 | 383 | 16 |
{-# LANGUAGE CPP, MagicHash, RecordWildCards, BangPatterns #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -fprof-auto-top #-}
{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- | ByteCodeGen: Generate bytecode from Core
module ByteCodeGen ( UnlinkedBCO, byteCodeGen, coreExprToBCOs ) where
#include "HsVersions.h"
import GhcPrelude
import ByteCodeInstr
import ByteCodeAsm
import ByteCodeTypes
import GHCi
import GHCi.FFI
import GHCi.RemoteTypes
import BasicTypes
import DynFlags
import Outputable
import GHC.Platform
import Name
import MkId
import Id
import Var ( updateVarType )
import ForeignCall
import HscTypes
import CoreUtils
import CoreSyn
import PprCore
import Literal
import PrimOp
import CoreFVs
import Type
import GHC.Types.RepType
import DataCon
import TyCon
import Util
import VarSet
import TysPrim
import TyCoPpr ( pprType )
import ErrUtils
import Unique
import FastString
import Panic
import GHC.StgToCmm.Closure ( NonVoid(..), fromNonVoid, nonVoidIds )
import GHC.StgToCmm.Layout
import GHC.Runtime.Layout hiding (WordOff, ByteOff, wordsToBytes)
import GHC.Data.Bitmap
import OrdList
import Maybes
import VarEnv
import Data.List
import Foreign
import Control.Monad
import Data.Char
import UniqSupply
import Module
import Control.Exception
import Data.Array
import Data.ByteString (ByteString)
import Data.Map (Map)
import Data.IntMap (IntMap)
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
import qualified FiniteMap as Map
import Data.Ord
import GHC.Stack.CCS
import Data.Either ( partitionEithers )
-- -----------------------------------------------------------------------------
-- Generating byte code for a complete module
byteCodeGen :: HscEnv
-> Module
-> CoreProgram
-> [TyCon]
-> Maybe ModBreaks
-> IO CompiledByteCode
byteCodeGen hsc_env this_mod binds tycs mb_modBreaks
= withTiming dflags
(text "ByteCodeGen"<+>brackets (ppr this_mod))
(const ()) $ do
-- Split top-level binds into strings and others.
-- See Note [generating code for top-level string literal bindings].
let (strings, flatBinds) = partitionEithers $ do -- list monad
(bndr, rhs) <- flattenBinds binds
return $ case exprIsTickedString_maybe rhs of
Just str -> Left (bndr, str)
_ -> Right (bndr, simpleFreeVars rhs)
stringPtrs <- allocateTopStrings hsc_env strings
us <- mkSplitUniqSupply 'y'
(BcM_State{..}, proto_bcos) <-
runBc hsc_env us this_mod mb_modBreaks (mkVarEnv stringPtrs) $
mapM schemeTopBind flatBinds
when (notNull ffis)
(panic "ByteCodeGen.byteCodeGen: missing final emitBc?")
dumpIfSet_dyn dflags Opt_D_dump_BCOs
"Proto-BCOs" FormatByteCode
(vcat (intersperse (char ' ') (map ppr proto_bcos)))
cbc <- assembleBCOs hsc_env proto_bcos tycs (map snd stringPtrs)
(case modBreaks of
Nothing -> Nothing
Just mb -> Just mb{ modBreaks_breakInfo = breakInfo })
-- Squash space leaks in the CompiledByteCode. This is really
-- important, because when loading a set of modules into GHCi
-- we don't touch the CompiledByteCode until the end when we
-- do linking. Forcing out the thunks here reduces space
-- usage by more than 50% when loading a large number of
-- modules.
evaluate (seqCompiledByteCode cbc)
return cbc
where dflags = hsc_dflags hsc_env
allocateTopStrings
:: HscEnv
-> [(Id, ByteString)]
-> IO [(Var, RemotePtr ())]
allocateTopStrings hsc_env topStrings = do
let !(bndrs, strings) = unzip topStrings
ptrs <- iservCmd hsc_env $ MallocStrings strings
return $ zip bndrs ptrs
{-
Note [generating code for top-level string literal bindings]
Here is a summary on how the byte code generator deals with top-level string
literals:
1. Top-level string literal bindings are separated from the rest of the module.
2. The strings are allocated via iservCmd, in allocateTopStrings
3. The mapping from binders to allocated strings (topStrings) are maintained in
BcM and used when generating code for variable references.
-}
-- -----------------------------------------------------------------------------
-- Generating byte code for an expression
-- Returns: the root BCO for this expression
coreExprToBCOs :: HscEnv
-> Module
-> CoreExpr
-> IO UnlinkedBCO
coreExprToBCOs hsc_env this_mod expr
= withTiming dflags
(text "ByteCodeGen"<+>brackets (ppr this_mod))
(const ()) $ do
-- create a totally bogus name for the top-level BCO; this
-- should be harmless, since it's never used for anything
let invented_name = mkSystemVarName (mkPseudoUniqueE 0) (fsLit "ExprTopLevel")
-- the uniques are needed to generate fresh variables when we introduce new
-- let bindings for ticked expressions
us <- mkSplitUniqSupply 'y'
(BcM_State _dflags _us _this_mod _final_ctr mallocd _ _ _, proto_bco)
<- runBc hsc_env us this_mod Nothing emptyVarEnv $
schemeR [] (invented_name, simpleFreeVars expr)
when (notNull mallocd)
(panic "ByteCodeGen.coreExprToBCOs: missing final emitBc?")
dumpIfSet_dyn dflags Opt_D_dump_BCOs "Proto-BCOs" FormatByteCode
(ppr proto_bco)
assembleOneBCO hsc_env proto_bco
where dflags = hsc_dflags hsc_env
-- The regular freeVars function gives more information than is useful to
-- us here. We need only the free variables, not everything in an FVAnn.
-- Historical note: At one point FVAnn was more sophisticated than just
-- a set. Now it isn't. So this function is much simpler. Keeping it around
-- so that if someone changes FVAnn, they will get a nice type error right
-- here.
simpleFreeVars :: CoreExpr -> AnnExpr Id DVarSet
simpleFreeVars = freeVars
-- -----------------------------------------------------------------------------
-- Compilation schema for the bytecode generator
type BCInstrList = OrdList BCInstr
newtype ByteOff = ByteOff Int
deriving (Enum, Eq, Integral, Num, Ord, Real)
newtype WordOff = WordOff Int
deriving (Enum, Eq, Integral, Num, Ord, Real)
wordsToBytes :: DynFlags -> WordOff -> ByteOff
wordsToBytes dflags = fromIntegral . (* wORD_SIZE dflags) . fromIntegral
-- Used when we know we have a whole number of words
bytesToWords :: DynFlags -> ByteOff -> WordOff
bytesToWords dflags (ByteOff bytes) =
let (q, r) = bytes `quotRem` (wORD_SIZE dflags)
in if r == 0
then fromIntegral q
else panic $ "ByteCodeGen.bytesToWords: bytes=" ++ show bytes
wordSize :: DynFlags -> ByteOff
wordSize dflags = ByteOff (wORD_SIZE dflags)
type Sequel = ByteOff -- back off to this depth before ENTER
type StackDepth = ByteOff
-- | Maps Ids to their stack depth. This allows us to avoid having to mess with
-- it after each push/pop.
type BCEnv = Map Id StackDepth -- To find vars on the stack
{-
ppBCEnv :: BCEnv -> SDoc
ppBCEnv p
= text "begin-env"
$$ nest 4 (vcat (map pp_one (sortBy cmp_snd (Map.toList p))))
$$ text "end-env"
where
pp_one (var, offset) = int offset <> colon <+> ppr var <+> ppr (bcIdArgRep var)
cmp_snd x y = compare (snd x) (snd y)
-}
-- Create a BCO and do a spot of peephole optimisation on the insns
-- at the same time.
mkProtoBCO
:: DynFlags
-> name
-> BCInstrList
-> Either [AnnAlt Id DVarSet] (AnnExpr Id DVarSet)
-- ^ original expression; for debugging only
-> Int
-> Word16
-> [StgWord]
-> Bool -- True <=> is a return point, rather than a function
-> [FFIInfo]
-> ProtoBCO name
mkProtoBCO dflags nm instrs_ordlist origin arity bitmap_size bitmap is_ret ffis
= ProtoBCO {
protoBCOName = nm,
protoBCOInstrs = maybe_with_stack_check,
protoBCOBitmap = bitmap,
protoBCOBitmapSize = bitmap_size,
protoBCOArity = arity,
protoBCOExpr = origin,
protoBCOFFIs = ffis
}
where
-- Overestimate the stack usage (in words) of this BCO,
-- and if >= iNTERP_STACK_CHECK_THRESH, add an explicit
-- stack check. (The interpreter always does a stack check
-- for iNTERP_STACK_CHECK_THRESH words at the start of each
-- BCO anyway, so we only need to add an explicit one in the
-- (hopefully rare) cases when the (overestimated) stack use
-- exceeds iNTERP_STACK_CHECK_THRESH.
maybe_with_stack_check
| is_ret && stack_usage < fromIntegral (aP_STACK_SPLIM dflags) = peep_d
-- don't do stack checks at return points,
-- everything is aggregated up to the top BCO
-- (which must be a function).
-- That is, unless the stack usage is >= AP_STACK_SPLIM,
-- see bug #1466.
| stack_usage >= fromIntegral iNTERP_STACK_CHECK_THRESH
= STKCHECK stack_usage : peep_d
| otherwise
= peep_d -- the supposedly common case
-- We assume that this sum doesn't wrap
stack_usage = sum (map bciStackUse peep_d)
-- Merge local pushes
peep_d = peep (fromOL instrs_ordlist)
peep (PUSH_L off1 : PUSH_L off2 : PUSH_L off3 : rest)
= PUSH_LLL off1 (off2-1) (off3-2) : peep rest
peep (PUSH_L off1 : PUSH_L off2 : rest)
= PUSH_LL off1 (off2-1) : peep rest
peep (i:rest)
= i : peep rest
peep []
= []
argBits :: DynFlags -> [ArgRep] -> [Bool]
argBits _ [] = []
argBits dflags (rep : args)
| isFollowableArg rep = False : argBits dflags args
| otherwise = take (argRepSizeW dflags rep) (repeat True) ++ argBits dflags args
-- -----------------------------------------------------------------------------
-- schemeTopBind
-- Compile code for the right-hand side of a top-level binding
schemeTopBind :: (Id, AnnExpr Id DVarSet) -> BcM (ProtoBCO Name)
schemeTopBind (id, rhs)
| Just data_con <- isDataConWorkId_maybe id,
isNullaryRepDataCon data_con = do
dflags <- getDynFlags
-- Special case for the worker of a nullary data con.
-- It'll look like this: Nil = /\a -> Nil a
-- If we feed it into schemeR, we'll get
-- Nil = Nil
-- because mkConAppCode treats nullary constructor applications
-- by just re-using the single top-level definition. So
-- for the worker itself, we must allocate it directly.
-- ioToBc (putStrLn $ "top level BCO")
emitBc (mkProtoBCO dflags (getName id) (toOL [PACK data_con 0, ENTER])
(Right rhs) 0 0 [{-no bitmap-}] False{-not alts-})
| otherwise
= schemeR [{- No free variables -}] (getName id, rhs)
-- -----------------------------------------------------------------------------
-- schemeR
-- Compile code for a right-hand side, to give a BCO that,
-- when executed with the free variables and arguments on top of the stack,
-- will return with a pointer to the result on top of the stack, after
-- removing the free variables and arguments.
--
-- Park the resulting BCO in the monad. Also requires the
-- name of the variable to which this value was bound,
-- so as to give the resulting BCO a name.
schemeR :: [Id] -- Free vars of the RHS, ordered as they
-- will appear in the thunk. Empty for
-- top-level things, which have no free vars.
-> (Name, AnnExpr Id DVarSet)
-> BcM (ProtoBCO Name)
schemeR fvs (nm, rhs)
{-
| trace (showSDoc (
(char ' '
$$ (ppr.filter (not.isTyVar).dVarSetElems.fst) rhs
$$ pprCoreExpr (deAnnotate rhs)
$$ char ' '
))) False
= undefined
| otherwise
-}
= schemeR_wrk fvs nm rhs (collect rhs)
-- If an expression is a lambda (after apply bcView), return the
-- list of arguments to the lambda (in R-to-L order) and the
-- underlying expression
collect :: AnnExpr Id DVarSet -> ([Var], AnnExpr' Id DVarSet)
collect (_, e) = go [] e
where
go xs e | Just e' <- bcView e = go xs e'
go xs (AnnLam x (_,e))
| typePrimRep (idType x) `lengthExceeds` 1
= multiValException
| otherwise
= go (x:xs) e
go xs not_lambda = (reverse xs, not_lambda)
schemeR_wrk
:: [Id]
-> Name
-> AnnExpr Id DVarSet -- expression e, for debugging only
-> ([Var], AnnExpr' Var DVarSet) -- result of collect on e
-> BcM (ProtoBCO Name)
schemeR_wrk fvs nm original_body (args, body)
= do
dflags <- getDynFlags
let
all_args = reverse args ++ fvs
arity = length all_args
-- all_args are the args in reverse order. We're compiling a function
-- \fv1..fvn x1..xn -> e
-- i.e. the fvs come first
-- Stack arguments always take a whole number of words, we never pack
-- them unlike constructor fields.
szsb_args = map (wordsToBytes dflags . idSizeW dflags) all_args
sum_szsb_args = sum szsb_args
p_init = Map.fromList (zip all_args (mkStackOffsets 0 szsb_args))
-- make the arg bitmap
bits = argBits dflags (reverse (map bcIdArgRep all_args))
bitmap_size = genericLength bits
bitmap = mkBitmap dflags bits
body_code <- schemeER_wrk sum_szsb_args p_init body
emitBc (mkProtoBCO dflags nm body_code (Right original_body)
arity bitmap_size bitmap False{-not alts-})
-- introduce break instructions for ticked expressions
schemeER_wrk :: StackDepth -> BCEnv -> AnnExpr' Id DVarSet -> BcM BCInstrList
schemeER_wrk d p rhs
| AnnTick (Breakpoint tick_no fvs) (_annot, newRhs) <- rhs
= do code <- schemeE d 0 p newRhs
cc_arr <- getCCArray
this_mod <- moduleName <$> getCurrentModule
dflags <- getDynFlags
let idOffSets = getVarOffSets dflags d p fvs
let breakInfo = CgBreakInfo
{ cgb_vars = idOffSets
, cgb_resty = exprType (deAnnotate' newRhs)
}
newBreakInfo tick_no breakInfo
dflags <- getDynFlags
let cc | interpreterProfiled dflags = cc_arr ! tick_no
| otherwise = toRemotePtr nullPtr
let breakInstr = BRK_FUN (fromIntegral tick_no) (getUnique this_mod) cc
return $ breakInstr `consOL` code
| otherwise = schemeE d 0 p rhs
getVarOffSets :: DynFlags -> StackDepth -> BCEnv -> [Id] -> [Maybe (Id, Word16)]
getVarOffSets dflags depth env = map getOffSet
where
getOffSet id = case lookupBCEnv_maybe id env of
Nothing -> Nothing
Just offset ->
-- michalt: I'm not entirely sure why we need the stack
-- adjustment by 2 here. I initially thought that there's
-- something off with getIdValFromApStack (the only user of this
-- value), but it looks ok to me. My current hypothesis is that
-- this "adjustment" is needed due to stack manipulation for
-- BRK_FUN in Interpreter.c In any case, this is used only when
-- we trigger a breakpoint.
let !var_depth_ws =
trunc16W $ bytesToWords dflags (depth - offset) + 2
in Just (id, var_depth_ws)
truncIntegral16 :: Integral a => a -> Word16
truncIntegral16 w
| w > fromIntegral (maxBound :: Word16)
= panic "stack depth overflow"
| otherwise
= fromIntegral w
trunc16B :: ByteOff -> Word16
trunc16B = truncIntegral16
trunc16W :: WordOff -> Word16
trunc16W = truncIntegral16
fvsToEnv :: BCEnv -> DVarSet -> [Id]
-- Takes the free variables of a right-hand side, and
-- delivers an ordered list of the local variables that will
-- be captured in the thunk for the RHS
-- The BCEnv argument tells which variables are in the local
-- environment: these are the ones that should be captured
--
-- The code that constructs the thunk, and the code that executes
-- it, have to agree about this layout
fvsToEnv p fvs = [v | v <- dVarSetElems fvs,
isId v, -- Could be a type variable
v `Map.member` p]
-- -----------------------------------------------------------------------------
-- schemeE
returnUnboxedAtom
:: StackDepth
-> Sequel
-> BCEnv
-> AnnExpr' Id DVarSet
-> ArgRep
-> BcM BCInstrList
-- Returning an unlifted value.
-- Heave it on the stack, SLIDE, and RETURN.
returnUnboxedAtom d s p e e_rep = do
dflags <- getDynFlags
(push, szb) <- pushAtom d p e
return (push -- value onto stack
`appOL` mkSlideB dflags szb (d - s) -- clear to sequel
`snocOL` RETURN_UBX e_rep) -- go
-- Compile code to apply the given expression to the remaining args
-- on the stack, returning a HNF.
schemeE
:: StackDepth -> Sequel -> BCEnv -> AnnExpr' Id DVarSet -> BcM BCInstrList
schemeE d s p e
| Just e' <- bcView e
= schemeE d s p e'
-- Delegate tail-calls to schemeT.
schemeE d s p e@(AnnApp _ _) = schemeT d s p e
schemeE d s p e@(AnnLit lit) = returnUnboxedAtom d s p e (typeArgRep (literalType lit))
schemeE d s p e@(AnnCoercion {}) = returnUnboxedAtom d s p e V
schemeE d s p e@(AnnVar v)
-- See Note [Not-necessarily-lifted join points], step 3.
| isNNLJoinPoint v = doTailCall d s p (protectNNLJoinPointId v) [AnnVar voidPrimId]
| isUnliftedType (idType v) = returnUnboxedAtom d s p e (bcIdArgRep v)
| otherwise = schemeT d s p e
schemeE d s p (AnnLet (AnnNonRec x (_,rhs)) (_,body))
| (AnnVar v, args_r_to_l) <- splitApp rhs,
Just data_con <- isDataConWorkId_maybe v,
dataConRepArity data_con == length args_r_to_l
= do -- Special case for a non-recursive let whose RHS is a
-- saturated constructor application.
-- Just allocate the constructor and carry on
alloc_code <- mkConAppCode d s p data_con args_r_to_l
dflags <- getDynFlags
let !d2 = d + wordSize dflags
body_code <- schemeE d2 s (Map.insert x d2 p) body
return (alloc_code `appOL` body_code)
-- General case for let. Generates correct, if inefficient, code in
-- all situations.
schemeE d s p (AnnLet binds (_,body)) = do
dflags <- getDynFlags
let (xs,rhss) = case binds of AnnNonRec x rhs -> ([x],[rhs])
AnnRec xs_n_rhss -> unzip xs_n_rhss
n_binds = genericLength xs
fvss = map (fvsToEnv p' . fst) rhss
-- See Note [Not-necessarily-lifted join points], step 2.
(xs',rhss') = zipWithAndUnzip protectNNLJoinPointBind xs rhss
-- Sizes of free vars
size_w = trunc16W . idSizeW dflags
sizes = map (\rhs_fvs -> sum (map size_w rhs_fvs)) fvss
-- the arity of each rhs
arities = map (genericLength . fst . collect) rhss'
-- This p', d' defn is safe because all the items being pushed
-- are ptrs, so all have size 1 word. d' and p' reflect the stack
-- after the closures have been allocated in the heap (but not
-- filled in), and pointers to them parked on the stack.
offsets = mkStackOffsets d (genericReplicate n_binds (wordSize dflags))
p' = Map.insertList (zipE xs' offsets) p
d' = d + wordsToBytes dflags n_binds
zipE = zipEqual "schemeE"
-- ToDo: don't build thunks for things with no free variables
build_thunk
:: StackDepth
-> [Id]
-> Word16
-> ProtoBCO Name
-> Word16
-> Word16
-> BcM BCInstrList
build_thunk _ [] size bco off arity
= return (PUSH_BCO bco `consOL` unitOL (mkap (off+size) size))
where
mkap | arity == 0 = MKAP
| otherwise = MKPAP
build_thunk dd (fv:fvs) size bco off arity = do
(push_code, pushed_szb) <- pushAtom dd p' (AnnVar fv)
more_push_code <-
build_thunk (dd + pushed_szb) fvs size bco off arity
return (push_code `appOL` more_push_code)
alloc_code = toOL (zipWith mkAlloc sizes arities)
where mkAlloc sz 0
| is_tick = ALLOC_AP_NOUPD sz
| otherwise = ALLOC_AP sz
mkAlloc sz arity = ALLOC_PAP arity sz
is_tick = case binds of
AnnNonRec id _ -> occNameFS (getOccName id) == tickFS
_other -> False
compile_bind d' fvs x rhs size arity off = do
bco <- schemeR fvs (getName x,rhs)
build_thunk d' fvs size bco off arity
compile_binds =
[ compile_bind d' fvs x rhs size arity (trunc16W n)
| (fvs, x, rhs, size, arity, n) <-
zip6 fvss xs' rhss' sizes arities [n_binds, n_binds-1 .. 1]
]
body_code <- schemeE d' s p' body
thunk_codes <- sequence compile_binds
return (alloc_code `appOL` concatOL thunk_codes `appOL` body_code)
-- Introduce a let binding for a ticked case expression. This rule
-- *should* only fire when the expression was not already let-bound
-- (the code gen for let bindings should take care of that). Todo: we
-- call exprFreeVars on a deAnnotated expression, this may not be the
-- best way to calculate the free vars but it seemed like the least
-- intrusive thing to do
schemeE d s p exp@(AnnTick (Breakpoint _id _fvs) _rhs)
| isLiftedTypeKind (typeKind ty)
= do id <- newId ty
-- Todo: is emptyVarSet correct on the next line?
let letExp = AnnLet (AnnNonRec id (fvs, exp)) (emptyDVarSet, AnnVar id)
schemeE d s p letExp
| otherwise
= do -- If the result type is not definitely lifted, then we must generate
-- let f = \s . tick<n> e
-- in f realWorld#
-- When we stop at the breakpoint, _result will have an unlifted
-- type and hence won't be bound in the environment, but the
-- breakpoint will otherwise work fine.
--
-- NB (#12007) this /also/ applies for if (ty :: TYPE r), where
-- r :: RuntimeRep is a variable. This can happen in the
-- continuations for a pattern-synonym matcher
-- match = /\(r::RuntimeRep) /\(a::TYPE r).
-- \(k :: Int -> a) \(v::T).
-- case v of MkV n -> k n
-- Here (k n) :: a :: Type r, so we don't know if it's lifted
-- or not; but that should be fine provided we add that void arg.
id <- newId (mkVisFunTy realWorldStatePrimTy ty)
st <- newId realWorldStatePrimTy
let letExp = AnnLet (AnnNonRec id (fvs, AnnLam st (emptyDVarSet, exp)))
(emptyDVarSet, (AnnApp (emptyDVarSet, AnnVar id)
(emptyDVarSet, AnnVar realWorldPrimId)))
schemeE d s p letExp
where
exp' = deAnnotate' exp
fvs = exprFreeVarsDSet exp'
ty = exprType exp'
-- ignore other kinds of tick
schemeE d s p (AnnTick _ (_, rhs)) = schemeE d s p rhs
schemeE d s p (AnnCase (_,scrut) _ _ []) = schemeE d s p scrut
-- no alts: scrut is guaranteed to diverge
schemeE d s p (AnnCase scrut bndr _ [(DataAlt dc, [bind1, bind2], rhs)])
| isUnboxedTupleCon dc -- handles pairs with one void argument (e.g. state token)
-- Convert
-- case .... of x { (# V'd-thing, a #) -> ... }
-- to
-- case .... of a { DEFAULT -> ... }
-- because the return convention for both are identical.
--
-- Note that it does not matter losing the void-rep thing from the
-- envt (it won't be bound now) because we never look such things up.
, Just res <- case (typePrimRep (idType bind1), typePrimRep (idType bind2)) of
([], [_])
-> Just $ doCase d s p scrut bind2 [(DEFAULT, [], rhs)] (Just bndr)
([_], [])
-> Just $ doCase d s p scrut bind1 [(DEFAULT, [], rhs)] (Just bndr)
_ -> Nothing
= res
schemeE d s p (AnnCase scrut bndr _ [(DataAlt dc, [bind1], rhs)])
| isUnboxedTupleCon dc
, typePrimRep (idType bndr) `lengthAtMost` 1 -- handles unit tuples
= doCase d s p scrut bind1 [(DEFAULT, [], rhs)] (Just bndr)
schemeE d s p (AnnCase scrut bndr _ alt@[(DEFAULT, [], _)])
| isUnboxedTupleType (idType bndr)
, Just ty <- case typePrimRep (idType bndr) of
[_] -> Just (unwrapType (idType bndr))
[] -> Just voidPrimTy
_ -> Nothing
-- handles any pattern with a single non-void binder; in particular I/O
-- monad returns (# RealWorld#, a #)
= doCase d s p scrut (bndr `setIdType` ty) alt (Just bndr)
schemeE d s p (AnnCase scrut bndr _ alts)
= doCase d s p scrut bndr alts Nothing{-not an unboxed tuple-}
schemeE _ _ _ expr
= pprPanic "ByteCodeGen.schemeE: unhandled case"
(pprCoreExpr (deAnnotate' expr))
-- Is this Id a not-necessarily-lifted join point?
-- See Note [Not-necessarily-lifted join points], step 1
isNNLJoinPoint :: Id -> Bool
isNNLJoinPoint x = isJoinId x &&
Just True /= isLiftedType_maybe (idType x)
-- If necessary, modify this Id and body to protect not-necessarily-lifted join points.
-- See Note [Not-necessarily-lifted join points], step 2.
protectNNLJoinPointBind :: Id -> AnnExpr Id DVarSet -> (Id, AnnExpr Id DVarSet)
protectNNLJoinPointBind x rhs@(fvs, _)
| isNNLJoinPoint x
= (protectNNLJoinPointId x, (fvs, AnnLam voidArgId rhs))
| otherwise
= (x, rhs)
-- Update an Id's type to take a Void# argument.
-- Precondition: the Id is a not-necessarily-lifted join point.
-- See Note [Not-necessarily-lifted join points]
protectNNLJoinPointId :: Id -> Id
protectNNLJoinPointId x
= ASSERT( isNNLJoinPoint x )
updateVarType (voidPrimTy `mkVisFunTy`) x
{-
Ticked Expressions
------------------
The idea is that the "breakpoint<n,fvs> E" is really just an annotation on
the code. When we find such a thing, we pull out the useful information,
and then compile the code as if it was just the expression E.
Note [Not-necessarily-lifted join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A join point variable is essentially a goto-label: it is, for example,
never used as an argument to another function, and it is called only
in tail position. See Note [Join points] and Note [Invariants on join points],
both in CoreSyn. Because join points do not compile to true, red-blooded
variables (with, e.g., registers allocated to them), they are allowed
to be levity-polymorphic. (See invariant #6 in Note [Invariants on join points]
in CoreSyn.)
However, in this byte-code generator, join points *are* treated just as
ordinary variables. There is no check whether a binding is for a join point
or not; they are all treated uniformly. (Perhaps there is a missed optimization
opportunity here, but that is beyond the scope of my (Richard E's) Thursday.)
We thus must have *some* strategy for dealing with levity-polymorphic and
unlifted join points. Levity-polymorphic variables are generally not allowed
(though levity-polymorphic join points *are*; see Note [Invariants on join points]
in CoreSyn, point 6), and we don't wish to evaluate unlifted join points eagerly.
The questionable join points are *not-necessarily-lifted join points*
(NNLJPs). (Not having such a strategy led to #16509, which panicked in the
isUnliftedType check in the AnnVar case of schemeE.) Here is the strategy:
1. Detect NNLJPs. This is done in isNNLJoinPoint.
2. When binding an NNLJP, add a `\ (_ :: Void#) ->` to its RHS, and modify the
type to tack on a `Void# ->`. (Void# is written voidPrimTy within GHC.)
Note that functions are never levity-polymorphic, so this transformation
changes an NNLJP to a non-levity-polymorphic join point. This is done
in protectNNLJoinPointBind, called from the AnnLet case of schemeE.
3. At an occurrence of an NNLJP, add an application to void# (called voidPrimId),
being careful to note the new type of the NNLJP. This is done in the AnnVar
case of schemeE, with help from protectNNLJoinPointId.
Here is an example. Suppose we have
f = \(r :: RuntimeRep) (a :: TYPE r) (x :: T).
join j :: a
j = error @r @a "bloop"
in case x of
A -> j
B -> j
C -> error @r @a "blurp"
Our plan is to behave is if the code was
f = \(r :: RuntimeRep) (a :: TYPE r) (x :: T).
let j :: (Void# -> a)
j = \ _ -> error @r @a "bloop"
in case x of
A -> j void#
B -> j void#
C -> error @r @a "blurp"
It's a bit hacky, but it works well in practice and is local. I suspect the
Right Fix is to take advantage of join points as goto-labels.
-}
-- Compile code to do a tail call. Specifically, push the fn,
-- slide the on-stack app back down to the sequel depth,
-- and enter. Four cases:
--
-- 0. (Nasty hack).
-- An application "GHC.Prim.tagToEnum# <type> unboxed-int".
-- The int will be on the stack. Generate a code sequence
-- to convert it to the relevant constructor, SLIDE and ENTER.
--
-- 1. The fn denotes a ccall. Defer to generateCCall.
--
-- 2. (Another nasty hack). Spot (# a::V, b #) and treat
-- it simply as b -- since the representations are identical
-- (the V takes up zero stack space). Also, spot
-- (# b #) and treat it as b.
--
-- 3. Application of a constructor, by defn saturated.
-- Split the args into ptrs and non-ptrs, and push the nonptrs,
-- then the ptrs, and then do PACK and RETURN.
--
-- 4. Otherwise, it must be a function call. Push the args
-- right to left, SLIDE and ENTER.
schemeT :: StackDepth -- Stack depth
-> Sequel -- Sequel depth
-> BCEnv -- stack env
-> AnnExpr' Id DVarSet
-> BcM BCInstrList
schemeT d s p app
-- Case 0
| Just (arg, constr_names) <- maybe_is_tagToEnum_call app
= implement_tagToId d s p arg constr_names
-- Case 1
| Just (CCall ccall_spec) <- isFCallId_maybe fn
= if isSupportedCConv ccall_spec
then generateCCall d s p ccall_spec fn args_r_to_l
else unsupportedCConvException
-- Case 2: Constructor application
| Just con <- maybe_saturated_dcon
, isUnboxedTupleCon con
= case args_r_to_l of
[arg1,arg2] | isVAtom arg1 ->
unboxedTupleReturn d s p arg2
[arg1,arg2] | isVAtom arg2 ->
unboxedTupleReturn d s p arg1
_other -> multiValException
-- Case 3: Ordinary data constructor
| Just con <- maybe_saturated_dcon
= do alloc_con <- mkConAppCode d s p con args_r_to_l
dflags <- getDynFlags
return (alloc_con `appOL`
mkSlideW 1 (bytesToWords dflags $ d - s) `snocOL`
ENTER)
-- Case 4: Tail call of function
| otherwise
= doTailCall d s p fn args_r_to_l
where
-- Extract the args (R->L) and fn
-- The function will necessarily be a variable,
-- because we are compiling a tail call
(AnnVar fn, args_r_to_l) = splitApp app
-- Only consider this to be a constructor application iff it is
-- saturated. Otherwise, we'll call the constructor wrapper.
n_args = length args_r_to_l
maybe_saturated_dcon
= case isDataConWorkId_maybe fn of
Just con | dataConRepArity con == n_args -> Just con
_ -> Nothing
-- -----------------------------------------------------------------------------
-- Generate code to build a constructor application,
-- leaving it on top of the stack
mkConAppCode
:: StackDepth
-> Sequel
-> BCEnv
-> DataCon -- The data constructor
-> [AnnExpr' Id DVarSet] -- Args, in *reverse* order
-> BcM BCInstrList
mkConAppCode _ _ _ con [] -- Nullary constructor
= ASSERT( isNullaryRepDataCon con )
return (unitOL (PUSH_G (getName (dataConWorkId con))))
-- Instead of doing a PACK, which would allocate a fresh
-- copy of this constructor, use the single shared version.
mkConAppCode orig_d _ p con args_r_to_l =
ASSERT( args_r_to_l `lengthIs` dataConRepArity con ) app_code
where
app_code = do
dflags <- getDynFlags
-- The args are initially in reverse order, but mkVirtHeapOffsets
-- expects them to be left-to-right.
let non_voids =
[ NonVoid (prim_rep, arg)
| arg <- reverse args_r_to_l
, let prim_rep = atomPrimRep arg
, not (isVoidRep prim_rep)
]
(_, _, args_offsets) =
mkVirtHeapOffsetsWithPadding dflags StdHeader non_voids
do_pushery !d (arg : args) = do
(push, arg_bytes) <- case arg of
(Padding l _) -> return $! pushPadding l
(FieldOff a _) -> pushConstrAtom d p (fromNonVoid a)
more_push_code <- do_pushery (d + arg_bytes) args
return (push `appOL` more_push_code)
do_pushery !d [] = do
let !n_arg_words = trunc16W $ bytesToWords dflags (d - orig_d)
return (unitOL (PACK con n_arg_words))
-- Push on the stack in the reverse order.
do_pushery orig_d (reverse args_offsets)
-- -----------------------------------------------------------------------------
-- Returning an unboxed tuple with one non-void component (the only
-- case we can handle).
--
-- Remember, we don't want to *evaluate* the component that is being
-- returned, even if it is a pointed type. We always just return.
unboxedTupleReturn
:: StackDepth -> Sequel -> BCEnv -> AnnExpr' Id DVarSet -> BcM BCInstrList
unboxedTupleReturn d s p arg = returnUnboxedAtom d s p arg (atomRep arg)
-- -----------------------------------------------------------------------------
-- Generate code for a tail-call
doTailCall
:: StackDepth
-> Sequel
-> BCEnv
-> Id
-> [AnnExpr' Id DVarSet]
-> BcM BCInstrList
doTailCall init_d s p fn args = do_pushes init_d args (map atomRep args)
where
do_pushes !d [] reps = do
ASSERT( null reps ) return ()
(push_fn, sz) <- pushAtom d p (AnnVar fn)
dflags <- getDynFlags
ASSERT( sz == wordSize dflags ) return ()
let slide = mkSlideB dflags (d - init_d + wordSize dflags) (init_d - s)
return (push_fn `appOL` (slide `appOL` unitOL ENTER))
do_pushes !d args reps = do
let (push_apply, n, rest_of_reps) = findPushSeq reps
(these_args, rest_of_args) = splitAt n args
(next_d, push_code) <- push_seq d these_args
dflags <- getDynFlags
instrs <- do_pushes (next_d + wordSize dflags) rest_of_args rest_of_reps
-- ^^^ for the PUSH_APPLY_ instruction
return (push_code `appOL` (push_apply `consOL` instrs))
push_seq d [] = return (d, nilOL)
push_seq d (arg:args) = do
(push_code, sz) <- pushAtom d p arg
(final_d, more_push_code) <- push_seq (d + sz) args
return (final_d, push_code `appOL` more_push_code)
-- v. similar to CgStackery.findMatch, ToDo: merge
findPushSeq :: [ArgRep] -> (BCInstr, Int, [ArgRep])
findPushSeq (P: P: P: P: P: P: rest)
= (PUSH_APPLY_PPPPPP, 6, rest)
findPushSeq (P: P: P: P: P: rest)
= (PUSH_APPLY_PPPPP, 5, rest)
findPushSeq (P: P: P: P: rest)
= (PUSH_APPLY_PPPP, 4, rest)
findPushSeq (P: P: P: rest)
= (PUSH_APPLY_PPP, 3, rest)
findPushSeq (P: P: rest)
= (PUSH_APPLY_PP, 2, rest)
findPushSeq (P: rest)
= (PUSH_APPLY_P, 1, rest)
findPushSeq (V: rest)
= (PUSH_APPLY_V, 1, rest)
findPushSeq (N: rest)
= (PUSH_APPLY_N, 1, rest)
findPushSeq (F: rest)
= (PUSH_APPLY_F, 1, rest)
findPushSeq (D: rest)
= (PUSH_APPLY_D, 1, rest)
findPushSeq (L: rest)
= (PUSH_APPLY_L, 1, rest)
findPushSeq _
= panic "ByteCodeGen.findPushSeq"
-- -----------------------------------------------------------------------------
-- Case expressions
doCase
:: StackDepth
-> Sequel
-> BCEnv
-> AnnExpr Id DVarSet
-> Id
-> [AnnAlt Id DVarSet]
-> Maybe Id -- Just x <=> is an unboxed tuple case with scrut binder,
-- don't enter the result
-> BcM BCInstrList
doCase d s p (_,scrut) bndr alts is_unboxed_tuple
| typePrimRep (idType bndr) `lengthExceeds` 1
= multiValException
| otherwise
= do
dflags <- getDynFlags
let
profiling
| gopt Opt_ExternalInterpreter dflags = gopt Opt_SccProfilingOn dflags
| otherwise = rtsIsProfiled
-- Top of stack is the return itbl, as usual.
-- underneath it is the pointer to the alt_code BCO.
-- When an alt is entered, it assumes the returned value is
-- on top of the itbl.
ret_frame_size_b :: StackDepth
ret_frame_size_b = 2 * wordSize dflags
-- The extra frame we push to save/restore the CCCS when profiling
save_ccs_size_b | profiling = 2 * wordSize dflags
| otherwise = 0
-- An unlifted value gets an extra info table pushed on top
-- when it is returned.
unlifted_itbl_size_b :: StackDepth
unlifted_itbl_size_b | isAlgCase = 0
| otherwise = wordSize dflags
-- depth of stack after the return value has been pushed
d_bndr =
d + ret_frame_size_b + wordsToBytes dflags (idSizeW dflags bndr)
-- depth of stack after the extra info table for an unboxed return
-- has been pushed, if any. This is the stack depth at the
-- continuation.
d_alts = d_bndr + unlifted_itbl_size_b
-- Env in which to compile the alts, not including
-- any vars bound by the alts themselves
p_alts0 = Map.insert bndr d_bndr p
p_alts = case is_unboxed_tuple of
Just ubx_bndr -> Map.insert ubx_bndr d_bndr p_alts0
Nothing -> p_alts0
bndr_ty = idType bndr
isAlgCase = not (isUnliftedType bndr_ty) && isNothing is_unboxed_tuple
-- given an alt, return a discr and code for it.
codeAlt (DEFAULT, _, (_,rhs))
= do rhs_code <- schemeE d_alts s p_alts rhs
return (NoDiscr, rhs_code)
codeAlt alt@(_, bndrs, (_,rhs))
-- primitive or nullary constructor alt: no need to UNPACK
| null real_bndrs = do
rhs_code <- schemeE d_alts s p_alts rhs
return (my_discr alt, rhs_code)
-- If an alt attempts to match on an unboxed tuple or sum, we must
-- bail out, as the bytecode compiler can't handle them.
-- (See #14608.)
| any (\bndr -> typePrimRep (idType bndr) `lengthExceeds` 1) bndrs
= multiValException
-- algebraic alt with some binders
| otherwise =
let (tot_wds, _ptrs_wds, args_offsets) =
mkVirtHeapOffsets dflags NoHeader
[ NonVoid (bcIdPrimRep id, id)
| NonVoid id <- nonVoidIds real_bndrs
]
size = WordOff tot_wds
stack_bot = d_alts + wordsToBytes dflags size
-- convert offsets from Sp into offsets into the virtual stack
p' = Map.insertList
[ (arg, stack_bot - ByteOff offset)
| (NonVoid arg, offset) <- args_offsets ]
p_alts
in do
MASSERT(isAlgCase)
rhs_code <- schemeE stack_bot s p' rhs
return (my_discr alt,
unitOL (UNPACK (trunc16W size)) `appOL` rhs_code)
where
real_bndrs = filterOut isTyVar bndrs
my_discr (DEFAULT, _, _) = NoDiscr {-shouldn't really happen-}
my_discr (DataAlt dc, _, _)
| isUnboxedTupleCon dc || isUnboxedSumCon dc
= multiValException
| otherwise
= DiscrP (fromIntegral (dataConTag dc - fIRST_TAG))
my_discr (LitAlt l, _, _)
= case l of LitNumber LitNumInt i _ -> DiscrI (fromInteger i)
LitNumber LitNumWord w _ -> DiscrW (fromInteger w)
LitFloat r -> DiscrF (fromRational r)
LitDouble r -> DiscrD (fromRational r)
LitChar i -> DiscrI (ord i)
_ -> pprPanic "schemeE(AnnCase).my_discr" (ppr l)
maybe_ncons
| not isAlgCase = Nothing
| otherwise
= case [dc | (DataAlt dc, _, _) <- alts] of
[] -> Nothing
(dc:_) -> Just (tyConFamilySize (dataConTyCon dc))
-- the bitmap is relative to stack depth d, i.e. before the
-- BCO, info table and return value are pushed on.
-- This bit of code is v. similar to buildLivenessMask in CgBindery,
-- except that here we build the bitmap from the known bindings of
-- things that are pointers, whereas in CgBindery the code builds the
-- bitmap from the free slots and unboxed bindings.
-- (ToDo: merge?)
--
-- NOTE [7/12/2006] bug #1013, testcase ghci/should_run/ghci002.
-- The bitmap must cover the portion of the stack up to the sequel only.
-- Previously we were building a bitmap for the whole depth (d), but we
-- really want a bitmap up to depth (d-s). This affects compilation of
-- case-of-case expressions, which is the only time we can be compiling a
-- case expression with s /= 0.
bitmap_size = trunc16W $ bytesToWords dflags (d - s)
bitmap_size' :: Int
bitmap_size' = fromIntegral bitmap_size
bitmap = intsToReverseBitmap dflags bitmap_size'{-size-}
(sort (filter (< bitmap_size') rel_slots))
where
binds = Map.toList p
-- NB: unboxed tuple cases bind the scrut binder to the same offset
-- as one of the alt binders, so we have to remove any duplicates here:
rel_slots = nub $ map fromIntegral $ concat (map spread binds)
spread (id, offset) | isFollowableArg (bcIdArgRep id) = [ rel_offset ]
| otherwise = []
where rel_offset = trunc16W $ bytesToWords dflags (d - offset)
alt_stuff <- mapM codeAlt alts
alt_final <- mkMultiBranch maybe_ncons alt_stuff
let
alt_bco_name = getName bndr
alt_bco = mkProtoBCO dflags alt_bco_name alt_final (Left alts)
0{-no arity-} bitmap_size bitmap True{-is alts-}
-- trace ("case: bndr = " ++ showSDocDebug (ppr bndr) ++ "\ndepth = " ++ show d ++ "\nenv = \n" ++ showSDocDebug (ppBCEnv p) ++
-- "\n bitmap = " ++ show bitmap) $ do
scrut_code <- schemeE (d + ret_frame_size_b + save_ccs_size_b)
(d + ret_frame_size_b + save_ccs_size_b)
p scrut
alt_bco' <- emitBc alt_bco
let push_alts
| isAlgCase = PUSH_ALTS alt_bco'
| otherwise = PUSH_ALTS_UNLIFTED alt_bco' (typeArgRep bndr_ty)
return (push_alts `consOL` scrut_code)
-- -----------------------------------------------------------------------------
-- Deal with a CCall.
-- Taggedly push the args onto the stack R->L,
-- deferencing ForeignObj#s and adjusting addrs to point to
-- payloads in Ptr/Byte arrays. Then, generate the marshalling
-- (machine) code for the ccall, and create bytecodes to call that and
-- then return in the right way.
generateCCall
:: StackDepth
-> Sequel
-> BCEnv
-> CCallSpec -- where to call
-> Id -- of target, for type info
-> [AnnExpr' Id DVarSet] -- args (atoms)
-> BcM BCInstrList
generateCCall d0 s p (CCallSpec target cconv safety) fn args_r_to_l
= do
dflags <- getDynFlags
let
-- useful constants
addr_size_b :: ByteOff
addr_size_b = wordSize dflags
-- Get the args on the stack, with tags and suitably
-- dereferenced for the CCall. For each arg, return the
-- depth to the first word of the bits for that arg, and the
-- ArgRep of what was actually pushed.
pargs
:: ByteOff -> [AnnExpr' Id DVarSet] -> BcM [(BCInstrList, PrimRep)]
pargs _ [] = return []
pargs d (a:az)
= let arg_ty = unwrapType (exprType (deAnnotate' a))
in case tyConAppTyCon_maybe arg_ty of
-- Don't push the FO; instead push the Addr# it
-- contains.
Just t
| t == arrayPrimTyCon || t == mutableArrayPrimTyCon
-> do rest <- pargs (d + addr_size_b) az
code <- parg_ArrayishRep (fromIntegral (arrPtrsHdrSize dflags)) d p a
return ((code,AddrRep):rest)
| t == smallArrayPrimTyCon || t == smallMutableArrayPrimTyCon
-> do rest <- pargs (d + addr_size_b) az
code <- parg_ArrayishRep (fromIntegral (smallArrPtrsHdrSize dflags)) d p a
return ((code,AddrRep):rest)
| t == byteArrayPrimTyCon || t == mutableByteArrayPrimTyCon
-> do rest <- pargs (d + addr_size_b) az
code <- parg_ArrayishRep (fromIntegral (arrWordsHdrSize dflags)) d p a
return ((code,AddrRep):rest)
-- Default case: push taggedly, but otherwise intact.
_
-> do (code_a, sz_a) <- pushAtom d p a
rest <- pargs (d + sz_a) az
return ((code_a, atomPrimRep a) : rest)
-- Do magic for Ptr/Byte arrays. Push a ptr to the array on
-- the stack but then advance it over the headers, so as to
-- point to the payload.
parg_ArrayishRep
:: Word16
-> StackDepth
-> BCEnv
-> AnnExpr' Id DVarSet
-> BcM BCInstrList
parg_ArrayishRep hdrSize d p a
= do (push_fo, _) <- pushAtom d p a
-- The ptr points at the header. Advance it over the
-- header and then pretend this is an Addr#.
return (push_fo `snocOL` SWIZZLE 0 hdrSize)
code_n_reps <- pargs d0 args_r_to_l
let
(pushs_arg, a_reps_pushed_r_to_l) = unzip code_n_reps
a_reps_sizeW = sum (map (repSizeWords dflags) a_reps_pushed_r_to_l)
push_args = concatOL pushs_arg
!d_after_args = d0 + wordsToBytes dflags a_reps_sizeW
a_reps_pushed_RAW
| null a_reps_pushed_r_to_l || not (isVoidRep (head a_reps_pushed_r_to_l))
= panic "ByteCodeGen.generateCCall: missing or invalid World token?"
| otherwise
= reverse (tail a_reps_pushed_r_to_l)
-- Now: a_reps_pushed_RAW are the reps which are actually on the stack.
-- push_args is the code to do that.
-- d_after_args is the stack depth once the args are on.
-- Get the result rep.
(returns_void, r_rep)
= case maybe_getCCallReturnRep (idType fn) of
Nothing -> (True, VoidRep)
Just rr -> (False, rr)
{-
Because the Haskell stack grows down, the a_reps refer to
lowest to highest addresses in that order. The args for the call
are on the stack. Now push an unboxed Addr# indicating
the C function to call. Then push a dummy placeholder for the
result. Finally, emit a CCALL insn with an offset pointing to the
Addr# just pushed, and a literal field holding the mallocville
address of the piece of marshalling code we generate.
So, just prior to the CCALL insn, the stack looks like this
(growing down, as usual):
<arg_n>
...
<arg_1>
Addr# address_of_C_fn
<placeholder-for-result#> (must be an unboxed type)
The interpreter then calls the marshall code mentioned
in the CCALL insn, passing it (& <placeholder-for-result#>),
that is, the addr of the topmost word in the stack.
When this returns, the placeholder will have been
filled in. The placeholder is slid down to the sequel
depth, and we RETURN.
This arrangement makes it simple to do f-i-dynamic since the Addr#
value is the first arg anyway.
The marshalling code is generated specifically for this
call site, and so knows exactly the (Haskell) stack
offsets of the args, fn address and placeholder. It
copies the args to the C stack, calls the stacked addr,
and parks the result back in the placeholder. The interpreter
calls it as a normal C call, assuming it has a signature
void marshall_code ( StgWord* ptr_to_top_of_stack )
-}
-- resolve static address
maybe_static_target :: Maybe Literal
maybe_static_target =
case target of
DynamicTarget -> Nothing
StaticTarget _ _ _ False ->
panic "generateCCall: unexpected FFI value import"
StaticTarget _ target _ True ->
Just (LitLabel target mb_size IsFunction)
where
mb_size
| OSMinGW32 <- platformOS (targetPlatform dflags)
, StdCallConv <- cconv
= Just (fromIntegral a_reps_sizeW * wORD_SIZE dflags)
| otherwise
= Nothing
let
is_static = isJust maybe_static_target
-- Get the arg reps, zapping the leading Addr# in the dynamic case
a_reps -- | trace (showSDoc (ppr a_reps_pushed_RAW)) False = error "???"
| is_static = a_reps_pushed_RAW
| otherwise = if null a_reps_pushed_RAW
then panic "ByteCodeGen.generateCCall: dyn with no args"
else tail a_reps_pushed_RAW
-- push the Addr#
(push_Addr, d_after_Addr)
| Just machlabel <- maybe_static_target
= (toOL [PUSH_UBX machlabel 1], d_after_args + addr_size_b)
| otherwise -- is already on the stack
= (nilOL, d_after_args)
-- Push the return placeholder. For a call returning nothing,
-- this is a V (tag).
r_sizeW = repSizeWords dflags r_rep
d_after_r = d_after_Addr + wordsToBytes dflags r_sizeW
push_r =
if returns_void
then nilOL
else unitOL (PUSH_UBX (mkDummyLiteral dflags r_rep) (trunc16W r_sizeW))
-- generate the marshalling code we're going to call
-- Offset of the next stack frame down the stack. The CCALL
-- instruction needs to describe the chunk of stack containing
-- the ccall args to the GC, so it needs to know how large it
-- is. See comment in Interpreter.c with the CCALL instruction.
stk_offset = trunc16W $ bytesToWords dflags (d_after_r - s)
conv = case cconv of
CCallConv -> FFICCall
StdCallConv -> FFIStdCall
_ -> panic "ByteCodeGen: unexpected calling convention"
-- the only difference in libffi mode is that we prepare a cif
-- describing the call type by calling libffi, and we attach the
-- address of this to the CCALL instruction.
let ffires = primRepToFFIType dflags r_rep
ffiargs = map (primRepToFFIType dflags) a_reps
hsc_env <- getHscEnv
token <- ioToBc $ iservCmd hsc_env (PrepFFI conv ffiargs ffires)
recordFFIBc token
let
-- do the call
do_call = unitOL (CCALL stk_offset token flags)
where flags = case safety of
PlaySafe -> 0x0
PlayInterruptible -> 0x1
PlayRisky -> 0x2
-- slide and return
d_after_r_min_s = bytesToWords dflags (d_after_r - s)
wrapup = mkSlideW (trunc16W r_sizeW) (d_after_r_min_s - r_sizeW)
`snocOL` RETURN_UBX (toArgRep r_rep)
--trace (show (arg1_offW, args_offW , (map argRepSizeW a_reps) )) $
return (
push_args `appOL`
push_Addr `appOL` push_r `appOL` do_call `appOL` wrapup
)
primRepToFFIType :: DynFlags -> PrimRep -> FFIType
primRepToFFIType dflags r
= case r of
VoidRep -> FFIVoid
IntRep -> signed_word
WordRep -> unsigned_word
Int64Rep -> FFISInt64
Word64Rep -> FFIUInt64
AddrRep -> FFIPointer
FloatRep -> FFIFloat
DoubleRep -> FFIDouble
_ -> panic "primRepToFFIType"
where
(signed_word, unsigned_word)
| wORD_SIZE dflags == 4 = (FFISInt32, FFIUInt32)
| wORD_SIZE dflags == 8 = (FFISInt64, FFIUInt64)
| otherwise = panic "primTyDescChar"
-- Make a dummy literal, to be used as a placeholder for FFI return
-- values on the stack.
mkDummyLiteral :: DynFlags -> PrimRep -> Literal
mkDummyLiteral dflags pr
= case pr of
IntRep -> mkLitInt dflags 0
WordRep -> mkLitWord dflags 0
Int64Rep -> mkLitInt64 0
Word64Rep -> mkLitWord64 0
AddrRep -> LitNullAddr
DoubleRep -> LitDouble 0
FloatRep -> LitFloat 0
_ -> pprPanic "mkDummyLiteral" (ppr pr)
-- Convert (eg)
-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
-- -> (# GHC.Prim.State# GHC.Prim.RealWorld, GHC.Prim.Int# #)
--
-- to Just IntRep
-- and check that an unboxed pair is returned wherein the first arg is V'd.
--
-- Alternatively, for call-targets returning nothing, convert
--
-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
-- -> (# GHC.Prim.State# GHC.Prim.RealWorld #)
--
-- to Nothing
maybe_getCCallReturnRep :: Type -> Maybe PrimRep
maybe_getCCallReturnRep fn_ty
= let
(_a_tys, r_ty) = splitFunTys (dropForAlls fn_ty)
r_reps = typePrimRepArgs r_ty
blargh :: a -- Used at more than one type
blargh = pprPanic "maybe_getCCallReturn: can't handle:"
(pprType fn_ty)
in
case r_reps of
[] -> panic "empty typePrimRepArgs"
[VoidRep] -> Nothing
[rep]
| isGcPtrRep rep -> blargh
| otherwise -> Just rep
-- if it was, it would be impossible to create a
-- valid return value placeholder on the stack
_ -> blargh
maybe_is_tagToEnum_call :: AnnExpr' Id DVarSet -> Maybe (AnnExpr' Id DVarSet, [Name])
-- Detect and extract relevant info for the tagToEnum kludge.
maybe_is_tagToEnum_call app
| AnnApp (_, AnnApp (_, AnnVar v) (_, AnnType t)) arg <- app
, Just TagToEnumOp <- isPrimOpId_maybe v
= Just (snd arg, extract_constr_Names t)
| otherwise
= Nothing
where
extract_constr_Names ty
| rep_ty <- unwrapType ty
, Just tyc <- tyConAppTyCon_maybe rep_ty
, isDataTyCon tyc
= map (getName . dataConWorkId) (tyConDataCons tyc)
-- NOTE: use the worker name, not the source name of
-- the DataCon. See DataCon.hs for details.
| otherwise
= pprPanic "maybe_is_tagToEnum_call.extract_constr_Ids" (ppr ty)
{- -----------------------------------------------------------------------------
Note [Implementing tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(implement_tagToId arg names) compiles code which takes an argument
'arg', (call it i), and enters the i'th closure in the supplied list
as a consequence. The [Name] is a list of the constructors of this
(enumeration) type.
The code we generate is this:
push arg
push bogus-word
TESTEQ_I 0 L1
PUSH_G <lbl for first data con>
JMP L_Exit
L1: TESTEQ_I 1 L2
PUSH_G <lbl for second data con>
JMP L_Exit
...etc...
Ln: TESTEQ_I n L_fail
PUSH_G <lbl for last data con>
JMP L_Exit
L_fail: CASEFAIL
L_exit: SLIDE 1 n
ENTER
The 'bogus-word' push is because TESTEQ_I expects the top of the stack
to have an info-table, and the next word to have the value to be
tested. This is very weird, but it's the way it is right now. See
Interpreter.c. We don't actually need an info-table here; we just
need to have the argument to be one-from-top on the stack, hence pushing
a 1-word null. See #8383.
-}
implement_tagToId
:: StackDepth
-> Sequel
-> BCEnv
-> AnnExpr' Id DVarSet
-> [Name]
-> BcM BCInstrList
-- See Note [Implementing tagToEnum#]
implement_tagToId d s p arg names
= ASSERT( notNull names )
do (push_arg, arg_bytes) <- pushAtom d p arg
labels <- getLabelsBc (genericLength names)
label_fail <- getLabelBc
label_exit <- getLabelBc
dflags <- getDynFlags
let infos = zip4 labels (tail labels ++ [label_fail])
[0 ..] names
steps = map (mkStep label_exit) infos
slide_ws = bytesToWords dflags (d - s + arg_bytes)
return (push_arg
`appOL` unitOL (PUSH_UBX LitNullAddr 1)
-- Push bogus word (see Note [Implementing tagToEnum#])
`appOL` concatOL steps
`appOL` toOL [ LABEL label_fail, CASEFAIL,
LABEL label_exit ]
`appOL` mkSlideW 1 (slide_ws + 1)
-- "+1" to account for bogus word
-- (see Note [Implementing tagToEnum#])
`appOL` unitOL ENTER)
where
mkStep l_exit (my_label, next_label, n, name_for_n)
= toOL [LABEL my_label,
TESTEQ_I n next_label,
PUSH_G name_for_n,
JMP l_exit]
-- -----------------------------------------------------------------------------
-- pushAtom
-- Push an atom onto the stack, returning suitable code & number of
-- stack words used.
--
-- The env p must map each variable to the highest- numbered stack
-- slot for it. For example, if the stack has depth 4 and we
-- tagged-ly push (v :: Int#) on it, the value will be in stack[4],
-- the tag in stack[5], the stack will have depth 6, and p must map v
-- to 5 and not to 4. Stack locations are numbered from zero, so a
-- depth 6 stack has valid words 0 .. 5.
pushAtom
:: StackDepth -> BCEnv -> AnnExpr' Id DVarSet -> BcM (BCInstrList, ByteOff)
pushAtom d p e
| Just e' <- bcView e
= pushAtom d p e'
pushAtom _ _ (AnnCoercion {}) -- Coercions are zero-width things,
= return (nilOL, 0) -- treated just like a variable V
-- See Note [Empty case alternatives] in coreSyn/CoreSyn.hs
-- and Note [Bottoming expressions] in coreSyn/CoreUtils.hs:
-- The scrutinee of an empty case evaluates to bottom
pushAtom d p (AnnCase (_, a) _ _ []) -- trac #12128
= pushAtom d p a
pushAtom d p (AnnVar var)
| [] <- typePrimRep (idType var)
= return (nilOL, 0)
| isFCallId var
= pprPanic "pushAtom: shouldn't get an FCallId here" (ppr var)
| Just primop <- isPrimOpId_maybe var
= do
dflags <-getDynFlags
return (unitOL (PUSH_PRIMOP primop), wordSize dflags)
| Just d_v <- lookupBCEnv_maybe var p -- var is a local variable
= do dflags <- getDynFlags
let !szb = idSizeCon dflags var
with_instr instr = do
let !off_b = trunc16B $ d - d_v
return (unitOL (instr off_b), wordSize dflags)
case szb of
1 -> with_instr PUSH8_W
2 -> with_instr PUSH16_W
4 -> with_instr PUSH32_W
_ -> do
let !szw = bytesToWords dflags szb
!off_w = trunc16W $ bytesToWords dflags (d - d_v) + szw - 1
return (toOL (genericReplicate szw (PUSH_L off_w)), szb)
-- d - d_v offset from TOS to the first slot of the object
--
-- d - d_v + sz - 1 offset from the TOS of the last slot of the object
--
-- Having found the last slot, we proceed to copy the right number of
-- slots on to the top of the stack.
| otherwise -- var must be a global variable
= do topStrings <- getTopStrings
dflags <- getDynFlags
case lookupVarEnv topStrings var of
Just ptr -> pushAtom d p $ AnnLit $ mkLitWord dflags $
fromIntegral $ ptrToWordPtr $ fromRemotePtr ptr
Nothing -> do
let sz = idSizeCon dflags var
MASSERT( sz == wordSize dflags )
return (unitOL (PUSH_G (getName var)), sz)
pushAtom _ _ (AnnLit lit) = do
dflags <- getDynFlags
let code rep
= let size_words = WordOff (argRepSizeW dflags rep)
in return (unitOL (PUSH_UBX lit (trunc16W size_words)),
wordsToBytes dflags size_words)
case lit of
LitLabel _ _ _ -> code N
LitFloat _ -> code F
LitDouble _ -> code D
LitChar _ -> code N
LitNullAddr -> code N
LitString _ -> code N
LitRubbish -> code N
LitNumber nt _ _ -> case nt of
LitNumInt -> code N
LitNumWord -> code N
LitNumInt64 -> code L
LitNumWord64 -> code L
-- No LitInteger's or LitNatural's should be left by the time this is
-- called. CorePrep should have converted them all to a real core
-- representation.
LitNumInteger -> panic "pushAtom: LitInteger"
LitNumNatural -> panic "pushAtom: LitNatural"
pushAtom _ _ expr
= pprPanic "ByteCodeGen.pushAtom"
(pprCoreExpr (deAnnotate' expr))
-- | Push an atom for constructor (i.e., PACK instruction) onto the stack.
-- This is slightly different to @pushAtom@ due to the fact that we allow
-- packing constructor fields. See also @mkConAppCode@ and @pushPadding@.
pushConstrAtom
:: StackDepth -> BCEnv -> AnnExpr' Id DVarSet -> BcM (BCInstrList, ByteOff)
pushConstrAtom _ _ (AnnLit lit@(LitFloat _)) =
return (unitOL (PUSH_UBX32 lit), 4)
pushConstrAtom d p (AnnVar v)
| Just d_v <- lookupBCEnv_maybe v p = do -- v is a local variable
dflags <- getDynFlags
let !szb = idSizeCon dflags v
done instr = do
let !off = trunc16B $ d - d_v
return (unitOL (instr off), szb)
case szb of
1 -> done PUSH8
2 -> done PUSH16
4 -> done PUSH32
_ -> pushAtom d p (AnnVar v)
pushConstrAtom d p expr = pushAtom d p expr
pushPadding :: Int -> (BCInstrList, ByteOff)
pushPadding !n = go n (nilOL, 0)
where
go n acc@(!instrs, !off) = case n of
0 -> acc
1 -> (instrs `mappend` unitOL PUSH_PAD8, off + 1)
2 -> (instrs `mappend` unitOL PUSH_PAD16, off + 2)
3 -> go 1 (go 2 acc)
4 -> (instrs `mappend` unitOL PUSH_PAD32, off + 4)
_ -> go (n - 4) (go 4 acc)
-- -----------------------------------------------------------------------------
-- Given a bunch of alts code and their discrs, do the donkey work
-- of making a multiway branch using a switch tree.
-- What a load of hassle!
mkMultiBranch :: Maybe Int -- # datacons in tycon, if alg alt
-- a hint; generates better code
-- Nothing is always safe
-> [(Discr, BCInstrList)]
-> BcM BCInstrList
mkMultiBranch maybe_ncons raw_ways = do
lbl_default <- getLabelBc
let
mkTree :: [(Discr, BCInstrList)] -> Discr -> Discr -> BcM BCInstrList
mkTree [] _range_lo _range_hi = return (unitOL (JMP lbl_default))
-- shouldn't happen?
mkTree [val] range_lo range_hi
| range_lo == range_hi
= return (snd val)
| null defaults -- Note [CASEFAIL]
= do lbl <- getLabelBc
return (testEQ (fst val) lbl
`consOL` (snd val
`appOL` (LABEL lbl `consOL` unitOL CASEFAIL)))
| otherwise
= return (testEQ (fst val) lbl_default `consOL` snd val)
-- Note [CASEFAIL] It may be that this case has no default
-- branch, but the alternatives are not exhaustive - this
-- happens for GADT cases for example, where the types
-- prove that certain branches are impossible. We could
-- just assume that the other cases won't occur, but if
-- this assumption was wrong (because of a bug in GHC)
-- then the result would be a segfault. So instead we
-- emit an explicit test and a CASEFAIL instruction that
-- causes the interpreter to barf() if it is ever
-- executed.
mkTree vals range_lo range_hi
= let n = length vals `div` 2
vals_lo = take n vals
vals_hi = drop n vals
v_mid = fst (head vals_hi)
in do
label_geq <- getLabelBc
code_lo <- mkTree vals_lo range_lo (dec v_mid)
code_hi <- mkTree vals_hi v_mid range_hi
return (testLT v_mid label_geq
`consOL` (code_lo
`appOL` unitOL (LABEL label_geq)
`appOL` code_hi))
the_default
= case defaults of
[] -> nilOL
[(_, def)] -> LABEL lbl_default `consOL` def
_ -> panic "mkMultiBranch/the_default"
instrs <- mkTree notd_ways init_lo init_hi
return (instrs `appOL` the_default)
where
(defaults, not_defaults) = partition (isNoDiscr.fst) raw_ways
notd_ways = sortBy (comparing fst) not_defaults
testLT (DiscrI i) fail_label = TESTLT_I i fail_label
testLT (DiscrW i) fail_label = TESTLT_W i fail_label
testLT (DiscrF i) fail_label = TESTLT_F i fail_label
testLT (DiscrD i) fail_label = TESTLT_D i fail_label
testLT (DiscrP i) fail_label = TESTLT_P i fail_label
testLT NoDiscr _ = panic "mkMultiBranch NoDiscr"
testEQ (DiscrI i) fail_label = TESTEQ_I i fail_label
testEQ (DiscrW i) fail_label = TESTEQ_W i fail_label
testEQ (DiscrF i) fail_label = TESTEQ_F i fail_label
testEQ (DiscrD i) fail_label = TESTEQ_D i fail_label
testEQ (DiscrP i) fail_label = TESTEQ_P i fail_label
testEQ NoDiscr _ = panic "mkMultiBranch NoDiscr"
-- None of these will be needed if there are no non-default alts
(init_lo, init_hi)
| null notd_ways
= panic "mkMultiBranch: awesome foursome"
| otherwise
= case fst (head notd_ways) of
DiscrI _ -> ( DiscrI minBound, DiscrI maxBound )
DiscrW _ -> ( DiscrW minBound, DiscrW maxBound )
DiscrF _ -> ( DiscrF minF, DiscrF maxF )
DiscrD _ -> ( DiscrD minD, DiscrD maxD )
DiscrP _ -> ( DiscrP algMinBound, DiscrP algMaxBound )
NoDiscr -> panic "mkMultiBranch NoDiscr"
(algMinBound, algMaxBound)
= case maybe_ncons of
-- XXX What happens when n == 0?
Just n -> (0, fromIntegral n - 1)
Nothing -> (minBound, maxBound)
isNoDiscr NoDiscr = True
isNoDiscr _ = False
dec (DiscrI i) = DiscrI (i-1)
dec (DiscrW w) = DiscrW (w-1)
dec (DiscrP i) = DiscrP (i-1)
dec other = other -- not really right, but if you
-- do cases on floating values, you'll get what you deserve
-- same snotty comment applies to the following
minF, maxF :: Float
minD, maxD :: Double
minF = -1.0e37
maxF = 1.0e37
minD = -1.0e308
maxD = 1.0e308
-- -----------------------------------------------------------------------------
-- Supporting junk for the compilation schemes
-- Describes case alts
data Discr
= DiscrI Int
| DiscrW Word
| DiscrF Float
| DiscrD Double
| DiscrP Word16
| NoDiscr
deriving (Eq, Ord)
instance Outputable Discr where
ppr (DiscrI i) = int i
ppr (DiscrW w) = text (show w)
ppr (DiscrF f) = text (show f)
ppr (DiscrD d) = text (show d)
ppr (DiscrP i) = ppr i
ppr NoDiscr = text "DEF"
lookupBCEnv_maybe :: Id -> BCEnv -> Maybe ByteOff
lookupBCEnv_maybe = Map.lookup
idSizeW :: DynFlags -> Id -> WordOff
idSizeW dflags = WordOff . argRepSizeW dflags . bcIdArgRep
idSizeCon :: DynFlags -> Id -> ByteOff
idSizeCon dflags = ByteOff . primRepSizeB dflags . bcIdPrimRep
bcIdArgRep :: Id -> ArgRep
bcIdArgRep = toArgRep . bcIdPrimRep
bcIdPrimRep :: Id -> PrimRep
bcIdPrimRep id
| [rep] <- typePrimRepArgs (idType id)
= rep
| otherwise
= pprPanic "bcIdPrimRep" (ppr id <+> dcolon <+> ppr (idType id))
repSizeWords :: DynFlags -> PrimRep -> WordOff
repSizeWords dflags rep = WordOff $ argRepSizeW dflags (toArgRep rep)
isFollowableArg :: ArgRep -> Bool
isFollowableArg P = True
isFollowableArg _ = False
isVoidArg :: ArgRep -> Bool
isVoidArg V = True
isVoidArg _ = False
-- See bug #1257
multiValException :: a
multiValException = throwGhcException (ProgramError
("Error: bytecode compiler can't handle unboxed tuples and sums.\n"++
" Possibly due to foreign import/export decls in source.\n"++
" Workaround: use -fobject-code, or compile this module to .o separately."))
-- | Indicate if the calling convention is supported
isSupportedCConv :: CCallSpec -> Bool
isSupportedCConv (CCallSpec _ cconv _) = case cconv of
CCallConv -> True -- we explicitly pattern match on every
StdCallConv -> True -- convention to ensure that a warning
PrimCallConv -> False -- is triggered when a new one is added
JavaScriptCallConv -> False
CApiConv -> False
-- See bug #10462
unsupportedCConvException :: a
unsupportedCConvException = throwGhcException (ProgramError
("Error: bytecode compiler can't handle some foreign calling conventions\n"++
" Workaround: use -fobject-code, or compile this module to .o separately."))
mkSlideB :: DynFlags -> ByteOff -> ByteOff -> OrdList BCInstr
mkSlideB dflags !nb !db = mkSlideW n d
where
!n = trunc16W $ bytesToWords dflags nb
!d = bytesToWords dflags db
mkSlideW :: Word16 -> WordOff -> OrdList BCInstr
mkSlideW !n !ws
| ws > fromIntegral limit
-- If the amount to slide doesn't fit in a Word16, generate multiple slide
-- instructions
= SLIDE n limit `consOL` mkSlideW n (ws - fromIntegral limit)
| ws == 0
= nilOL
| otherwise
= unitOL (SLIDE n $ fromIntegral ws)
where
limit :: Word16
limit = maxBound
splitApp :: AnnExpr' Var ann -> (AnnExpr' Var ann, [AnnExpr' Var ann])
-- The arguments are returned in *right-to-left* order
splitApp e | Just e' <- bcView e = splitApp e'
splitApp (AnnApp (_,f) (_,a)) = case splitApp f of
(f', as) -> (f', a:as)
splitApp e = (e, [])
bcView :: AnnExpr' Var ann -> Maybe (AnnExpr' Var ann)
-- The "bytecode view" of a term discards
-- a) type abstractions
-- b) type applications
-- c) casts
-- d) ticks (but not breakpoints)
-- Type lambdas *can* occur in random expressions,
-- whereas value lambdas cannot; that is why they are nuked here
bcView (AnnCast (_,e) _) = Just e
bcView (AnnLam v (_,e)) | isTyVar v = Just e
bcView (AnnApp (_,e) (_, AnnType _)) = Just e
bcView (AnnTick Breakpoint{} _) = Nothing
bcView (AnnTick _other_tick (_,e)) = Just e
bcView _ = Nothing
isVAtom :: AnnExpr' Var ann -> Bool
isVAtom e | Just e' <- bcView e = isVAtom e'
isVAtom (AnnVar v) = isVoidArg (bcIdArgRep v)
isVAtom (AnnCoercion {}) = True
isVAtom _ = False
atomPrimRep :: AnnExpr' Id ann -> PrimRep
atomPrimRep e | Just e' <- bcView e = atomPrimRep e'
atomPrimRep (AnnVar v) = bcIdPrimRep v
atomPrimRep (AnnLit l) = typePrimRep1 (literalType l)
-- #12128:
-- A case expression can be an atom because empty cases evaluate to bottom.
-- See Note [Empty case alternatives] in coreSyn/CoreSyn.hs
atomPrimRep (AnnCase _ _ ty _) =
ASSERT(case typePrimRep ty of [LiftedRep] -> True; _ -> False) LiftedRep
atomPrimRep (AnnCoercion {}) = VoidRep
atomPrimRep other = pprPanic "atomPrimRep" (ppr (deAnnotate' other))
atomRep :: AnnExpr' Id ann -> ArgRep
atomRep e = toArgRep (atomPrimRep e)
-- | Let szsw be the sizes in bytes of some items pushed onto the stack, which
-- has initial depth @original_depth@. Return the values which the stack
-- environment should map these items to.
mkStackOffsets :: ByteOff -> [ByteOff] -> [ByteOff]
mkStackOffsets original_depth szsb = tail (scanl' (+) original_depth szsb)
typeArgRep :: Type -> ArgRep
typeArgRep = toArgRep . typePrimRep1
-- -----------------------------------------------------------------------------
-- The bytecode generator's monad
data BcM_State
= BcM_State
{ bcm_hsc_env :: HscEnv
, uniqSupply :: UniqSupply -- for generating fresh variable names
, thisModule :: Module -- current module (for breakpoints)
, nextlabel :: Word16 -- for generating local labels
, ffis :: [FFIInfo] -- ffi info blocks, to free later
-- Should be free()d when it is GCd
, modBreaks :: Maybe ModBreaks -- info about breakpoints
, breakInfo :: IntMap CgBreakInfo
, topStrings :: IdEnv (RemotePtr ()) -- top-level string literals
-- See Note [generating code for top-level string literal bindings].
}
newtype BcM r = BcM (BcM_State -> IO (BcM_State, r)) deriving (Functor)
ioToBc :: IO a -> BcM a
ioToBc io = BcM $ \st -> do
x <- io
return (st, x)
runBc :: HscEnv -> UniqSupply -> Module -> Maybe ModBreaks
-> IdEnv (RemotePtr ())
-> BcM r
-> IO (BcM_State, r)
runBc hsc_env us this_mod modBreaks topStrings (BcM m)
= m (BcM_State hsc_env us this_mod 0 [] modBreaks IntMap.empty topStrings)
thenBc :: BcM a -> (a -> BcM b) -> BcM b
thenBc (BcM expr) cont = BcM $ \st0 -> do
(st1, q) <- expr st0
let BcM k = cont q
(st2, r) <- k st1
return (st2, r)
thenBc_ :: BcM a -> BcM b -> BcM b
thenBc_ (BcM expr) (BcM cont) = BcM $ \st0 -> do
(st1, _) <- expr st0
(st2, r) <- cont st1
return (st2, r)
returnBc :: a -> BcM a
returnBc result = BcM $ \st -> (return (st, result))
instance Applicative BcM where
pure = returnBc
(<*>) = ap
(*>) = thenBc_
instance Monad BcM where
(>>=) = thenBc
(>>) = (*>)
instance HasDynFlags BcM where
getDynFlags = BcM $ \st -> return (st, hsc_dflags (bcm_hsc_env st))
getHscEnv :: BcM HscEnv
getHscEnv = BcM $ \st -> return (st, bcm_hsc_env st)
emitBc :: ([FFIInfo] -> ProtoBCO Name) -> BcM (ProtoBCO Name)
emitBc bco
= BcM $ \st -> return (st{ffis=[]}, bco (ffis st))
recordFFIBc :: RemotePtr C_ffi_cif -> BcM ()
recordFFIBc a
= BcM $ \st -> return (st{ffis = FFIInfo a : ffis st}, ())
getLabelBc :: BcM Word16
getLabelBc
= BcM $ \st -> do let nl = nextlabel st
when (nl == maxBound) $
panic "getLabelBc: Ran out of labels"
return (st{nextlabel = nl + 1}, nl)
getLabelsBc :: Word16 -> BcM [Word16]
getLabelsBc n
= BcM $ \st -> let ctr = nextlabel st
in return (st{nextlabel = ctr+n}, [ctr .. ctr+n-1])
getCCArray :: BcM (Array BreakIndex (RemotePtr CostCentre))
getCCArray = BcM $ \st ->
let breaks = expectJust "ByteCodeGen.getCCArray" $ modBreaks st in
return (st, modBreaks_ccs breaks)
newBreakInfo :: BreakIndex -> CgBreakInfo -> BcM ()
newBreakInfo ix info = BcM $ \st ->
return (st{breakInfo = IntMap.insert ix info (breakInfo st)}, ())
newUnique :: BcM Unique
newUnique = BcM $
\st -> case takeUniqFromSupply (uniqSupply st) of
(uniq, us) -> let newState = st { uniqSupply = us }
in return (newState, uniq)
getCurrentModule :: BcM Module
getCurrentModule = BcM $ \st -> return (st, thisModule st)
getTopStrings :: BcM (IdEnv (RemotePtr ()))
getTopStrings = BcM $ \st -> return (st, topStrings st)
newId :: Type -> BcM Id
newId ty = do
uniq <- newUnique
return $ mkSysLocal tickFS uniq ty
tickFS :: FastString
tickFS = fsLit "ticked"
| sdiehl/ghc | compiler/ghci/ByteCodeGen.hs | bsd-3-clause | 77,893 | 0 | 23 | 23,738 | 16,172 | 8,288 | 7,884 | -1 | -1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.OcclusionQuery2
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/ARB/occlusion_query2.txt ARB_occlusion_query2> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.OcclusionQuery2 (
-- * Enums
gl_ANY_SAMPLES_PASSED
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/OcclusionQuery2.hs | bsd-3-clause | 662 | 0 | 4 | 78 | 37 | 31 | 6 | 3 | 0 |
module Network.CrawlChain.CrawlDirective where
import Network.CrawlChain.CrawlAction
import Network.CrawlChain.CrawlResult
{-|
A crawl directive takes a content of a web page and produces crawl actions for links/forms to follow.
The general idea is to specify a list of operations that in theory produces a dynamically collected
tree of requests which leaves are either dead ends or end results.
Additional, logical branching/combination of Directives is possible with:
* Alternatives - evaluate both Directives in order.
* Restart - evaluate completely new initial action & chain if the previous combo does not produce end results.
-}
data CrawlDirective =
SimpleDirective (String -> [CrawlAction])
-- ^ access content to find absolute follow-up urls
| RelativeDirective (String -> [CrawlAction])
-- ^ as simple, but found relative urls are completed
| FollowUpDirective (CrawlResult -> [CrawlAction])
-- ^ as simple, but with access to complete result
| DelayDirective Int CrawlDirective
-- ^ wait additional seconds before executing
| RetryDirective Int CrawlDirective
-- ^ if given directive yields no results use add. retries
| AlternativeDirective CrawlDirective CrawlDirective
-- ^ fallback to second argument if first yields no results
| RestartChainDirective (CrawlAction, CrawlDirective)
-- ^ the possibility to start a new chain (when using alternative)
| GuardDirective (CrawlAction -> Bool)
-- ^ not crawling anything, just a blacklisting option
| DirectiveSequence [CrawlDirective]
-- ^ chaining of directives
| michaxm/crawlchain | src/Network/CrawlChain/CrawlDirective.hs | bsd-3-clause | 1,597 | 0 | 9 | 289 | 130 | 83 | 47 | 13 | 0 |
module API.CacheMaintenance (
spinupCache
) where
import qualified Data.ByteString.Lazy as BSL
import Control.Concurrent.MVar
import Control.Concurrent.Chan
import Control.Concurrent (forkIO, threadDelay)
import Control.Exception (tryJust)
import Control.Monad (forever, guard)
import System.IO (withFile, IOMode (ReadMode, WriteMode))
import System.IO.Error (isDoesNotExistError)
import Glug (Cache, newCache, mergeCache, serializeCache, deserializeCache)
cacheSize :: Int
cacheSize = 256
-- | Creates a cache and accessors.
-- Performs cache maintenance duties in a separate thread.
-- Consumes from channel Chan and reduces into the Cache MVar.
-- Regularly writes the cache out to a file.
spinupCache :: IO (MVar Cache, Chan Cache)
spinupCache = do
m <- inflateCache >>= newMVar
c <- newChan
_ <- forkIO $ reduceCache c m
_ <- forkIO $ persistCache m
return (m, c)
inflateCache :: IO (Cache)
inflateCache = do
r <- tryJust (guard . isDoesNotExistError) $ _inflateCache
case r of
Left _ -> return $ newCache cacheSize
Right x -> return x
_inflateCache :: IO (Cache)
_inflateCache = do
withFile "cache.dat" ReadMode $ \f -> do
bsl <- BSL.hGetContents f
case deserializeCache cacheSize bsl of
Right oldCache -> do
putStrLn "Inflated from serialized cache"
return oldCache
Left _ -> do
putStrLn "Faild to inflate from cache"
return $ newCache cacheSize
persistCache :: MVar Cache -> IO ()
persistCache mvar = forever $ do
threadDelay $ 60 * 60 * 1000 * 1000
got <- readMVar mvar
withFile "cache.dat" WriteMode $ \f ->
BSL.hPut f . serializeCache $ got
putStrLn "Persisted Cache"
reduceCache :: Chan Cache -> MVar Cache -> IO ()
reduceCache chan mvar = forever $ do
got <- readChan chan
old <- takeMVar mvar
putMVar mvar $ mergeCache old got
| robmcl4/Glug | app/API/CacheMaintenance.hs | bsd-3-clause | 1,932 | 0 | 17 | 459 | 559 | 281 | 278 | 49 | 2 |
module B1.Program.Prices.Options
( Options(..)
, DataSource(..)
, readOptions
) where
import System.Console.GetOpt
data Options =
Options
{ dataSource :: DataSource
, symbol :: String
}
data DataSource = Google | Mock deriving Show
defaultOptions = Options
{ dataSource = Google
, symbol = "GOOG"
}
readOptions :: [String] -> IO (Options, [String])
readOptions args =
case getOpt RequireOrder options args of
(options, nonOptions, []) ->
return (foldl (flip id) defaultOptions options, nonOptions)
(_, _, errors) ->
ioError (userError (concat errors ++ usageInfo header options))
where
header = "Usage: b1 [OPTION...]"
options :: [OptDescr (Options -> Options)]
options =
[ Option "d" ["dataSource"] (ReqArg getDataSource "DATASOURCE")
"Data source to get stock price information."
, Option "s" ["symbol"] (ReqArg getSymbol "SYMBOL")
"Stock symbol like SPY."
]
getDataSource :: String -> Options -> Options
getDataSource arg options = options { dataSource = readDataSource }
where
readDataSource = case arg of
"google" -> Google
"mock" -> Mock
_ -> Google
getSymbol :: String -> Options -> Options
getSymbol arg options = options { symbol = arg }
| madjestic/b1 | src/B1/Program/Prices/Options.hs | bsd-3-clause | 1,255 | 0 | 13 | 281 | 374 | 210 | 164 | 35 | 3 |
{-# OPTIONS_GHC -Wall #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2014 Edward Kmett and Gabríel Arthúr Pétursson
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : portable
--
--
-- Loaded by cabal to configure the project.
--
-- Local hooks are provided to generate the API on build, producing haddocks and to enable @cabal sdist@
----------------------------------------------------------------------------
import Data.Functor
import Distribution.Simple
import Distribution.Simple.LocalBuildInfo
import Distribution.PackageDescription
import System.FilePath
import Generator (generateSource)
import Parser (parseFile)
import Registry (deshenaniganize)
generateAPI :: LocalBuildInfo -> IO ()
generateAPI l = do
registry <- parseFile "gl.xml"
man <- lines <$> readFile "man.txt"
extensions <- lines <$> readFile "extensions.txt"
putStr "Generating API..."
generateSource (buildDir l </> "autogen") (deshenaniganize registry) man [ (x,y) | [x,y] <- map words extensions ]
putStrLn "done"
main :: IO ()
main = defaultMainWithHooks simpleUserHooks
{ buildHook = \p l h f -> generateAPI l >> buildHook simpleUserHooks p l h f
, haddockHook = \p l h f -> generateAPI l >> haddockHook simpleUserHooks p l h f
, sDistHook = \p ml h f -> case ml of
Nothing -> fail "No local buildinfo available. configure first"
Just l -> do
let editlib lib = lib { libBuildInfo = editBuildInfo (libBuildInfo lib) }
editBuildInfo bi = bi { hsSourceDirs = (buildDir l </> "autogen") : hsSourceDirs bi }
p' = p { library = fmap editlib (library p) }
generateAPI l >> sDistHook simpleUserHooks p' ml h f
}
| phaazon/gl | Setup.hs | bsd-3-clause | 1,814 | 0 | 22 | 333 | 429 | 224 | 205 | 28 | 2 |
module Examples where
import qualified Prelude
import Feldspar hiding (name)
import Feldspar.Vector
import Feldspar.Algorithm.CRC
import Feldspar.Compiler.Signature
-- * Examples
expr1 :: Data [Index] -> Data Index
expr1 x = 1 + getLength x
expr3 :: Data Index -> Data Word8 -> Data Index
expr3 a b = a * i2n b
expr5 :: Data [Index] -> Data Index
expr5 v = let l = getLength v
in forLoop l l $ \i s -> s + v ! i
sig6_1, sig6_2 :: Signature Data (Word16 -> Word16 -> [Word8] -> Word16)
sig6_1 = lam $ \p -> lam $ \i -> lam $ \v -> ptr "crc" $ crcNaive p i $ sugar v
sig6_2 = name "poly" $ \p ->
name "ini" $ \i ->
name "arr" $ \v ->
ret "crc" $ crcNaive p i $ sugar v
sig6_3 :: Signature Data (Word16 -> Word16 -> Length -> [Word8] -> Word16)
sig6_3 = name "poly" $ \p ->
name "ini" $ \i ->
exposeLength $ \v ->
ret "crc" $ crcNaive p i $ sugar v
crc :: (Bits a, Integral a)
=> Data a -> Data a -> Data [Word8] -> Data a
-- crc p i v = share (makeCrcTable p)
-- $ \t -> crcNormal t i $ sugar v
crc p i v = crcNaive p i $ sugar v
sig7 :: Signature Data (Word16 -> [Word8] -> Word16)
sig7 = name "ini" $ \i ->
name "vec" $ \v ->
ret "crc" $ crc 0x8005 i v
sig8_1 :: Signature Data ([Word8] -> [Word8] -> Word8)
sig8_1 = lam $ \as ->
lam $ \bs ->
ret "scalarProd" $ scalarProd (sugar as -:: tPull1 id)
(sugar bs -:: tPull1 id)
sig8_2 :: Signature Data (Length -> [Word8] -> [Word8] -> Word8)
sig8_2 = name "len" $ \len ->
native len $ \as ->
native len $ \bs ->
ret "scalarProd" $ scalarProd (sugar as -:: tPull1 id)
(sugar bs -:: tPull1 id)
| emwap/signature-paper | examples/Examples.hs | bsd-3-clause | 1,782 | 0 | 15 | 573 | 739 | 376 | 363 | 42 | 1 |
{-# LANGUAGE CPP, GADTs, FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE TypeOperators, TypeFamilies #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.Array.Representation
-- Copyright : [2008..2011] Manuel M T Chakravarty, Gabriele Keller, Sean Lee
-- [2009..2012] Manuel M T Chakravarty, Gabriele Keller, Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.Array.Representation (
-- * Array shapes, indices, and slices
Shape(..), Slice(..), SliceIndex(..),
) where
-- friends
import Data.Array.Accelerate.Type
-- standard library
import GHC.Base ( quotInt, remInt )
#include "accelerate.h"
-- |Index representation
--
-- |Class of index representations (which are nested pairs)
--
class (Eq sh, Slice sh) => Shape sh where
-- user-facing methods
dim :: sh -> Int -- ^number of dimensions (>= 0); rank of the array
size :: sh -> Int -- ^total number of elements in an array of this /shape/
-- internal methods
intersect :: sh -> sh -> sh -- yield the intersection of two shapes
ignore :: sh -- identifies ignored elements in 'permute'
toIndex :: sh -> sh -> Int -- yield the index position in a linear, row-major representation of
-- the array (first argument is the shape)
fromIndex :: sh -> Int -> sh -- inverse of `toIndex`
bound :: sh -> sh -> Boundary e -> Either e sh
-- apply a boundary condition to an index
iter :: sh -> (sh -> a) -> (a -> a -> a) -> a -> a
-- iterate through the entire shape, applying the function in the
-- second argument; third argument combines results and fourth is an
-- initial value that is combined with the results; the index space
-- is traversed in row-major order
iter1 :: sh -> (sh -> a) -> (a -> a -> a) -> a
-- variant of 'iter' without an initial value
-- operations to facilitate conversion with IArray
rangeToShape :: (sh, sh) -> sh -- convert a minpoint-maxpoint index
-- into a shape
shapeToRange :: sh -> (sh, sh) -- ...the converse
-- other conversions
shapeToList :: sh -> [Int] -- convert a shape into its list of dimensions
listToShape :: [Int] -> sh -- convert a list of dimensions into a shape
instance Shape () where
dim () = 0
size () = 1
() `intersect` () = ()
ignore = ()
toIndex () () = 0
fromIndex () _ = ()
bound () () _ = Right ()
iter () f _ _ = f ()
iter1 () f _ = f ()
rangeToShape ((), ()) = ()
shapeToRange () = ((), ())
shapeToList () = []
listToShape [] = ()
listToShape _ = INTERNAL_ERROR(error) "listToShape" "non-empty list when converting to unit"
instance Shape sh => Shape (sh, Int) where
dim (sh, _) = dim sh + 1
size (sh, sz) = size sh * sz
(sh1, sz1) `intersect` (sh2, sz2) = (sh1 `intersect` sh2, sz1 `min` sz2)
ignore = (ignore, -1)
toIndex (sh, sz) (ix, i) = BOUNDS_CHECK(checkIndex) "toIndex" i sz
$ toIndex sh ix * sz + i
fromIndex (sh, sz) i = (fromIndex sh (i `quotInt` sz), r)
-- If we assume that the index is in range, there is no point in computing
-- the remainder for the highest dimension since i < sz must hold.
--
where
r | dim sh == 0 = BOUNDS_CHECK(checkIndex) "fromIndex" i sz i
| otherwise = i `remInt` sz
bound (sh, sz) (ix, i) bndy
| i < 0 = case bndy of
Clamp -> bound sh ix bndy `addDim` 0
Mirror -> bound sh ix bndy `addDim` (-i)
Wrap -> bound sh ix bndy `addDim` (sz+i)
Constant e -> Left e
| i >= sz = case bndy of
Clamp -> bound sh ix bndy `addDim` (sz-1)
Mirror -> bound sh ix bndy `addDim` (sz-(i-sz+2))
Wrap -> bound sh ix bndy `addDim` (i-sz)
Constant e -> Left e
| otherwise = bound sh ix bndy `addDim` i
where
Right ds `addDim` d = Right (ds, d)
Left e `addDim` _ = Left e
iter (sh, sz) f c r = iter sh (\ix -> iter' (ix,0)) c r
where
iter' (ix,i) | i >= sz = r
| otherwise = f (ix,i) `c` iter' (ix,i+1)
iter1 (_, 0) _ _ = BOUNDS_ERROR(error) "iter1" "empty iteration space"
iter1 (sh, sz) f c = iter1 sh (\ix -> iter1' (ix,0)) c
where
iter1' (ix,i) | i == sz-1 = f (ix,i)
| otherwise = f (ix,i) `c` iter1' (ix,i+1)
rangeToShape ((sh1, sz1), (sh2, sz2))
= (rangeToShape (sh1, sh2), sz2 - sz1 + 1)
shapeToRange (sh, sz)
= let (low, high) = shapeToRange sh
in
((low, 0), (high, sz - 1))
shapeToList (sh,sz) = sz : shapeToList sh
listToShape [] = INTERNAL_ERROR(error) "listToShape" "empty list when converting to Ix"
listToShape (x:xs) = (listToShape xs,x)
-- |Slice representation
--
-- |Class of slice representations (which are nested pairs)
--
class Slice sl where
type SliceShape sl -- the projected slice
type CoSliceShape sl -- the complement of the slice
type FullShape sl -- the combined dimension
-- argument *value* not used; it's just a phantom value to fix the type
sliceIndex :: {-dummy-} sl -> SliceIndex sl (SliceShape sl) (CoSliceShape sl) (FullShape sl)
instance Slice () where
type SliceShape () = ()
type CoSliceShape () = ()
type FullShape () = ()
sliceIndex _ = SliceNil
instance Slice sl => Slice (sl, ()) where
type SliceShape (sl, ()) = (SliceShape sl, Int)
type CoSliceShape (sl, ()) = CoSliceShape sl
type FullShape (sl, ()) = (FullShape sl, Int)
sliceIndex _ = SliceAll (sliceIndex (undefined::sl))
instance Slice sl => Slice (sl, Int) where
type SliceShape (sl, Int) = SliceShape sl
type CoSliceShape (sl, Int) = (CoSliceShape sl, Int)
type FullShape (sl, Int) = (FullShape sl, Int)
sliceIndex _ = SliceFixed (sliceIndex (undefined::sl))
-- |Generalised array index, which may index only in a subset of the dimensions
-- of a shape.
--
data SliceIndex ix slice coSlice sliceDim where
SliceNil :: SliceIndex () () () ()
SliceAll ::
SliceIndex ix slice co dim -> SliceIndex (ix, ()) (slice, Int) co (dim, Int)
SliceFixed ::
SliceIndex ix slice co dim -> SliceIndex (ix, Int) slice (co, Int) (dim, Int)
instance Show (SliceIndex ix slice coSlice sliceDim) where
show SliceNil = "SliceNil"
show (SliceAll rest) = "SliceAll (" ++ show rest ++ ")"
show (SliceFixed rest) = "SliceFixed (" ++ show rest ++ ")"
| robeverest/accelerate | Data/Array/Accelerate/Array/Representation.hs | bsd-3-clause | 7,330 | 0 | 15 | 2,479 | 2,105 | 1,162 | 943 | 105 | 0 |
{-# LANGUAGE TypeOperators, Rank2Types, EmptyDataDecls,
MultiParamTypeClasses, FunctionalDependencies,
FlexibleContexts, FlexibleInstances, UndecidableInstances,
IncoherentInstances, OverlappingInstances #-}
module Data.Rope.Branded
( Branded(..)
, Unsafe
, UBR
, null -- :: (s `Branded` Rope) a -> Bool
-- * Unpacking Ropes
, head -- :: Unpackable t => (s `Branded` Rope) a -> t
, last -- :: Unpackable t => (s `Branded` Rope) a -> t
, unpack -- :: Unpackable t => (s `Branded` Rope) a -> [t]
-- * MonadWriter
, runBranded
, execBranded -- MonadWriter terminology for 'context'
) where
import Prelude hiding (null, head, last, take, drop, span, break, splitAt, takeWhile, dropWhile)
import Control.Applicative hiding (empty)
import Control.Monad.Writer.Class
import Data.Rope.Branded.Comonad
import Data.Monoid
import Data.FingerTree (Measured(..))
import Data.Foldable (Foldable)
import qualified Data.Foldable
import Data.Traversable (Traversable(traverse))
import qualified Data.Rope.Internal as Rope
import Data.Rope.Internal (Rope(..),Unpackable)
type UBR a = (Unsafe `Branded` Rope) a
data Unsafe
data Branded brand t a = Branded { context :: !t, extractBranded :: a }
null :: Branded s Rope a -> Bool
null = Rope.null . context
{-# INLINE null #-}
head :: Unpackable t => Branded s Rope a -> t
head = Rope.head . context
{-# INLINE head #-}
last :: Unpackable t => Branded s Rope a -> t
last = Rope.last . context
{-# INLINE last #-}
unpack :: Unpackable t => Branded s Rope a -> [t]
unpack (Branded s _) = Rope.unpack s
{-# INLINE unpack #-}
instance Measured v t => Measured v (Branded s t a) where
measure = measure . context
instance Functor (Branded s t) where
fmap f (Branded s a) = Branded s (f a)
instance Comonad (Branded s t) where
extract = extractBranded
extend f a@(Branded s _) = Branded s (f a)
duplicate a@(Branded s _) = Branded s a
instance Foldable (Branded s t) where
foldr f z (Branded _ a) = f a z
foldr1 _ (Branded _ a) = a
foldl f z (Branded _ a) = f z a
foldl1 _ (Branded _ a) = a
foldMap f (Branded _ a) = f a
instance Traversable (Branded s t) where
traverse f (Branded s a) = Branded s <$> f a
instance Monoid t => Applicative (Branded Unsafe t) where
pure = Branded mempty
Branded s f <*> Branded s' a = Branded (s `mappend` s') (f a)
instance Monoid t => Monad (Branded Unsafe t) where
return = Branded mempty
Branded s a >>= f = Branded (s `mappend` s') b
where Branded s' b = f a
instance (Monoid t, Monoid m) => Monoid (Branded Unsafe t m) where
mempty = Branded mempty mempty
Branded r t `mappend` Branded s u = Branded (r `mappend` s) (t `mappend` u)
-- > sample :: Branded Unsafe Rope ()
-- > sample = do pack "Hello"
-- > pack ' '
-- > pack "World"
-- >
instance Monoid t => MonadWriter t (Branded Unsafe t) where
tell t = Branded t ()
listen (Branded t a) = Branded t (a, t)
pass (Branded t (a,f)) = Branded (f t) a
runBranded :: Branded s t a -> (a, t)
runBranded (Branded t a) = (a, t)
{-# INLINE runBranded #-}
execBranded :: Branded s t a -> t
execBranded (Branded t _) = t
{-# INLINE execBranded #-}
| ekmett/rope | Data/Rope/Branded.hs | bsd-3-clause | 3,297 | 0 | 9 | 794 | 1,127 | 608 | 519 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module Language.TNT.LambdaLifter (lambdaLift) where
import Control.Comonad
import Control.Monad.State
import Data.Generics
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set, (\\))
import qualified Data.Set as Set
import Language.TNT.Location
import Language.TNT.Name
import Language.TNT.Stmt
data R = R
{ callSet :: Map Name [Name]
, boundVars :: Map Name [Name]
}
lambdaLift :: Dec Located Name -> (Dec Located Name, [Dec Located Name])
lambdaLift dec = (dec, [])
-- stronglyConnFuns :: Dec Located Name -> [[Name]]
-- stronglyConnFuns = everywhere (++) (const [] `extQ` funD `extQ` funDefS)
-- where
-- funD :: Dec Located Name -> (Name, Name, [Name])
-- getCallSet :: Name -> LambdaLifter [Name]
-- getCallSet a = do
-- R {..} <- ask
-- return (callSet ! a)
-- callSet :: Stmt Located Name -> [Name]
-- callSet = q
-- where
-- q = everythingBut (++) $ const ([], False) `extQ` funAppE `extQ` funDefS
-- funAppE :: Exp Located Name -> ([Name], Bool)
-- funAppE (FunAppE x _) = ([extract x], False)
-- funAppE _ = ([], False)
-- funDefS :: Stmt Located Name -> ([Name], Bool)
-- funDefS (FunDefS _ _ _) = ([], True)
-- funDefS _ = ([], False)
-- getFreeVars :: Data a => Name -> [Name] -> a -> LambdaLifter [Name]
-- getFreeVars a b c = do
-- s@S {..} <- get
-- case Map.lookup a freeVars of
-- Just x ->
-- return x
-- Nothing -> do
-- let defVars = Set.fromList (defVarQ c)
-- let vars = Set.fromList (varQ c)
-- let v = Set.toList (vars \\ defVars)
-- put s { freeVars = Map.insert a v freeVars }
-- return v
-- where
-- defVarQ = everythingBut (++) $ (b, False) `mkQ` defS
-- defS :: Stmt Located Name -> ([Name], Bool)
-- defS (ImportS _ x) = ([extract x], False)
-- defS (DefS x _) = ([extract x], False)
-- defS (FunDefS _ _ _) = ([], True)
-- defS _ = ([], False)
-- varQ = everythingBut (++) $ const ([], False) `extQ` varE `extQ` funDefS
-- varE :: Exp Located Name -> ([Name], Bool)
-- varE (VarE x) = ([x], False)
-- varE (AssignE x _) = ([extract x], False)
-- varE _ = ([], False)
-- funDefS :: Stmt Located Name -> ([Name], Bool)
-- funDefS (FunDefS _ _ _) = ([], True)
-- funDefS _ = ([], False)
-- nestedFuns :: Data a => a -> [Name]
-- nestedFuns = everythingBut (++) (([], False) `mkQ` f)
-- where
-- f :: Stmt Located Name -> ([Name], Bool)
-- f (FunDefS a _ _) = ([extract a], True)
-- f _ = ([], True) | sonyandy/tnt | Language/TNT/LambdaLifter.hs | bsd-3-clause | 2,611 | 0 | 10 | 672 | 225 | 157 | 68 | 17 | 1 |
-- | This module re-exports the /mid/ interface
-- to /processing/.
module Graphics.Web.Processing (
-- * Mid interface
module Graphics.Web.Processing.Mid
) where
import Graphics.Web.Processing.Mid
| Daniel-Diaz/processing | Graphics/Web/Processing.hs | bsd-3-clause | 209 | 0 | 5 | 34 | 27 | 20 | 7 | 3 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Types
( Server, ServerState(..), initServerState
-- * Configuration types
, Environment(..), getEnvironment, getOptions, Source(..)
, getConfig, parseCmdLine
, logMsg
) where
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.State.Lazy (StateT, modify)
import Network.Wai.Handler.Warp (defaultSettings)
import System.Environment (lookupEnv, getArgs)
import Web.Scotty (Options(..))
import Data.Aeson
import Data.Aeson.Types (typeMismatch)
import qualified Data.ByteString.Lazy as BL
import System.Console.GetOpt
-- |State monad holding the configuration of a running server.
data ServerState = ServerState
{ environment :: Environment
, postSources :: [Source]
, optRunMigration :: Bool
}
initServerState :: ServerState
initServerState = ServerState
{ environment = Development
, postSources = []
, optRunMigration = False
}
-- |Hold the server state over the standard IO monad.
type Server = StateT ServerState IO
-- |Represents different environment variables that can be set.
data Environment = Development | Production
deriving (Show, Eq, Read)
getEnvironment :: IO Environment
getEnvironment = maybe Development read <$> lookupEnv "SCOTTY_ENV"
-- | Derive Scotty options from the server configuration
getOptions :: Environment -> IO Options
getOptions e = do
let v = case e of
Development -> 1
Production -> 0
-- For now we are going with default Warp settings
return $ Options v defaultSettings
-- |All sources that can contain markdown posts.
data Source = Disk FilePath
instance Show Source where
show (Disk f) = "DIR: " ++ f
data ConfigFile = ConfigFile
{ sources :: [Source] }
instance FromJSON ConfigFile where
parseJSON (Object v) = do
ss <- v .: "sources"
diskSources <- ss .:? "disk" .!= ([] :: [FilePath])
return $ ConfigFile (Disk <$> diskSources)
parseJSON c = typeMismatch "ConfigFile" c
-----------------------------------------------------------------------------
-- Config Parsing --
-----------------------------------------------------------------------------
getConfig :: FilePath -> Server ()
getConfig f = do
maybeConf <- parseConfigFile f
case maybeConf of
-- update config
Just conf -> do
liftIO $ putStrLn $ "Adding sources: " ++ show (sources conf)
modify (\s -> s { postSources = sources conf ++ postSources s })
-- keep config
Nothing -> return ()
parseConfigFile :: FilePath -> Server (Maybe ConfigFile)
parseConfigFile f = do
confStr <- liftIO $ BL.readFile f
return $ decode confStr
-----------------------------------------------------------------------------
-- Command Line Options --
-----------------------------------------------------------------------------
optDescs :: [OptDescr (ServerState -> ServerState)]
optDescs =
[ Option "m" ["migrate"]
( NoArg (\s -> s { optRunMigration = True }) )
"Run post migrations from specified sources."
]
parseCmdLine :: Server ()
parseCmdLine = do
argv <- liftIO getArgs
case getOpt Permute optDescs argv of
(os, _, []) -> mapM_ modify os
(_, _, errs) ->
do liftIO $ mapM_ putStrLn errs
error "Error parsing command line."
logMsg :: String -> Server ()
logMsg = liftIO . putStrLn
| ashutoshrishi/blogserver | src/Types.hs | bsd-3-clause | 3,574 | 0 | 18 | 888 | 828 | 455 | 373 | 73 | 2 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser}
-}
{-# LANGUAGE CPP #-}
module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs
, deepSplitProductType_maybe, findTypeShape
) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, mkCast )
import Id
import IdInfo ( vanillaIdInfo )
import DataCon
import Demand
import MkCore ( mkRuntimeErrorApp, aBSENT_ERROR_ID, mkCoreUbxTup )
import MkId ( voidArgId, voidPrimId )
import TysPrim ( voidPrimTy )
import TysWiredIn ( tupleDataCon )
import VarEnv ( mkInScopeSet )
import Type
import Coercion
import FamInstEnv
import BasicTypes ( Boxity(..) )
import Literal ( absentLiteralOf )
import TyCon
import UniqSupply
import Unique
import Maybes
import Util
import Outputable
import DynFlags
import FastString
import ListSetOps
{-
************************************************************************
* *
\subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@}
* *
************************************************************************
Here's an example. The original function is:
\begin{verbatim}
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
From this, we want to produce:
\begin{verbatim}
-- wrapper (an unfolding)
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
I# x# -> $wg a x# ys
-- call the worker; don't forget the type args!
-- worker
$wg :: forall a . Int# -> [a] -> a
$wg = \/\ a -> \ x# ys ->
let
x = I# x#
in
case x of -- note: body of g moved intact
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
Something we have to be careful about: Here's an example:
\begin{verbatim}
-- "f" strictness: U(P)U(P)
f (I# a) (I# b) = a +# b
g = f -- "g" strictness same as "f"
\end{verbatim}
\tr{f} will get a worker all nice and friendly-like; that's good.
{\em But we don't want a worker for \tr{g}}, even though it has the
same strictness as \tr{f}. Doing so could break laziness, at best.
Consequently, we insist that the number of strictness-info items is
exactly the same as the number of lambda-bound arguments. (This is
probably slightly paranoid, but OK in practice.) If it isn't the
same, we ``revise'' the strictness info, so that we won't propagate
the unusable strictness-info into the interfaces.
************************************************************************
* *
\subsection{The worker wrapper core}
* *
************************************************************************
@mkWwBodies@ is called when doing the worker\/wrapper split inside a module.
-}
mkWwBodies :: DynFlags
-> FamInstEnvs
-> Type -- Type of original function
-> [Demand] -- Strictness of original function
-> DmdResult -- Info about function result
-> UniqSM (Maybe ([Demand], -- Demands for worker (value) args
Id -> CoreExpr, -- Wrapper body, lacking only the worker Id
CoreExpr -> CoreExpr)) -- Worker body, lacking the original function rhs
-- wrap_fn_args E = \x y -> E
-- work_fn_args E = E x y
-- wrap_fn_str E = case x of { (a,b) ->
-- case a of { (a1,a2) ->
-- E a1 a2 b y }}
-- work_fn_str E = \a2 a2 b y ->
-- let a = (a1,a2) in
-- let x = (a,b) in
-- E
mkWwBodies dflags fam_envs fun_ty demands res_info
= do { let empty_subst = mkEmptyTCvSubst (mkInScopeSet (tyCoVarsOfType fun_ty))
; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs empty_subst fun_ty demands
; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args
-- Do CPR w/w. See Note [Always do CPR w/w]
; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty)
<- mkWWcpr (gopt Opt_CprAnal dflags) fam_envs res_ty res_info
; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args cpr_res_ty
worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v]
wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var
worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args
; if useful1 && not only_one_void_argument || useful2
then return (Just (worker_args_dmds, wrapper_body, worker_body))
else return Nothing
}
-- We use an INLINE unconditionally, even if the wrapper turns out to be
-- something trivial like
-- fw = ...
-- f = __inline__ (coerce T fw)
-- The point is to propagate the coerce to f's call sites, so even though
-- f's RHS is now trivial (size 1) we still want the __inline__ to prevent
-- fw from being inlined into f's RHS
where
-- Note [Do not split void functions]
only_one_void_argument
| [d] <- demands
, Just (arg_ty1, _) <- splitFunTy_maybe fun_ty
, isAbsDmd d && isVoidTy arg_ty1
= True
| otherwise
= False
{-
Note [Always do CPR w/w]
~~~~~~~~~~~~~~~~~~~~~~~~
At one time we refrained from doing CPR w/w for thunks, on the grounds that
we might duplicate work. But that is already handled by the demand analyser,
which doesn't give the CPR proprety if w/w might waste work: see
Note [CPR for thunks] in DmdAnal.
And if something *has* been given the CPR property and we don't w/w, it's
a disaster, because then the enclosing function might say it has the CPR
property, but now doesn't and there a cascade of disaster. A good example
is Trac #5920.
************************************************************************
* *
\subsection{Making wrapper args}
* *
************************************************************************
During worker-wrapper stuff we may end up with an unlifted thing
which we want to let-bind without losing laziness. So we
add a void argument. E.g.
f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z
==>
fw = /\ a -> \void -> E
f = /\ a -> \x y z -> fw realworld
We use the state-token type which generates no code.
-}
mkWorkerArgs :: DynFlags -> [Var]
-> Type -- Type of body
-> ([Var], -- Lambda bound args
[Var]) -- Args at call site
mkWorkerArgs dflags args res_ty
| any isId args || not needsAValueLambda
= (args, args)
| otherwise
= (args ++ [voidArgId], args ++ [voidPrimId])
where
needsAValueLambda =
isUnliftedType res_ty
|| not (gopt Opt_FunToThunk dflags)
-- see Note [Protecting the last value argument]
{-
Note [Protecting the last value argument]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the user writes (\_ -> E), they might be intentionally disallowing
the sharing of E. Since absence analysis and worker-wrapper are keen
to remove such unused arguments, we add in a void argument to prevent
the function from becoming a thunk.
The user can avoid adding the void argument with the -ffun-to-thunk
flag. However, this can create sharing, which may be bad in two ways. 1) It can
create a space leak. 2) It can prevent inlining *under a lambda*. If w/w
removes the last argument from a function f, then f now looks like a thunk, and
so f can't be inlined *under a lambda*.
************************************************************************
* *
\subsection{Coercion stuff}
* *
************************************************************************
We really want to "look through" coerces.
Reason: I've seen this situation:
let f = coerce T (\s -> E)
in \x -> case x of
p -> coerce T' f
q -> \s -> E2
r -> coerce T' f
If only we w/w'd f, we'd get
let f = coerce T (\s -> fw s)
fw = \s -> E
in ...
Now we'll inline f to get
let fw = \s -> E
in \x -> case x of
p -> fw
q -> \s -> E2
r -> fw
Now we'll see that fw has arity 1, and will arity expand
the \x to get what we want.
-}
-- mkWWargs just does eta expansion
-- is driven off the function type and arity.
-- It chomps bites off foralls, arrows, newtypes
-- and keeps repeating that until it's satisfied the supplied arity
mkWWargs :: TCvSubst -- Freshening substitution to apply to the type
-- See Note [Freshen type variables]
-> Type -- The type of the function
-> [Demand] -- Demands and one-shot info for value arguments
-> UniqSM ([Var], -- Wrapper args
CoreExpr -> CoreExpr, -- Wrapper fn
CoreExpr -> CoreExpr, -- Worker fn
Type) -- Type of wrapper body
mkWWargs subst fun_ty demands
| null demands
= return ([], id, id, substTy subst fun_ty)
| (dmd:demands') <- demands
, Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= do { uniq <- getUniqueM
; let arg_ty' = substTy subst arg_ty
id = mk_wrap_arg uniq arg_ty' dmd
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst fun_ty' demands'
; return (id : wrap_args,
Lam id . wrap_fn_args,
work_fn_args . (`App` varToCoreExpr id),
res_ty) }
| Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty
= do { let (subst', tv') = substTyVarBndr subst tv
-- This substTyVarBndr clones the type variable when necy
-- See Note [Freshen type variables]
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst' fun_ty' demands
; return (tv' : wrap_args,
Lam tv' . wrap_fn_args,
work_fn_args . (`mkTyApps` [mkTyVarTy tv']),
res_ty) }
| Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty
-- The newtype case is for when the function has
-- a newtype after the arrow (rare)
--
-- It's also important when we have a function returning (say) a pair
-- wrapped in a newtype, at least if CPR analysis can look
-- through such newtypes, which it probably can since they are
-- simply coerces.
= do { (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst rep_ty demands
; return (wrap_args,
\e -> Cast (wrap_fn_args e) (mkSymCo co),
\e -> work_fn_args (Cast e co),
res_ty) }
| otherwise
= WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand
return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow
applyToVars :: [Var] -> CoreExpr -> CoreExpr
applyToVars vars fn = mkVarApps fn vars
mk_wrap_arg :: Unique -> Type -> Demand -> Id
mk_wrap_arg uniq ty dmd
= mkSysLocalOrCoVar (fsLit "w") uniq ty
`setIdDemandInfo` dmd
{-
Note [Freshen type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wen we do a worker/wrapper split, we must not use shadowed names,
else we'll get
f = /\ a /\a. fw a a
which is obviously wrong. Type variables can can in principle shadow,
within a type (e.g. forall a. a -> forall a. a->a). But type
variables *are* mentioned in <blah>, so we must substitute.
That's why we carry the TCvSubst through mkWWargs
************************************************************************
* *
\subsection{Strictness stuff}
* *
************************************************************************
-}
mkWWstr :: DynFlags
-> FamInstEnvs
-> [Var] -- Wrapper args; have their demand info on them
-- *Includes type variables*
-> UniqSM (Bool, -- Is this useful
[Var], -- Worker args
CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call
-- and without its lambdas
-- This fn adds the unboxing
CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function,
-- and lacking its lambdas.
-- This fn does the reboxing
mkWWstr _ _ []
= return (False, [], nop_fn, nop_fn)
mkWWstr dflags fam_envs (arg : args) = do
(useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg
(useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args
return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2)
{-
Note [Unpacking arguments with product and polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The argument is unpacked in a case if it has a product type and has a
strict *and* used demand put on it. I.e., arguments, with demands such
as the following ones:
<S,U(U, L)>
<S(L,S),U>
will be unpacked, but
<S,U> or <B,U>
will not, because the pieces aren't used. This is quite important otherwise
we end up unpacking massive tuples passed to the bottoming function. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Does 'main' print "error 1" or "error no"? We don't really want 'f'
to unbox its second argument. This actually happened in GHC's onwn
source code, in Packages.applyPackageFlag, which ended up un-boxing
the enormous DynFlags tuple, and being strict in the
as-yet-un-filled-in pkgState files.
-}
----------------------
-- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn)
-- * wrap_fn assumes wrap_arg is in scope,
-- brings into scope work_args (via cases)
-- * work_fn assumes work_args are in scope, a
-- brings into scope wrap_arg (via lets)
mkWWstr_one :: DynFlags -> FamInstEnvs -> Var
-> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr)
mkWWstr_one dflags fam_envs arg
| isTyVar arg
= return (False, [arg], nop_fn, nop_fn)
-- See Note [Worker-wrapper for bottoming functions]
| isAbsDmd dmd
, Just work_fn <- mk_absent_let dflags arg
-- Absent case. We can't always handle absence for arbitrary
-- unlifted types, so we need to choose just the cases we can
--- (that's what mk_absent_let does)
= return (True, [], nop_fn, work_fn)
-- See Note [Worthy functions for Worker-Wrapper split]
| isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope
-- of dropping seqs in the worker
= let arg_w_unf = arg `setIdUnfolding` evaldUnfolding
-- Tell the worker arg that it's sure to be evaluated
-- so that internal seqs can be dropped
in return (True, [arg_w_unf], mk_seq_case arg, nop_fn)
-- Pass the arg, anyway, even if it is in theory discarded
-- Consider
-- f x y = x `seq` y
-- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker
-- we ABSOLUTELY MUST record that x is evaluated in the wrapper.
-- Something like:
-- f x y = x `seq` fw y
-- fw y = let x{Evald} = error "oops" in (x `seq` y)
-- If we don't pin on the "Evald" flag, the seq doesn't disappear, and
-- we end up evaluating the absent thunk.
-- But the Evald flag is pretty weird, and I worry that it might disappear
-- during simplification, so for now I've just nuked this whole case
| isStrictDmd dmd
, Just cs <- splitProdDmd_maybe dmd
-- See Note [Unpacking arguments with product and polymorphic demands]
, Just (data_con, inst_tys, inst_con_arg_tys, co)
<- deepSplitProductType_maybe fam_envs (idType arg)
, cs `equalLength` inst_con_arg_tys
-- See Note [mkWWstr and unsafeCoerce]
= do { (uniq1:uniqs) <- getUniquesM
; let unpk_args = zipWith mk_ww_local uniqs inst_con_arg_tys
unpk_args_w_ds = zipWithEqual "mkWWstr" setIdDemandInfo unpk_args cs
unbox_fn = mkUnpackCase (Var arg) co uniq1
data_con unpk_args
rebox_fn = Let (NonRec arg con_app)
con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co
; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args_w_ds
; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) }
-- Don't pass the arg, rebox instead
| otherwise -- Other cases
= return (False, [arg], nop_fn, nop_fn)
where
dmd = idDemandInfo arg
----------------------
nop_fn :: CoreExpr -> CoreExpr
nop_fn body = body
{-
Note [mkWWstr and unsafeCoerce]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By using unsafeCoerce, it is possible to make the number of demands fail to
match the number of constructor arguments; this happened in Trac #8037.
If so, the worker/wrapper split doesn't work right and we get a Core Lint
bug. The fix here is simply to decline to do w/w if that happens.
************************************************************************
* *
Type scrutiny that is specfic to demand analysis
* *
************************************************************************
Note [Do not unpack class dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
f :: Ord a => [a] -> Int -> a
{-# INLINABLE f #-}
and we worker/wrapper f, we'll get a worker with an INLINALBE pragma
(see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which
can still be specialised by the type-class specialiser, something like
fw :: Ord a => [a] -> Int# -> a
BUT if f is strict in the Ord dictionary, we might unpack it, to get
fw :: (a->a->Bool) -> [a] -> Int# -> a
and the type-class specialiser can't specialise that. An example is
Trac #6056.
Moreover, dictinoaries can have a lot of fields, so unpacking them can
increase closure sizes.
Conclusion: don't unpack dictionaries.
-}
deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitProductType_maybe fam_envs ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkRepReflCo ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, Just con <- isDataProductTyCon_maybe tc
, not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries]
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitProductType_maybe _ _ = Nothing
deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitCprType_maybe fam_envs con_tag ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkRepReflCo ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, isDataTyCon tc
, let cons = tyConDataCons tc
, cons `lengthAtLeast` con_tag -- This might not be true if we import the
-- type constructor via a .hs-bool file (#8743)
, let con = cons `getNth` (con_tag - fIRST_TAG)
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitCprType_maybe _ _ _ = Nothing
findTypeShape :: FamInstEnvs -> Type -> TypeShape
-- Uncover the arrow and product shape of a type
-- The data type TypeShape is defined in Demand
-- See Note [Trimming a demand to a type] in Demand
findTypeShape fam_envs ty
| Just (tc, tc_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tc
= TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args)
| Just (_, res) <- splitFunTy_maybe ty
= TsFun (findTypeShape fam_envs res)
| Just (_, ty') <- splitForAllTy_maybe ty
= findTypeShape fam_envs ty'
| Just (_, ty') <- topNormaliseType_maybe fam_envs ty
= findTypeShape fam_envs ty'
| otherwise
= TsUnk
{-
************************************************************************
* *
\subsection{CPR stuff}
* *
************************************************************************
@mkWWcpr@ takes the worker/wrapper pair produced from the strictness
info and adds in the CPR transformation. The worker returns an
unboxed tuple containing non-CPR components. The wrapper takes this
tuple and re-produces the correct structured output.
The non-CPR results appear ordered in the unboxed tuple as if by a
left-to-right traversal of the result structure.
-}
mkWWcpr :: Bool
-> FamInstEnvs
-> Type -- function body type
-> DmdResult -- CPR analysis results
-> UniqSM (Bool, -- Is w/w'ing useful?
CoreExpr -> CoreExpr, -- New wrapper
CoreExpr -> CoreExpr, -- New worker
Type) -- Type of worker's body
mkWWcpr opt_CprAnal fam_envs body_ty res
-- CPR explicitly turned off (or in -O0)
| not opt_CprAnal = return (False, id, id, body_ty)
-- CPR is turned on by default for -O and O2
| otherwise
= case returnsCPR_maybe res of
Nothing -> return (False, id, id, body_ty) -- No CPR info
Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty
-> mkWWcpr_help stuff
| otherwise
-- See Note [non-algebraic or open body type warning]
-> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty )
return (False, id, id, body_ty)
mkWWcpr_help :: (DataCon, [Type], [Type], Coercion)
-> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type)
mkWWcpr_help (data_con, inst_tys, arg_tys, co)
| [arg_ty1] <- arg_tys
, isUnliftedType arg_ty1
-- Special case when there is a single result of unlifted type
--
-- Wrapper: case (..call worker..) of x -> C x
-- Worker: case ( ..body.. ) of C x -> x
= do { (work_uniq : arg_uniq : _) <- getUniquesM
; let arg = mk_ww_local arg_uniq arg_ty1
con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co
; return ( True
, \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con [arg] (varToCoreExpr arg)
-- varToCoreExpr important here: arg can be a coercion
-- Lacking this caused Trac #10658
, arg_ty1 ) }
| otherwise -- The general case
-- Wrapper: case (..call worker..) of (# a, b #) -> C a b
-- Worker: case ( ...body... ) of C a b -> (# a, b #)
= do { (work_uniq : uniqs) <- getUniquesM
; let (wrap_wild : args) = zipWith mk_ww_local uniqs (ubx_tup_ty : arg_tys)
ubx_tup_ty = exprType ubx_tup_app
ubx_tup_app = mkCoreUbxTup arg_tys (map varToCoreExpr args)
con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co
; return (True
, \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt (tupleDataCon Unboxed (length arg_tys)), args, con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app
, ubx_tup_ty ) }
mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr
-- (mkUnpackCase e co uniq Con args body)
-- returns
-- case e |> co of bndr { Con args -> body }
mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking]
= Tick tickish (mkUnpackCase e co uniq con args body)
mkUnpackCase scrut co uniq boxing_con unpk_args body
= Case casted_scrut bndr (exprType body)
[(DataAlt boxing_con, unpk_args, body)]
where
casted_scrut = scrut `mkCast` co
bndr = mk_ww_local uniq (exprType casted_scrut)
{-
Note [non-algebraic or open body type warning]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a few cases where the W/W transformation is told that something
returns a constructor, but the type at hand doesn't really match this. One
real-world example involves unsafeCoerce:
foo = IO a
foo = unsafeCoerce c_exit
foreign import ccall "c_exit" c_exit :: IO ()
Here CPR will tell you that `foo` returns a () constructor for sure, but trying
to create a worker/wrapper for type `a` obviously fails.
(This was a real example until ee8e792 in libraries/base.)
It does not seem feasible to avoid all such cases already in the analyser (and
after all, the analysis is not really wrong), so we simply do nothing here in
mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch
other cases where something went avoidably wrong.
Note [Profiling and unpacking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function looked like
f = \ x -> {-# SCC "foo" #-} E
then we want the CPR'd worker to look like
\ x -> {-# SCC "foo" #-} (case E of I# x -> x)
and definitely not
\ x -> case ({-# SCC "foo" #-} E) of I# x -> x)
This transform doesn't move work or allocation
from one cost centre to another.
Later [SDM]: presumably this is because we want the simplifier to
eliminate the case, and the scc would get in the way? I'm ok with
including the case itself in the cost centre, since it is morally
part of the function (post transformation) anyway.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
Note [Absent errors]
~~~~~~~~~~~~~~~~~~~~
We make a new binding for Ids that are marked absent, thus
let x = absentError "x :: Int"
The idea is that this binding will never be used; but if it
buggily is used we'll get a runtime error message.
Coping with absence for *unlifted* types is important; see, for
example, Trac #4306. For these we find a suitable literal,
using Literal.absentLiteralOf. We don't have literals for
every primitive type, so the function is partial.
[I did try the experiment of using an error thunk for unlifted
things too, relying on the simplifier to drop it as dead code,
by making absentError
(a) *not* be a bottoming Id,
(b) be "ok for speculation"
But that relies on the simplifier finding that it really
is dead code, which is fragile, and indeed failed when
profiling is on, which disables various optimisations. So
using a literal will do.]
-}
mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr)
mk_absent_let dflags arg
| not (isUnliftedType arg_ty)
= Just (Let (NonRec arg abs_rhs))
| Just tc <- tyConAppTyCon_maybe arg_ty
, Just lit <- absentLiteralOf tc
= Just (Let (NonRec arg (Lit lit)))
| arg_ty `eqType` voidPrimTy
= Just (Let (NonRec arg (Var voidPrimId)))
| otherwise
= WARN( True, text "No absent value for" <+> ppr arg_ty )
Nothing
where
arg_ty = idType arg
abs_rhs = mkRuntimeErrorApp aBSENT_ERROR_ID arg_ty msg
msg = showSDoc dflags (ppr arg <+> ppr (idType arg))
mk_seq_case :: Id -> CoreExpr -> CoreExpr
mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)]
sanitiseCaseBndr :: Id -> Id
-- The argument we are scrutinising has the right type to be
-- a case binder, so it's convenient to re-use it for that purpose.
-- But we *must* throw away all its IdInfo. In particular, the argument
-- will have demand info on it, and that demand info may be incorrect for
-- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... }
-- Quite likely ww_arg isn't used in '...'. The case may get discarded
-- if the case binder says "I'm demanded". This happened in a situation
-- like (x+y) `seq` ....
sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo
mk_ww_local :: Unique -> Type -> Id
mk_ww_local uniq ty = mkSysLocalOrCoVar (fsLit "ww") uniq ty
| tjakway/ghcjvm | compiler/stranal/WwLib.hs | bsd-3-clause | 30,135 | 0 | 18 | 9,108 | 3,726 | 2,023 | 1,703 | 258 | 2 |
module Haskmon.Types.Pokemon(
module Haskmon.Types.Pokemon,
I.Pokemon, I.Pokedex, I.MetaPokemon
) where
import Haskmon.Types.Internals(MetaData,
MetaPokemon,
MetaAbility,
MetaMove,
MetaType,
MetaSprite,
MetaEggGroup,
MetaDescription,
Pokedex,
Pokemon,
Evolution)
import qualified Haskmon.Types.Internals as I
-- Pokedex
pokedexName :: Pokedex -> String
pokedexName = I.pokedexName
pokedexPokemons :: Pokedex -> [MetaPokemon]
pokedexPokemons = I.pokedexPokemons
-- MetaPokemon
mPokemonName :: MetaPokemon -> String
mPokemonName = I.mPokemonName
getPokemon :: MetaPokemon -> IO Pokemon
getPokemon = I.getPokemon
-- Pokemon
pokemonName :: Pokemon -> String
pokemonName = I.pokemonName
pokemonNationalId :: Pokemon -> Word
pokemonNationalId = I.pokemonNationalId
pokemonAbilities :: Pokemon -> [MetaAbility]
pokemonAbilities = I.pokemonAbilities
pokemonMoves :: Pokemon -> [MetaMove]
pokemonMoves = I.pokemonMoves
pokemonTypes :: Pokemon -> [MetaType]
pokemonTypes = I.pokemonTypes
pokemonEggCycle :: Pokemon -> Word
pokemonEggCycle = I.pokemonEggCycle
pokemonEggGroups :: Pokemon -> [MetaEggGroup]
pokemonEggGroups = I.pokemonEggGroups
pokemonCatchRate :: Pokemon -> Word
pokemonCatchRate = I.pokemonCatchRate
pokemonHp :: Pokemon -> Word
pokemonHp = I.pokemonHp
pokemonAttack :: Pokemon -> Word
pokemonAttack = I.pokemonAttack
pokemonDefense :: Pokemon -> Word
pokemonDefense = I.pokemonDefense
pokemonSpAtk :: Pokemon -> Word
pokemonSpAtk = I.pokemonSpAtk
pokemonSpDef :: Pokemon -> Word
pokemonSpDef = I.pokemonSpDef
pokemonSpeed :: Pokemon -> Word
pokemonSpeed = I.pokemonSpeed
pokemonSprites :: Pokemon -> [MetaSprite]
pokemonSprites = I.pokemonSprites
pokemonDescriptions :: Pokemon -> [MetaDescription]
pokemonDescriptions = I.pokemonDescriptions
pokemonMetadata :: Pokemon -> MetaData
pokemonMetadata = I.pokemonMetadata
pokemonEvolutions :: Pokemon -> [Evolution]
pokemonEvolutions = I.pokemonEvolutions
| bitemyapp/Haskmon | src/Haskmon/Types/Pokemon.hs | mit | 2,256 | 0 | 6 | 555 | 463 | 272 | 191 | 59 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : BasicFSM
Description : A simple example.
Copyright : (c) Max Amanshauser, 2016
License : MIT
Maintainer : [email protected]
-}
module BasicFSM (runBasicTests) where
import Test.Tasty
import Test.Tasty.HUnit
import Control.Concurrent
import Data.Aeson
import Data.Hashable
import Data.Text as Text
import Data.Typeable
import GHC.Generics
import Mealstrom
import Mealstrom.FSMStore
import Mealstrom.WALStore
import Mealstrom.PostgresJSONStore as PGJSON
import Mealstrom.MemoryStore as MemStore
-- ####################
-- # Connection Example
-- ####################
-- This is a contrived example of how to use a custom Key type, instead of the recommended Text and UUID.
newtype ConnectionKey = ConnectionKey (Int,Int) deriving (Show,Eq,Generic,Hashable)
instance FSMKey ConnectionKey where
toText (ConnectionKey (a,b)) = Text.pack $ "(" ++ show a ++ "," ++ show b ++ ")"
fromText t = case fmap (\s -> read (unpack s) :: Int) (splitOn "," $ Text.dropEnd 1 (Text.drop 1 t)) of
a:[b] -> ConnectionKey (a,b)
_ -> error ""
data ConnectionState = New | Open | Closed
deriving (Eq,Show,Typeable,Generic,ToJSON,FromJSON)
data ConnectionEvent = Create | Close | Reset
deriving (Eq,Show,Typeable,Generic,ToJSON,FromJSON)
data ConnectionAction = PrintStatusOpened | PrintStatusClosed
deriving (Eq,Typeable,Generic,ToJSON,FromJSON)
instance MealyInstance ConnectionKey ConnectionState ConnectionEvent ConnectionAction
connEffects :: MVar () -> Msg ConnectionAction -> IO Bool
connEffects mvar (Msg _i c)
| c == PrintStatusOpened = putStrLn "OUTPUT: Connection opened" >> putMVar mvar () >> return True
| c == PrintStatusClosed = putStrLn "OUTPUT: Connection closed" >> putMVar mvar () >> return True
connTransition :: (ConnectionState, ConnectionEvent) -> (ConnectionState, [ConnectionAction])
connTransition (s,e) =
case (s,e) of
(New, Create) -> (Open, [PrintStatusOpened])
(Open, Close) -> (Closed,[PrintStatusClosed])
(Open, Reset) -> (Open, [PrintStatusClosed, PrintStatusOpened])
runBasicTests :: String -> TestTree
runBasicTests c = testGroup "BasicFSM" [
testCase "BasicPG" (runTest (PGJSON.mkStore c)),
testCase "BasicMem0" (runTest (MemStore.mkStore :: Text -> IO (MemoryStore ConnectionKey ConnectionState ConnectionEvent ConnectionAction)))
]
runTest :: (FSMStore st ConnectionKey ConnectionState ConnectionEvent ConnectionAction,
WALStore st ConnectionKey) => (Text -> IO st) -> IO ()
runTest c = do
st <- c "BasicFSMTest"
sync <- newEmptyMVar
let t = FSMTable connTransition (connEffects sync)
let myFSM = FSMHandle st st t 90 3
let firstId = ConnectionKey (1231231,21) -- This represents a socket or something
post myFSM firstId New
Just fsmState1 <- get myFSM firstId
fsmState1 @?= New
msg1 <- mkMsgs [Create]
_ <- patch myFSM firstId msg1
takeMVar sync
Just fsmState2 <- get myFSM firstId
fsmState2 @?= Open
msg2 <- mkMsgs [Close]
_ <- patch myFSM firstId msg2
takeMVar sync
Just fsmState3 <- get myFSM firstId
fsmState3 @?= Closed
| linearray/mealstrom | test/BasicFSM.hs | mit | 3,382 | 0 | 14 | 672 | 980 | 508 | 472 | 67 | 3 |
module Language.Bracer (module X) where
import Language.Bracer.Syntax as X
import Language.Bracer.Parsing as X
import Language.Bracer.Transformations as X
| cbarrett/bracer | Language/Bracer.hs | mit | 156 | 0 | 4 | 18 | 36 | 26 | 10 | 4 | 0 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.OpsWorks
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- AWS OpsWorks
--
-- Welcome to the /AWS OpsWorks API Reference/. This guide provides
-- descriptions, syntax, and usage examples about AWS OpsWorks actions and
-- data types, including common parameters and error codes.
--
-- AWS OpsWorks is an application management service that provides an
-- integrated experience for overseeing the complete application lifecycle.
-- For information about this product, go to the
-- <http://aws.amazon.com/opsworks/ AWS OpsWorks> details page.
--
-- __SDKs and CLI__
--
-- The most common way to use the AWS OpsWorks API is by using the AWS
-- Command Line Interface (CLI) or by using one of the AWS SDKs to
-- implement applications in your preferred language. For more information,
-- see:
--
-- - <http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html AWS CLI>
-- - <http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/opsworks/AWSOpsWorksClient.html AWS SDK for Java>
-- - <http://docs.aws.amazon.com/sdkfornet/latest/apidocs/html/N_Amazon_OpsWorks.htm AWS SDK for .NET>
-- - <http://docs.aws.amazon.com/aws-sdk-php-2/latest/class-Aws.OpsWorks.OpsWorksClient.html AWS SDK for PHP 2>
-- - <http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/OpsWorks/Client.html AWS SDK for Ruby>
-- - <http://aws.amazon.com/documentation/sdkforjavascript/ AWS SDK for Node.js>
-- - <http://docs.pythonboto.org/en/latest/ref/opsworks.html AWS SDK for Python(Boto)>
--
-- __Endpoints__
--
-- AWS OpsWorks supports only one endpoint,
-- opsworks.us-east-1.amazonaws.com (HTTPS), so you must connect to that
-- endpoint. You can then use the API to direct AWS OpsWorks to create
-- stacks in any AWS Region.
--
-- __Chef Versions__
--
-- When you call CreateStack, CloneStack, or UpdateStack we recommend you
-- use the 'ConfigurationManager' parameter to specify the Chef version.
-- The recommended value for Linux stacks, which is also the default value,
-- is currently 11.10. Windows stacks use Chef 12.2. For more information,
-- see
-- <http://docs.aws.amazon.com/opsworks/latest/userguide/workingcookbook-chef11.html Chef Versions>.
--
-- You can also specify Chef 11.4 or Chef 0.9 for your Linux stack.
-- However, Chef 0.9 has been deprecated. We do not recommend using Chef
-- 0.9 for new stacks, and we recommend migrating your existing Chef 0.9
-- stacks to Chef 11.10 as soon as possible.
--
-- /See:/ <http://docs.aws.amazon.com/opsworks/latest/APIReference/Welcome.html AWS API Reference>
module Network.AWS.OpsWorks
(
-- * Service Configuration
opsWorks
-- * Errors
-- $errors
-- ** ValidationException
, _ValidationException
-- ** ResourceNotFoundException
, _ResourceNotFoundException
-- * Waiters
-- $waiters
-- * Operations
-- $operations
-- ** DescribeRDSDBInstances
, module Network.AWS.OpsWorks.DescribeRDSDBInstances
-- ** DeleteStack
, module Network.AWS.OpsWorks.DeleteStack
-- ** UpdateStack
, module Network.AWS.OpsWorks.UpdateStack
-- ** CreateLayer
, module Network.AWS.OpsWorks.CreateLayer
-- ** SetLoadBasedAutoScaling
, module Network.AWS.OpsWorks.SetLoadBasedAutoScaling
-- ** DeregisterRDSDBInstance
, module Network.AWS.OpsWorks.DeregisterRDSDBInstance
-- ** UnassignVolume
, module Network.AWS.OpsWorks.UnassignVolume
-- ** CreateInstance
, module Network.AWS.OpsWorks.CreateInstance
-- ** DescribeLayers
, module Network.AWS.OpsWorks.DescribeLayers
-- ** RegisterElasticIP
, module Network.AWS.OpsWorks.RegisterElasticIP
-- ** DescribeAgentVersions
, module Network.AWS.OpsWorks.DescribeAgentVersions
-- ** CreateDeployment
, module Network.AWS.OpsWorks.CreateDeployment
-- ** AssignInstance
, module Network.AWS.OpsWorks.AssignInstance
-- ** DescribeStacks
, module Network.AWS.OpsWorks.DescribeStacks
-- ** DeleteInstance
, module Network.AWS.OpsWorks.DeleteInstance
-- ** UpdateInstance
, module Network.AWS.OpsWorks.UpdateInstance
-- ** DeregisterVolume
, module Network.AWS.OpsWorks.DeregisterVolume
-- ** RebootInstance
, module Network.AWS.OpsWorks.RebootInstance
-- ** DeleteApp
, module Network.AWS.OpsWorks.DeleteApp
-- ** UpdateApp
, module Network.AWS.OpsWorks.UpdateApp
-- ** UpdateRDSDBInstance
, module Network.AWS.OpsWorks.UpdateRDSDBInstance
-- ** DescribeTimeBasedAutoScaling
, module Network.AWS.OpsWorks.DescribeTimeBasedAutoScaling
-- ** StopStack
, module Network.AWS.OpsWorks.StopStack
-- ** DescribeVolumes
, module Network.AWS.OpsWorks.DescribeVolumes
-- ** DisassociateElasticIP
, module Network.AWS.OpsWorks.DisassociateElasticIP
-- ** RegisterEcsCluster
, module Network.AWS.OpsWorks.RegisterEcsCluster
-- ** StopInstance
, module Network.AWS.OpsWorks.StopInstance
-- ** RegisterVolume
, module Network.AWS.OpsWorks.RegisterVolume
-- ** SetTimeBasedAutoScaling
, module Network.AWS.OpsWorks.SetTimeBasedAutoScaling
-- ** DescribeUserProfiles
, module Network.AWS.OpsWorks.DescribeUserProfiles
-- ** AttachElasticLoadBalancer
, module Network.AWS.OpsWorks.AttachElasticLoadBalancer
-- ** DeregisterElasticIP
, module Network.AWS.OpsWorks.DeregisterElasticIP
-- ** DeregisterEcsCluster
, module Network.AWS.OpsWorks.DeregisterEcsCluster
-- ** DescribeApps
, module Network.AWS.OpsWorks.DescribeApps
-- ** UpdateMyUserProfile
, module Network.AWS.OpsWorks.UpdateMyUserProfile
-- ** DescribeStackSummary
, module Network.AWS.OpsWorks.DescribeStackSummary
-- ** DescribeInstances
, module Network.AWS.OpsWorks.DescribeInstances
-- ** DescribeDeployments
, module Network.AWS.OpsWorks.DescribeDeployments
-- ** DescribeElasticIPs
, module Network.AWS.OpsWorks.DescribeElasticIPs
-- ** GrantAccess
, module Network.AWS.OpsWorks.GrantAccess
-- ** DeleteLayer
, module Network.AWS.OpsWorks.DeleteLayer
-- ** UpdateLayer
, module Network.AWS.OpsWorks.UpdateLayer
-- ** CreateStack
, module Network.AWS.OpsWorks.CreateStack
-- ** UpdateElasticIP
, module Network.AWS.OpsWorks.UpdateElasticIP
-- ** CreateApp
, module Network.AWS.OpsWorks.CreateApp
-- ** GetHostnameSuggestion
, module Network.AWS.OpsWorks.GetHostnameSuggestion
-- ** CloneStack
, module Network.AWS.OpsWorks.CloneStack
-- ** DescribePermissions
, module Network.AWS.OpsWorks.DescribePermissions
-- ** DetachElasticLoadBalancer
, module Network.AWS.OpsWorks.DetachElasticLoadBalancer
-- ** RegisterInstance
, module Network.AWS.OpsWorks.RegisterInstance
-- ** AssociateElasticIP
, module Network.AWS.OpsWorks.AssociateElasticIP
-- ** DescribeLoadBasedAutoScaling
, module Network.AWS.OpsWorks.DescribeLoadBasedAutoScaling
-- ** DescribeStackProvisioningParameters
, module Network.AWS.OpsWorks.DescribeStackProvisioningParameters
-- ** UnassignInstance
, module Network.AWS.OpsWorks.UnassignInstance
-- ** DescribeMyUserProfile
, module Network.AWS.OpsWorks.DescribeMyUserProfile
-- ** DeleteUserProfile
, module Network.AWS.OpsWorks.DeleteUserProfile
-- ** UpdateUserProfile
, module Network.AWS.OpsWorks.UpdateUserProfile
-- ** DescribeServiceErrors
, module Network.AWS.OpsWorks.DescribeServiceErrors
-- ** RegisterRDSDBInstance
, module Network.AWS.OpsWorks.RegisterRDSDBInstance
-- ** StartStack
, module Network.AWS.OpsWorks.StartStack
-- ** CreateUserProfile
, module Network.AWS.OpsWorks.CreateUserProfile
-- ** DescribeCommands
, module Network.AWS.OpsWorks.DescribeCommands
-- ** AssignVolume
, module Network.AWS.OpsWorks.AssignVolume
-- ** DescribeElasticLoadBalancers
, module Network.AWS.OpsWorks.DescribeElasticLoadBalancers
-- ** SetPermission
, module Network.AWS.OpsWorks.SetPermission
-- ** DeregisterInstance
, module Network.AWS.OpsWorks.DeregisterInstance
-- ** DescribeEcsClusters
, module Network.AWS.OpsWorks.DescribeEcsClusters
-- ** DescribeRAIdArrays
, module Network.AWS.OpsWorks.DescribeRAIdArrays
-- ** UpdateVolume
, module Network.AWS.OpsWorks.UpdateVolume
-- ** StartInstance
, module Network.AWS.OpsWorks.StartInstance
-- * Types
-- ** AppAttributesKeys
, AppAttributesKeys (..)
-- ** AppType
, AppType (..)
-- ** Architecture
, Architecture (..)
-- ** AutoScalingType
, AutoScalingType (..)
-- ** DeploymentCommandName
, DeploymentCommandName (..)
-- ** LayerAttributesKeys
, LayerAttributesKeys (..)
-- ** LayerType
, LayerType (..)
-- ** RootDeviceType
, RootDeviceType (..)
-- ** SourceType
, SourceType (..)
-- ** StackAttributesKeys
, StackAttributesKeys (..)
-- ** VirtualizationType
, VirtualizationType (..)
-- ** VolumeType
, VolumeType (..)
-- ** AgentVersion
, AgentVersion
, agentVersion
, avVersion
, avConfigurationManager
-- ** App
, App
, app
, appSSLConfiguration
, appEnvironment
, appEnableSSL
, appCreatedAt
, appShortname
, appDataSources
, appAppSource
, appAppId
, appAttributes
, appName
, appType
, appStackId
, appDomains
, appDescription
-- ** AutoScalingThresholds
, AutoScalingThresholds
, autoScalingThresholds
, astInstanceCount
, astIgnoreMetricsTime
, astLoadThreshold
, astThresholdsWaitTime
, astAlarms
, astMemoryThreshold
, astCPUThreshold
-- ** BlockDeviceMapping
, BlockDeviceMapping
, blockDeviceMapping
, bdmVirtualName
, bdmNoDevice
, bdmEBS
, bdmDeviceName
-- ** ChefConfiguration
, ChefConfiguration
, chefConfiguration
, ccBerkshelfVersion
, ccManageBerkshelf
-- ** Command
, Command
, command
, cDeploymentId
, cInstanceId
, cStatus
, cLogURL
, cCreatedAt
, cCommandId
, cExitCode
, cType
, cCompletedAt
, cAcknowledgedAt
-- ** DataSource
, DataSource
, dataSource
, dsARN
, dsDatabaseName
, dsType
-- ** Deployment
, Deployment
, deployment
, dDeploymentId
, dStatus
, dCommand
, dCreatedAt
, dCustomJSON
, dIAMUserARN
, dAppId
, dInstanceIds
, dCompletedAt
, dStackId
, dComment
, dDuration
-- ** DeploymentCommand
, DeploymentCommand
, deploymentCommand
, dcArgs
, dcName
-- ** EBSBlockDevice
, EBSBlockDevice
, ebsBlockDevice
, ebdDeleteOnTermination
, ebdVolumeSize
, ebdIOPS
, ebdVolumeType
, ebdSnapshotId
-- ** EcsCluster
, EcsCluster
, ecsCluster
, ecEcsClusterARN
, ecEcsClusterName
, ecRegisteredAt
, ecStackId
-- ** ElasticIP
, ElasticIP
, elasticIP
, eiInstanceId
, eiDomain
, eiIP
, eiName
, eiRegion
-- ** ElasticLoadBalancer
, ElasticLoadBalancer
, elasticLoadBalancer
, elbSubnetIds
, elbVPCId
, elbAvailabilityZones
, elbRegion
, elbElasticLoadBalancerName
, elbStackId
, elbEC2InstanceIds
, elbLayerId
, elbDNSName
-- ** EnvironmentVariable
, EnvironmentVariable
, environmentVariable
, evSecure
, evKey
, evValue
-- ** Instance
, Instance
, instance'
, iPrivateDNS
, iReportedAgentVersion
, iInstanceId
, iStatus
, iPrivateIP
, iInstallUpdatesOnBoot
, iVirtualizationType
, iInstanceProfileARN
, iPlatform
, iHostname
, iSSHHostRsaKeyFingerprint
, iSecurityGroupIds
, iEcsClusterARN
, iCreatedAt
, iEC2InstanceId
, iSSHKeyName
, iAgentVersion
, iRootDeviceVolumeId
, iSubnetId
, iInfrastructureClass
, iSSHHostDsaKeyFingerprint
, iInstanceType
, iEBSOptimized
, iElasticIP
, iOS
, iAvailabilityZone
, iLastServiceErrorId
, iAutoScalingType
, iLayerIds
, iArchitecture
, iPublicDNS
, iAMIId
, iPublicIP
, iReportedOS
, iRegisteredBy
, iStackId
, iRootDeviceType
, iEcsContainerInstanceARN
, iBlockDeviceMappings
-- ** InstanceIdentity
, InstanceIdentity
, instanceIdentity
, iiSignature
, iiDocument
-- ** InstancesCount
, InstancesCount
, instancesCount
, icTerminating
, icPending
, icOnline
, icUnassigning
, icDeregistering
, icRunningSetup
, icRequested
, icBooting
, icStopped
, icRebooting
, icAssigning
, icShuttingDown
, icSetupFailed
, icConnectionLost
, icTerminated
, icStopping
, icRegistered
, icStartFailed
, icRegistering
-- ** Layer
, Layer
, layer
, lCustomInstanceProfileARN
, lCustomSecurityGroupIds
, lInstallUpdatesOnBoot
, lLifecycleEventConfiguration
, lCreatedAt
, lShortname
, lDefaultRecipes
, lCustomRecipes
, lCustomJSON
, lVolumeConfigurations
, lEnableAutoHealing
, lPackages
, lAttributes
, lName
, lAutoAssignPublicIPs
, lType
, lUseEBSOptimizedInstances
, lStackId
, lLayerId
, lDefaultSecurityGroupNames
, lAutoAssignElasticIPs
-- ** LifecycleEventConfiguration
, LifecycleEventConfiguration
, lifecycleEventConfiguration
, lecShutdown
-- ** LoadBasedAutoScalingConfiguration
, LoadBasedAutoScalingConfiguration
, loadBasedAutoScalingConfiguration
, lbascUpScaling
, lbascEnable
, lbascDownScaling
, lbascLayerId
-- ** Permission
, Permission
, permission
, pIAMUserARN
, pAllowSudo
, pStackId
, pLevel
, pAllowSSH
-- ** RAIdArray
, RAIdArray
, rAIdArray
, raiaInstanceId
, raiaSize
, raiaIOPS
, raiaCreatedAt
, raiaRAIdLevel
, raiaDevice
, raiaNumberOfDisks
, raiaAvailabilityZone
, raiaName
, raiaRAIdArrayId
, raiaVolumeType
, raiaStackId
, raiaMountPoint
-- ** RDSDBInstance
, RDSDBInstance
, rdsDBInstance
, rdiRDSDBInstanceARN
, rdiDBUser
, rdiMissingOnRDS
, rdiEngine
, rdiAddress
, rdiDBInstanceIdentifier
, rdiRegion
, rdiStackId
, rdiDBPassword
-- ** Recipes
, Recipes
, recipes
, rSetup
, rShutdown
, rUndeploy
, rConfigure
, rDeploy
-- ** ReportedOS
, ReportedOS
, reportedOS
, roFamily
, roName
, roVersion
-- ** SSLConfiguration
, SSLConfiguration
, sslConfiguration
, scChain
, scCertificate
, scPrivateKey
-- ** SelfUserProfile
, SelfUserProfile
, selfUserProfile
, supSSHPublicKey
, supSSHUsername
, supIAMUserARN
, supName
-- ** ServiceError'
, ServiceError'
, serviceError'
, seInstanceId
, seCreatedAt
, seServiceErrorId
, seType
, seStackId
, seMessage
-- ** ShutdownEventConfiguration
, ShutdownEventConfiguration
, shutdownEventConfiguration
, secExecutionTimeout
, secDelayUntilElbConnectionsDrained
-- ** Source
, Source
, source
, sURL
, sUsername
, sSSHKey
, sPassword
, sType
, sRevision
-- ** Stack
, Stack
, stack
, sDefaultInstanceProfileARN
, sServiceRoleARN
, sDefaultRootDeviceType
, sARN
, sCreatedAt
, sVPCId
, sChefConfiguration
, sAgentVersion
, sDefaultSSHKeyName
, sCustomJSON
, sCustomCookbooksSource
, sDefaultAvailabilityZone
, sAttributes
, sName
, sDefaultOS
, sUseOpsworksSecurityGroups
, sUseCustomCookbooks
, sDefaultSubnetId
, sRegion
, sConfigurationManager
, sStackId
, sHostnameTheme
-- ** StackConfigurationManager
, StackConfigurationManager
, stackConfigurationManager
, scmName
, scmVersion
-- ** StackSummary
, StackSummary
, stackSummary
, ssARN
, ssAppsCount
, ssName
, ssStackId
, ssLayersCount
, ssInstancesCount
-- ** TemporaryCredential
, TemporaryCredential
, temporaryCredential
, tcInstanceId
, tcUsername
, tcPassword
, tcValidForInMinutes
-- ** TimeBasedAutoScalingConfiguration
, TimeBasedAutoScalingConfiguration
, timeBasedAutoScalingConfiguration
, tbascInstanceId
, tbascAutoScalingSchedule
-- ** UserProfile
, UserProfile
, userProfile
, upAllowSelfManagement
, upSSHPublicKey
, upSSHUsername
, upIAMUserARN
, upName
-- ** Volume
, Volume
, volume
, vInstanceId
, vStatus
, vSize
, vIOPS
, vDevice
, vAvailabilityZone
, vName
, vRAIdArrayId
, vVolumeId
, vRegion
, vVolumeType
, vEC2VolumeId
, vMountPoint
-- ** VolumeConfiguration
, VolumeConfiguration
, volumeConfiguration
, vcIOPS
, vcRAIdLevel
, vcVolumeType
, vcMountPoint
, vcNumberOfDisks
, vcSize
-- ** WeeklyAutoScalingSchedule
, WeeklyAutoScalingSchedule
, weeklyAutoScalingSchedule
, wassThursday
, wassWednesday
, wassSaturday
, wassMonday
, wassFriday
, wassSunday
, wassTuesday
) where
import Network.AWS.OpsWorks.AssignInstance
import Network.AWS.OpsWorks.AssignVolume
import Network.AWS.OpsWorks.AssociateElasticIP
import Network.AWS.OpsWorks.AttachElasticLoadBalancer
import Network.AWS.OpsWorks.CloneStack
import Network.AWS.OpsWorks.CreateApp
import Network.AWS.OpsWorks.CreateDeployment
import Network.AWS.OpsWorks.CreateInstance
import Network.AWS.OpsWorks.CreateLayer
import Network.AWS.OpsWorks.CreateStack
import Network.AWS.OpsWorks.CreateUserProfile
import Network.AWS.OpsWorks.DeleteApp
import Network.AWS.OpsWorks.DeleteInstance
import Network.AWS.OpsWorks.DeleteLayer
import Network.AWS.OpsWorks.DeleteStack
import Network.AWS.OpsWorks.DeleteUserProfile
import Network.AWS.OpsWorks.DeregisterEcsCluster
import Network.AWS.OpsWorks.DeregisterElasticIP
import Network.AWS.OpsWorks.DeregisterInstance
import Network.AWS.OpsWorks.DeregisterRDSDBInstance
import Network.AWS.OpsWorks.DeregisterVolume
import Network.AWS.OpsWorks.DescribeAgentVersions
import Network.AWS.OpsWorks.DescribeApps
import Network.AWS.OpsWorks.DescribeCommands
import Network.AWS.OpsWorks.DescribeDeployments
import Network.AWS.OpsWorks.DescribeEcsClusters
import Network.AWS.OpsWorks.DescribeElasticIPs
import Network.AWS.OpsWorks.DescribeElasticLoadBalancers
import Network.AWS.OpsWorks.DescribeInstances
import Network.AWS.OpsWorks.DescribeLayers
import Network.AWS.OpsWorks.DescribeLoadBasedAutoScaling
import Network.AWS.OpsWorks.DescribeMyUserProfile
import Network.AWS.OpsWorks.DescribePermissions
import Network.AWS.OpsWorks.DescribeRAIdArrays
import Network.AWS.OpsWorks.DescribeRDSDBInstances
import Network.AWS.OpsWorks.DescribeServiceErrors
import Network.AWS.OpsWorks.DescribeStackProvisioningParameters
import Network.AWS.OpsWorks.DescribeStacks
import Network.AWS.OpsWorks.DescribeStackSummary
import Network.AWS.OpsWorks.DescribeTimeBasedAutoScaling
import Network.AWS.OpsWorks.DescribeUserProfiles
import Network.AWS.OpsWorks.DescribeVolumes
import Network.AWS.OpsWorks.DetachElasticLoadBalancer
import Network.AWS.OpsWorks.DisassociateElasticIP
import Network.AWS.OpsWorks.GetHostnameSuggestion
import Network.AWS.OpsWorks.GrantAccess
import Network.AWS.OpsWorks.RebootInstance
import Network.AWS.OpsWorks.RegisterEcsCluster
import Network.AWS.OpsWorks.RegisterElasticIP
import Network.AWS.OpsWorks.RegisterInstance
import Network.AWS.OpsWorks.RegisterRDSDBInstance
import Network.AWS.OpsWorks.RegisterVolume
import Network.AWS.OpsWorks.SetLoadBasedAutoScaling
import Network.AWS.OpsWorks.SetPermission
import Network.AWS.OpsWorks.SetTimeBasedAutoScaling
import Network.AWS.OpsWorks.StartInstance
import Network.AWS.OpsWorks.StartStack
import Network.AWS.OpsWorks.StopInstance
import Network.AWS.OpsWorks.StopStack
import Network.AWS.OpsWorks.Types
import Network.AWS.OpsWorks.UnassignInstance
import Network.AWS.OpsWorks.UnassignVolume
import Network.AWS.OpsWorks.UpdateApp
import Network.AWS.OpsWorks.UpdateElasticIP
import Network.AWS.OpsWorks.UpdateInstance
import Network.AWS.OpsWorks.UpdateLayer
import Network.AWS.OpsWorks.UpdateMyUserProfile
import Network.AWS.OpsWorks.UpdateRDSDBInstance
import Network.AWS.OpsWorks.UpdateStack
import Network.AWS.OpsWorks.UpdateUserProfile
import Network.AWS.OpsWorks.UpdateVolume
import Network.AWS.OpsWorks.Waiters
{- $errors
Error matchers are designed for use with the functions provided by
<http://hackage.haskell.org/package/lens/docs/Control-Exception-Lens.html Control.Exception.Lens>.
This allows catching (and rethrowing) service specific errors returned
by 'OpsWorks'.
-}
{- $operations
Some AWS operations return results that are incomplete and require subsequent
requests in order to obtain the entire result set. The process of sending
subsequent requests to continue where a previous request left off is called
pagination. For example, the 'ListObjects' operation of Amazon S3 returns up to
1000 objects at a time, and you must send subsequent requests with the
appropriate Marker in order to retrieve the next page of results.
Operations that have an 'AWSPager' instance can transparently perform subsequent
requests, correctly setting Markers and other request facets to iterate through
the entire result set of a truncated API operation. Operations which support
this have an additional note in the documentation.
Many operations have the ability to filter results on the server side. See the
individual operation parameters for details.
-}
{- $waiters
Waiters poll by repeatedly sending a request until some remote success condition
configured by the 'Wait' specification is fulfilled. The 'Wait' specification
determines how many attempts should be made, in addition to delay and retry strategies.
-}
| fmapfmapfmap/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks.hs | mpl-2.0 | 23,282 | 0 | 5 | 5,344 | 2,466 | 1,783 | 683 | 530 | 0 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Control.Concurrent.QSemN (module M) where
import "base" Control.Concurrent.QSemN as M
| d191562687/codeworld | codeworld-base/src/Control/Concurrent/QSemN.hs | apache-2.0 | 763 | 0 | 4 | 136 | 25 | 19 | 6 | 4 | 0 |
{-# language TypeFamilies #-}
module Planetary.Library (repo, resolve) where
import Control.Lens
import Data.Foldable (fold)
import Planetary.Core
import Planetary.Support.NameResolution
import Planetary.Support.Parser
import qualified Planetary.Library.FrankExamples as Frank
import qualified Planetary.Library.HaskellForeign as HaskellForeign
import qualified Planetary.Library.Management as Management
import qualified Planetary.Library.Meta as Meta
import qualified Planetary.Library.StrNat as StrNat
import qualified Planetary.Library.Syntax as Syntax
repo :: ResolvedDecls
repo = fold
[ Frank.resolvedDecls
, HaskellForeign.resolvedDecls
, Management.resolvedDecls
, Meta.resolvedDecls
, StrNat.resolvedDecls
, Syntax.resolvedDecls
]
-- TODO: converting between uidmap and hashmap is weird
resolve :: Tm' -> Either ResolutionErr TmI
resolve = resolveTm (repo ^. globalCids . to toList . to fromList)
| joelburget/interplanetary-computation | src/Planetary/Library.hs | bsd-3-clause | 956 | 0 | 9 | 147 | 188 | 119 | 69 | -1 | -1 |
module Types.Basic (
Name, Task, Seed, Signature
) where
import Util.Hash
-- basic types
type Name = String
type Task = String
type Seed = String
type Signature = Digest
| Erdwolf/autotool-bonn | server/src/Types/Basic.hs | gpl-2.0 | 176 | 0 | 4 | 36 | 50 | 33 | 17 | 7 | 0 |
module PackageTests.PreProcess.Check (suite) where
import PackageTests.PackageTester
(PackageSpec(..), assertBuildSucceeded, cabal_build)
import System.FilePath
import Test.HUnit
suite :: FilePath -> Test
suite ghcPath = TestCase $ do
let spec = PackageSpec
{ directory = "PackageTests" </> "PreProcess"
, distPref = Nothing
, configOpts = ["--enable-tests", "--enable-benchmarks"]
}
result <- cabal_build spec ghcPath
assertBuildSucceeded result
| DavidAlphaFox/ghc | libraries/Cabal/Cabal/tests/PackageTests/PreProcess/Check.hs | bsd-3-clause | 513 | 0 | 13 | 117 | 120 | 68 | 52 | 13 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.