code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Vamsi Thummala {[email protected]}, Copyright (C) 2013-2015
*
*/
package safe.safesets
package client
import akka.actor.{Actor, ActorRef, Props}
import scala.concurrent.duration.FiniteDuration
object Master {
def props(workTimeout: FiniteDuration): Props =
Props(classOf[Master], workTimeout, 65536, 0.99f, 16) // 2^16 * 1 kB ~ 64MB
def props(
workTimeout: FiniteDuration
, setCacheInitialCapacity: Int
, setCacheLoadFactor: Float
, setCacheConcurrency: Int
): Props =
Props(classOf[Master], workTimeout, setCacheInitialCapacity, setCacheLoadFactor, setCacheConcurrency)
def name: String = "SafeSetsMaster"
}
class Master(
val workTimeout: FiniteDuration
, val setCacheInitialCapacity: Int
, val setCacheLoadFactor: Float
, val setCacheConcurrency: Int
) extends MasterLike
with MasterLikeConfig {
}
| wowmsi/safe | safe-lang/src/main/scala/safe/safesets/client/Master.scala | Scala | apache-2.0 | 1,670 |
object Solution {
import scala.collection.mutable.{HashSet, HashMap, PriorityQueue}
// Dijkstra's Algorithm
def minPathSum4(matrix: Array[Array[Int]]): Long = {
val n = matrix.size
val target = (n - 1, n - 1)
val nodes = for {
i <- 0 until n
j <- 0 until n
} yield (i, j)
// val reverseOrdering = Ordering[(Long, (Int, Int))]
val nodesMap = Array.fill(n * n)(Long.MaxValue)
val nodesSet = HashSet(nodes: _*)
val nodePQueue = PriorityQueue(nodes.map(x => (-Long.MaxValue, x)): _*)
// (Long.MaxValue, (0, 0)) happens to be the first element in the queue
val minPath = 0L + matrix(0)(0)
nodesMap(0) = minPath
nodePQueue.enqueue((-minPath, (0, 0)))
def extractMin(): (Long, (Int, Int)) = {
var a = nodePQueue.dequeue
while (! nodesSet(a._2)) {
a = nodePQueue.dequeue
}
(-a._1, a._2)
}
while (nodesSet.size > 0) {
val (length, (x, y)) = extractMin()
// get the neighbors
for {
(i, j) <- List((-1, 0), (1, 0), (0, -1), (0, 1))
if x + i >= 0 && x + i < n &&
y + j >= 0 && y + j < n && nodesSet((x + i, y + j))
dis = length + matrix(x + i)(y + j)
if dis < nodesMap((x + i) * n + y + j)
} {
nodesMap((x + i) * n + y + j) = dis
nodePQueue.enqueue((-dis, (x + i, y + j)))
}
nodesSet.remove((x, y))
nodesMap(x * n + y) = length
if (x == n - 1 && y == n - 1)
return length
}
nodesMap(n * n - 1)
}
def main(args: Array[String]) {
val n = readInt
val matrix = (1 to n).map(x => readLine.split(" ").map(_.toInt)).toArray
println(minPathSum4(matrix))
}
}
| advancedxy/hackerrank | project-euler/problem-83/PathSum4Ways.scala | Scala | mit | 1,730 |
package sds.classfile.attribute
import sds.classfile.{ClassfileStream => Stream}
import sds.classfile.constant_pool.{ConstantInfo => CInfo}
import sds.util.DescriptorParser.parse
class LocalVariable(data: Stream, pool: Array[CInfo], private val name: String) extends AttributeInfo {
private var table: Array[Array[Int]] = null
private var nameTable: Array[Array[String]] = null
init()
def init(): Unit = {
val size: Int = data.short
this.table = new Array(size)
this.nameTable = new Array(size)
(0 until size).foreach((i: Int) => {
val start: Int = data.short
val end: Int = data.short + start
val nameIndex: Int = data.short
val descIndex: Int = data.short
val index: Int = data.short
val name: String = extract(nameIndex, pool)
val desc: String = if(descIndex - 1 > 0) parse(extract(descIndex, pool)) else ""
table(i) = Array(start, end, index)
nameTable(i) = Array(name, desc)
})
}
def getTable(): Array[Array[Int]] = table
def getNameTable(): Array[Array[String]] = nameTable
} | g1144146/sds_for_scala | src/main/scala/sds/classfile/attribute/LocalVariable.scala | Scala | apache-2.0 | 1,163 |
package com.lot.user.model
import com.lot.order.model.Order
import com.lot.order.model.Order
import com.lot.order.model.Order
import com.lot.trade.model.Trade
import com.lot.position.model.Position
import com.lot.marketEvent.model.MarketEvent
import com.lot.marketEvent.model.TriggeredEvent
import com.lot.security.model.Security
import com.lot.blockAmount.model.BlockAmount
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import com.typesafe.scalalogging.LazyLogging
import com.lot.order.model.Order
import com.lot.position.model.Position
import com.lot.trade.model.Trade
object Authorize extends LazyLogging {
val READ = "details"
val LIST = "list"
val CREATE = "create"
val UPDATE = "update"
val DELETE = "delete"
/**
* The central authorize method
*/
def checkAccess(access: String, current_user: User, entity: Option[Any]): Boolean = {
logger.debug(s"$access requested by ${current_user.id} : ${current_user.role} for $entity")
current_user.role match {
case UserRoles.ADMIN => adminAccess(access, current_user, entity)
case UserRoles.OPS => adminAccess(access, current_user, entity)
case UserRoles.SIM => traderAccess(access, current_user, entity)
case UserRoles.TRADER => traderAccess(access, current_user, entity)
case UserRoles.GUEST => guestAccess(access, current_user, entity)
case _ => false
}
}
/**
* The central authorize method
*/
def checkAccess(access: String, current_user: User, entityF: Future[Option[Any]]): Future[Boolean] = {
entityF.map { entity =>
checkAccess(access, current_user, entity)
}
}
/**
* All access to admin role is defined here
*/
private def adminAccess(access: String, current_user: User, entity: Option[Any]) = {
entity match {
/*
* Access to Order
*/
case Some(e: Order) => {
access match {
case CREATE => false
case _ => true
}
}
/*
* Access to Trade, Position
*/
case Some(e: Trade) => {
access match {
case READ => true
case _ => false
}
}
case Some(e: Position) => {
access match {
case READ => true
case _ => false
}
}
case Some(e: BlockAmount) => {
access match {
case READ => true
case _ => false
}
}
/*
* Access everything we have forgotten to explicitly authorize
*/
case _ => {
access match {
case _ => true
}
}
}
}
/**
* All access to the trader role is defined here
*/
private def traderAccess(access: String, current_user: User, entity: Option[Any]) = {
entity match {
/*
* Access to Order
*/
case Some(e: Order) => {
access match {
case DELETE => false
case CREATE => true
case _ => e.user_id == current_user.id.get
}
}
/*
* Access to Trade, Position
*/
case Some(e: Trade) => {
access match {
case READ => e.user_id == current_user.id.get
case _ => false
}
}
case Some(e: Position) => {
access match {
case READ => e.user_id == current_user.id.get
case _ => false
}
}
case Some(e: BlockAmount) => {
access match {
case LIST => true
case READ => e.user_id == current_user.id.get
case _ => false
}
}
/*
* Access to others
*/
case Some(e: User) => {
access match {
case READ => true
case _ => false
}
}
case Some(e: Security) => {
access match {
case READ => true
case _ => false
}
}
case Some(e: TriggeredEvent) => {
access match {
case READ => true
case _ => false
}
}
case Some(Order) | Some(Position) | Some(Trade) | Some(BlockAmount) | Some(Security) | Some(TriggeredEvent) => {
access match {
case LIST => true
case _ => false
}
}
case _ => {
access match {
case _ => false
}
}
}
}
/**
* All access to Guest role
*/
private def guestAccess(access: String, current_user: User, entity: Any) = {
entity match {
case Some(Trade) | Some(TriggeredEvent) | Some(Security) => {
access match {
case READ | LIST => true
case _ => false
}
}
/*
* Access to something we have forgotten to explicitly authorize
*/
case _ => {
access match {
case _ => false
}
}
}
}
} | thimmaiah/life_of_a_trade_scala | src/main/scala/com/lot/user/model/Authorize.scala | Scala | apache-2.0 | 4,899 |
package hephaestus
package skybox
import hephaestus.platform._
import hephaestus.io.Buffer
import java.nio.{ByteBuffer, ByteOrder}
import java.io.File
import java.io.InputStream
import java.nio.file.Files
import javax.imageio._
import java.awt.image._
import scodec.codecs._
import scodec.stream._
import cats.implicits._
import scodec.interop.cats._
object Step03 {
val FENCE_TIMEOUT = 100000000
val width = 500
val height = 500
val textureWidth = 1024 //900
val textureHeight = 512 //1201
val cubeTextureWidth = 1024
val cubeTextureHeight = 1024
val scale = 0.25f
val glfw = new GLFW()
val vk = new Vulkan()
val name = "skybox01"
val skyboxFile = "skybox.png"
val terrainFile = "terrain.model"
val terrainTextureFile = "terrain-texture.png"
case class Component(size: Int, num: Int)
case class Header(components: List[Component])
//sucessfully decoded vertex data
//now need to integrate it with the rest of the system
def decodeFile(): (List[Component], List[Buffer[Byte]]) = {
val perComponent = (uint32L ~ uint32L)
.map {
case (n, s) =>
Component(n.toInt, s.toInt)
}
.contramap((c: Component) => (c.num, c.size))
.fuse
val vertexData = for {
nComponents <- uint32L
components <- listOfN(provide(nComponents.toInt), perComponent) // repeat this
bufs <- components.traverse(c =>
bytes(c.size.toInt * c.num.toInt).asDecoder.map(b =>
Buffer.direct[Byte](b.toArray: _*)))
} yield (components, bufs)
val decoder: StreamDecoder[(List[Component], List[Buffer[Byte]])] =
decode.once(vertexData)
val fileStream: InputStream =
getClass.getResourceAsStream(s"/$terrainFile")
decoder.decodeInputStream(fileStream).runLog.unsafeRun().head
}
//need to compare image types to supported types
//need to stage images, and set sample swizzles from input type
def loadTexture(name: String): ByteBuffer = {
val file = new File(getClass.getResource(s"/$name").toURI())
val img = ImageIO.read(file)
val width = img.getWidth
val height = img.getHeight
println(s"width $width height $height type ${img.getType}")
val data = img.getRaster.getDataBuffer.asInstanceOf[DataBufferByte].getData
println(s"data size ${data.size}, should be ${width * height * 4}")
val buffer = Buffer.direct(data: _*).value
buffer
}
def initGraphicsPresentQueueFamilyIndex(
instance: Vulkan.Instance,
physicalDevice: Vulkan.PhysicalDevice): Int = {
val qfps = vk.getPhysicalDeviceQueueFamilyProperties(physicalDevice)
qfps.zipWithIndex
.find {
case (q, i) =>
val ss = glfw.getPhysicalDevicePresentationSupport(instance,
physicalDevice,
i)
val gb = (q.queueFlags & Vulkan.QUEUE_GRAPHICS_BIT) > 0
ss && gb
}
.map(_._2)
.get
}
def memoryTypeIndex(ps: Vulkan.PhysicalDeviceMemoryProperties,
reqs: Vulkan.MemoryRequirements,
mask: Int): Int = {
ps.memoryTypes.zipWithIndex
.foldLeft((Option.empty[Int], reqs.memoryTypeBits)) { (t0, t1) =>
(t0, t1) match {
case ((None, bits), (tpe, i)) =>
if ((bits & 1) == 1 && (tpe.propertyFlags & mask) == mask)
(Some(i), bits)
else (None, bits >> 1)
case (idx, _) => idx
}
}
._1
.get
}
def spvFile(name: String): ByteBuffer = {
val file = new File(getClass.getResource(s"/$name").toURI())
val bytes = Files.readAllBytes(file.toPath())
val buf =
ByteBuffer.allocateDirect(bytes.size).order(ByteOrder.nativeOrder())
buf.put(bytes, 0, bytes.size)
buf
}
def initShaderModule(name: String,
device: Vulkan.Device): Vulkan.ShaderModule = {
val spv = spvFile(name)
val info = new Vulkan.ShaderModuleCreateInfo(
flags = 0,
codeSize = spv.capacity,
code = spv
)
vk.createShaderModule(device, info)
}
def main(args: Array[String]): Unit = {
val (comps, datas) = decodeFile()
//position, normal, uv
val terrainVertexBytes = datas.head.value
val terrainPolygonBytes = datas(1).value
glfw.init()
val instance = vk.createInstance(
new Vulkan.InstanceCreateInfo(
applicationInfo = new Vulkan.ApplicationInfo(applicationName = name,
applicationVersion = 1,
engineName = name,
engineVersion = 1,
apiVersion =
Vulkan.API_VERSION_1_0),
enabledExtensionNames = (Vulkan.EXT_DEBUG_REPORT_EXTENSION_NAME :: glfw
.getRequiredInstanceExtensions()
.toList).toArray,
enabledLayerNames = Array(Vulkan.LAYER_LUNARG_STANDARD_VALIDATION_NAME,
Vulkan.LAYER_LUNARG_API_DUMP_NAME)
))
vk.debugReport(instance)
glfw.windowHint(GLFW.CLIENT_API, GLFW.NO_API)
val window = glfw.createWindow(width, height, name)
val surface = glfw.createWindowSurface(instance, window)
val physicalDevice = vk.enumeratePhysicalDevices(instance)(0)
val qi = initGraphicsPresentQueueFamilyIndex(instance, physicalDevice)
val device = vk.createDevice(
physicalDevice,
new Vulkan.DeviceCreateInfo(
queueCreateInfos = Array(
new Vulkan.DeviceQueueCreateInfo(flags = 0,
queueFamilyIndex = qi,
queuePriorities = Array(0f))),
enabledExtensionNames = Array(Vulkan.SWAPCHAIN_EXTENSION_NAME)
)
)
val commandPool = vk.createCommandPool(
device,
new Vulkan.CommandPoolCreateInfo(flags = Vulkan.COMMAND_POOL_BLANK_FLAG,
queueFamilyIndex = qi))
val primaryCommandBuffer = vk.allocateCommandBuffers(
device,
new Vulkan.CommandBufferAllocateInfo(
commandPool = commandPool,
level = Vulkan.COMMAND_BUFFER_LEVEL_PRIMARY,
commandBufferCount = 1))
val secondaryCommandBuffer = vk.allocateCommandBuffers(
device,
new Vulkan.CommandBufferAllocateInfo(
commandPool = commandPool,
level = Vulkan.COMMAND_BUFFER_LEVEL_SECONDARY,
commandBufferCount = 1))
val formats = vk.getPhysicalDeviceSurfaceFormats(physicalDevice, surface)
val swapchainFormat =
if (formats(0).format == Vulkan.FORMAT_UNDEFINED)
Vulkan.FORMAT_B8G8R8A8_UNORM
else formats(0).format
val surfaceCapabilities =
vk.getPhysicalDeviceSurfaceCapabilities(physicalDevice, surface)
val swapchainExtent =
if (surfaceCapabilities.currentExtent.width == 0xFFFFFFFF) {
val ewidth =
if (width < surfaceCapabilities.minImageExtent.width)
surfaceCapabilities.minImageExtent.width
else if (width > surfaceCapabilities.maxImageExtent.width)
surfaceCapabilities.maxImageExtent.width
else width
val eheight =
if (height < surfaceCapabilities.minImageExtent.height)
surfaceCapabilities.minImageExtent.height
else if (height > surfaceCapabilities.maxImageExtent.height)
surfaceCapabilities.maxImageExtent.height
else height
new Vulkan.Extent2D(ewidth, eheight)
} else surfaceCapabilities.currentExtent
val preTransform =
if ((surfaceCapabilities.supportedTransforms & Vulkan.SURFACE_TRANSFORM_IDENTITY_BIT) > 0)
Vulkan.SURFACE_TRANSFORM_IDENTITY_BIT
else surfaceCapabilities.currentTransform
val swapchain = vk.createSwapchain(
device,
new Vulkan.SwapchainCreateInfo(
flags = 0,
surface = surface,
minImageCount = surfaceCapabilities.minImageCount,
imageFormat = swapchainFormat,
imageExtent = swapchainExtent,
preTransform = preTransform,
compositeAlpha = Vulkan.COMPOSITE_ALPHA_OPAQUE_BIT,
imageArrayLayers = 1,
presentMode = Vulkan.PRESENT_MODE_FIFO,
clipped = true,
imageColorSpace = Vulkan.COLORSPACE_SRGB_NONLINEAR,
imageUsage = Vulkan.IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
imageSharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
queueFamilyIndices = Array.empty[Int]
)
)
val swapchainImages = vk.getSwapchainImages(device, swapchain)
val imageViews = swapchainImages.map { i =>
vk.createImageView(
device,
new Vulkan.ImageViewCreateInfo(
flags = 0,
image = i,
viewType = Vulkan.IMAGE_VIEW_TYPE_2D,
format = swapchainFormat,
components = new Vulkan.ComponentMapping(
r = Vulkan.COMPONENT_SWIZZLE_R,
g = Vulkan.COMPONENT_SWIZZLE_G,
b = Vulkan.COMPONENT_SWIZZLE_B,
a = Vulkan.COMPONENT_SWIZZLE_A
),
subresourceRange =
new Vulkan.ImageSubresourceRange(aspectMask =
Vulkan.IMAGE_ASPECT_COLOR_BIT,
baseMipLevel = 0,
levelCount = 1,
baseArrayLayer = 0,
layerCount = 1)
)
)
}
val formatProperties = vk.getPhysicalDeviceFormatProperties(
physicalDevice,
Vulkan.FORMAT_D16_UNORM)
val imageTiling =
if (Vulkan.FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & formatProperties.linearTilingFeatures)
Vulkan.IMAGE_TILING_LINEAR
else if (Vulkan.FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & formatProperties.optimalTilingFeatures)
Vulkan.IMAGE_TILING_OPTIMAL
else throw new Error("depth not supported")
val depthImage = vk.createImage(
device,
new Vulkan.ImageCreateInfo(
flags = 0,
imageType = Vulkan.IMAGE_TYPE_2D,
format = Vulkan.FORMAT_D16_UNORM,
extent = new Vulkan.Extent3D(width = swapchainExtent.width,
height = swapchainExtent.height,
depth = 1),
mipLevels = 1,
arrayLayers = 1,
samples = Vulkan.SAMPLE_COUNT_1_BIT,
tiling = imageTiling,
initialLayout = Vulkan.IMAGE_LAYOUT_UNDEFINED,
usage = Vulkan.IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
queueFamilyIndices = Array.empty,
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE
)
)
val memoryProperties = vk.getPhysicalDeviceMemoryProperties(physicalDevice)
val depthImageMemoryRequirements =
vk.getImageMemoryRequirements(device, depthImage)
val depthImageMemoryTypeIndex = memoryProperties.memoryTypes.zipWithIndex
.foldLeft(
(Option.empty[Int], depthImageMemoryRequirements.memoryTypeBits)) {
(t0, t1) =>
(t0, t1) match {
case ((None, bits), (tpe, i)) =>
if ((bits & 1) == 1) (Some(i), bits) else (None, bits >> 1)
case (prev, _) => prev
}
}
._1
.get
val depthImageMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(
allocationSize = depthImageMemoryRequirements.size,
memoryTypeIndex = depthImageMemoryTypeIndex))
vk.bindImageMemory(device, depthImage, depthImageMemory, 0)
val depthImageView = vk.createImageView(
device,
new Vulkan.ImageViewCreateInfo(
flags = 0,
image = depthImage,
viewType = Vulkan.IMAGE_VIEW_TYPE_2D,
format = Vulkan.FORMAT_D16_UNORM,
components = new Vulkan.ComponentMapping(
r = Vulkan.COMPONENT_SWIZZLE_R,
g = Vulkan.COMPONENT_SWIZZLE_G,
b = Vulkan.COMPONENT_SWIZZLE_B,
a = Vulkan.COMPONENT_SWIZZLE_A
),
subresourceRange =
new Vulkan.ImageSubresourceRange(aspectMask =
Vulkan.IMAGE_ASPECT_DEPTH_BIT,
baseMipLevel = 0,
levelCount = 1,
baseArrayLayer = 0,
layerCount = 1)
)
)
val uniformData = Buffer.direct(scale, 0f).value
val cubeUniformData =
hephaestus.lunarg.tutorial.Cube.uniformData(width, height, 0)
val uniformBuffer = vk.createBuffer(
device,
new Vulkan.BufferCreateInfo(
usage = Vulkan.BUFFER_USAGE_UNIFORM_BUFFER_BIT,
size = uniformData.capacity,
queueFamilyIndices = Array.empty[Int],
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
flags = 0
)
)
val uniformBufferMemoryRequirements =
vk.getBufferMemoryRequirements(device, uniformBuffer)
val uniformBufferMemoryTypeIndex = memoryTypeIndex(
memoryProperties,
uniformBufferMemoryRequirements,
Vulkan.MEMORY_PROPERTY_HOST_VISIBLE_BIT | Vulkan.MEMORY_PROPERTY_HOST_COHERENT_BIT)
val uniformBufferMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(allocationSize =
uniformBufferMemoryRequirements.size,
memoryTypeIndex =
uniformBufferMemoryTypeIndex))
val uniformDataPtr = vk.mapMemory(device,
uniformBufferMemory,
0,
uniformBufferMemoryRequirements.size,
0)
vk.loadMemory(uniformDataPtr, uniformData)
vk.unmapMemory(device, uniformBufferMemory)
vk.bindBufferMemory(device, uniformBuffer, uniformBufferMemory, 0)
val cubeUniformBuffer = vk.createBuffer(
device,
new Vulkan.BufferCreateInfo(
usage = Vulkan.BUFFER_USAGE_UNIFORM_BUFFER_BIT,
size = cubeUniformData.capacity,
queueFamilyIndices = Array.empty[Int],
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
flags = 0
)
)
val cubeUniformBufferMemoryRequirements =
vk.getBufferMemoryRequirements(device, cubeUniformBuffer)
val cubeUniformBufferMemoryTypeIndex = memoryTypeIndex(
memoryProperties,
cubeUniformBufferMemoryRequirements,
Vulkan.MEMORY_PROPERTY_HOST_VISIBLE_BIT | Vulkan.MEMORY_PROPERTY_HOST_COHERENT_BIT)
val cubeUniformBufferMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(allocationSize =
cubeUniformBufferMemoryRequirements.size,
memoryTypeIndex =
cubeUniformBufferMemoryTypeIndex))
val cubeUniformDataPtr = vk.mapMemory(
device,
cubeUniformBufferMemory,
0,
cubeUniformBufferMemoryRequirements.size,
0)
vk.loadMemory(cubeUniformDataPtr, cubeUniformData)
vk.unmapMemory(device, cubeUniformBufferMemory)
vk.bindBufferMemory(device, cubeUniformBuffer, cubeUniformBufferMemory, 0)
val textureFormatProperties = vk.getPhysicalDeviceFormatProperties(
physicalDevice,
Vulkan.FORMAT_R8G8B8A8_UNORM)
if (!(Vulkan.FORMAT_FEATURE_SAMPLED_IMAGE_BIT & textureFormatProperties.linearTilingFeatures))
throw new Error("image needs staging!")
val textureImage = vk.createImage(
device,
new Vulkan.ImageCreateInfo(
flags = 0,
imageType = Vulkan.IMAGE_TYPE_2D,
format = Vulkan.FORMAT_R8G8B8A8_UNORM,
extent = new Vulkan.Extent3D(textureWidth, textureHeight, 1),
mipLevels = 1,
arrayLayers = 1,
samples = 1,
tiling = Vulkan.IMAGE_TILING_LINEAR,
usage = Vulkan.IMAGE_USAGE_SAMPLED_BIT,
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
queueFamilyIndices = Array.empty,
initialLayout = Vulkan.IMAGE_LAYOUT_PREINITIALIZED
)
)
val textureImageMemoryRequirements =
vk.getImageMemoryRequirements(device, textureImage)
val textureMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(
allocationSize = textureImageMemoryRequirements.size,
memoryTypeIndex = memoryTypeIndex(
memoryProperties,
textureImageMemoryRequirements,
Vulkan.MEMORY_PROPERTY_HOST_VISIBLE_BIT | Vulkan.MEMORY_PROPERTY_HOST_COHERENT_BIT)
)
)
vk.bindImageMemory(device, textureImage, textureMemory, 0)
val textureData = loadTexture(skyboxFile)
val textureDataPtr = vk.mapMemory(device,
textureMemory,
0,
textureImageMemoryRequirements.size,
0)
vk.loadMemory(textureDataPtr, textureData)
vk.unmapMemory(device, textureMemory)
val textureImageView = vk.createImageView(
device,
new Vulkan.ImageViewCreateInfo(
flags = 0,
image = textureImage,
viewType = Vulkan.IMAGE_VIEW_TYPE_2D,
format = Vulkan.FORMAT_R8G8B8A8_UNORM,
components = new Vulkan.ComponentMapping(
Vulkan.COMPONENT_SWIZZLE_A,
Vulkan.COMPONENT_SWIZZLE_B,
Vulkan.COMPONENT_SWIZZLE_G,
Vulkan.COMPONENT_SWIZZLE_R
),
subresourceRange =
new Vulkan.ImageSubresourceRange(aspectMask =
Vulkan.IMAGE_ASPECT_COLOR_BIT,
baseMipLevel = 0,
levelCount = 1,
baseArrayLayer = 0,
layerCount = 1)))
val cubeTextureFormatProperties = vk.getPhysicalDeviceFormatProperties(
physicalDevice,
Vulkan.FORMAT_R8G8B8A8_UNORM)
if (!(Vulkan.FORMAT_FEATURE_SAMPLED_IMAGE_BIT & cubeTextureFormatProperties.linearTilingFeatures))
throw new Error("image needs staging!")
val cubeTextureImage = vk.createImage(
device,
new Vulkan.ImageCreateInfo(
flags = 0,
imageType = Vulkan.IMAGE_TYPE_2D,
format = Vulkan.FORMAT_R8G8B8A8_UNORM,
extent = new Vulkan.Extent3D(cubeTextureWidth, cubeTextureHeight, 1),
mipLevels = 1,
arrayLayers = 1,
samples = 1,
tiling = Vulkan.IMAGE_TILING_LINEAR,
usage = Vulkan.IMAGE_USAGE_SAMPLED_BIT,
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
queueFamilyIndices = Array.empty,
initialLayout = Vulkan.IMAGE_LAYOUT_PREINITIALIZED
)
)
val cubeTextureImageMemoryRequirements =
vk.getImageMemoryRequirements(device, cubeTextureImage)
val cubeTextureMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(
allocationSize = cubeTextureImageMemoryRequirements.size,
memoryTypeIndex = memoryTypeIndex(
memoryProperties,
cubeTextureImageMemoryRequirements,
Vulkan.MEMORY_PROPERTY_HOST_VISIBLE_BIT | Vulkan.MEMORY_PROPERTY_HOST_COHERENT_BIT)
)
)
vk.bindImageMemory(device, cubeTextureImage, cubeTextureMemory, 0)
val cubeTextureData = loadTexture(terrainTextureFile) // lunarg.tutorial.Cube.textureData(cubeTextureWidth, cubeTextureHeight, 0)
val cubeTextureDataPtr = vk.mapMemory(
device,
cubeTextureMemory,
0,
cubeTextureImageMemoryRequirements.size,
0)
vk.loadMemory(cubeTextureDataPtr, cubeTextureData)
vk.unmapMemory(device, cubeTextureMemory)
val cubeTextureImageView = vk.createImageView(
device,
new Vulkan.ImageViewCreateInfo(
flags = 0,
image = cubeTextureImage,
viewType = Vulkan.IMAGE_VIEW_TYPE_2D,
format = Vulkan.FORMAT_R8G8B8A8_UNORM,
components = new Vulkan.ComponentMapping(
Vulkan.COMPONENT_SWIZZLE_A,
Vulkan.COMPONENT_SWIZZLE_B,
Vulkan.COMPONENT_SWIZZLE_G,
Vulkan.COMPONENT_SWIZZLE_R
),
subresourceRange =
new Vulkan.ImageSubresourceRange(aspectMask =
Vulkan.IMAGE_ASPECT_COLOR_BIT,
baseMipLevel = 0,
levelCount = 1,
baseArrayLayer = 0,
layerCount = 1)
)
)
val textureSampler = vk.createSampler(
device,
new Vulkan.SamplerCreateInfo(
flags = 0,
magFilter = Vulkan.FILTER_NEAREST,
minFilter = Vulkan.FILTER_NEAREST,
mipmapMode = Vulkan.SAMPLER_MIPMAP_MODE_NEAREST,
addressModeU = Vulkan.SAMPLER_ADDRESS_MODE_REPEAT,
addressModeV = Vulkan.SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
addressModeW = Vulkan.SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
mipLodBias = 0f,
anisotropyEnable = false,
maxAnisotropy = 0f,
compareOp = Vulkan.COMPARE_OP_NEVER,
minLod = 0f,
maxLod = 0f,
compareEnable = false,
borderColor = Vulkan.BORDER_COLOR_FLOAT_OPAQUE_WHITE,
unnormalizedCoordinates = false
)
)
val descriptorSetLayout = vk.createDescriptorSetLayout(
device,
new Vulkan.DescriptorSetLayoutCreateInfo(
flags = 0,
bindings = Array(
new Vulkan.DescriptorSetLayoutBinding(
binding = 0,
descriptorType = Vulkan.DESCRIPTOR_TYPE_UNIFORM_BUFFER,
descriptorCount = 1,
stageFlags = Vulkan.SHADER_STAGE_VERTEX_BIT,
immutableSamplers = Array.empty[Vulkan.Sampler]
),
new Vulkan.DescriptorSetLayoutBinding(
binding = 1,
descriptorType = Vulkan.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
descriptorCount = 1,
stageFlags = Vulkan.SHADER_STAGE_FRAGMENT_BIT,
immutableSamplers = Array.empty
)
)
)
)
val pipelineLayout = vk.createPipelineLayout(
device,
new Vulkan.PipelineLayoutCreateInfo(flags = 0,
setLayouts =
Array(descriptorSetLayout),
pushConstantRanges = Array.empty))
val descriptorPool = vk.createDescriptorPool(
device,
new Vulkan.DescriptorPoolCreateInfo(
flags = 0,
maxSets = 2,
poolSizes = Array(
new Vulkan.DescriptorPoolSize(
tpe = Vulkan.DESCRIPTOR_TYPE_UNIFORM_BUFFER,
descriptorCount = 2),
new Vulkan.DescriptorPoolSize(
tpe = Vulkan.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
descriptorCount = 2)
)
)
)
val descriptorSets = vk.allocateDescriptorSets(
device,
new Vulkan.DescriptorSetAllocateInfo(
descriptorPool = descriptorPool,
setLayouts = Array(descriptorSetLayout, descriptorSetLayout)))
//this could be dynamic
//have forgotten why we couldn't use the same uniform buffer in the first place
val writeDescriptorSets = Array(
new Vulkan.WriteDescriptorSet(
dstSet = descriptorSets(0),
dstBinding = 0,
dstArrayElement = 0,
descriptorCount = 1,
descriptorType = Vulkan.DESCRIPTOR_TYPE_UNIFORM_BUFFER,
imageInfo = Array.empty[Vulkan.DescriptorImageInfo],
bufferInfo = Array(
new Vulkan.DescriptorBufferInfo(buffer = uniformBuffer,
offset = 0,
range = uniformData.capacity)),
texelBufferView = Array.empty[Vulkan.BufferView]
),
new Vulkan.WriteDescriptorSet(
dstSet = descriptorSets(0),
dstBinding = 1,
dstArrayElement = 0,
descriptorCount = 1,
descriptorType = Vulkan.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
imageInfo = Array(
new Vulkan.DescriptorImageInfo(sampler = textureSampler,
imageView = textureImageView,
imageLayout =
Vulkan.IMAGE_LAYOUT_GENERAL)),
bufferInfo = Array.empty,
texelBufferView = Array.empty
),
new Vulkan.WriteDescriptorSet(
dstSet = descriptorSets(1),
dstBinding = 0,
dstArrayElement = 0,
descriptorCount = 1,
descriptorType = Vulkan.DESCRIPTOR_TYPE_UNIFORM_BUFFER,
imageInfo = Array.empty[Vulkan.DescriptorImageInfo],
bufferInfo = Array(
new Vulkan.DescriptorBufferInfo(buffer = cubeUniformBuffer,
offset = 0,
range = cubeUniformData.capacity)),
texelBufferView = Array.empty[Vulkan.BufferView]
),
new Vulkan.WriteDescriptorSet(
dstSet = descriptorSets(1),
dstBinding = 1,
dstArrayElement = 0,
descriptorCount = 1,
descriptorType = Vulkan.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
imageInfo = Array(
new Vulkan.DescriptorImageInfo(sampler = textureSampler,
imageView = cubeTextureImageView,
imageLayout =
Vulkan.IMAGE_LAYOUT_GENERAL)),
bufferInfo = Array.empty,
texelBufferView = Array.empty
)
)
vk.updateDescriptorSets(device,
4,
writeDescriptorSets,
0,
Array.empty[Vulkan.CopyDescriptorSet])
val vertexData =
Buffer.direct(-1f, -1f, -1f, 1f, 1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f).value
val cubeVertexData = terrainVertexBytes
val vertexBuffer = vk.createBuffer(
device,
new Vulkan.BufferCreateInfo(
usage = Vulkan.BUFFER_USAGE_VERTEX_BUFFER_BIT,
size = vertexData.capacity + cubeVertexData.capacity,
queueFamilyIndices = Array.empty[Int],
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
flags = 0
)
)
val vertexBufferMemoryRequirements =
vk.getBufferMemoryRequirements(device, vertexBuffer)
val vertexBufferMemoryTypeIndex = memoryTypeIndex(
memoryProperties,
vertexBufferMemoryRequirements,
Vulkan.MEMORY_PROPERTY_HOST_VISIBLE_BIT | Vulkan.MEMORY_PROPERTY_HOST_COHERENT_BIT)
val vertexBufferMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(allocationSize =
vertexBufferMemoryRequirements.size,
memoryTypeIndex =
vertexBufferMemoryTypeIndex))
val vertexDataPtr = vk.mapMemory(device,
vertexBufferMemory,
0,
vertexBufferMemoryRequirements.size,
0)
vk.loadMemory(vertexDataPtr, vertexData)
vk.loadMemory(vertexDataPtr + vertexData.capacity, cubeVertexData)
vk.unmapMemory(device, vertexBufferMemory)
vk.bindBufferMemory(device, vertexBuffer, vertexBufferMemory, 0)
val elementData = terrainPolygonBytes
val elementBuffer = vk.createBuffer(
device,
new Vulkan.BufferCreateInfo(
usage = Vulkan.BUFFER_USAGE_INDEX_BUFFER_BIT,
size = elementData.capacity,
queueFamilyIndices = Array.empty[Int],
sharingMode = Vulkan.SHARING_MODE_EXCLUSIVE,
flags = 0
)
)
val elementBufferMemoryRequirements =
vk.getBufferMemoryRequirements(device, elementBuffer)
val elementBufferMemoryTypeIndex = memoryTypeIndex(
memoryProperties,
elementBufferMemoryRequirements,
Vulkan.MEMORY_PROPERTY_HOST_VISIBLE_BIT | Vulkan.MEMORY_PROPERTY_HOST_COHERENT_BIT)
val elementBufferMemory = vk.allocateMemory(
device,
new Vulkan.MemoryAllocateInfo(allocationSize =
elementBufferMemoryRequirements.size,
memoryTypeIndex =
elementBufferMemoryTypeIndex))
val elementDataPtr = vk.mapMemory(device,
elementBufferMemory,
0,
elementBufferMemoryRequirements.size,
0)
vk.loadMemory(elementDataPtr, elementData)
vk.unmapMemory(device, elementBufferMemory)
vk.bindBufferMemory(device, elementBuffer, elementBufferMemory, 0)
val renderPass = vk.createRenderPass(
device,
new Vulkan.RenderPassCreateInfo(
flags = 0,
attachments = Array(
new Vulkan.AttachmentDescription(
format = swapchainFormat,
samples = Vulkan.SAMPLE_COUNT_1_BIT,
loadOp = Vulkan.ATTACHMENT_LOAD_OP_CLEAR,
storeOp = Vulkan.ATTACHMENT_STORE_OP_STORE,
stencilLoadOp = Vulkan.ATTACHMENT_LOAD_OP_DONT_CARE,
stencilStoreOp = Vulkan.ATTACHMENT_STORE_OP_DONT_CARE,
initialLayout = Vulkan.IMAGE_LAYOUT_UNDEFINED,
finalLayout = Vulkan.IMAGE_LAYOUT_PRESENT_SRC_KHR,
flags = 0
),
new Vulkan.AttachmentDescription(
format = Vulkan.FORMAT_D16_UNORM,
samples = Vulkan.SAMPLE_COUNT_1_BIT,
loadOp = Vulkan.ATTACHMENT_LOAD_OP_CLEAR,
storeOp = Vulkan.ATTACHMENT_STORE_OP_DONT_CARE,
stencilLoadOp = Vulkan.ATTACHMENT_LOAD_OP_DONT_CARE,
stencilStoreOp = Vulkan.ATTACHMENT_STORE_OP_DONT_CARE,
initialLayout = Vulkan.IMAGE_LAYOUT_UNDEFINED,
finalLayout = Vulkan.IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
flags = 0
)
),
subpasses = Array(
new Vulkan.SubpassDescription(
pipelineBindPoint = Vulkan.PIPELINE_BIND_POINT_GRAPHICS,
flags = 0,
inputAttachments = Array.empty[Vulkan.AttachmentReference],
colorAttachments = Array(
new Vulkan.AttachmentReference(
attachment = 0,
layout = Vulkan.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
resolveAttachments = Array.empty[Vulkan.AttachmentReference],
depthStencilAttachment = Array(new Vulkan.AttachmentReference(
attachment = 1,
layout = Vulkan.IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)),
preserveAttachments = Array.empty[Int]
)),
dependencies = Array.empty
)
)
val vertexModule = initShaderModule("skybox.vert.spv", device)
val fragmentModule = initShaderModule("skybox.frag.spv", device)
val cubeVertexModule = initShaderModule("terrain.vert.spv", device)
val cubeFragmentModule = initShaderModule("texture.frag.spv", device)
val framebuffers = imageViews.map { v =>
vk.createFramebuffer(device,
new Vulkan.FramebufferCreateInfo(
flags = 0,
renderPass = renderPass,
attachments = Array(v, depthImageView),
width = width,
height = height,
layers = 1))
}
vk.beginCommandBuffer(
secondaryCommandBuffer,
new Vulkan.CommandBufferBeginInfo(
Vulkan.COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT | Vulkan.COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
new Vulkan.CommandBufferInheritanceInfo(renderPass = renderPass)
)
)
//can bind multiple vertex buffers to a pipeline (i.e. give them different strides and attributes)
//but what is the point, since they need to go in the same shader pipeline?
//should be for if the attributes are stored in different buffers
val vertexInputStateCreateInfo =
new Vulkan.PipelineVertexInputStateCreateInfo(
flags = 0,
vertexBindingDescriptions = Array(
new Vulkan.VertexInputBindingDescription(
binding = 0,
inputRate = Vulkan.VERTEX_INPUT_RATE_VERTEX,
stride = 8)),
vertexAttributeDescriptions = Array(
new Vulkan.VertexInputAttributeDescription(
binding = 0,
location = 0,
format = Vulkan.FORMAT_R32G32_SFLOAT,
offset = 0
))
)
val dynamicState = new Vulkan.PipelineDynamicStateCreateInfo(
flags = 0,
dynamicStates =
Array(Vulkan.DYNAMIC_STATE_VIEWPORT, Vulkan.DYNAMIC_STATE_SCISSOR))
val inputAssemblyStateCreateInfo =
new Vulkan.PipelineInputAssemblyStateCreateInfo(
flags = 0,
topology = Vulkan.PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
primitiveRestartEnable = false)
val rasterizationStateCreateInfo =
new Vulkan.PipelineRasterizationStateCreateInfo(
flags = 0,
polygonMode = Vulkan.POLYGON_MODE_FILL,
cullMode = Vulkan.CULL_MODE_BACK_BIT,
frontFace = Vulkan.FRONT_FACE_COUNTER_CLOCKWISE,
depthClampEnable = true,
rasterizerDiscardEnable = false,
depthBiasEnable = false,
depthBiasConstantFactor = 0,
depthBiasClamp = 0,
depthBiasSlopeFactor = 0,
lineWidth = 1f
)
val colorBlendAttachmentState =
new Vulkan.PipelineColorBlendAttachmentState(
colorWriteMask = 0xf,
blendEnable = false,
alphaBlendOp = Vulkan.BLEND_OP_ADD,
colorBlendOp = Vulkan.BLEND_OP_ADD,
srcColorBlendFactor = Vulkan.BLEND_FACTOR_ZERO,
dstColorBlendFactor = Vulkan.BLEND_FACTOR_ZERO,
srcAlphaBlendFactor = Vulkan.BLEND_FACTOR_ZERO,
dstAlphaBlendFactor = Vulkan.BLEND_FACTOR_ZERO
)
val colorBlendStateCreateInfo =
new Vulkan.PipelineColorBlendStateCreateInfo(
flags = 0,
attachments = Array(colorBlendAttachmentState),
logicOpEnable = false,
logicOp = Vulkan.LOGIC_OP_NO_OP,
blendConstants = Array(1f, 1f, 1f, 1f)
)
val viewportStateCreateInfo = new Vulkan.PipelineViewportStateCreateInfo(
flags = 0,
viewportCount = 1,
viewports = Array.empty,
scissorCount = 1,
scissors = Array.empty)
val depthStencilOpState = new Vulkan.StencilOpState(
failOp = Vulkan.STENCIL_OP_KEEP,
passOp = Vulkan.STENCIL_OP_KEEP,
compareOp = Vulkan.COMPARE_OP_ALWAYS,
compareMask = 0,
reference = 0,
depthFailOp = Vulkan.STENCIL_OP_KEEP,
writeMask = 0
)
val depthStencilStateCreateInfo =
new Vulkan.PipelineDepthStencilStateCreateInfo(
flags = 0,
depthTestEnable = true,
depthWriteEnable = true,
depthCompareOp = Vulkan.COMPARE_OP_LESS_OR_EQUAL,
depthBoundsTestEnable = false,
minDepthBounds = 0,
maxDepthBounds = 0,
stencilTestEnable = false,
back = depthStencilOpState,
front = depthStencilOpState
)
val multisampleStateCreateInfo =
new Vulkan.PipelineMultisampleStateCreateInfo(
flags = 0,
sampleMask = 0,
rasterizationSamples = Vulkan.SAMPLE_COUNT_1_BIT,
sampleShadingEnable = false,
alphaToCoverageEnable = false,
alphaToOneEnable = false,
minSampleShading = 0f
)
val pipelineInfo = new Vulkan.GraphicsPipelineCreateInfo(
layout = pipelineLayout,
basePipelineHandle = new Vulkan.Pipeline(0),
basePipelineIndex = 0,
flags = 0,
vertexInputState = vertexInputStateCreateInfo,
inputAssemblyState = inputAssemblyStateCreateInfo,
rasterizationState = rasterizationStateCreateInfo,
colorBlendState = colorBlendStateCreateInfo,
multisampleState = multisampleStateCreateInfo,
dynamicState = dynamicState,
viewportState = viewportStateCreateInfo,
depthStencilState = depthStencilStateCreateInfo,
stages = Array(
new Vulkan.PipelineShaderStageCreateInfo(
flags = 0,
stage = Vulkan.SHADER_STAGE_VERTEX_BIT,
module = vertexModule,
name = "main"
),
new Vulkan.PipelineShaderStageCreateInfo(
flags = 0,
stage = Vulkan.SHADER_STAGE_FRAGMENT_BIT,
module = fragmentModule,
name = "main")
),
renderPass = renderPass,
subpass = 0
)
val cubeVertexInputStateCreateInfo =
new Vulkan.PipelineVertexInputStateCreateInfo(
flags = 0,
vertexBindingDescriptions = Array(
new Vulkan.VertexInputBindingDescription(
binding = 0,
inputRate = Vulkan.VERTEX_INPUT_RATE_VERTEX,
stride = 32)),
vertexAttributeDescriptions = Array(
new Vulkan.VertexInputAttributeDescription(
binding = 0,
location = 0,
format = Vulkan.FORMAT_R32G32B32_SFLOAT,
offset = 0
),
new Vulkan.VertexInputAttributeDescription(
binding = 0,
location = 1,
format = Vulkan.FORMAT_R32G32_SFLOAT,
offset = 24
)
)
)
val cubeRasterizationStateCreateInfo =
new Vulkan.PipelineRasterizationStateCreateInfo(
flags = 0,
polygonMode = Vulkan.POLYGON_MODE_FILL,
cullMode = Vulkan.CULL_MODE_BACK_BIT,
frontFace = Vulkan.FRONT_FACE_CLOCKWISE,
depthClampEnable = true,
rasterizerDiscardEnable = false,
depthBiasEnable = false,
depthBiasConstantFactor = 0,
depthBiasClamp = 0,
depthBiasSlopeFactor = 0,
lineWidth = 1f
)
val cubePipelineInfo = new Vulkan.GraphicsPipelineCreateInfo(
layout = pipelineLayout,
basePipelineHandle = new Vulkan.Pipeline(0),
basePipelineIndex = 0,
flags = 0,
vertexInputState = cubeVertexInputStateCreateInfo,
inputAssemblyState = inputAssemblyStateCreateInfo,
rasterizationState = cubeRasterizationStateCreateInfo,
colorBlendState = colorBlendStateCreateInfo,
multisampleState = multisampleStateCreateInfo,
dynamicState = dynamicState,
viewportState = viewportStateCreateInfo,
depthStencilState = depthStencilStateCreateInfo,
stages = Array(
new Vulkan.PipelineShaderStageCreateInfo(
flags = 0,
stage = Vulkan.SHADER_STAGE_VERTEX_BIT,
module = cubeVertexModule,
name = "main"
),
new Vulkan.PipelineShaderStageCreateInfo(
flags = 0,
stage = Vulkan.SHADER_STAGE_FRAGMENT_BIT,
module = cubeFragmentModule,
name = "main")
),
renderPass = renderPass,
subpass = 0
)
val pipelines = vk.createGraphicsPipelines(
device,
2,
Array(pipelineInfo, cubePipelineInfo))
vk.cmdBindPipeline(secondaryCommandBuffer,
Vulkan.PIPELINE_BIND_POINT_GRAPHICS,
pipelines(0))
//set number should be specified in the shader
vk.cmdBindDescriptorSets(secondaryCommandBuffer,
Vulkan.PIPELINE_BIND_POINT_GRAPHICS,
pipelineLayout,
0,
1,
Array(descriptorSets(0)),
0,
Array.empty)
//we need to bind this twice, because the format of the data is different. If not, we would just bind it once and set the offset in the draw
vk.cmdBindVertexBuffers(secondaryCommandBuffer,
0,
1,
Array(vertexBuffer),
Array(0))
vk.cmdBindIndexBuffer(secondaryCommandBuffer,
elementBuffer,
0,
Vulkan.INDEX_TYPE_UINT32)
vk.cmdSetViewport(secondaryCommandBuffer,
0,
1,
Array(
new Vulkan.Viewport(height = height,
width = width,
minDepth = 0f,
maxDepth = 1f,
x = 0,
y = 0)))
vk.cmdSetScissor(
secondaryCommandBuffer,
0,
1,
Array(
new Vulkan.Rect2D(
extent = new Vulkan.Extent2D(width = width, height = height),
offset = new Vulkan.Offset2D(x = 0, y = 0)
)))
vk.cmdDraw(secondaryCommandBuffer, 6, 1, 0, 0)
vk.cmdBindVertexBuffers(secondaryCommandBuffer,
0,
1,
Array(vertexBuffer),
Array(vertexData.capacity))
vk.cmdBindPipeline(secondaryCommandBuffer,
Vulkan.PIPELINE_BIND_POINT_GRAPHICS,
pipelines(1))
vk.cmdBindDescriptorSets(secondaryCommandBuffer,
Vulkan.PIPELINE_BIND_POINT_GRAPHICS,
pipelineLayout,
0,
1,
Array(descriptorSets(1)),
0,
Array.empty)
vk.cmdDrawIndexed(secondaryCommandBuffer, comps(1).num * 3, 1, 0, 0, 0)
vk.endCommandBuffer(secondaryCommandBuffer)
val acquireSemaphore =
vk.createSemaphore(device, new Vulkan.SemaphoreCreateInfo(flags = 0))
val renderSemaphore =
vk.createSemaphore(device, new Vulkan.SemaphoreCreateInfo(flags = 0))
val fence = vk.createFence(device, new Vulkan.FenceCreateInfo(flags = 0))
val graphicsQueue = vk.getDeviceQueue(device, qi, 0)
(0 until 5000).foreach { i =>
val theta = (i % 5000).toDouble / 5000.0
val uniformDataPerFrame = Buffer.direct(scale, theta.toFloat).value
//since memory is coherent, we just need to do a memcopy
vk.loadMemory(uniformDataPtr, uniformDataPerFrame)
val cubeUniformDataPerFrame =
hephaestus.lunarg.tutorial.Cube.uniformData(width, height, i)
vk.loadMemory(cubeUniformDataPtr, cubeUniformDataPerFrame)
// val textureDataPerFrame = hephaestus.lunarg.tutorial.Cube.textureData(cubeTextureWidth, cubeTextureHeight, i)
// vk.loadMemory(cubeTextureDataPtr, textureDataPerFrame)
val currentBuffer = vk.acquireNextImageKHR(device,
swapchain,
java.lang.Long.MAX_VALUE,
acquireSemaphore,
new Vulkan.Fence(0))
vk.beginCommandBuffer(
primaryCommandBuffer,
new Vulkan.CommandBufferBeginInfo(
flags = Vulkan.COMMAND_BUFFER_USAGE_BLANK_FLAG,
inheritanceInfo = Vulkan.COMMAND_BUFFER_INHERITANCE_INFO_NULL_HANDLE)
)
vk.cmdBeginRenderPass(
primaryCommandBuffer,
new Vulkan.RenderPassBeginInfo(
renderPass = renderPass,
framebuffer = framebuffers(currentBuffer),
renderArea = new Vulkan.Rect2D(
offset = new Vulkan.Offset2D(x = 0, y = 0),
extent = new Vulkan.Extent2D(width = width, height = height)),
clearValues = Array(
new Vulkan.ClearValueColor(
color = new Vulkan.ClearColorValueFloat(
float32 = Array(0.2f, 0.2f, 0.2f, 0.2f))),
new Vulkan.ClearValueDepthStencil(
depthStencil =
new Vulkan.ClearDepthStencilValue(depth = 1.0f, stencil = 0))
)
),
Vulkan.SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
)
vk.cmdExecuteCommands(primaryCommandBuffer,
1,
Array(secondaryCommandBuffer))
vk.cmdEndRenderPass(primaryCommandBuffer)
vk.endCommandBuffer(primaryCommandBuffer)
vk.queueSubmit(
graphicsQueue,
1,
Array(
new Vulkan.SubmitInfo(
waitSemaphores = Array(acquireSemaphore),
waitDstStageMask =
Array(Vulkan.PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT),
commandBuffers = Array(primaryCommandBuffer),
signalSemaphores = Array(renderSemaphore)
)),
new Vulkan.Fence(0)
)
vk.queueSubmit(graphicsQueue, 0, Array.empty, fence)
vk.queuePresentKHR(
graphicsQueue,
new Vulkan.PresentInfoKHR(swapchains = Array(swapchain),
imageIndices = currentBuffer,
waitSemaphores = Array(renderSemaphore)))
var shouldWait = true
println("about to wait")
while (shouldWait) {
val res =
vk.waitForFences(device, 1, Array(fence), false, FENCE_TIMEOUT)
if (res.value != Vulkan.TIMEOUT.value) {
println("finished waiting")
shouldWait = false
}
}
vk.resetFences(device, 1, Array(fence))
}
Thread.sleep(1000)
vk.destroySemaphore(device, renderSemaphore)
vk.destroySemaphore(device, acquireSemaphore)
vk.destroyFence(device, fence)
vk.destroyPipeline(device, pipelines(1))
vk.destroyPipeline(device, pipelines(0))
framebuffers.foreach { f =>
vk.destroyFramebuffer(device, f)
}
vk.destroyShaderModule(device, vertexModule)
vk.destroyShaderModule(device, fragmentModule)
vk.destroyRenderPass(device, renderPass)
vk.destroyBuffer(device, vertexBuffer)
vk.freeMemory(device, vertexBufferMemory)
vk.freeDescriptorSets(device, descriptorPool, 1, descriptorSets)
vk.destroyDescriptorPool(device, descriptorPool)
vk.destroyPipelineLayout(device, pipelineLayout)
vk.destroyDescriptorSetLayout(device, descriptorSetLayout)
vk.destroySampler(device, textureSampler)
vk.destroyImageView(device, textureImageView)
vk.freeMemory(device, textureMemory)
vk.destroyImage(device, textureImage)
vk.destroyImageView(device, cubeTextureImageView)
vk.freeMemory(device, cubeTextureMemory)
vk.destroyImage(device, cubeTextureImage)
vk.destroyBuffer(device, cubeUniformBuffer)
vk.freeMemory(device, cubeUniformBufferMemory)
vk.destroyBuffer(device, uniformBuffer)
vk.freeMemory(device, uniformBufferMemory)
vk.destroyImageView(device, depthImageView)
vk.freeMemory(device, depthImageMemory)
vk.destroyImage(device, depthImage)
imageViews.foreach { i =>
vk.destroyImageView(device, i)
}
vk.destroySwapchain(device, swapchain)
vk.freeCommandBuffers(device, commandPool, 1, secondaryCommandBuffer)
vk.freeCommandBuffers(device, commandPool, 1, primaryCommandBuffer)
vk.destroyCommandPool(device, commandPool)
vk.destroyDevice(device)
vk.destroySurfaceKHR(instance, surface)
glfw.destroyWindow(window)
vk.destroyInstance(instance)
glfw.terminate()
}
}
/**
I would like:
- resource management, so I don't have to clean everything up all the time
- a clear idea of what the API is like in terms of structures derived from other structures
- a cleraer idea of the memory management (we now have two objects in the same vertex buffer)
- cleaner file loading. We've got a codec, which is good
- a better vector and camera system
- I should be able to calculate the part of the skybox I need from the camera system
-
Can create a pipeline without knowing anything about the data, except for format
So don't need to know data size.
val uniformBuffer = createBuffer
val uniformBufferMemoryRequirements = vk.getBufferMemoryRequirements(device, uniformBuffer)
//find the index of the memory type which can be used to store the buffer, given the properties and buffer requirements
val uniformBufferMemoryTypeIndex = memoryTypeIndex(memoryProperties, uniformBufferMemoryRequirements, props)
//allocate memory for a memory type
val uniformBufferMemory = vk.allocateMemory ...
//map and copy an amount of memory
val uniformDataPtr = vk.mapMemory(device, uniformBufferMemory, new Vulkan.DeviceSize(0), uniformBufferMemoryRequirements.size, 0)
vk.loadMemory(uniformDataPtr, uniformData)
vk.unmapMemory(device, uniformBufferMemory)
//bind the memory to a buffer
vk.bindBufferMemory(device, uniformBuffer, uniformBufferMemory, new Vulkan.DeviceSize(0))
Ideally, we'd allocate a chunk of memory for a memory type
We'd then find that the memory type was valid, and take a range for a buffer
We'd load stuff in and bind that range to a buffer
When we use the buffer, we have to specify an offset. This offset must be valid. So the data bound to the buffer memory must be ok.
A single heap can support multiple types
So how do we pick a heap? It seems to be chosen for us. The type has the heap index in it!
e.g.
propertyFlags 0 heap 1
propertyFlags 0 heap 1
propertyFlags 0 heap 1
propertyFlags 0 heap 1
propertyFlags 0 heap 1
propertyFlags 0 heap 1
propertyFlags 0 heap 1
propertyFlags 1 heap 0 //device local
propertyFlags 1 heap 0 //device local
propertyFlags 6 heap 1 //host visible and coherent
propertyFlags 14 heap 1 // host visible and coherent and cached
heap size 2147483648 flags 1 //device local
heap size 25197714432 flags 0 //device local
=> This has a dedicated graphics card. We need to copy to put into the device local memory
message: vkUpdateDescriptorsSets() failed write update validation for Descriptor Set 0x1a with error: Write update to descriptor in set 0x1a binding #0 failed with error message: Attempted write update to buffer descriptor failed due to: VkDescriptorBufferInfo range is 64 which is greater than buffer size (64) minus requested offset of 8. For more information refer to Vulkan Spec Section '13.2.4. Descriptor Set Updates' which states 'If range is not equal to VK_WHOLE_SIZE, range must be less than or equal to the size of buffer minus offset' (https://www.khronos.org/registry/vulkan/specs/1.0-extensions/xhtml/vkspec.html#VkDescriptorBufferInfo)
message: vkUpdateDescriptorSets(): pDescriptorWrites[2].pBufferInfo[0].offset (0x8) must be a multiple of device limit minUniformBufferOffsetAlignment 0x100
Vulkan
®
1.0.39 - A Specification
575 / 683
Valid Usage
• If any member of this structure is
VK_FALSE
, as returned by
vkGetPhysicalDeviceFeatures
, then it
must be
VK_FALSE
when passed as part of the
VkDeviceCreateInfo
struct when creating a device
30.1.1 Feature Requirements
All Vulkan graphics implementations must support the following features:
•
robustBufferAccess
.
All other features are not required by the Specification.
30.2 Limits
There are a variety of implementation-dependent limits.
The
VkPhysicalDeviceLimits
are properties of the physical device. These are available in the
limits
member of
the
VkPhysicalDeviceProperties
structure which is returned from
vkGetPhysicalDeviceProperties
Design plan:
- assume that there can be separate queues for loading and drawing in the same queue family.
=> we sync the queues using a semaphore when we want to use updated data
- memory can be categorized as:
no-updates, long term (should be device local)
updates, long term (should be host visible, not device local)
per-frame updates (using vkCmdUpdatebuffer, host visible)
- give a size hint for a given memory type on application start
- for a given buffer, get memoryTypeBits, find appropriate allocation, bind allocation
- for a given set of vertex attributes, find a vertex buffer
- for a given set of uniforms, find a uniform buffer
- buffer quantities:
- there should be a single index buffer
- there should be a vertex buffer per attribute set (assuming interleaving here)
- there should be a vertex buffer per instanced attribute set
- there should be a single uniform buffer (no point in having multiple uniform buffers)
What about buffer views?
What about coherent / cached vs non-coherent?
What about images?
- should always read in optimal. If non-optimal, need to stage first.
-
lin 5121 op: 7555 buf: 88 // RGBA, blit_src_blit, ds att,
lin 0 op: 0 buf: 64 //RGB
VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002,
VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004,
VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008,
VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010,
VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020,
VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040,
VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080,
VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100,
VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200,
VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400,
VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000,
*/
| to-ithaca/hephaestus | samples/src/main/scala/hephaestus/skybox/Step03.scala | Scala | apache-2.0 | 53,706 |
package com.daxin
import scala.io.Source
import java.io.File
/**
* 3.1. 概念
* 隐式转换和隐式参数是Scala中两个非常强大的功能,利用隐式转换和隐式参数
* 你可以提供优雅的类库,对类库的使用者隐匿掉那些枯燥乏味的细节。
* 3.2. 作用
* 隐式的对类的方法进行增强,丰富现有类库的功能
* 3.3. 隐式转换函数
* 是指那种以implicit关键字声明的带有单个参数的函数
*/
object MainApp {
def main(args: Array[String]): Unit = {
//导入隐式转换
import RichFile._
val file=new File("G://data.txt")
/**
* 隐式转换执行过程:
* 当file调用read时候发现file类没有read方法,但是通过查找上下文(上下文中导入import RichFile._)发现:
* implicit def file2RichFile(file: File) = new RichFile(file) 这句话,可以转成RichFile
* 在RichFile中恰好有read方法,所以执行。
*/
val content=file.read
println(content)
}
}
| Dax1n/Scala | implicitDemo/src/com/daxin/Main.scala | Scala | apache-2.0 | 1,046 |
package com.avidmouse.spark.streaming
/**
* @author avidmouse
* @version 0.1, 14-6-25
*/
package object mongodb {
val dstream = MongoSparkDStream.Implicits
}
| avidmouse/mongo-spark | streaming/src/main/scala/com/avidmouse/spark/streaming/mongodb/package.scala | Scala | apache-2.0 | 164 |
package db.tables
import shared.models.Product
import slick.lifted.Tag
import db.currentJdbcProfile.api._
private[db] class ProductsTable(tag: Tag) extends Table[Product](tag, "products") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def name = column[String]("name")
def quantity = column[Int]("quantity")
def * = (id, name, quantity) <> (Product.tupled, Product.unapply)
}
| kkrzys/eShop | eShop-database/src/main/scala/db/tables/ProductsTable.scala | Scala | apache-2.0 | 397 |
/*
*
* * Copyright 2020 Lenses.io.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.datamountaineer.streamreactor.connect.jms.sink.converters
import com.datamountaineer.streamreactor.common.converters.sink.SinkRecordToJson
import com.datamountaineer.streamreactor.common.schemas.ConverterUtil
import com.datamountaineer.streamreactor.connect.jms.config.JMSSetting
import org.apache.kafka.connect.sink.SinkRecord
import javax.jms.{Message, Session}
class JsonMessageConverter extends JMSMessageConverter with ConverterUtil {
override def convert(sinkRecord: SinkRecord, session: Session, setting: JMSSetting): (String, Message) = {
val json = SinkRecordToJson(sinkRecord, Map(sinkRecord.topic() -> setting.fields), Map(sinkRecord.topic() -> setting.ignoreField))
(setting.source, session.createTextMessage(json))
}
}
| datamountaineer/stream-reactor | kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/JsonMessageConverter.scala | Scala | apache-2.0 | 1,396 |
import java.sql.Date
case class Transaction(
id:Option[Int], // unique identifier
candidate:String, // candidate receiving the donation
contributor:String, // name of the contributor
contributorState:String, // contributor state
contributorOccupation:Option[String], // contributor job
amount:Long, // amount in cents
date:Date // date of the donation
)
| pbugnion/s4ds | chap06/Transaction.scala | Scala | apache-2.0 | 383 |
package org.improving.scalify
import org.eclipse.jdt.core.dom
import scala.collection.mutable.{ HashMap, ListBuffer }
import scala.collection.immutable
import ScalifySafe._
// A JDTForest is each CU mapped to its JDTMap
// A JDTMap is each ASTNode mapped to its Node
object Forest
{
private var forest: JDTForest = null
def keys: List[dom.CompilationUnit] = forest.trees.keys.toList
def initialize(xs: List[dom.CompilationUnit]) = {
def doASTTraversal(cu: dom.CompilationUnit): JDTMap = {
val v = new JDTVisitor(cu)
cu.accept(v)
v.tree
}
val trees: Map[dom.CompilationUnit, JDTMap] = PCompute.runAll(xs, doASTTraversal _)
forest = new JDTForest(trees)
}
def get(node: ASTNode): Node = forest.lookup(node)
def getJDTMap(cu: dom.CompilationUnit): JDTMap = forest.trees(cu)
// go through all nodes in all trees, optionally altering
def transformByNode(f: (ASTNode) => Option[Node]) = {
val newTrees: Map[dom.CompilationUnit, JDTMap] = PCompute.runAll(keys, x => getJDTMap(x).map(f))
forest = new JDTForest(newTrees)
}
// go through on a map-by-map basis
def transformByMap(f: (JDTMap) => JDTMap) = {
val newTrees: Map[dom.CompilationUnit, JDTMap] = PCompute.runAll(keys, x => f(getJDTMap(x)))
forest = new JDTForest(newTrees)
}
// search all trees in parallel for nodes meeting a condition
def search[T](f: (JDTMap) => List[T]): List[T] = {
val results = PCompute.runAll(keys, (x: dom.CompilationUnit) => f(getJDTMap(x)))
List.flatten(results.values.toList)
}
// def search(f: (JDTMap) => List[ASTNode]): List[ASTNode] = {
// val results = PCompute.runAll(keys, (x: dom.CompilationUnit) => f(getJDTMap(x)))
//
// List.flatten(results.values.toList)
// }
import scala.actors.Actor. { actor, receive, reply }
val renamer = actor {
while(true) {
receive {
case SetNodeName(node) => get(node) match {
case x: NamedDecl => x.incrementName
case _ => abort(node.toString)
}
case GetNodeName(node) => get(node) match {
case x: Named => reply(x.currentName)
case _ => abort(node.toString)
}
case x => abort(x.toString)
}
}
}
}
| mbana/scalify | src/main/core/Forest.scala | Scala | isc | 2,149 |
package leo.modules.procedures
import leo.datastructures.Term._
import leo.datastructures._
import leo.modules.HOLSignature._
import leo.modules.input.Input.{readFormula => read}
import leo.{Checked, LeoTestSuite}
class SimplificationTest extends LeoTestSuite {
implicit private val s: Signature = getFreshSignature
mkAtom(s.addUninterpreted("p", o))
mkAtom(s.addUninterpreted("q", o ->: o))
mkAtom(s.addUninterpreted("r", i))
private val tests : Map[Term,Term] = Map[Term, Term](
(read("~$true"), read("$false")),
(read("~$false"), read("$true")),
(read("p & p"), read("p")),
(read("p & ~p"), read("$false")),
(read("p <=> p"), read("$true")),
(read("p & $true"), read("p")),
(read("p & $false"), read("$false")),
(read("p => $true"), read("$true")),
(read("p => $false"), read("~p")),
(read("p <=> $true"), read("p")),
(read("p <=> $false"), read("~p")),
(read("p | p"), read("p")),
(read("p | ~p"), read("$true")),
(read("p => p"), read("$true")),
(read("p | $true"), read("$true")),
(read("p | $false"), read("p")),
(read("$true => p"), read("p")),
(read("$false => p"), read("$true")),
(read("![X:$o]: p"), read("p")),
(read("?[X:$o]: p"), read("p")),
(read("![X:$o]: ((q @ X) <=> (q @ X))"), read("$true")),
(read("! [X:$i]: (r = r)"), read("$true")),
(read("! [X:$i]: (X = X)"), read("$true")),
(read("! [X:$tType]: (r = r)"), read("$true")),
(read("2"), read("2")),
(read("1/2"), read("1/2")),
(read("2/4"), read("1/2")),
(read("2/2"), read("1/1"))
)
for ((input,expected) <- tests){
test(s"Simplification Test: ${input.pretty(s)}", Checked) {
val simp = Simplification.apply(input) //Simp.normalize(t)
if (simp != expected) fail(s"The simplified Term '${input.pretty(s)}' should be '${expected.pretty(s)}', but was '${simp.pretty(s)}'.")
}
}
}
| lex-lex/Leo-III | src/test/scala/leo/modules/procedures/SimplificationTest.scala | Scala | bsd-3-clause | 1,906 |
package controllers.admin
object Assets extends controllers.AssetsBuilder
| SollmoStudio/beyond | modules/admin/app/controllers/admin/Assets.scala | Scala | apache-2.0 | 74 |
package gapt.proofs.nd
import gapt.expr._
import gapt.expr.formula.All
import gapt.expr.formula.And
import gapt.expr.formula.Bottom
import gapt.expr.formula.Eq
import gapt.expr.formula.Ex
import gapt.expr.formula.Formula
import gapt.expr.formula.Imp
import gapt.expr.formula.Neg
import gapt.expr.formula.Or
import gapt.expr.formula.Top
import gapt.expr.subst.Substitution
import gapt.expr.ty.FunctionType
import gapt.expr.util.freeVariables
import gapt.expr.util.replacementContext
import gapt.proofs.IndexOrFormula.{ IsFormula, IsIndex }
import gapt.proofs._
import scala.collection.mutable
abstract class NDProof extends SequentProof[Formula, NDProof] {
protected def NDRuleCreationException( message: String ): NDRuleCreationException =
new NDRuleCreationException( longName, message )
/**
* The end-sequent of the rule.
*/
final def endSequent = conclusion
/**
* Checks whether indices are in the right place and premise is defined at all of them.
*
* @param premise The sequent to be checked.
* @param antecedentIndices Indices that should be in the antecedent.
*/
protected def validateIndices( premise: HOLSequent, antecedentIndices: Seq[SequentIndex] ): Unit = {
val antSet = mutable.HashSet[SequentIndex]()
for ( i <- antecedentIndices ) i match {
case Ant( _ ) =>
if ( !premise.isDefinedAt( i ) )
throw NDRuleCreationException( s"Sequent $premise is not defined at index $i." )
if ( antSet contains i )
throw NDRuleCreationException( s"Duplicate index $i for sequent $premise." )
antSet += i
case Suc( _ ) => throw NDRuleCreationException( s"Index $i should be in the antecedent." )
}
}
}
/**
* An NDProof deriving a sequent from another sequent:
* <pre>
* (π)
* Γ :- A
* ----------
* Γ' :- A'
* </pre>
*/
abstract class UnaryNDProof extends NDProof {
/**
* The immediate subproof of the rule.
*
* @return
*/
def subProof: NDProof
/**
* The object connecting the lower and upper sequents.auxFormulas.
*
* @return
*/
def getSequentConnector: SequentConnector = occConnectors.head
/**
* The upper sequent of the rule.
*
* @return
*/
def premise = subProof.endSequent
override def immediateSubProofs = Seq( subProof )
}
object UnaryNDProof {
def unapply( p: UnaryNDProof ) = Some( p.endSequent, p.subProof )
}
/**
* An NDProof deriving a sequent from two other sequents:
* <pre>
* (π1) (π2)
* Γ :- A Γ' :- A'
* ------------------
* Π :- B
* </pre>
*/
abstract class BinaryNDProof extends NDProof {
/**
* The immediate left subproof of the rule.
*
* @return
*/
def leftSubProof: NDProof
/**
* The immediate right subproof of the rule.
*
* @return
*/
def rightSubProof: NDProof
/**
* The object connecting the lower and left upper sequents.
*
* @return
*/
def getLeftSequentConnector: SequentConnector = occConnectors.head
/**
* The object connecting the lower and right upper sequents.
*
* @return
*/
def getRightSequentConnector: SequentConnector = occConnectors.tail.head
/**
* The left upper sequent of the rule.
*
* @return
*/
def leftPremise = leftSubProof.endSequent
/**
* The right upper sequent of the rule.
*
* @return
*/
def rightPremise = rightSubProof.endSequent
override def immediateSubProofs = Seq( leftSubProof, rightSubProof )
}
object BinaryNDProof {
def unapply( p: BinaryNDProof ) = Some( p.endSequent, p.leftSubProof, p.rightSubProof )
}
/**
* An NDProof deriving a sequent from three other sequents:
* <pre>
* (π1) (π2) (π3)
* Γ1 :- A1 Γ2 :- A2 Γ3 :- A3
* --------------------------------
* Π :- B
* </pre>
*/
abstract class TernaryNDProof extends NDProof {
/**
* The immediate left subproof of the rule.
*
* @return
*/
def leftSubProof: NDProof
/**
* The immediate middle subproof of the rule.
*
* @return
*/
def middleSubProof: NDProof
/**
* The immediate right subproof of the rule.
*
* @return
*/
def rightSubProof: NDProof
/**
* The object connecting the lower and left upper sequents.
*
* @return
*/
def getLeftSequentConnector: SequentConnector = occConnectors( 0 )
/**
* The object connecting the lower and middle upper sequents.
*
* @return
*/
def getMiddleSequentConnector: SequentConnector = occConnectors( 1 )
/**
* The object connecting the lower and right upper sequents.
*
* @return
*/
def getRightSequentConnector: SequentConnector = occConnectors( 2 )
/**
* The left upper sequent of the rule.
*
* @return
*/
def leftPremise = leftSubProof.endSequent
/**
* The middle upper sequent of the rule.
*
* @return
*/
def middlePremise = middleSubProof.endSequent
/**
* The right upper sequent of the rule.
*
* @return
*/
def rightPremise = rightSubProof.endSequent
override def immediateSubProofs = Seq( leftSubProof, middleSubProof, rightSubProof )
}
object TernaryNDProof {
def unapply( p: TernaryNDProof ) = Some( p.endSequent, p.leftSubProof, p.middleSubProof, p.rightSubProof )
}
trait CommonRule extends NDProof with ContextRule[Formula, NDProof]
/**
* Use this trait for rules that use eigenvariables.
*
*/
trait Eigenvariable {
def eigenVariable: Var
}
/**
* An NDProof consisting of a single sequent:
* <pre>
* --------ax
* Γ :- A
* </pre>
*/
abstract class InitialSequent extends NDProof {
override def mainIndices = endSequent.indices
override def auxIndices = Seq()
override def immediateSubProofs = Seq()
override def occConnectors = Seq()
}
object InitialSequent {
def unapply( proof: InitialSequent ) = Some( proof.endSequent )
}
/**
* An NDProof ending with weakening:
* <pre>
* (π)
* Γ :- B
* ---------wkn
* A, Γ :- B
* </pre>
*
* @param subProof The subproof π.
* @param formula The formula A.
*/
case class WeakeningRule( subProof: NDProof, formula: Formula )
extends UnaryNDProof with CommonRule {
override def auxIndices = Seq( Seq() )
override def name = "wkn"
def mainFormula = formula
override def mainFormulaSequent = mainFormula +: Sequent()
}
object WeakeningRule extends ConvenienceConstructor( "WeakeningRule" ) {
/**
* Convenience constructor for ax, taking a context.
* Applies the axiom rule followed by 0 or more weakenings.
* <pre>
* (π)
* Γ :- B
* ---------------------wkn*
* A1, ..., An, Γ :- B
* </pre>
*
* @param subProof The subproof π.
* @param formulas The formulas A1, ..., An
* @return
*/
def apply( subProof: NDProof, formulas: Seq[Formula] ): NDProof = {
formulas.foldLeft[NDProof]( subProof ) { ( ant, c ) =>
WeakeningRule( ant, c )
}
}
}
/**
* An NDProof ending with a contraction:
* <pre>
* (π)
* A, A, Γ :- B
* --------------ctr
* A, Γ :- B
* </pre>
*
* @param subProof The subproof π.
* @param aux1 The index of one occurrence of A.
* @param aux2 The index of the other occurrence of A.
*/
case class ContractionRule( subProof: NDProof, aux1: SequentIndex, aux2: SequentIndex )
extends UnaryNDProof with CommonRule {
validateIndices( premise, Seq( aux1, aux2 ) )
if ( premise( aux1 ) != premise( aux2 ) )
throw NDRuleCreationException( s"Auxiliary formulas ${premise( aux1 )} and ${premise( aux2 )} are not equal." )
val mainFormula = premise( aux1 )
override def auxIndices = Seq( Seq( aux1, aux2 ) )
override def name = "ctr"
override def mainFormulaSequent = mainFormula +: Sequent()
}
object ContractionRule extends ConvenienceConstructor( "ContractionRule" ) {
/**
* Convenience constructor for ctr that, given a formula to contract, will automatically pick the
* first two occurrences of that formula.
*
* @param subProof The subproof π.
* @param f The formula to contract.
*/
def apply( subProof: NDProof, f: Formula ): ContractionRule = {
val premise = subProof.endSequent
val ( indices, _ ) = findAndValidate( premise )( Seq( f, f ), Suc( 0 ) )
val p = ContractionRule( subProof, Ant( indices( 0 ) ), Ant( indices( 1 ) ) )
assert( p.mainFormula == f )
p
}
}
/**
* An NDProof consisting of a logical axiom:
* <pre>
* --------ax
* A :- A
* </pre>
*
* @param A The formula A.
*/
case class LogicalAxiom( A: Formula ) extends InitialSequent {
override def name = "ax"
override def conclusion = NDSequent( Seq( A ), A )
def mainFormula = A
}
object LogicalAxiom extends ConvenienceConstructor( "LogicalAxiom" ) {
/**
* Convenience constructor for ax, taking a context.
* Applies the axiom rule followed by 0 or more weakenings.
* <pre>
* --------ax
* A :- A
* -----------wkn*
* Γ, A :- A
* </pre>
*
* @param A The atom a.
* @param context The context Γ.
* @return
*/
def apply( A: Formula, context: Seq[Formula] ): NDProof = {
context.foldLeft[NDProof]( LogicalAxiom( A ) ) { ( ant, c ) =>
WeakeningRule( ant, c )
}
}
}
/**
* An NDProof ending with elimination of the right conjunct:
* <pre>
* (π)
* Γ :- A ∧ B
* --------------∧:e1
* Γ :- A
* </pre>
*
* @param subProof The subproof π.
*/
case class AndElim1Rule( subProof: NDProof )
extends UnaryNDProof with CommonRule {
val conjunction = premise( Suc( 0 ) )
val mainFormula = conjunction match {
case And( leftConjunct, _ ) => leftConjunct
case _ =>
throw NDRuleCreationException( s"Proposed main formula $conjunction is not a conjunction." )
}
override def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def name = "∧:e1"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with elimination of the left conjunct:
* <pre>
* (π)
* Γ :- A ∧ B
* --------------∧:e2
* Γ :- B
* </pre>
*
* @param subProof The subproof π.
*/
case class AndElim2Rule( subProof: NDProof )
extends UnaryNDProof with CommonRule {
val conjunction = premise( Suc( 0 ) )
val mainFormula = conjunction match {
case And( _, rightConjunct ) => rightConjunct
case _ =>
throw NDRuleCreationException( s"Proposed main formula $conjunction is not a conjunction." )
}
override def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def name = "∧:e2"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with a conjunction on the right:
* <pre>
* (π1) (π2)
* Γ :- A Π :- B
* --------------------∧:i
* Γ, Π :- A∧B
* </pre>
*
* @param leftSubProof The proof π,,1,,.
* @param rightSubProof The proof π,,2,,.
*/
case class AndIntroRule( leftSubProof: NDProof, rightSubProof: NDProof )
extends BinaryNDProof with CommonRule {
val leftConjunct = leftPremise( Suc( 0 ) )
val rightConjunct = rightPremise( Suc( 0 ) )
val mainFormula = And( leftConjunct, rightConjunct )
def auxIndices = Seq( Seq( Suc( 0 ) ), Seq( Suc( 0 ) ) )
override def name = "∧:i"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with elimination of a disjunction:
* <pre>
* (π1) (π2) (π3)
* Γ :- A∨B Π, A :- C Δ, B :- C
* ------------------------------------∨:e
* Γ, Π, Δ :- C
* </pre>
*
* @param leftSubProof The proof π,,1,,.
* @param middleSubProof The proof π,,2,,.
* @param aux1 The index of A.
* @param rightSubProof The proof π,,3,,.
* @param aux2 The index of B.
*/
case class OrElimRule(
leftSubProof: NDProof,
middleSubProof: NDProof, aux1: SequentIndex,
rightSubProof: NDProof, aux2: SequentIndex )
extends TernaryNDProof with CommonRule {
validateIndices( middlePremise, Seq( aux1 ) )
validateIndices( rightPremise, Seq( aux2 ) )
val leftDisjunct = middlePremise( aux1 )
val rightDisjunct = rightPremise( aux2 )
val disjunction = leftPremise( Suc( 0 ) )
require(
disjunction == Or( leftDisjunct, rightDisjunct ),
throw NDRuleCreationException( s"Formula $disjunction is not a disjunction of $leftDisjunct and $rightDisjunct." ) )
val middleC = middlePremise( Suc( 0 ) )
val rightC = rightPremise( Suc( 0 ) )
val mainFormula = if ( middleC == rightC ) middleC else
throw NDRuleCreationException( s"Formulas $middleC an $rightC are not the same." )
def auxIndices = Seq( Seq( Suc( 0 ) ), Seq( aux1, Suc( 0 ) ), Seq( aux2, Suc( 0 ) ) )
override def name = "∨:e"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object OrElimRule extends ConvenienceConstructor( "OrElimRule" ) {
/**
* Convenience constructor for ∨:e.
* Given only the subproofs, it will attempt to create an inference with this.
*
* @param leftSubProof The left subproof.
* @param middleSubProof The middle subproof.
* @param rightSubProof The right subproof.
* @return
*/
def apply( leftSubProof: NDProof, middleSubProof: NDProof, rightSubProof: NDProof ): OrElimRule = {
val disjunction = leftSubProof.endSequent( Suc( 0 ) )
val ( leftDisjunct, rightDisjunct ) = disjunction match {
case Or( f, g ) => ( f, g )
case _ => throw NDRuleCreationException( s"Formula $disjunction is not a disjunction." )
}
val ( middlePremise, rightPremise ) = ( middleSubProof.endSequent, rightSubProof.endSequent )
val ( middleIndices, _ ) = findAndValidate( middlePremise )( Seq( leftDisjunct ), Suc( 0 ) )
val ( rightIndices, _ ) = findAndValidate( rightPremise )( Seq( rightDisjunct ), Suc( 0 ) )
new OrElimRule( leftSubProof, middleSubProof, Ant( middleIndices( 0 ) ), rightSubProof, Ant( rightIndices( 0 ) ) )
}
}
/**
* An NDProof ending with introduction of a disjunction, with a new formula as the right disjunct:
* <pre>
* (π)
* Γ :- A
* ------------∨:i1
* Γ :- A ∨ B
* </pre>
*
* @param subProof The subproof π.
* @param rightDisjunct The formula B.
*/
case class OrIntro1Rule( subProof: NDProof, rightDisjunct: Formula )
extends UnaryNDProof with CommonRule {
val leftDisjunct = premise( Suc( 0 ) )
val mainFormula = Or( leftDisjunct, rightDisjunct )
override def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def name = "∨:i1"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with introduction of a disjunction, with a new formula as the left disjunct:
* <pre>
* (π)
* Γ :- A
* ------------∨:i2
* Γ :- B ∨ A
* </pre>
*
* @param subProof The subproof π.
* @param leftDisjunct The formula B.
*/
case class OrIntro2Rule( subProof: NDProof, leftDisjunct: Formula )
extends UnaryNDProof with CommonRule {
val rightDisjunct = premise( Suc( 0 ) )
val mainFormula = Or( leftDisjunct, rightDisjunct )
override def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def name = "∨:i2"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with elimination of an implication:
* <pre>
* (π1) (π2)
* Γ :- A→B Π :- A
* --------------------→:e
* Γ, Π :- B
* </pre>
*
* @param leftSubProof The proof π,,1,,.
* @param rightSubProof The proof π,,2,,.
*/
case class ImpElimRule( leftSubProof: NDProof, rightSubProof: NDProof )
extends BinaryNDProof with CommonRule {
val implication = leftPremise( Suc( 0 ) )
val antecedent = rightPremise( Suc( 0 ) )
val mainFormula = implication match {
case Imp( `antecedent`, consequent ) => consequent
case Imp( _, _ ) =>
throw NDRuleCreationException( s"Proposed main formula $antecedent is not the antecedent of $implication." )
case _ =>
throw NDRuleCreationException( s"Proposed main formula $implication is not an implication." )
}
def auxIndices = Seq( Seq( Suc( 0 ) ), Seq( Suc( 0 ) ) )
override def name = "→:e"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with introduction of an implication:
* <pre>
* (π)
* A, Γ :- B
* ------------→:i
* Γ :- A → B
* </pre>
*
* @param subProof The subproof π.
* @param aux The index of A.
*/
case class ImpIntroRule( subProof: NDProof, aux: SequentIndex )
extends UnaryNDProof with CommonRule {
validateIndices( premise, Seq( aux ) )
val impPremise = premise( aux )
val impConclusion = premise( Suc( 0 ) )
val mainFormula = Imp( impPremise, impConclusion )
override def auxIndices = Seq( Seq( aux, Suc( 0 ) ) )
override def name = "→:i"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object ImpIntroRule extends ConvenienceConstructor( "ImpIntroRule" ) {
/**
* Convenience constructor for →:i.
* The aux formula can be given as an index or a formula. If it is given as a formula, the constructor
* will attempt to find an appropriate index on its own.
*
* @param subProof The subproof.
* @param impPremise Index of the premise of the implication or the premise itself.
* @return
*/
def apply( subProof: NDProof, impPremise: IndexOrFormula ): ImpIntroRule = {
val premise = subProof.endSequent
val ( antIndices, sucIndices ) = findAndValidate( premise )( Seq( impPremise ), Suc( 0 ) )
new ImpIntroRule( subProof, Ant( antIndices( 0 ) ) )
}
/**
* Convenience constructor for →:i
* If the subproof has precisely one element in the antecedent of its premise, this element will be the aux index.
*
* @param subProof The subproof.
* @return
*/
def apply( subProof: NDProof ): ImpIntroRule = {
val premise = subProof.endSequent
if ( premise.antecedent.size == 1 ) apply( subProof, Ant( 0 ) )
else if ( premise.antecedent.size == 0 )
throw NDRuleCreationException( s"Antecedent of $premise doesn't contain any elements." )
else throw NDRuleCreationException( s"Antecedent of $premise has more than one element, " +
s"the formula serving as antecedent of the implication should be specified." )
}
}
/**
* An NDProof ending with elimination of a negation:
* <pre>
* (π1) (π2)
* Γ :- ¬A Π :- A
* -------------------¬:e
* Γ, Π :- ⊥
* </pre>
*
* @param leftSubProof The proof π,,1,,.
* @param rightSubProof The proof π,,2,,.
*/
case class NegElimRule( leftSubProof: NDProof, rightSubProof: NDProof )
extends BinaryNDProof with CommonRule {
val negatedFormula = leftPremise( Suc( 0 ) )
val formula = rightPremise( Suc( 0 ) )
val mainFormula =
if ( ( negatedFormula == Neg( formula ) ) )
Bottom()
else
throw NDRuleCreationException(
s"""Formula $negatedFormula is not the negation of $formula.
""".stripMargin )
def auxIndices = Seq( Seq( Suc( 0 ) ), Seq( Suc( 0 ) ) )
override def name = "¬:e"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with introduction of a negation:
* <pre>
* (π)
* A, Γ :- ⊥
* -----------¬:i
* Γ :- ¬A
* </pre>
*
* @param subProof The subproof π.
* @param aux The index of A.
*/
case class NegIntroRule( subProof: NDProof, aux: SequentIndex )
extends UnaryNDProof with CommonRule {
validateIndices( premise, Seq( aux ) )
val bottom = premise( Suc( 0 ) )
require( bottom == Bottom(), s"Formula $bottom is not ⊥." )
val formula = premise( aux )
val mainFormula = Neg( formula )
override def auxIndices = Seq( Seq( aux, Suc( 0 ) ) )
override def name = "¬:i"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object NegIntroRule extends ConvenienceConstructor( "NegIntroRule" ) {
/**
* Convenience constructor for ¬:i.
* The aux formula can be given as an index or a formula. If it is given as a formula, the constructor
* will attempt to find an appropriate index on its own.
*
* @param subProof The subproof.
* @param negation Index of the negation or the negation itself.
* @return
*/
def apply( subProof: NDProof, negation: IndexOrFormula ): NegIntroRule = {
val premise = subProof.endSequent
val ( antIndices, sucIndices ) = findAndValidate( premise )( Seq( negation ), Suc( 0 ) )
new NegIntroRule( subProof, Ant( antIndices( 0 ) ) )
}
/**
* Convenience constructor for ¬:i.
* If the subproof has precisely one element in the antecedent of its premise, this element will be the aux index.
*
* @param subProof The subproof.
* @return
*/
def apply( subProof: NDProof ): NegIntroRule = {
val premise = subProof.endSequent
if ( premise.antecedent.size == 1 ) apply( subProof, Ant( 0 ) )
else if ( premise.antecedent.size == 0 )
throw NDRuleCreationException( s"Antecedent of $premise doesn't contain any elements." )
else throw NDRuleCreationException(
s"Antecedent of $premise has more than one element, the formula to be negated should be specified." )
}
}
/**
* An NDProof that is the introduction of ⊤:
* <pre>
* ------⊤:i
* :- ⊤
* </pre>
*/
case object TopIntroRule extends InitialSequent {
def mainFormula = Top()
def conclusion = NDSequent( Seq(), mainFormula )
override def name = "⊤:i"
}
/**
* An NDProof eliminating ⊥:
* <pre>
* (π)
* Γ :- ⊥
* --------⊥:e
* Γ :- A
* </pre>
*
* @param subProof The subproof π.
* @param mainFormula The formula A.
*/
case class BottomElimRule( subProof: NDProof, mainFormula: Formula )
extends UnaryNDProof with CommonRule {
val bottom = premise( Suc( 0 ) )
require( bottom == Bottom(), s"Formula $bottom is not ⊥." )
override def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def name = "⊥:e"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with a universal quantifier introduction:
* <pre>
* (π)
* Γ :- A[x\\α]
* -------------∀:i
* Γ :- ∀x.A
* </pre>
* This rule is only applicable if the eigenvariable condition is satisfied: α must not occur freely in Γ.
*
* @param subProof The proof π.
* @param eigenVariable The variable α.
* @param quantifiedVariable The variable x.
*/
case class ForallIntroRule( subProof: NDProof, eigenVariable: Var, quantifiedVariable: Var )
extends UnaryNDProof with CommonRule with Eigenvariable {
val ( auxFormula, context ) = premise focus Suc( 0 )
//eigenvariable condition
if ( freeVariables( context ) contains eigenVariable )
throw NDRuleCreationException( s"Eigenvariable condition is violated: $context contains $eigenVariable" )
def subFormula = BetaReduction.betaNormalize( Substitution( eigenVariable, quantifiedVariable )( auxFormula ) )
if ( BetaReduction.betaNormalize( Substitution( quantifiedVariable, eigenVariable )( subFormula ) ) != auxFormula )
throw NDRuleCreationException( s"Aux formula should be $subFormula[$quantifiedVariable\\\\$eigenVariable] = " +
BetaReduction.betaNormalize( Substitution( quantifiedVariable, eigenVariable )( subFormula ) )
+ s", but is $auxFormula." )
def mainFormula = BetaReduction.betaNormalize( All( quantifiedVariable, subFormula ) )
override def name = "∀:i"
def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object ForallIntroRule extends ConvenienceConstructor( "ForallIntroRule" ) {
/**
* Convenience constructor for ∀:i that, given a main formula and an eigenvariable, will try to
* construct an inference with that instantiation.
*
* @param subProof The subproof.
* @param mainFormula The formula to be inferred. Must be of the form ∀x.A.
* @param eigenVariable A variable α such that A[α] occurs in the premise.
* @return
*/
def apply( subProof: NDProof, mainFormula: Formula, eigenVariable: Var ): ForallIntroRule = {
if ( freeVariables( mainFormula ) contains eigenVariable ) {
throw NDRuleCreationException( s"Illegal main formula: Eigenvariable $eigenVariable is free in $mainFormula." )
} else mainFormula match {
case All( v, subFormula ) =>
val auxFormula = Substitution( v, eigenVariable )( subFormula )
val premise = subProof.endSequent
val ( _, indices ) = findAndValidate( premise )( Seq(), auxFormula )
val p = ForallIntroRule( subProof, eigenVariable, v )
assert( p.mainFormula == mainFormula )
p
case _ => throw NDRuleCreationException( s"Proposed main formula $mainFormula is not universally quantified." )
}
}
}
/**
* An NDProof ending with a universal quantifier elimination:
* <pre>
* (π)
* Γ :- ∀x.A
* -------------∀:e
* Γ :- A[x\\t]
* </pre>
*
* @param subProof The proof π.
* @param term The term t.
*/
case class ForallElimRule( subProof: NDProof, term: Expr )
extends UnaryNDProof with CommonRule {
val universal = premise( Suc( 0 ) )
val mainFormula = universal match {
case All( v, subFormula ) => Substitution( v, term )( subFormula )
case _ =>
throw NDRuleCreationException( s"Proposed main formula $universal is not universally quantified." )
}
override def name = "∀:e"
def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object ForallElimBlock {
/**
* Applies the ForallElim-rule n times.
*
* The rule:
* <pre>
* (π)
* Γ :- ∀x1,..,xN.A
* ---------------------------------- (∀_e x n)
* Γ :- A[x1\\t1,...,xN\\tN]
*
* where t1,...,tN are terms.
* </pre>
*
* @param subProof The proof π with (Γ :- ∀x1,..,xN.A) as the bottommost sequent.
* @param terms The list of terms with which to instantiate main. The caller of this
* method has to ensure the correctness of these terms, and, specifically, that
* ∀x1,..,xN.A indeed occurs at the bottom of the proof π.
*/
def apply( subProof: NDProof, terms: Seq[Expr] ): NDProof =
terms.foldLeft( subProof )( ( acc, t ) => nd.ForallElimRule( acc, t ) )
}
/**
* An NDProof ending with an existential quantifier introduction:
* <pre>
* (π)
* Γ :- A[x\\t]
* ------------∃:i
* Γ :- ∃x.A
* </pre>
*
* @param subProof The proof π.
* @param A The formula A.
* @param term The term t.
* @param v The variable x.
*/
case class ExistsIntroRule( subProof: NDProof, A: Formula, term: Expr, v: Var )
extends UnaryNDProof with CommonRule {
if ( premise( Suc( 0 ) ) != BetaReduction.betaNormalize( Substitution( v, term )( A ) ) )
throw NDRuleCreationException( s"Substituting $term for $v in $A does not result in ${premise( Suc( 0 ) )}." )
val mainFormula = BetaReduction.betaNormalize( Ex( v, A ) )
override def name = "∃:i"
def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object ExistsIntroRule extends ConvenienceConstructor( "ExistsIntroRule" ) {
/**
* Convenience constructor for ∃:i that, given a main formula and a term, will try to
* construct an inference with that instantiation.
*
* @param subProof The subproof.
* @param mainFormula The formula to be inferred. Must be of the form ∃x.A.
* @param term A term t such that A[t] occurs in the premise.
* @return
*/
def apply( subProof: NDProof, mainFormula: Formula, term: Expr ): ExistsIntroRule = {
val premise = subProof.endSequent
mainFormula match {
case Ex( v, subFormula ) =>
val auxFormula = BetaReduction.betaNormalize( Substitution( v, term )( subFormula ) )
if ( premise( Suc( 0 ) ) == auxFormula ) {
val p = ExistsIntroRule( subProof, subFormula, term, v )
assert( p.mainFormula == mainFormula )
p
} else throw NDRuleCreationException( s"Formula $auxFormula is not the succedent of $premise." )
case _ => throw NDRuleCreationException( s"Proposed main formula $mainFormula is not existentially quantified." )
}
}
/**
* Convenience constructor for ∃:i that, given a main formula, will try to construct an inference with that formula.
*
* @param subProof The subproof.
* @param mainFormula The formula to be inferred. Must be of the form ∃x.A[t\\x]. The premise must contain A[t].
* @return
*/
def apply( subProof: NDProof, mainFormula: Formula ): ExistsIntroRule = mainFormula match {
case Ex( v, subFormula ) =>
val pos = subFormula.find( v ).head
val t = if ( subProof.endSequent( Suc( 0 ) ).isDefinedAt( pos ) )
subProof.endSequent( Suc( 0 ) ).get( pos ).get
else
throw NDRuleCreationException( s"Premise is not defined at $pos." )
val p = apply( subProof, mainFormula, t )
assert( p.mainFormula == mainFormula )
p
case _ => throw NDRuleCreationException( s"Proposed main formula $mainFormula is not existentially quantified." )
}
}
/**
* An NDProof ending with an existential quantifier elimination:
* <pre>
* (π1) (π2)
* Γ :- ∃x.A Π, A[x\\α] :- B
* ----------------------------∃:e
* Γ, Π :- B
* </pre>
* This rule is only applicable if the eigenvariable condition is satisfied: α must not occur freely in Π, and B
*
* @param leftSubProof The proof π1.
* @param rightSubProof The proof π2.
* @param aux The index of A[x\\α].
* @param eigenVariable The variable α.
*/
case class ExistsElimRule( leftSubProof: NDProof, rightSubProof: NDProof, aux: SequentIndex, eigenVariable: Var )
extends BinaryNDProof with CommonRule with Eigenvariable {
validateIndices( rightPremise, Seq( aux ) )
val ( existentialFormula, leftContext ) = leftPremise focus Suc( 0 )
val ( auxFormula, rightContext ) = rightPremise focus aux
//eigenvariable condition
if ( freeVariables( rightContext ) contains eigenVariable )
throw NDRuleCreationException( s"Eigenvariable condition is violated: $rightContext contains $eigenVariable" )
val ( quantifiedVariable, subFormula ) = existentialFormula match {
case Ex( variable, sub ) => ( variable, sub )
case _ =>
throw NDRuleCreationException( s"Formula $existentialFormula is not existentially quantified." )
}
val auxShouldBe = BetaReduction.betaNormalize( Substitution( quantifiedVariable, eigenVariable )( subFormula ) )
if ( auxShouldBe != auxFormula ) throw NDRuleCreationException( s"Formula $auxFormula should be $auxShouldBe." )
val mainFormula = rightPremise( Suc( 0 ) )
override def name = "∃:e"
def auxIndices = Seq( Seq( Suc( 0 ) ), Seq( aux, Suc( 0 ) ) )
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object ExistsElimRule extends ConvenienceConstructor( "ExistsElimRule" ) {
/**
* Convenience constructor for ∃:e that, given an eigenvariable, will try to
* construct an inference with that instantiation.
*
* @param leftSubProof The proof π1.
* @param rightSubProof The proof π2.
* @param eigenVariable A variable α such that A[α] occurs in the premise.
* @return
*/
def apply( leftSubProof: NDProof, rightSubProof: NDProof, eigenVariable: Var ): ExistsElimRule = {
val existentialFormula = leftSubProof.conclusion( Suc( 0 ) )
existentialFormula match {
case Ex( v, subFormula ) =>
val auxFormula = Substitution( v, eigenVariable )( subFormula )
val premise = rightSubProof.endSequent
val ( indices, _ ) = findAndValidate( premise )( Seq( auxFormula ), Suc( 0 ) )
ExistsElimRule( leftSubProof, rightSubProof, Ant( indices( 0 ) ), eigenVariable )
case _ => throw NDRuleCreationException( s"Formula $existentialFormula is not existentially quantified." )
}
}
/**
* Convenience constructor for ∃:e that, given only its subproofs, will try to
* construct an inference with that formula.
*
* @param leftSubProof The proof π1.
* @param rightSubProof The proof π2.
* @return
*/
def apply( leftSubProof: NDProof, rightSubProof: NDProof ): ExistsElimRule = {
val existentialFormula = leftSubProof.conclusion( Suc( 0 ) )
existentialFormula match {
case Ex( v, subFormula ) => apply( leftSubProof, rightSubProof, v )
case _ =>
throw NDRuleCreationException( s"Formula $existentialFormula is not existentially quantified." )
}
}
}
/**
* An NDProof consisting of an axiom from a theory:
* <pre>
* --------th
* :- A
* </pre>
*
* @param mainFormula The axiom A.
*/
case class TheoryAxiom( mainFormula: Formula ) extends InitialSequent {
def conclusion = NDSequent( Seq(), mainFormula )
override def name = "th"
}
/**
* An NDProof ending with elimination of equality:
* <pre>
* (π1) (π2)
* Γ :- s = t Π :- A[x\\s]
* ------------------------------eq:e
* Γ,Π :- A[x\\t]
*
* </pre>
*
* @param leftSubProof The subproof π1.
* @param rightSubProof The subproof π2.
* @param formulaA The formula A.
* @param variablex The variable x.
*/
case class EqualityElimRule( leftSubProof: NDProof, rightSubProof: NDProof, formulaA: Formula, variablex: Var )
extends BinaryNDProof with CommonRule {
val eqFormula = leftPremise( Suc( 0 ) )
val ( s, t ) = eqFormula match {
case Eq( s, t ) => ( s, t )
case _ => throw NDRuleCreationException( s"Formula $eqFormula is not an equation." )
}
val substitution1 = Substitution( variablex, s )
val substitution2 = Substitution( variablex, t )
val auxFormula = rightPremise( Suc( 0 ) )
val mainFormula = if ( auxFormula == BetaReduction.betaNormalize( substitution1( formulaA ) ) )
BetaReduction.betaNormalize( substitution2( formulaA ) )
else if ( auxFormula == BetaReduction.betaNormalize( substitution2( formulaA ) ) )
BetaReduction.betaNormalize( substitution1( formulaA ) )
else
throw NDRuleCreationException(
s"Formula $auxFormula is not equal to $formulaA with either " +
s"substitution $substitution1 or $substitution2 applied to it." )
def auxIndices = Seq( Seq( Suc( 0 ) ), Seq( Suc( 0 ) ) )
override def name = "eq:e"
override def mainFormulaSequent = Sequent() :+ mainFormula
}
object EqualityElimRule extends ConvenienceConstructor( "EqualityElimRule" ) {
/**
* Convenience constructor for eq:e.
* Given only the subproofs, it will attempt to create an inference with this.
*
* @param leftSubProof The left subproof.
* @param rightSubProof The right subproof.
* @return
*/
def apply( leftSubProof: NDProof, rightSubProof: NDProof ): EqualityElimRule = {
val eqFormula = leftSubProof.conclusion( Suc( 0 ) )
val auxFormula = rightSubProof.conclusion( Suc( 0 ) )
val ( s, _ ) = eqFormula match {
case Eq( s, t ) => ( s, t )
case _ => throw NDRuleCreationException( s"Formula $eqFormula is not an equation." )
}
val repContext = replacementContext.abstractTerm( auxFormula )( s )
val formulaA = repContext.term.asInstanceOf[Formula]
val variablex = repContext.variable.asInstanceOf[Var]
new EqualityElimRule( leftSubProof, rightSubProof, formulaA, variablex )
}
}
/**
* An NDProof that consist of the introduction of an equality.
* <pre>
* ----------eq:i
* :- t = t
*
* </pre>
*
* @param t The term t.
*/
case class EqualityIntroRule( t: Expr ) extends InitialSequent {
override def name = "eq:i"
override def conclusion = NDSequent( Seq(), Eq( t, t ) )
def mainFormula = Eq( t, t )
}
/**
* Proof that a given data type constructor c preserves a formula F:
*
* <pre>
* (π)
* F(x,,1,,), F(x,,2,,), ..., F(x,,n,,), Γ :- F(c(x,,1,,,...,x,,n,,,y,,1,,,...,y,,n,,))
* </pre>
*
* The variables x,,i,, and y,,i,, are eigenvariables; x,,i,, are the eigenvariables of the
* same type as the inductive data type, y,,i,, are the other arguments of the constructor c.
* They can come in any order in the constructor.
*
* @param proof The NDProof ending in the sequent of this case.
* @param constructor The constructor c of the inductive data type that we're considering.
* @param hypotheses Indices of F(x,,1,,), ..., F(x,,n,,)
* @param eigenVars The eigenvariables of this case: x,,1,,, ..., x,,n,,, y,,1,,, ..., y,,n,,
* (these need to correspond to the order in c)
*/
case class InductionCase( proof: NDProof, constructor: Const,
hypotheses: Seq[SequentIndex], eigenVars: Seq[Var] ) {
val FunctionType( indTy, fieldTypes ) = constructor.ty
require( fieldTypes == eigenVars.map( _.ty ) )
val hypVars = eigenVars filter { _.ty == indTy }
require( hypotheses.size == hypVars.size )
hypotheses foreach { hyp =>
require( hyp.isAnt && proof.endSequent.isDefinedAt( hyp ) )
}
val term = constructor( eigenVars: _* )
require( proof.endSequent.isDefinedAt( Suc( 0 ) ) )
}
/**
* An NDProof ending with an induction rule:
* <pre>
* (π,,1,,) (π,,2,,) (π,,n,,)
* case 1 case 2 ... case n
* -------------------------------------(ind)
* Γ :- F(t: indTy)
* </pre>
*
* This induction rule can handle inductive data types.
* The cases are proofs that the various type constructors preserve the formula we want to prove.
* They are provided via the [[InductionCase]] class.
*
* @param cases A sequence of proofs showing that each type constructor preserves the validity of the main formula.
* @param formula The formula we want to prove via induction.
*/
case class InductionRule( cases: Seq[InductionCase], formula: Abs, term: Expr ) extends CommonRule {
val Abs( quant @ Var( _, indTy ), qfFormula ) = formula
require( term.ty == indTy )
cases foreach { c =>
require( c.indTy == indTy )
c.hypotheses.lazyZip( c.hypVars ) foreach { ( hyp, eigen ) =>
require( c.proof.endSequent( hyp ) == Substitution( quant -> eigen )( qfFormula ) )
}
require( c.proof.endSequent( Suc( 0 ) ) == Substitution( quant -> c.term )( qfFormula ) )
}
require( freeVariables( contexts.flatMap( _.elements ) :+ formula ) intersect
cases.flatMap( _.eigenVars ).toSet isEmpty )
val mainFormula = BetaReduction.betaNormalize( formula( term ).asInstanceOf[Formula] )
override protected def mainFormulaSequent = Sequent() :+ mainFormula
override def auxIndices: Seq[Seq[SequentIndex]] = cases map { c => c.hypotheses :+ Suc( 0 ) }
override def immediateSubProofs: Seq[NDProof] = cases map { _.proof }
private lazy val product = cases.flatMap { _.productIterator } :+ formula :+ term
override def productArity = product.size
override def productElement( n: Int ) = product( n )
override def name = "ind"
def eigenVariables = cases.flatMap( _.eigenVars ).toSet
}
/**
* An NDProof ending with excluded middle:
* <pre>
* (π1) (π2)
* Γ, A :- B Π, ¬A :- B
* -------------------------EM
* Γ, Π :- B
* </pre>
*
* @param leftSubProof The proof π1.
* @param aux1 The index of A.
* @param rightSubProof The proof π2.
* @param aux2 The index of ¬A.
*/
case class ExcludedMiddleRule( leftSubProof: NDProof, aux1: SequentIndex, rightSubProof: NDProof, aux2: SequentIndex )
extends BinaryNDProof with CommonRule {
validateIndices( leftPremise, Seq( aux1 ) )
validateIndices( rightPremise, Seq( aux2 ) )
val formulaA = leftPremise( aux1 )
val formulaNegA = rightPremise( aux2 )
require( Neg( formulaA ) == formulaNegA, s"Formula $formulaNegA is not the negation of $formulaA." )
val leftB = leftPremise( Suc( 0 ) )
val rightB = rightPremise( Suc( 0 ) )
val mainFormula = if ( leftB == rightB ) leftB else
throw NDRuleCreationException( s"Formula $leftB is not equal to $rightB." )
override def name = "EM"
def auxIndices = Seq( Seq( aux1, Suc( 0 ) ), Seq( aux2, Suc( 0 ) ) )
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* An NDProof ending with a definition
*
* <pre>
* (π)
* Γ :- A[φ]
* -----------d
* Γ :- A[c]
* </pre>
*
* @param subProof The proof π.
* @param mainFormula The formula A[c].
*/
case class DefinitionRule( subProof: NDProof, mainFormula: Formula ) extends UnaryNDProof with CommonRule {
override def name = "d"
override def auxIndices = Seq( Seq( Suc( 0 ) ) )
override def mainFormulaSequent = Sequent() :+ mainFormula
}
/**
* Class for reducing boilerplate code in ND companion objects.
*
* @param longName The long name of the rule.
*/
class ConvenienceConstructor( val longName: String ) {
/**
* Create an NDRuleCreationException with a message starting
* with "Cannot create longName: ..."
*
* @param text The rest of the message.
* @return
*/
protected def NDRuleCreationException( text: String ): NDRuleCreationException =
new NDRuleCreationException( longName, text )
def findIndicesOrFormulasInPremise( premise: HOLSequent )(
antIndicesFormulas: Seq[IndexOrFormula], sucIndexFormula: IndexOrFormula ): ( Seq[Formula], Seq[Int], Formula, Int ) = {
val antReservedIndices = ( antIndicesFormulas.foldLeft( scala.collection.mutable.HashSet.empty[Int] ) ) { ( acc, e ) =>
e match {
case IsIndex( Ant( i ) ) => acc ++ Set( i )
case IsIndex( i: Suc ) => throw NDRuleCreationException( s"Index $i should be in the antecedent." )
case IsFormula( _ ) => acc
}
}
val ant = for ( e <- antIndicesFormulas ) yield {
e match {
case IsIndex( idx @ Ant( i ) ) =>
antReservedIndices += i
val f = premise( idx )
( f, i )
case IsFormula( f ) =>
var i = premise.antecedent.indexOf( f )
while ( antReservedIndices contains i )
i = premise.antecedent.indexOf( f, i + 1 )
if ( i != -1 )
antReservedIndices += i
( f, i )
case IsIndex( i: Suc ) => throw NDRuleCreationException( s"Index $i should be in the antecedent." )
}
}
val suc = sucIndexFormula match {
case IsIndex( Suc( i: Int ) ) =>
( premise( Suc( i ) ), i )
case IsFormula( f ) =>
val i = premise.succedent.indexOf( f )
( f, i )
case IsIndex( i: Ant ) => throw NDRuleCreationException( s"Index $i should be in the succedent." )
}
val ( antFormulas, antIndices ) = ant.unzip
val ( sucFormula, sucIndex ) = suc
( antFormulas, antIndices, sucFormula, sucIndex )
}
/**
* Throws an exception if the output of findFormulasInPremise contains any -1 entries.
*
* @param premise The sequent in question.
* @param antFormulas The list of formulas in the antecedent.
* @param antIndices The list of indices corresponding to antFormulas.
* @return
*/
protected def validateIndices( premise: HOLSequent )( antFormulas: Seq[Formula], antIndices: Seq[Int] ) = {
val antMap = scala.collection.mutable.HashMap.empty[Formula, Int]
for ( ( f, i ) <- antFormulas zip antIndices ) {
val count = antMap.getOrElse( f, 0 )
if ( i == -1 )
throw NDRuleCreationException( s"Formula $f only found $count times in antecedent of $premise." )
antMap += f -> ( count + 1 )
}
}
/**
* Combines findIndicesOrFormulasInPremise and validateIndices. That is, it will return a pair of a lists of indices
* and an index, and throw an exception if either list contains a -1.
*
* @param premise The sequent in question.
* @param antIndicesFormulas The list of indices or formulas in the antecedent.
* @param sucIndexFormula The index or formula in the succedent.
* @return
*/
protected def findAndValidate( premise: HOLSequent )(
antIndicesFormulas: Seq[IndexOrFormula], sucIndexFormula: IndexOrFormula ): ( Seq[Int], Int ) = {
val ( antFormulas, antIndices, sucFormula, sucIndex ) =
findIndicesOrFormulasInPremise( premise )( antIndicesFormulas, sucIndexFormula )
validateIndices( premise )( antFormulas, antIndices )
( antIndices, sucIndex )
}
}
class NDRuleCreationException( name: String, message: String ) extends Exception( s"Cannot create $name: " + message )
| gapt/gapt | core/src/main/scala/gapt/proofs/nd/nd.scala | Scala | gpl-3.0 | 44,342 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct
import play.api.libs.json.{Format, Json}
import uk.gov.hmrc.ct.box.formats._
package object formats {
implicit val cato01Format: Format[CATO01] = new IntegerFormat[CATO01](CATO01.apply)
implicit val cato02Format: Format[CATO02] = new IntegerFormat[CATO02](CATO02.apply)
implicit val cato03Format: Format[CATO03] = new IntegerFormat[CATO03](CATO03.apply)
implicit val CATO04Format: Format[CATO04] = new BigDecimalFormat[CATO04](CATO04.apply)
implicit val cato10Format: Format[CATO10] = new BooleanFormat[CATO10](CATO10.apply)
implicit val cato11Format: Format[CATO11] = new OptionalStringFormat[CATO11](CATO11.apply)
implicit val cato12Format: Format[CATO12] = new OptionalStringFormat[CATO12](CATO12.apply)
implicit val cato13Format: Format[CATO13] = new IntegerFormat[CATO13](CATO13.apply)
implicit val cato14Format: Format[CATO14] = new IntegerFormat[CATO14](CATO14.apply)
implicit val cato15Format: Format[CATO15] = new IntegerFormat[CATO15](CATO15.apply)
implicit val cato16Format: Format[CATO16] = new IntegerFormat[CATO16](CATO16.apply)
implicit val companyTypeFormat: Format[FilingCompanyType] = Json.format[FilingCompanyType]
implicit val abbreviatedAccountsFilingFormat: Format[AbbreviatedAccountsFiling] = new BooleanFormat[AbbreviatedAccountsFiling](AbbreviatedAccountsFiling.apply)
implicit val abridgedFilingFormat: Format[AbridgedFiling] = new BooleanFormat[AbridgedFiling](AbridgedFiling.apply)
implicit val companiesHouseFilingFormat: Format[CompaniesHouseFiling] = new BooleanFormat[CompaniesHouseFiling](CompaniesHouseFiling.apply)
implicit val hmrcFilingFormat: Format[HMRCFiling] = new BooleanFormat[HMRCFiling](HMRCFiling.apply)
implicit val companiesHouseSubmittedFormat: Format[CompaniesHouseSubmitted] = new BooleanFormat[CompaniesHouseSubmitted](CompaniesHouseSubmitted.apply)
implicit val hmrcSubmittedFormat: Format[HMRCSubmitted] = new BooleanFormat[HMRCSubmitted](HMRCSubmitted.apply)
implicit val hmrcAmendmentFormat: Format[HMRCAmendment] = new BooleanFormat[HMRCAmendment](HMRCAmendment.apply)
implicit val microEntityFilingFormat: Format[MicroEntityFiling] = new BooleanFormat[MicroEntityFiling](MicroEntityFiling.apply)
implicit val statutoryAccountsFilingFormat: Format[StatutoryAccountsFiling] = new BooleanFormat[StatutoryAccountsFiling](StatutoryAccountsFiling.apply)
implicit val utrFormat: Format[UTR] = new StringFormat[UTR](UTR.apply)
implicit val cato21Format: Format[CATO21] = new BigDecimalFormat[CATO21](CATO21.apply)
implicit val cato22Format: Format[CATO22] = new BigDecimalFormat[CATO22](CATO22.apply)
implicit val cato24Format: Format[CATO24] = new OptionalBooleanFormat[CATO24](CATO24.apply)
implicit val CountryOfRegistrationFormat: Format[CountryOfRegistration] = new OptionalStringFormat[CountryOfRegistration](CountryOfRegistration.apply)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/formats/package.scala | Scala | apache-2.0 | 3,504 |
/*
* Copyright (c) 2012 Orderly Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package co.orderly.ga2csv
// Java
import java.io.File
// Scala
import xml.XML
import collection.mutable.ListBuffer
// Google Analytics GData library
import com.google.gdata.data.analytics.DataFeed
import com.google.gdata.client.analytics.{DataQuery, AnalyticsService}
/**
* Ga2CsvMain is the command-line tool which wraps our GoogleAnalyticsQuery.
*/
object Ga2CsvMain {
def main(args: Array[String]){
// -------------------------------------------------------------------------------------------------------------------
// Handle command line arguments
// -------------------------------------------------------------------------------------------------------------------
// Check that two arguments have been supplied i.e. an inputFile and an outputFile
checkArgs(args)
val configFile = args(0)
val outputFile = args(1)
// Throw exception if output file already exists
checkExists(outputFile)
// -------------------------------------------------------------------------------------------------------------------
// Execute Google Analytics export
// -------------------------------------------------------------------------------------------------------------------
// Initialize and configure the exporter
val gae = new GoogleAnalyticsExporter(configFile)
// Setup and run query, dumping to outputFile
gae.exportData(outputFile)
}
// -------------------------------------------------------------------------------------------------------------------
// Helper methods for command-line handling
// -------------------------------------------------------------------------------------------------------------------
/**
* Check we're not overwriting an existing file
*/
private def checkExists(filename: String) {
val outputFile = new File(filename)
if (outputFile.exists) throw new IllegalArgumentException("Error: Output file %s already exists".format(filename))
}
/**
* Check that the number of arguments supplied is correct
* (i.e. 2, one for input file, one for output file)
*/
def checkArgs(args: Array[String]) {
if (args.length != 2) throw new IllegalArgumentException("Two arguments required, but %s supplied. Please specific input and output files".format(args.length))
}
} | keplar/google-analytics-export-to-csv | src/main/scala/co/orderly/ga2csv/MainApplication.scala | Scala | apache-2.0 | 3,001 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.client.execution
import com.ibm.spark.kernel.protocol.v5.content._
import com.ibm.spark.utils.LogLike
case class DeferredExecution() extends LogLike {
private var executeResultCallbacks: List[(ExecuteResult) => Unit] = Nil
private var streamCallbacks: List[(StreamContent) => Unit] = Nil
private var errorCallbacks: List[(ExecuteReplyError) => Unit] = Nil
private var successCallbacks: List[(ExecuteReplyError) => Unit] = Nil
private var executeResultOption: Option[ExecuteResult] = None
private var executeReplyOption: Option[ExecuteReply] = None
/**
* Registers a callback for handling ExecuteResult messages.
* This {@param callback} will run once on successful code execution and
* then be unregistered. If {@param callback} is registered after the result
* has been returned it will be invoked immediately.
* In the event of a failure {@param callback} will never be called.
* @param callback A callback function, which will be invoked at most once,
* with an ExecuteResult IPython message
* @return The DeferredExecution with the given callback registered.
*/
def onResult(callback: (ExecuteResult) => Unit): DeferredExecution = {
this.executeResultCallbacks = callback :: this.executeResultCallbacks
processCallbacks()
this
}
/**
* Registers a callback for handling StreamContent messages.
* Ths {@param callback} can be called 0 or more times. If the
* {@param callback} is registered after StreamContent messages have been
* emitted, the {@param callback} will only receive messages emitted after the
* point of registration.
* @param callback A callback function, which can be invoked 0 or more times,
* with Stream Ipython messages
* @return The DeferredExecution with the given callback registered.
*/
def onStream(callback: (StreamContent) => Unit): DeferredExecution = {
this.streamCallbacks = callback :: this.streamCallbacks
this
}
/**
* Registers a callback for handling ExecuteReply messages when there is an
* error during code execution. This {@param callback} will run once on failed
* code execution and then be unregistered. If {@param callback} is registered
* after the error reply has been returned it will be invoked immediately.
* In the event of successful code execution {@param callback} will never be
* called.
* @param callback A callback function, which will be invoked at most once,
* with an ExecuteReply IPython message
* @return The DeferredExecution with the given callback registered.
*/
def onError(callback: (ExecuteReplyError) => Unit): DeferredExecution = {
this.errorCallbacks = callback :: this.errorCallbacks
processCallbacks()
this
}
/**
* Registers a callback to be notified when code completion has completed
* successfully. {@param callback} will not be called if an error has been
* encountered, use {@method onError}.
* @param callback The callback to register.
* @return This deferred execution
*/
def onSuccess(callback: (ExecuteReplyError) => Unit): DeferredExecution = {
this.successCallbacks = callback :: this.successCallbacks
processCallbacks()
this
}
// In the next three methods we need to clear each list.
// This prevents methods from getting called again when
// a callback is registered after processing has happened
private def callErrorCallbacks(executeReplyError: ExecuteReplyError) = {
this.errorCallbacks.foreach(_(executeReplyError))
this.errorCallbacks = Nil
}
private def callSuccessCallbacks(executeReplyOk: ExecuteReplyOk) = {
this.successCallbacks.foreach(_(executeReplyOk))
this.successCallbacks = Nil
}
private def callResultCallbacks(executeResult: ExecuteResult) = {
this.executeResultCallbacks.foreach(_(executeResult))
this.executeResultCallbacks = Nil
}
private def processCallbacks(): Unit = {
(executeReplyOption, executeResultOption) match {
case (Some(executeReply), Some(executeResult)) if executeReply.status.equals("error") =>
callErrorCallbacks(executeReply)
case (Some(executeReply), Some(executeResult)) if executeReply.status.equals("ok") =>
callResultCallbacks(executeResult)
callSuccessCallbacks(executeReply)
case (Some(executeReply), None) if executeReply.status.equals("ok") =>
callSuccessCallbacks(executeReply)
case value =>
logger.debug(
s"""|Did not invoke client callbacks.
|ExecuteReply was: ${executeReplyOption}
|ExecuteResult was: ${executeResultOption}
""".stripMargin.trim)
}
}
def resolveResult(executeResultMessage: ExecuteResult): Unit = {
this.executeResultOption = Some(executeResultMessage)
processCallbacks()
}
def resolveReply(executeReplyMessage: ExecuteReply): Unit = {
this.executeReplyOption = Some(executeReplyMessage)
processCallbacks()
}
def emitStreamContent(streamContent: StreamContent): Unit = {
this.streamCallbacks.foreach(streamCallback => {
streamCallback(streamContent)
})
}
}
| yeghishe/spark-kernel | client/src/main/scala/com/ibm/spark/kernel/protocol/v5/client/execution/DeferredExecution.scala | Scala | apache-2.0 | 5,804 |
/**
* @author Claire Jaja
* @version 10/30/14
*
* Project Euler
* Problem 2
* Even Fibonacci numbers
*
* Each new term in the Fibonacci sequence is generated by adding
* the previous two terms.
* By startign with 1 and 2, the first 10 terms will be:
* 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
* By considering the terms in the Fibonacci sequence
* whose values do not exceed four million,
* find the sum of the even-valued terms.
*/
object EvenFibonacciNumbers {
// do not exceed 4 million
val max = 4000000
def main(args: Array[String]) {
// set up first three terms
var previousPreviousTerm = 1
var previousTerm = 1
var currentTerm = 2
var sum = 0
while (currentTerm < max) {
if (currentTerm % 2 == 0) sum += currentTerm
previousPreviousTerm = previousTerm
previousTerm = currentTerm
currentTerm = previousTerm + previousPreviousTerm
}
println(sum)
}
}
| clairejaja/project-euler | src/main/scala/problem2/EvenFibonacciNumbers.scala | Scala | mit | 922 |
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package org.scalajs.testsuite.jsinterop
import scala.language.implicitConversions
import scala.scalajs.js
import scala.scalajs.runtime.RuntimeLong
import org.scalajs.jasmine.JasmineExpectation
import org.scalajs.jasminetest.{JasmineTest, TestSuiteContext}
import org.scalajs.testsuite.utils.ExpectExceptions
import scala.util.Try
/**
* test the runtime Long implementation directly
* does not depend on magic compiler Long rewriting
*/
object RuntimeLongTest extends JasmineTest with ExpectExceptions{
import RuntimeLong.fromDouble
// Ask Jasmine to compare Longs by their pair of (lo, hi)
implicit def long2any(x: RuntimeLong): js.Any = js.Array(x.lo, x.hi)
// Short builders
def lg(lo: Int, hi: Int): RuntimeLong = new RuntimeLong(lo, hi)
def lg(i: Int): RuntimeLong = new RuntimeLong(i)
// Common values
val MaxVal = lg(0xffffffff, 0x7fffffff)
val MinVal = lg(0, 0x80000000)
val IntMaxVal = lg(Int.MaxValue)
val IntMinVal = lg(Int.MinValue)
val IntMaxValPlus1 = lg(0x80000000, 0)
val IntMinValMinus1 = lg(2147483647, -1)
val MaxSafeDouble = lg(-1, 2097151)
val TwoPow53 = lg(0, 2097152)
val MinSafeDouble = lg(1, -2097152)
val NegTwoPow53 = lg(0, -2097152)
describe("scala.scalajs.runtime.RuntimeLong") {
it("sanity of equality tests") {
expect(lg(123, 456)).toEqual(js.Array(123, 456))
expect(lg(123, 456)).toEqual(lg(123, 456))
expect(lg(123, 456)).not.toEqual(js.Array(123, 4))
expect(lg(123, 456)).not.toEqual(lg(123, 4))
expect(lg(123, 456)).not.toEqual(js.Array(1, 456))
expect(lg(123, 456)).not.toEqual(lg(1, 456))
expect(lg(123, 456)).not.toEqual(123)
}
it("equals(Any)") {
expect(lg(0, 0).equals(0: Any)).toBeFalsy
expect(lg(0, 0).equals(null: Any)).toBeFalsy
expect(lg(0, 0).equals(lg(0, 0): Any)).toBeTruthy
expect(lg(123, 456).equals(lg(123, 456): Any)).toBeTruthy
expect(lg(-123, 456).equals(lg(-123, 456): Any)).toBeTruthy
expect(lg(-123, -456).equals(lg(-123, -456): Any)).toBeTruthy
expect(lg(123, 456).equals(lg(-123, 456): Any)).toBeFalsy
expect(lg(123, 456).equals(lg(123, -456): Any)).toBeFalsy
expect(lg(-123, -456).equals(lg(123, -456): Any)).toBeFalsy
expect(lg(-123, -456).equals(lg(-123, 456): Any)).toBeFalsy
}
it("hashCode(), as specified in j.l.Long") {
expect(lg(0).hashCode()).toEqual(0)
expect(lg(-1).hashCode()).toEqual(0)
expect(lg(55).hashCode()).toEqual(55)
expect(lg(-12).hashCode()).toEqual(11)
expect(lg(10006548).hashCode()).toEqual(10006548)
expect(lg(-1098748).hashCode()).toEqual(1098747)
expect(lg(579906195, 461662560).hashCode()).toEqual(957662195)
expect(lg(-1403218312, 327367870).hashCode()).toEqual(-1075860794)
expect(lg(-1152051636, -274640221).hashCode()).toEqual(1425294575)
expect(lg(1026519507, -1379463549).hashCode()).toEqual(-1863811248)
expect(lg(363765329, -557842270).hashCode()).toEqual(-881942797)
expect(lg(21652572, 569942698).hashCode()).toEqual(548587254)
expect(lg(55820229, -1281708615).hashCode()).toEqual(-1328999812)
expect(lg(-1843678104, 89453422).hashCode()).toEqual(-1756412154)
expect(lg(-1928579430, 1836700344).hashCode()).toEqual(-529144798)
expect(lg(-181377900, 1335444084).hashCode()).toEqual(-1163319584)
expect(lg(1189983760, 1032146717).hashCode()).toEqual(2070477069)
expect(lg(-1982789145, 274636318).hashCode()).toEqual(-1718642695)
expect(lg(-2087901827, -1945935740).hashCode()).toEqual(260982265)
expect(lg(-1911332808, 1729620001).hashCode()).toEqual(-385578983)
expect(lg(-1920965295, 592125278).hashCode()).toEqual(-1362397169)
expect(lg(2017870028, 751907156).hashCode()).toEqual(1419211160)
expect(lg(1506336851, -933796127).hashCode()).toEqual(-1851816270)
expect(lg(-1747722429, -1855422773).hashCode()).toEqual(112959880)
expect(lg(-2139132623, -431847873).hashCode()).toEqual(1715333902)
expect(lg(739274932, -924496860).hashCode()).toEqual(-453690224)
expect(lg(-1482800071, 29485338).hashCode()).toEqual(-1503679197)
expect(lg(237609240, 2048220960).hashCode()).toEqual(1950154296)
expect(lg(-431092385, -1623412426).hashCode()).toEqual(2037562473)
expect(lg(2144172772, 1927987317).hashCode()).toEqual(220707473)
expect(lg(971459211, 1217334127).hashCode()).toEqual(1902658020)
expect(lg(-530209544, -763367967).hashCode()).toEqual(840583449)
expect(lg(-1322671605, -902331922).hashCode()).toEqual(2065572837)
expect(lg(1361976000, 1231329666).hashCode()).toEqual(407536450)
expect(lg(-96547475, 1640676759).hashCode()).toEqual(-1678479110)
expect(lg(1799144078, -936998300).hashCode()).toEqual(-1558558486)
expect(lg(221720683, -195204411).hashCode()).toEqual(-110470482)
expect(lg(2080474705, 1194291803).hashCode()).toEqual(992932874)
expect(lg(-1962255291, -228903623).hashCode()).toEqual(2035378556)
expect(lg(-1961045404, -1421226733).hashCode()).toEqual(542449527)
expect(lg(1762001719, -96661681).hashCode()).toEqual(-1824846728)
expect(lg(568630982, -458482587).hashCode()).toEqual(-985103709)
expect(lg(-1237704639, -1275053966).hashCode()).toEqual(37361715)
expect(lg(936273516, -1802824213).hashCode()).toEqual(-1555729529)
expect(lg(-870754516, -1755138351).hashCode()).toEqual(1534845437)
expect(lg(964079858, -332884522).hashCode()).toEqual(-715250396)
expect(lg(1769001167, 503396434).hashCode()).toEqual(2003953821)
expect(lg(811930233, 1365142270).hashCode()).toEqual(1631287431)
expect(lg(-280291442, 1136496326).hashCode()).toEqual(-1393125048)
expect(lg(439731659, 755060794).hashCode()).toEqual(926193137)
expect(lg(-561661919, -1701561506).hashCode()).toEqual(1141998463)
expect(lg(1556104387, 1080665841).hashCode()).toEqual(480895538)
expect(lg(1931061917, -1099252386).hashCode()).toEqual(-849143869)
expect(lg(2086961898, -298531087).hashCode()).toEqual(-1840233445)
expect(lg(-1148008529, -1186490352).hashCode()).toEqual(47538111)
expect(lg(807317094, 271251327).hashCode()).toEqual(540301593)
expect(lg(1077071399, 826295290).hashCode()).toEqual(1903332829)
expect(lg(781949710, -1637653074).hashCode()).toEqual(-1325859168)
expect(lg(1778433204, -839352494).hashCode()).toEqual(-1476869146)
expect(lg(-2038023199, -2088719372).hashCode()).toEqual(84316181)
expect(lg(-1764916235, -1980649039).hashCode()).toEqual(524038724)
expect(lg(-1796682086, 1148567289).hashCode()).toEqual(-794988445)
expect(lg(-1606200144, 320886535).hashCode()).toEqual(-1285356617)
expect(lg(755146140, 2028753842).hashCode()).toEqual(1441713710)
expect(lg(-1851453861, -2073516593).hashCode()).toEqual(365800340)
expect(lg(-543327214, -1587342674).hashCode()).toEqual(2130603708)
expect(lg(506958308, -1249713021).hashCode()).toEqual(-1414171289)
expect(lg(-2097389477, 1923820719).hashCode()).toEqual(-262714124)
expect(lg(-374932306, -523558320).hashCode()).toEqual(158195454)
expect(lg(-902905695, -925752196).hashCode()).toEqual(50128093)
expect(lg(-397013030, 646399757).hashCode()).toEqual(-825145129)
expect(lg(1764398539, -956440075).hashCode()).toEqual(-1344834498)
expect(lg(-1750710329, 1852419689).hashCode()).toEqual(-103814738)
expect(lg(-1664538473, 864969320).hashCode()).toEqual(-1354282241)
expect(lg(-500471847, -1312439708).hashCode()).toEqual(1408148925)
expect(lg(14748928, 1899600418).hashCode()).toEqual(1910019874)
expect(lg(-1985642880, -431011584).hashCode()).toEqual(1877620608)
expect(lg(494530531, -200582329).hashCode()).toEqual(-378358620)
expect(lg(-2067225228, -1718331081).hashCode()).toEqual(492633155)
expect(lg(-1799546135, 897340901).hashCode()).toEqual(-1581166836)
expect(lg(25821759, 200092463).hashCode()).toEqual(174532880)
expect(lg(403690141, -1032813241).hashCode()).toEqual(-629188646)
expect(lg(-1843541251, -308529236).hashCode()).toEqual(2139225425)
expect(lg(1643311840, 1780391559).hashCode()).toEqual(200043623)
expect(lg(1531597671, 764172997).hashCode()).toEqual(1992690082)
expect(lg(638938496, 182932582).hashCode()).toEqual(754072038)
expect(lg(309356043, -440275494).hashCode()).toEqual(-139359279)
expect(lg(-541225182, 1128039519).hashCode()).toEqual(-1669264515)
expect(lg(-387355169, -378598204).hashCode()).toEqual(25583899)
expect(lg(1787244135, 103129337).hashCode()).toEqual(1822592670)
expect(lg(-1654639624, -890602930).hashCode()).toEqual(1468680630)
expect(lg(-1867306675, -303043235).hashCode()).toEqual(2103231504)
expect(lg(1255224728, 265017316).hashCode()).toEqual(1159389820)
expect(lg(119985367, 695098919).hashCode()).toEqual(776506096)
expect(lg(-332671386, 1583817866).hashCode()).toEqual(-1303579924)
expect(lg(1610629865, 571880320).hashCode()).toEqual(1108767081)
expect(lg(727577343, -1794328817).hashCode()).toEqual(-1101969936)
expect(lg(730759795, -394092436).hashCode()).toEqual(-1022615009)
expect(lg(-148400203, 1074931585).hashCode()).toEqual(-1221218252)
expect(lg(181091802, 314250080).hashCode()).toEqual(410005178)
expect(lg(-1934827635, -889463837).hashCode()).toEqual(1180107886)
expect(lg(-1067099255, -650316777).hashCode()).toEqual(425308062)
expect(lg(1821917070, 174468125).hashCode()).toEqual(1727927187)
expect(lg(474121453, -830281051).hashCode()).toEqual(-759140792)
expect(lg(-402668999, -2100801229).hashCode()).toEqual(1698140938)
expect(lg(-615008378, -976157749).hashCode()).toEqual(512144461)
}
it("toString()") {
expect(lg(0).toString()).toEqual("0")
expect(lg(1).toString()).toEqual("1")
expect(lg(-1).toString()).toEqual("-1")
expect(IntMaxVal.toString()).toEqual(Int.MaxValue.toString())
expect(IntMaxValPlus1.toString()).toEqual("2147483648")
expect(IntMinVal.toString()).toEqual(Int.MinValue.toString())
expect(IntMinValMinus1.toString()).toEqual("-2147483649")
expect(lg(999999999).toString()).toEqual("999999999")
expect(lg(1000000000).toString()).toEqual("1000000000")
expect(MaxSafeDouble.toString()).toEqual("9007199254740991")
expect(TwoPow53.toString()).toEqual("9007199254740992")
expect(MinSafeDouble.toString()).toEqual("-9007199254740991")
expect(NegTwoPow53.toString()).toEqual("-9007199254740992")
expect(lg(-86922, -1).toString()).toEqual("-86922")
expect(lg(0, 0).toString()).toEqual("0")
expect(lg(-21874015, -1).toString()).toEqual("-21874015")
expect(lg(1317110830, -489).toString()).toEqual("-2098921896914")
expect(lg(-698060208, 18855).toString()).toEqual("80985205273168")
expect(lg(858389071, -2899145).toString()).toEqual("-12451732102972849")
expect(lg(3350, 0).toString()).toEqual("3350")
expect(lg(2005360390, -21540).toString()).toEqual("-92511590195450")
expect(lg(-2, -1).toString()).toEqual("-2")
expect(lg(1492984294, 103900277).toString()).toEqual("446248293253325286")
expect(lg(116015740, 116321286).toString()).toEqual("499596119314678396")
expect(lg(-3205893, -1).toString()).toEqual("-3205893")
expect(lg(1988813462, -20667).toString()).toEqual("-88762100292970")
expect(lg(-1278004, -1).toString()).toEqual("-1278004")
expect(lg(-1, -1).toString()).toEqual("-1")
expect(lg(-305393, -1).toString()).toEqual("-305393")
expect(lg(-2, -1).toString()).toEqual("-2")
expect(lg(-1678336113, 18695185).toString()).toEqual("80295210784300943")
expect(lg(5, 0).toString()).toEqual("5")
expect(lg(21, 0).toString()).toEqual("21")
expect(lg(64, 0).toString()).toEqual("64")
expect(lg(39146094, 0).toString()).toEqual("39146094")
expect(lg(-1725731, -1).toString()).toEqual("-1725731")
expect(lg(-874655652, -178824949).toString()).toEqual("-768047304243556260")
expect(lg(380990122, -635).toString()).toEqual("-2726923242838")
expect(lg(1318520807, -415).toString()).toEqual("-1781092907033")
expect(lg(-213275, -1).toString()).toEqual("-213275")
expect(lg(184176746, 1784).toString()).toEqual("7662405832810")
expect(lg(460945549, -36).toString()).toEqual("-154157877107")
expect(lg(1586508389, -216524094).toString()).toEqual("-929963900939521435")
expect(lg(-6872, -1).toString()).toEqual("-6872")
expect(lg(-333987816, 7413).toString()).toEqual("31842553544728")
expect(lg(-1817926382, 132147).toString()).toEqual("567569520305426")
expect(lg(19649016, 0).toString()).toEqual("19649016")
expect(lg(-1349346, -1).toString()).toEqual("-1349346")
expect(lg(-1372338764, 2207193).toString()).toEqual("9479824673588660")
expect(lg(3521781, 0).toString()).toEqual("3521781")
expect(lg(1740, 0).toString()).toEqual("1740")
expect(lg(0, 0).toString()).toEqual("0")
expect(lg(-1654582044, 21).toString()).toEqual("92834698468")
expect(lg(100400158, -18659001).toString()).toEqual("-80139798970631138")
expect(lg(30058, 0).toString()).toEqual("30058")
expect(lg(1332815438, -142265).toString()).toEqual("-611022189550002")
expect(lg(472694602, 119894).toString()).toEqual("514941281681226")
expect(lg(-1962042949, 571).toString()).toEqual("2454759250363")
expect(lg(1595551038, 3459895).toString()).toEqual("14860137468144958")
expect(lg(-79255, -1).toString()).toEqual("-79255")
expect(lg(-1501556660, 533210).toString()).toEqual("2290122305310796")
expect(lg(-463451414, -175936602).toString()).toEqual("-755641947927852310")
expect(lg(-771329970, -610447526).toString()).toEqual("-2621852156570472370")
expect(lg(698569929, -9).toString()).toEqual("-37956135735")
expect(lg(853219, 0).toString()).toEqual("853219")
expect(lg(901, 0).toString()).toEqual("901")
expect(lg(434694682, 1021).toString()).toEqual("4385596303898")
expect(lg(-972597865, -1).toString()).toEqual("-972597865")
expect(lg(-8057379, -1).toString()).toEqual("-8057379")
expect(lg(-14968, -1).toString()).toEqual("-14968")
expect(lg(-98204964, -1).toString()).toEqual("-98204964")
expect(lg(335479, 0).toString()).toEqual("335479")
expect(lg(54810714, -100).toString()).toEqual("-429441918886")
expect(lg(9798741, 0).toString()).toEqual("9798741")
expect(lg(-896875642, 31643665).toString()).toEqual("135908509698671494")
expect(lg(233027789, -32851335).toString()).toEqual("-141095409221912371")
expect(lg(-359183840, -2104985).toString()).toEqual("-9040837797787104")
expect(lg(-889, -1).toString()).toEqual("-889")
expect(lg(-1072884302, 0).toString()).toEqual("3222082994")
expect(lg(-1454853, -1).toString()).toEqual("-1454853")
expect(lg(-2113969463, 127).toString()).toEqual("547641844425")
expect(lg(-1766834443, 0).toString()).toEqual("2528132853")
expect(lg(242, 0).toString()).toEqual("242")
expect(lg(-1655763891, -1).toString()).toEqual("-1655763891")
expect(lg(82, 0).toString()).toEqual("82")
expect(lg(-120254181, -1).toString()).toEqual("-120254181")
expect(lg(-210088, -1).toString()).toEqual("-210088")
expect(lg(-2, -1).toString()).toEqual("-2")
expect(lg(598888267, 58267).toString()).toEqual("250255458324299")
expect(lg(-100656997, -1).toString()).toEqual("-100656997")
expect(lg(1672622015, -6).toString()).toEqual("-24097181761")
expect(lg(206088, 0).toString()).toEqual("206088")
expect(lg(-593, -1).toString()).toEqual("-593")
expect(lg(-99542049, -1).toString()).toEqual("-99542049")
expect(lg(421501, 0).toString()).toEqual("421501")
expect(lg(-2, -1).toString()).toEqual("-2")
expect(lg(-101, -1).toString()).toEqual("-101")
expect(lg(3, 0).toString()).toEqual("3")
expect(lg(2082590966, 3).toString()).toEqual("14967492854")
expect(lg(-86853659, -355870).toString()).toEqual("-1528445803513883")
expect(lg(-1353126070, 6230).toString()).toEqual("26760588095306")
expect(lg(1576139368, 2899).toString()).toEqual("12452686330472")
expect(lg(1022479965, -30415).toString()).toEqual("-130630407827875")
expect(lg(-1691843023, -3).toString()).toEqual("-10281777615")
expect(lg(2013284571, -21071).toString()).toEqual("-90497242609445")
expect(lg(1990158591, -3245).toString()).toEqual("-13935178716929")
expect(lg(-11308540, -1).toString()).toEqual("-11308540")
expect(lg(545166, 0).toString()).toEqual("545166")
expect(lg(1778574369, -243007).toString()).toEqual("-1043705339124703")
expect(lg(510, 0).toString()).toEqual("510")
expect(lg(1809514269, -1).toString()).toEqual("-2485453027")
expect(lg(-15103, -1).toString()).toEqual("-15103")
expect(lg(-779514418, -39296382).toString()).toEqual("-168776672025670194")
}
it("toByte") {
expect(lg(0).toByte).toEqual(0)
expect(lg(-1).toByte).toEqual(-1)
expect(lg(0xfedcba98, 0x76543210).toByte).toEqual(0x98.toByte)
expect(lg(-1755353242, -1245269156).toByte).toEqual(102)
expect(lg(-359135667, 1391746928).toByte).toEqual(77)
expect(lg(-957203503, 1516742479).toByte).toEqual(-47)
expect(lg(-1928741654, 1162703256).toByte).toEqual(-22)
expect(lg(-1698228849, 1497186951).toByte).toEqual(-113)
expect(lg(-68041812, -2115448390).toByte).toEqual(-84)
expect(lg(1534301729, 1468418695).toByte).toEqual(33)
expect(lg(1101829489, -514588123).toByte).toEqual(113)
expect(lg(-1437577204, 1896338488).toByte).toEqual(12)
expect(lg(-857671082, -1304076936).toByte).toEqual(86)
expect(lg(-292818212, -1485650549).toByte).toEqual(-36)
expect(lg(1044510040, 147719255).toByte).toEqual(88)
expect(lg(-1166136469, 78076997).toByte).toEqual(107)
expect(lg(500131901, 248541787).toByte).toEqual(61)
expect(lg(1863435363, -1465266670).toByte).toEqual(99)
expect(lg(136483252, 1662447178).toByte).toEqual(-76)
expect(lg(1787939584, 1303926235).toByte).toEqual(0)
expect(lg(2105657787, 845433223).toByte).toEqual(-69)
expect(lg(-1298285542, -1826340261).toByte).toEqual(26)
expect(lg(-766959552, -326327606).toByte).toEqual(64)
}
it("toShort") {
expect(lg(0).toShort).toEqual(0)
expect(lg(-1).toShort).toEqual(-1)
expect(lg(0xfedcba98, 0x76543210).toShort).toEqual(0xba98.toShort)
expect(lg(1925512546, -812328457).toShort).toEqual(-670)
expect(lg(2028716555, -1639243756).toShort).toEqual(-15861)
expect(lg(-1970657557, -1904990267).toShort).toEqual(9963)
expect(lg(-1012119590, -1704668195).toShort).toEqual(18394)
expect(lg(848486636, -810351120).toShort).toEqual(-7956)
expect(lg(2103989197, 955793808).toShort).toEqual(21453)
expect(lg(-237938237, -703399620).toShort).toEqual(22979)
expect(lg(666247428, -1109641927).toShort).toEqual(8452)
expect(lg(1824561213, -872828437).toShort).toEqual(-26563)
expect(lg(-10950266, -1779965318).toShort).toEqual(-5754)
expect(lg(1251814932, -491043391).toShort).toEqual(11796)
expect(lg(-117750172, -366379322).toShort).toEqual(18020)
expect(lg(-2095575368, 965048164).toShort).toEqual(3768)
expect(lg(-177410531, 1454361289).toShort).toEqual(-4579)
expect(lg(-359035310, -790126871).toShort).toEqual(-29102)
expect(lg(1486058820, 1675509542).toShort).toEqual(30020)
expect(lg(268881157, -342358099).toShort).toEqual(-13051)
expect(lg(-1089211040, 747294820).toShort).toEqual(-2720)
expect(lg(1163661942, 1708185440).toShort).toEqual(4726)
expect(lg(-1363821038, -1952481751).toShort).toEqual(-16878)
}
it("toInt") {
expect(lg(0).toInt).toEqual(0)
expect(lg(-1).toInt).toEqual(-1)
expect(lg(0xfedcba98, 0x76543210).toInt).toEqual(0xfedcba98)
expect(lg(-1869423218, -5516698).toInt).toEqual(-1869423218)
expect(lg(450655357, -521592408).toInt).toEqual(450655357)
expect(lg(-596464514, 629510497).toInt).toEqual(-596464514)
expect(lg(1668957409, 1231040344).toInt).toEqual(1668957409)
expect(lg(-313016061, 283507721).toInt).toEqual(-313016061)
expect(lg(-406779255, 1389322213).toInt).toEqual(-406779255)
expect(lg(-1125423893, -436921025).toInt).toEqual(-1125423893)
expect(lg(1491309031, 948401259).toInt).toEqual(1491309031)
expect(lg(360542935, -1033853853).toInt).toEqual(360542935)
expect(lg(178673916, -2045867551).toInt).toEqual(178673916)
expect(lg(-1167644863, 738699232).toInt).toEqual(-1167644863)
expect(lg(-1852739075, 950841298).toInt).toEqual(-1852739075)
expect(lg(-1965326912, 1694989583).toInt).toEqual(-1965326912)
expect(lg(-141857741, -1197558189).toInt).toEqual(-141857741)
expect(lg(-938893686, 1763555645).toInt).toEqual(-938893686)
expect(lg(-1178638558, 299067184).toInt).toEqual(-1178638558)
expect(lg(-1296424902, -1694453755).toInt).toEqual(-1296424902)
expect(lg(204387309, -240738711).toInt).toEqual(204387309)
expect(lg(-942136876, -527367452).toInt).toEqual(-942136876)
expect(lg(-1703892744, 240186844).toInt).toEqual(-1703892744)
}
it("toLong") {
expect(lg(0).toLong == 0L).toBeTruthy
expect(lg(-1).toLong == -1L).toBeTruthy
expect(lg(0xfedcba98, 0x76543210).toLong == 0x76543210fedcba98L).toBeTruthy
expect(lg(-85753595, 1608259083).toLong == 6907420169189163269L).toBeTruthy
expect(lg(539593679, -1527121853).toLong == -6558938415102325809L).toBeTruthy
expect(lg(-379998034, -1777303946).toLong == -7633462319206780754L).toBeTruthy
expect(lg(-655641274, -943321249).toLong == -4051533910437546682L).toBeTruthy
expect(lg(1727460259, -905790147).toLong == -3890339056676572253L).toBeTruthy
expect(lg(1824805856, -719806090).toLong == -3091543614186826784L).toBeTruthy
expect(lg(948567983, 653384746).toLong == 2806266116723834799L).toBeTruthy
expect(lg(-957910924, -405401095).toLong == -1741184441450532748L).toBeTruthy
expect(lg(-433042213, 790675337).toLong == 3395924718030703835L).toBeTruthy
expect(lg(889526541, -1795647094).toLong == -7712245542997911283L).toBeTruthy
expect(lg(1316066543, -640532153).toLong == -2751064647855401745L).toBeTruthy
expect(lg(1913378322, 1216751901).toLong == 5225909624054208018L).toBeTruthy
expect(lg(-434813127, 310602037).toLong == 1334025594846136121L).toBeTruthy
expect(lg(1689963942, -366687109).toLong == -1574909139329823322L).toBeTruthy
expect(lg(754250892, -2128587091).toLong == -9142211941778525044L).toBeTruthy
expect(lg(-1817691823, -1284620305).toLong == -5517402195275269807L).toBeTruthy
expect(lg(-222627957, 1772466007).toLong == 7612683537409046411L).toBeTruthy
expect(lg(-1282993697, -688214725).toLong == -2955859733488660001L).toBeTruthy
expect(lg(799857959, 107587404).toLong == 462084382441397543L).toBeTruthy
expect(lg(2076251528, 2049295309).toLong == 8801656334077465992L).toBeTruthy
}
when("strict-floats").
it("toFloat (strict)") {
expect(lg(0).toFloat).toEqual(0)
expect(lg(-1).toFloat).toEqual(-1)
if (!TestSuiteContext.hasTag("fullopt-stage")) {
expect(MaxVal.toFloat).toEqual(9.223372E18f)
expect(MinVal.toFloat).toEqual(-9.223372E18f)
} else {
// Closure seems to incorrectly rewrite the constant on the right :-(
expect(MaxVal.toFloat).toBeCloseTo(9.223372E18f, -4)
expect(MinVal.toFloat).toBeCloseTo(-9.223372E18f, -4)
}
expect(lg(-1026388143, 1116923232).toFloat).toEqual(4.7971489E18f)
expect(lg(-1288678667, -521651607).toFloat).toEqual(-2.24047663E18f)
expect(lg(1192262605, 1069184891).toFloat).toEqual(4.59211416E18f)
expect(lg(-180353617, 789161022).toFloat).toEqual(3.38942079E18f)
expect(lg(-1158443188, -1585038363).toFloat).toEqual(-6.8076878E18f)
expect(lg(906981906, 1726665521).toFloat).toEqual(7.4159717E18f)
expect(lg(2042933575, -431379283).toFloat).toEqual(-1.85275997E18f)
expect(lg(599900903, 1335148382).toFloat).toEqual(5.7344188E18f)
expect(lg(1458166084, 746013039).toFloat).toEqual(3.20410168E18f)
expect(lg(1956524672, -1683605603).toFloat).toEqual(-7.2310311E18f)
expect(lg(478583639, 1796320118).toFloat).toEqual(7.7151362E18f)
expect(lg(-1645816617, 329141676).toFloat).toEqual(1.41365268E18f)
expect(lg(184187116, -705937657).toFloat).toEqual(-3.03197918E18f)
expect(lg(659513335, -941305424).toFloat).toEqual(-4.04287594E18f)
expect(lg(770505156, -1820844549).toFloat).toEqual(-7.8204678E18f)
expect(lg(929928858, -1390767911).toFloat).toEqual(-5.9733025E18f)
expect(lg(-1475096259, 262207373).toFloat).toEqual(1.1261721E18f)
expect(lg(787691795, 933383012).toFloat).toEqual(4.00884963E18f)
expect(lg(1189057493, -334139018).toFloat).toEqual(-1.43511611E18f)
expect(lg(-618946450, 888051141).toFloat).toEqual(3.81415059E18f)
}
it("toDouble") {
expect(lg(0).toDouble).toEqual(0)
expect(lg(-1).toDouble).toEqual(-1)
if (!TestSuiteContext.hasTag("fullopt-stage")) {
expect(MaxVal.toDouble).toEqual(9.223372036854776E18)
expect(MinVal.toDouble).toEqual(-9.223372036854776E18)
} else {
// Closure seems to incorrectly rewrite the constant on the right :-(
expect(MaxVal.toDouble).toBeCloseTo(9.223372036854776E18, -4)
expect(MinVal.toDouble).toBeCloseTo(-9.223372036854776E18, -4)
}
expect(lg(-151011088, 797216310).toDouble).toEqual(3.4240179834317537E18)
expect(lg(-508205099, 19929381).toDouble).toEqual(8.5596043411285968E16)
expect(lg(1249322201, -736451403).toDouble).toEqual(-3.1630346897289943E18)
expect(lg(483575860, -1044191477).toDouble).toEqual(-4.4847682439933604E18)
expect(lg(-1526343930, -149046007).toDouble).toEqual(-6.4014772289576371E17)
expect(lg(531728928, -412036011).toDouble).toEqual(-1.76968119148756736E18)
expect(lg(-734111585, -1993185640).toDouble).toEqual(-8.5606671350959739E18)
expect(lg(-1407864332, -2104881296).toDouble).toEqual(-9.0403963253949932E18)
expect(lg(-1712351423, -1513137310).toDouble).toEqual(-6.4988752582247977E18)
expect(lg(1969244733, -181115448).toDouble).toEqual(-7.7788492399114394E17)
expect(lg(-907683842, 1777829016).toDouble).toEqual(7.6357174849871442E18)
expect(lg(-815927209, 291826806).toDouble).toEqual(1.25338659134517658E18)
expect(lg(463523496, -742968207).toDouble).toEqual(-3.1910241505692349E18)
expect(lg(1482622807, 1727987781).toDouble).toEqual(7.4216510087652332E18)
expect(lg(1170040143, -1906661060).toDouble).toEqual(-8.189046896086654E18)
expect(lg(-85609173, 1590612176).toDouble).toEqual(6.8316272807487539E18)
expect(lg(-1212811257, -1876873801).toDouble).toEqual(-8.0611115909320561E18)
expect(lg(-648802816, 398781194).toDouble).toEqual(1.7127521901359959E18)
expect(lg(-1484519186, -1500419423).toDouble).toEqual(-6.4442523492577423E18)
expect(lg(-2016996893, -398756124).toDouble).toEqual(-1.71264450938175027E18)
}
it("comparisons") {
def test(x: RuntimeLong, y: RuntimeLong, expected: Int): Unit = {
expect(x.compareTo(y).signum).toEqual(expected)
expect(x.compareTo(y.toLong: java.lang.Long).signum).toEqual(expected)
expect(x.equals(y)).toBe(expected == 0)
expect(x.notEquals(y)).toBe(expected != 0)
expect(x < y).toBe(expected < 0)
expect(x <= y).toBe(expected <= 0)
expect(x > y).toBe(expected > 0)
expect(x >= y).toBe(expected >= 0)
}
test(lg(0), lg(0), 0)
test(lg(0), lg(1), -1)
test(lg(0), lg(-1), 1)
test(MaxVal, MinVal, 1)
test(MinVal, MaxVal, -1)
// Positive and negative numbers requiring lo to be compared via unsigned
test(lg(0x87654321, 0x654789ab), lg(0x12345678, 0x654789ab), 1)
test(lg(0x87654321, 0x89abcdef), lg(0x12345678, 0x89abcdef), 1)
// Workaround for https://code.google.com/p/v8/issues/detail?id=3304
test(lg(-1, 0), lg(0, 0), 1)
test(lg(0, 0), lg(-1, 0), -1)
test(lg(173547161, -1884162399), lg(173547161, -1884162399), 0)
test(lg(-1131022787, -472928681), lg(-1131022787, -472928681), 0)
test(lg(-1426164191, 1230100202), lg(-1426164191, 1230100202), 0)
test(lg(-865774626, 1656835920), lg(-865774626, 1656835920), 0)
test(lg(323675568, -725625271), lg(323675568, -725625271), 0)
test(lg(-480943595, -1454872354), lg(-480943595, -1454872354), 0)
test(lg(-626788852, 1037229194), lg(-626788852, 1037229194), 0)
test(lg(-717389653, 232764759), lg(-717389653, 232764759), 0)
test(lg(-861190423, -1233377930), lg(-861190423, -1233377930), 0)
test(lg(-424759090, 2081288998), lg(-424759090, 2081288998), 0)
test(lg(-1092215366, 753517982), lg(349136582, -103427916), 1)
test(lg(363609757, -1151024787), lg(472951646, -1802702403), 1)
test(lg(604332601, 1869576376), lg(1642523661, 1083165388), 1)
test(lg(309732766, 1349689861), lg(1287300335, 1464464808), -1)
test(lg(-1309668929, -965374553), lg(-1952664258, 53355972), -1)
test(lg(1881957750, 388099413), lg(1843907319, -1819358211), 1)
test(lg(-969542710, 864289013), lg(-1025874755, 1102102911), -1)
test(lg(-1425636748, -220185411), lg(1184140796, 40447497), -1)
test(lg(242386079, 452246653), lg(435337552, -956883630), 1)
test(lg(-1007383056, 344856628), lg(-195994328, 635205577), -1)
test(lg(-1652098619, 2042392045), lg(819672742, -2139008380), 1)
test(lg(1423590080, 1919857862), lg(918443721, 1202178673), 1)
test(lg(-1726296442, 302493002), lg(314727886, 1583734481), -1)
test(lg(-2124336701, 769721099), lg(461146322, -591528218), 1)
test(lg(1544826993, -689540243), lg(-1107003972, -1622786326), 1)
test(lg(2050227802, 951848379), lg(-774454951, 1675192386), -1)
test(lg(251298779, -327163776), lg(767615943, 1531730165), -1)
test(lg(1890888425, 761833495), lg(1870917399, 2027251288), -1)
test(lg(594868313, 126374530), lg(-1567484882, -1199917303), 1)
test(lg(-914360997, -703435655), lg(2049249771, -1581791194), 1)
test(lg(-732484281, -738997306), lg(1445589646, 1910084021), -1)
test(lg(340771740, 1351224018), lg(459324247, 1301544548), 1)
test(lg(-940710332, 1344186742), lg(-1143672211, 1112189558), 1)
test(lg(-804347876, 364046111), lg(-4317439, -1733157379), 1)
test(lg(914214836, -1226397169), lg(-299522125, 1393423940), -1)
test(lg(1244546642, 1821771770), lg(44151604, -1398558064), 1)
test(lg(-2094640323, -1469168677), lg(-263524564, 88152070), -1)
test(lg(-124567753, -93039352), lg(-200449699, -30383890), -1)
test(lg(161119306, -1098626173), lg(-137189625, 1289988889), -1)
test(lg(-2052616761, 846341515), lg(-150583666, 1044666783), -1)
test(lg(-10359669, -1628837253), lg(165345114, 1529503183), -1)
test(lg(1717988228, 1622548180), lg(834798590, -1907713185), 1)
test(lg(-1416372109, -353311343), lg(-722195813, -2060788759), 1)
test(lg(980620531, -300588346), lg(-889348218, 1805452697), -1)
test(lg(-465681479, 556544868), lg(-684386776, 724207906), -1)
test(lg(1720493596, 1118244444), lg(2048914469, -789300492), 1)
test(lg(-1259678249, -1557339417), lg(-1908141376, -468055129), -1)
test(lg(1374750478, 1591281700), lg(1107931774, 1073828802), 1)
test(lg(1307860622, -1769647645), lg(-1521056504, 1476896409), -1)
test(lg(1870719065, -606069057), lg(1219817813, -1063559023), 1)
test(lg(-526519712, 1166848880), lg(-748095992, 59925642), 1)
test(lg(-1011429486, -2053277854), lg(537284118, 1714076830), -1)
test(lg(-669104363, -107157886), lg(1647426475, -1784147450), 1)
test(lg(-389860398, 693324889), lg(1047633230, -1757663140), 1)
test(lg(-200206281, 96771163), lg(613429570, -1206384633), 1)
test(lg(-1436571081, -2050819200), lg(-665572561, 644211697), -1)
test(lg(620796821, -567816428), lg(-109412350, -624638338), 1)
test(lg(858464866, -2104597302), lg(-987329519, 1189618105), -1)
test(lg(-1342634556, -1517778924), lg(-693373055, 142499537), -1)
test(lg(1839280888, -168388422), lg(-1645740821, -1967920957), 1)
}
it("bitwise not ~") {
expect(~lg(-1664374423, -327449893)).toEqual(lg(1664374422, 327449892))
expect(~lg(2033180389, 1179462630)).toEqual(lg(-2033180390, -1179462631))
expect(~lg(1134559213, -581653070)).toEqual(lg(-1134559214, 581653069))
expect(~lg(304074637, 795726116)).toEqual(lg(-304074638, -795726117))
expect(~lg(1711832786, -1153070600)).toEqual(lg(-1711832787, 1153070599))
expect(~lg(1526506636, -966114537)).toEqual(lg(-1526506637, 966114536))
expect(~lg(-4362924, -1155261398)).toEqual(lg(4362923, 1155261397))
expect(~lg(1976846288, 68873333)).toEqual(lg(-1976846289, -68873334))
expect(~lg(980717877, 1171857117)).toEqual(lg(-980717878, -1171857118))
expect(~lg(-1087568371, -543704247)).toEqual(lg(1087568370, 543704246))
expect(~lg(-466027719, -693030606)).toEqual(lg(466027718, 693030605))
expect(~lg(-457333959, -1344424075)).toEqual(lg(457333958, 1344424074))
expect(~lg(1195369387, 1211454824)).toEqual(lg(-1195369388, -1211454825))
expect(~lg(-1637646575, -618600149)).toEqual(lg(1637646574, 618600148))
expect(~lg(-1882417449, -81477817)).toEqual(lg(1882417448, 81477816))
expect(~lg(755550611, 520392565)).toEqual(lg(-755550612, -520392566))
expect(~lg(754282894, 1550447286)).toEqual(lg(-754282895, -1550447287))
expect(~lg(-949172350, 708028074)).toEqual(lg(949172349, -708028075))
expect(~lg(-1587810907, 1344614949)).toEqual(lg(1587810906, -1344614950))
expect(~lg(1761617638, 353615614)).toEqual(lg(-1761617639, -353615615))
expect(~lg(153730677, -249152221)).toEqual(lg(-153730678, 249152220))
expect(~lg(189227913, -2071190798)).toEqual(lg(-189227914, 2071190797))
expect(~lg(853867869, -445686069)).toEqual(lg(-853867870, 445686068))
expect(~lg(779434874, -417640993)).toEqual(lg(-779434875, 417640992))
expect(~lg(-1997707716, 1100729421)).toEqual(lg(1997707715, -1100729422))
expect(~lg(-1171311730, 1236578927)).toEqual(lg(1171311729, -1236578928))
expect(~lg(833922039, -1773972622)).toEqual(lg(-833922040, 1773972621))
expect(~lg(-1414648870, -1222586076)).toEqual(lg(1414648869, 1222586075))
expect(~lg(-1123832583, 1270176017)).toEqual(lg(1123832582, -1270176018))
expect(~lg(-1163066310, -237396272)).toEqual(lg(1163066309, 237396271))
expect(~lg(1826566062, -509270118)).toEqual(lg(-1826566063, 509270117))
expect(~lg(450318542, -1650640100)).toEqual(lg(-450318543, 1650640099))
expect(~lg(-1461907705, 27364748)).toEqual(lg(1461907704, -27364749))
expect(~lg(-1012261257, -1691289855)).toEqual(lg(1012261256, 1691289854))
expect(~lg(1929178873, -1804481537)).toEqual(lg(-1929178874, 1804481536))
expect(~lg(888719199, 1846455122)).toEqual(lg(-888719200, -1846455123))
expect(~lg(-984231683, 867292443)).toEqual(lg(984231682, -867292444))
expect(~lg(-2105026706, 16146222)).toEqual(lg(2105026705, -16146223))
expect(~lg(-1742028654, 1648876190)).toEqual(lg(1742028653, -1648876191))
expect(~lg(-1922039595, 60702354)).toEqual(lg(1922039594, -60702355))
expect(~lg(-264728649, -275960742)).toEqual(lg(264728648, 275960741))
expect(~lg(-1237639033, 1761272006)).toEqual(lg(1237639032, -1761272007))
expect(~lg(-1118919823, -901486923)).toEqual(lg(1118919822, 901486922))
expect(~lg(-18001221, 1121574636)).toEqual(lg(18001220, -1121574637))
expect(~lg(-2122002357, 1370943784)).toEqual(lg(2122002356, -1370943785))
expect(~lg(-2006182036, 1422441077)).toEqual(lg(2006182035, -1422441078))
expect(~lg(-1314896175, -460075840)).toEqual(lg(1314896174, 460075839))
expect(~lg(-1829402919, 1031934891)).toEqual(lg(1829402918, -1031934892))
expect(~lg(2138673172, 107590305)).toEqual(lg(-2138673173, -107590306))
expect(~lg(-1382443515, 56307752)).toEqual(lg(1382443514, -56307753))
}
it("bitwise or |") {
expect(lg(1198889513, -170491266) | lg(356560637, 1244673694)).toEqual(lg(1467334397, -608514))
expect(lg(-1930990792, 627822941) | lg(-1849669008, 185716690)).toEqual(lg(-1645778056, 796647391))
expect(lg(711185578, -154795743) | lg(1446469570, -104529814)).toEqual(lg(2121785322, -3735189))
expect(lg(356565628, 275405582) | lg(380967239, 1356925723)).toEqual(lg(401988479, 1357601567))
expect(lg(1968397619, -447093015) | lg(-1242708043, 1353146913)).toEqual(lg(-167780425, -167778583))
expect(lg(-2121965024, -76133937) | lg(2104409609, -1365814226)).toEqual(lg(-34603479, -565777))
expect(lg(1496398822, -548061626) | lg(-556169301, -245689186)).toEqual(lg(-537280529, -10535202))
expect(lg(856203065, -1102382704) | lg(1276763344, 377524977)).toEqual(lg(2132402169, -1093993487))
expect(lg(474066920, -215674305) | lg(366737695, 530830706)).toEqual(lg(500957183, -5777537))
expect(lg(-1543310820, 664058893) | lg(1002387606, 1826081595)).toEqual(lg(-1077937506, 1876426559))
expect(lg(1606847457, -857707283) | lg(-82108753, 628476252)).toEqual(lg(-2121745, -302649859))
expect(lg(703699686, -1218298019) | lg(1575693246, -565500071)).toEqual(lg(2113649662, -9748643))
expect(lg(1281663616, 1255777790) | lg(1708663964, 1604300502)).toEqual(lg(1845274268, 1608495102))
expect(lg(-1315547660, 1726760037) | lg(-442781559, 235328140)).toEqual(lg(-174066179, 1861146349))
expect(lg(2014986997, -1130692301) | lg(124088654, 1637408903)).toEqual(lg(2139059199, -40115785))
expect(lg(1653153899, 1412277603) | lg(-1615398494, -682581111)).toEqual(lg(-4195861, -679630869))
expect(lg(551077237, 1349033186) | lg(597575118, 1662855120)).toEqual(lg(601802239, 1937620978))
expect(lg(613289137, -1123701660) | lg(-1383294317, 369006329)).toEqual(lg(-1383162189, -1107312899))
expect(lg(-418175046, -593383309) | lg(1468132939, 360734532)).toEqual(lg(-141299717, -576585865))
expect(lg(1428236018, -1294026291) | lg(572735565, 1213340152)).toEqual(lg(1998808831, -86066691))
expect(lg(-1949058688, -1013245209) | lg(416580246, 300148007)).toEqual(lg(-1680360554, -738459673))
expect(lg(-1746245220, 1427323605) | lg(-1185613404, -469621610)).toEqual(lg(-1073808964, -183288105))
expect(lg(1445648649, 701317455) | lg(1407661733, 1287118327)).toEqual(lg(1475346349, 1845485055))
expect(lg(-1861500445, 764080137) | lg(-33812527, -411163560)).toEqual(lg(-33566733, -268503975))
expect(lg(-1408712806, 393166157) | lg(1323973395, 1580353248)).toEqual(lg(-286605413, 1602191341))
expect(lg(-2072304578, -2142600249) | lg(-625840402, -2018265417)).toEqual(lg(-553947394, -2013546505))
expect(lg(450125308, 1742298015) | lg(-999674466, -89794491)).toEqual(lg(-553746946, -140321))
expect(lg(1239068904, -68194107) | lg(-1092247939, -639552609)).toEqual(lg(-16643, -68193313))
expect(lg(-2075047684, -1706497393) | lg(-119858776, -1461536706)).toEqual(lg(-52733444, -1159005505))
expect(lg(-1065293728, 1045575815) | lg(943802850, 4130803)).toEqual(lg(-121509406, 1048526839))
expect(lg(1688647147, -1327540094) | lg(1767049400, -1609892586)).toEqual(lg(1844952571, -1327497834))
expect(lg(-207425559, 231270892) | lg(515004644, -1349918716)).toEqual(lg(-5046291, -1345721876))
expect(lg(781813534, 1274454635) | lg(-1814682890, -1182466103)).toEqual(lg(-1075861506, -67698709))
expect(lg(1792206347, -54265949) | lg(931436592, -625499620)).toEqual(lg(2144796219, -17303617))
expect(lg(-1957992337, 421859924) | lg(1138122674, -1896513908)).toEqual(lg(-874545153, -1611301156))
expect(lg(-1220262128, 1790926509) | lg(-2107837994, -245286664)).toEqual(lg(-1218644010, -67141891))
expect(lg(-485426246, 2122993116) | lg(-1077361187, 795578180)).toEqual(lg(-2555905, 2146160604))
expect(lg(713580935, 2059541733) | lg(957494730, 1688940106)).toEqual(lg(999978447, 2129346287))
expect(lg(-181332639, 608154803) | lg(787627150, 1378378253)).toEqual(lg(-836113, 1983903423))
expect(lg(-944448827, -1528432780) | lg(-374967708, 364320051)).toEqual(lg(-273220891, -1242040457))
expect(lg(1822361801, -1626992863) | lg(-1865553026, -1867721804)).toEqual(lg(-52433921, -1615929419))
expect(lg(-333036705, -39743141) | lg(-136127263, -404241201)).toEqual(lg(-1646593, -1583649))
expect(lg(1342309595, 143297662) | lg(-1448137844, -50933699)).toEqual(lg(-105959457, -50406273))
expect(lg(-514802766, 718197230) | lg(1113082335, -259890518)).toEqual(lg(-480707585, -87100434))
expect(lg(-476348284, -1025699402) | lg(1518405435, 1545110880)).toEqual(lg(-73693249, -555903498))
expect(lg(-2058311589, 1135057747) | lg(-1664731675, -1535754941)).toEqual(lg(-1646871041, -403194029))
expect(lg(333362997, -34482226) | lg(-205173969, 1754490115)).toEqual(lg(-203423937, -34342961))
expect(lg(2083354303, -2043490039) | lg(1344953817, -195725679)).toEqual(lg(2083487743, -159909991))
expect(lg(-942983837, -683124136) | lg(909452980, -1021249590)).toEqual(lg(-134268937, -680984614))
expect(lg(-402624124, -505696678) | lg(-688199800, 2110291577)).toEqual(lg(-17107060, -35914117))
}
it("bitwise and &") {
expect(lg(-1973652216, 353474049) & lg(-576365513, -1546420349)).toEqual(lg(-2012982272, 17896961))
expect(lg(2054268182, -735220496) & lg(-1706223071, -653894309)).toEqual(lg(440467456, -805024688))
expect(lg(-761230816, -1888512251) & lg(-988806710, -256349768)).toEqual(lg(-1073741824, -2144861952))
expect(lg(-834874333, -101893315) & lg(-1964333382, -1877225849)).toEqual(lg(-1977056222, -1878455803))
expect(lg(-767041747, 1403541430) & lg(-320482908, 442929698)).toEqual(lg(-1069166300, 304091682))
expect(lg(1538292767, 67928849) & lg(261587146, 2097883842)).toEqual(lg(193986570, 67633664))
expect(lg(448790964, 1852174074) & lg(-284620129, 35804464)).toEqual(lg(167772308, 35669040))
expect(lg(123267521, 1965916169) & lg(-401979731, 588194498)).toEqual(lg(540801, 554500096))
expect(lg(-1725202754, 324931273) & lg(-1240211271, 948007557)).toEqual(lg(-1878826824, 268436097))
expect(lg(1044995460, -1447811559) & lg(1381579300, 378161591)).toEqual(lg(306780164, 8388625))
expect(lg(1640550232, -1980050765) & lg(-1613988461, 381206391)).toEqual(lg(29904144, 12096051))
expect(lg(-810205145, 832395272) & lg(-153237294, -1368559681)).toEqual(lg(-963297278, 537741320))
expect(lg(-2087037677, -539042218) & lg(-1930915595, -1879201391)).toEqual(lg(-2138566639, -1881372656))
expect(lg(936077102, 1888906741) & lg(-590306112, 153013360)).toEqual(lg(348136448, 1461360))
expect(lg(-1520343420, -480326676) & lg(-1031638712, 463833361)).toEqual(lg(-2147459072, 50628864))
expect(lg(-603625122, -837874740) & lg(-266310439, -433325742)).toEqual(lg(-805279656, -972355264))
expect(lg(2101242821, 1363798717) & lg(-337523686, -1007893653)).toEqual(lg(1763723264, 1095287337))
expect(lg(-849542331, 2084521938) & lg(1866786159, -179258269)).toEqual(lg(1296302405, 1947206722))
expect(lg(1283984114, 1922846117) & lg(-42342754, 948944324)).toEqual(lg(1275593362, 814484868))
expect(lg(18451376, 39592223) & lg(-300891980, 43819665)).toEqual(lg(1081520, 35397649))
expect(lg(1772840110, -1611388521) & lg(876572201, -1080057992)).toEqual(lg(539714600, -1617688304))
expect(lg(-1792575438, 1131693597) & lg(2026108738, -691967420)).toEqual(lg(268660738, 1111507460))
expect(lg(-1977130853, 1105495064) & lg(-289941322, 37545108)).toEqual(lg(-1977139054, 2393104))
expect(lg(-1590955612, -1330697458) & lg(-924798828, -1177272879)).toEqual(lg(-2145341308, -1333516032))
expect(lg(-285931035, -293654078) & lg(-1486596765, -31342500)).toEqual(lg(-1503395487, -299827136))
expect(lg(1237743775, -1293389691) & lg(1803860874, 1175174664)).toEqual(lg(1233401994, 34091008))
expect(lg(-839976008, 900736195) & lg(-362132238, -668577850)).toEqual(lg(-932558672, 270533826))
expect(lg(1202887172, 484547048) & lg(793351913, -1622877017)).toEqual(lg(117477888, 473995424))
expect(lg(1393155525, -2025583778) & lg(-1164217783, -416769026)).toEqual(lg(302600257, -2030040226))
expect(lg(-658787467, -1534848013) & lg(770509273, 861439716)).toEqual(lg(145293649, 536871648))
expect(lg(1550840002, 1588870758) & lg(2084528882, 302148833)).toEqual(lg(1546608834, 302001248))
expect(lg(481609689, -152204489) & lg(1279544421, -561242137)).toEqual(lg(201606209, -695465177))
expect(lg(-1529763097, -1978531900) & lg(641783708, -2039026814)).toEqual(lg(608207492, -2112820352))
expect(lg(887514076, -129985897) & lg(423346174, -1364800691)).toEqual(lg(270672860, -1476361723))
expect(lg(1736270961, -223672071) & lg(748709016, -498985816)).toEqual(lg(606102544, -503185240))
expect(lg(413438572, 628333003) & lg(-1964689415, -2039117914)).toEqual(lg(144970344, 74547586))
expect(lg(-1441786846, -952014445) & lg(1364118108, 582220621)).toEqual(lg(0, 33646849))
expect(lg(-167845571, -610782244) & lg(920048140, -1832380167)).toEqual(lg(886489100, -1836576552))
expect(lg(1070668735, 1223734716) & lg(1255200260, 310500128)).toEqual(lg(181408260, 8425760))
expect(lg(565998918, 2102701486) & lg(1230790357, -651115716)).toEqual(lg(18633796, 1494253868))
expect(lg(1259021457, -988117846) & lg(-95497780, 2025257730)).toEqual(lg(1242169472, 1074954242))
expect(lg(236334914, 210367602) & lg(-1388488109, 672191707)).toEqual(lg(202639938, 134272082))
expect(lg(2029259749, 2012495659) & lg(-1125022313, -17866867)).toEqual(lg(955253125, 1994661641))
expect(lg(2078335024, -748696528) & lg(-1944488853, 1455161657)).toEqual(lg(134242336, 1377566768))
expect(lg(1962270604, 747650594) & lg(1051641707, -1606005365)).toEqual(lg(883214088, 536873986))
expect(lg(504991188, 623990339) & lg(-1919047324, 331123498)).toEqual(lg(203000132, 19923458))
expect(lg(409659995, 1887189252) & lg(384277491, 1973591160)).toEqual(lg(274893395, 1881151488))
expect(lg(872793907, 353626075) & lg(34859627, 1988247415)).toEqual(lg(115235, 335685459))
expect(lg(-1407266644, 441386073) & lg(1635378940, -548742904)).toEqual(lg(538493100, 441057288))
expect(lg(844761371, 1022505085) & lg(1930384912, 688275291)).toEqual(lg(839516176, 671232089))
}
it("bitwise xor ^") {
expect(lg(690404572, -1279287229) ^ lg(2032643064, 1592473506)).toEqual(lg(1342248740, -313223199))
expect(lg(1880634009, 1433776255) ^ lg(-348716857, 1160616710)).toEqual(lg(-1691405730, 274213753))
expect(lg(-958227509, 287282926) ^ lg(-227156354, -260614433)).toEqual(lg(882329013, -513228751))
expect(lg(-266860160, 1815641996) ^ lg(-1536078487, -252396512)).toEqual(lg(1416185065, -1664302164))
expect(lg(1278830943, 22194981) ^ lg(-127614265, 1402065425)).toEqual(lg(-1268929640, 1388542260))
expect(lg(-865349911, -84319450) ^ lg(-1309551184, 1538105408)).toEqual(lg(2107251545, -1588280474))
expect(lg(-1973252863, -1969367363) ^ lg(916708915, -2107399239)).toEqual(lg(-1128180942, 150893828))
expect(lg(2098389933, -3394272) ^ lg(-1444158786, -35986574)).toEqual(lg(-721878765, 35051090))
expect(lg(533612062, -1712875225) ^ lg(-1893500990, -2045945845)).toEqual(lg(-1863503396, 535478572))
expect(lg(799833325, 2072025633) ^ lg(1223390615, -462316872)).toEqual(lg(1732708730, -1611595623))
expect(lg(570370215, 1665373667) ^ lg(-215635812, -199487627)).toEqual(lg(-757432261, -1755342186))
expect(lg(-1440978805, 1756956707) ^ lg(-2028544094, 1603010180)).toEqual(lg(755676969, 926086823))
expect(lg(-1788434031, -203193594) ^ lg(-634323830, -1548988140)).toEqual(lg(1331057947, 1347408402))
expect(lg(-1101798994, 1399594232) ^ lg(-1646597332, -1546197695)).toEqual(lg(596183682, -256181831))
expect(lg(-1432962218, -1631098948) ^ lg(-75062662, -1809535684)).toEqual(lg(1360009516, 182700672))
expect(lg(699430210, 902448324) ^ lg(180589540, -851178037)).toEqual(lg(594798246, -124892913))
expect(lg(-1873118605, -20501824) ^ lg(553528574, -1833816077)).toEqual(lg(-1331407219, 1819608371))
expect(lg(-693963147, 616673404) ^ lg(-1300017312, 952842738)).toEqual(lg(1679931669, 470452622))
expect(lg(1250421224, 1104113895) ^ lg(610853582, -420437133)).toEqual(lg(1861159718, -1488989292))
expect(lg(-611286212, -1550148499) ^ lg(-445979241, 1514412284)).toEqual(lg(1056597675, -102857583))
expect(lg(1704771515, 1382796179) ^ lg(1792974657, 845718187)).toEqual(lg(255992058, 1610836280))
expect(lg(1042258124, 728098489) ^ lg(752081254, 178455073)).toEqual(lg(315376042, 566682776))
expect(lg(-1887944331, 1039677246) ^ lg(2073445080, -1177715779)).toEqual(lg(-185728083, -2076881789))
expect(lg(1986213921, -1875380784) ^ lg(2000642315, -903708915)).toEqual(lg(22829354, 1511361245))
expect(lg(365179043, -418125319) ^ lg(-1574194252, -2111511936)).toEqual(lg(-1209040105, 1698106233))
expect(lg(-376038790, 1936322298) ^ lg(1865150125, -1725716895)).toEqual(lg(-2034371369, -364230501))
expect(lg(182372182, -1389399582) ^ lg(-428511717, 121795327)).toEqual(lg(-324294323, -1435696355))
expect(lg(408417754, -547668779) ^ lg(-2031925038, -640727503)).toEqual(lg(-1632322296, 110394084))
expect(lg(1515701412, 860890032) ^ lg(105620727, -733936646)).toEqual(lg(1545363539, -418308022))
expect(lg(144626057, 2121098703) ^ lg(-1983696154, 599907975)).toEqual(lg(-2124553361, 1571601224))
expect(lg(1716685092, -647833300) ^ lg(-2015169962, -236730016)).toEqual(lg(-508527758, 679546956))
expect(lg(-2016515438, -1674300757) ^ lg(1371710907, 306998239)).toEqual(lg(-703803607, -1904715404))
expect(lg(-547404938, -860356684) ^ lg(1838979051, -234273060)).toEqual(lg(-1295788899, 1052686696))
expect(lg(1034397763, 1158948099) ^ lg(-1774872572, -585891415)).toEqual(lg(-1416482745, -1744821078))
expect(lg(1755131065, -847055172) ^ lg(-1905373301, 1520046660)).toEqual(lg(-420256974, -1759976200))
expect(lg(755114159, 1707687361) ^ lg(-1492035880, -98945846)).toEqual(lg(-1978435977, -1613559541))
expect(lg(1110955283, -394871226) ^ lg(407088050, 1436378667)).toEqual(lg(1517584033, -1108617107))
expect(lg(729918767, -1047522396) ^ lg(1311993397, 527980061)).toEqual(lg(1706214170, -555203143))
expect(lg(-1533968339, 1826223468) ^ lg(1274742780, -681737135)).toEqual(lg(-278231087, -1148948163))
expect(lg(230297309, -219465279) ^ lg(-26402437, -1168671510)).toEqual(lg(-204001370, 1220298027))
expect(lg(-1364422220, 1487677662) ^ lg(350226860, -557455315)).toEqual(lg(-1169385448, -2039889677))
expect(lg(-1049451753, 1883174397) ^ lg(-296389651, 1475305844)).toEqual(lg(791138554, 668046473))
expect(lg(-895088167, 1303802204) ^ lg(-1211781080, 258296169)).toEqual(lg(2103687665, 1121138741))
expect(lg(1409034242, -1162000487) ^ lg(-1155284684, -1936324751)).toEqual(lg(-387978954, 908804328))
expect(lg(861082066, -475962819) ^ lg(2015491450, -1480757658)).toEqual(lg(1265820840, 1142688859))
expect(lg(-288714491, 1935424926) ^ lg(-1240144421, -1674954616)).toEqual(lg(1490973918, -277478122))
expect(lg(-699164585, -731232280) ^ lg(-1144193327, -1043673420)).toEqual(lg(1839163014, 362842460))
expect(lg(683993930, 248552821) ^ lg(220002260, -2040344874)).toEqual(lg(634920094, -2001579101))
expect(lg(640417317, 298956382) ^ lg(-398074626, -554826341)).toEqual(lg(-831642917, -817908795))
expect(lg(-1493347776, 1187436882) ^ lg(-1779986703, 550293355)).toEqual(lg(857398449, 1711937081))
}
it("shift left <<") {
expect(lg(-1875389825, 1268606893) << -73329513).toEqual(lg(1065353216, -691528727))
expect(lg(869553861, -291578632) << -339545061).toEqual(lg(671088640, -1046568266))
expect(lg(543726956, -1753066291) << -809014658).toEqual(lg(0, 0))
expect(lg(-895322669, 847749031) << 1030973528).toEqual(lg(-754974720, -1479892363))
expect(lg(1598039634, 819660072) << 82069876).toEqual(lg(0, 1696595968))
expect(lg(-151740279, -595601314) << 503039850).toEqual(lg(0, -763223040))
expect(lg(-1702267427, 1115684531) << 1171866675).toEqual(lg(0, -1360527360))
expect(lg(-807341493, 286689824) << -1938771891).toEqual(lg(508125184, -784066052))
expect(lg(-382832750, -2134078182) << 1537970769).toEqual(lg(-551288832, 439734876))
expect(lg(-580904341, 939559401) << 1856717061).toEqual(lg(-1409069728, 1129787))
expect(lg(-198125160, 663832884) << 1561097110).toEqual(lg(1711276032, 1295846454))
expect(lg(-1199332365, -1728151952) << 858801923).toEqual(lg(-1004724328, -940313723))
expect(lg(773140802, -181814355) << 1110910853).toEqual(lg(-1029298112, -1523092059))
expect(lg(1601160689, 869229832) << -338843811).toEqual(lg(536870912, 200145086))
expect(lg(-1919381932, -201750119) << -813015128).toEqual(lg(0, -1735502848))
expect(lg(-52019067, -102802849) << -2122946486).toEqual(lg(-1727917056, 2104066035))
expect(lg(-456947922, 1170727731) << 2126487160).toEqual(lg(0, 771751936))
expect(lg(1756719200, -1702547414) << -32425558).toEqual(lg(0, -710836224))
expect(lg(97072750, 409070577) << 1222452733).toEqual(lg(0, -1073741824))
expect(lg(1177105779, 212324545) << -834196361).toEqual(lg(0, -1182793728))
expect(lg(1395605166, -1743726419) << -1762017159).toEqual(lg(0, 1543503872))
expect(lg(703808254, 1939941481) << 1042647417).toEqual(lg(0, -67108864))
expect(lg(-702184622, -618243162) << -753853766).toEqual(lg(0, 1207959552))
expect(lg(-1368457662, 1747275710) << 1382741393).toEqual(lg(-58458112, -1619174179))
expect(lg(-74885703, 1342895995) << 1929734882).toEqual(lg(0, -299542812))
expect(lg(-61401466, -496528012) << -129147274).toEqual(lg(0, -1585446912))
expect(lg(-660169692, 1479330149) << 289081298).toEqual(lg(1888485376, 630678170))
expect(lg(-421237721, 1011668330) << 370873533).toEqual(lg(0, -536870912))
expect(lg(-821818323, -2029348763) << -916638609).toEqual(lg(0, 102137856))
expect(lg(-1246065172, -1572087360) << 1493241980).toEqual(lg(0, -1073741824))
expect(lg(578258094, -906212820) << 2074806145).toEqual(lg(1156516188, -1812425640))
expect(lg(61151968, -1770168701) << -2062208020).toEqual(lg(0, 1370357760))
expect(lg(1013576541, 460756940) << -902835237).toEqual(lg(-402653184, 1642287002))
expect(lg(-1731171245, 771836652) << 868975579).toEqual(lg(-1744830464, 1690731362))
expect(lg(1123258511, 1049676716) << 575477257).toEqual(lg(-417260032, 563566725))
expect(lg(-779579692, 1222433667) << 1238257604).toEqual(lg(411626816, -1915897795))
expect(lg(-1102469156, -543766743) << 553354173).toEqual(lg(0, -2147483648))
expect(lg(843520587, -517185932) << 1899246569).toEqual(lg(0, -1909156352))
expect(lg(-510775647, -896837143) << 1487779500).toEqual(lg(0, -487976960))
expect(lg(-1594115986, -186853391) << -119255604).toEqual(lg(-1148788736, -847308273))
expect(lg(-588635767, 1047291343) << 2089738146).toEqual(lg(0, 1940424228))
expect(lg(-1002017201, -986188138) << 800913356).toEqual(lg(1726279680, 2137615428))
expect(lg(1813551275, -400674286) << -1609938966).toEqual(lg(0, 1650633728))
expect(lg(-1333929801, 254558182) << -1518372133).toEqual(lg(-1207959552, 897838789))
expect(lg(834127324, 878312672) << -923142549).toEqual(lg(0, -1104224256))
expect(lg(126340223, -2008491127) << -252023418).toEqual(lg(-504160320, 305586753))
expect(lg(510931784, -1313923431) << 1174528765).toEqual(lg(0, 0))
expect(lg(711394099, -400560166) << -967606846).toEqual(lg(-1449390900, -1602240664))
expect(lg(1319282800, -1994311032) << 1237159401).toEqual(lg(0, 1162928128))
expect(lg(-874710629, -1242845989) << 484063041).toEqual(lg(-1749421258, 1809275319))
}
it("shift logical right >>>") {
expect(lg(88517143, 1273092247) >>> 2099569298).toEqual(lg(1982185809, 4856))
expect(lg(-1987462914, 1361836721) >>> -2053535175).toEqual(lg(40, 0))
expect(lg(1513792977, 1085974656) >>> -303705162).toEqual(lg(258, 0))
expect(lg(-2071249600, 1411897130) >>> 1015183069).toEqual(lg(-1589724844, 2))
expect(lg(-1560865755, 214919778) >>> 1191603401).toEqual(lg(827423626, 419765))
expect(lg(944265510, -995896821) >>> 485744647).toEqual(lg(376475826, 25773988))
expect(lg(1131824263, -2080089658) >>> -386336938).toEqual(lg(291969293, 528))
expect(lg(-827478170, -1185129975) >>> 2048537528).toEqual(lg(185, 0))
expect(lg(-916869993, -1344352401) >>> -791372688).toEqual(lg(45022, 0))
expect(lg(588931659, -1830830904) >>> -1259543946).toEqual(lg(587, 0))
expect(lg(473794659, 947514265) >>> -1409717873).toEqual(lg(-684574597, 28915))
expect(lg(471518489, -940479957) >>> -847604034).toEqual(lg(3, 0))
expect(lg(-818287716, 1547586919) >>> -216455813).toEqual(lg(11, 0))
expect(lg(-2088976011, -2057680935) >>> 787633143).toEqual(lg(266, 0))
expect(lg(306848777, -497453644) >>> 1584315654).toEqual(lg(-800511856, 59336150))
expect(lg(-1689341833, -927188015) >>> 1300572337).toEqual(lg(25694, 0))
expect(lg(396954515, 413418119) >>> 1180537031).toEqual(lg(237982231, 3229829))
expect(lg(1478732342, 1335401807) >>> -1668840943).toEqual(lg(1319611409, 10188))
expect(lg(-1326271298, -1643756084) >>> -2118687716).toEqual(lg(-530293557, 9))
expect(lg(1205635051, 875594107) >>> 350453433).toEqual(lg(26, 0))
expect(lg(-2049358216, -553556680) >>> -1203541232).toEqual(lg(1698203097, 57089))
expect(lg(1278981121, -1661145698) >>> 254766480).toEqual(lg(-308392901, 40188))
expect(lg(1313272948, 929268302) >>> 1175504903).toEqual(lg(-1667461656, 7259908))
expect(lg(1982277801, -1050318135) >>> 629735727).toEqual(lg(99018, 0))
expect(lg(-610510955, 1064153335) >>> 577897264).toEqual(lg(16237, 0))
expect(lg(1859860682, 1413109554) >>> 243415787).toEqual(lg(689994, 0))
expect(lg(1757351444, -7991214) >>> -1844808396).toEqual(lg(4088, 0))
expect(lg(-1277568919, -1194709070) >>> -2102413146).toEqual(lg(48441534, 0))
expect(lg(-1768551066, 1342559) >>> 365466523).toEqual(lg(42961906, 0))
expect(lg(1051996382, -213518283) >>> -717261067).toEqual(lg(1946, 0))
expect(lg(451444747, -1380034334) >>> -675522340).toEqual(lg(-605712863, 10))
expect(lg(605006440, -1956088854) >>> 192236860).toEqual(lg(8, 0))
expect(lg(-384174131, -2122615661) >>> -1278414057).toEqual(lg(-152492078, 258))
expect(lg(-1579022332, -1953425763) >>> 2134440904).toEqual(lg(-1650335224, 9146646))
expect(lg(-433112808, -1479030417) >>> -1873327132).toEqual(lg(175996054, 0))
expect(lg(-1786180708, 385945228) >>> 1526047775).toEqual(lg(771890457, 0))
expect(lg(868056695, -1200391723) >>> 93595840).toEqual(lg(868056695, -1200391723))
expect(lg(1335240662, -1403745666) >>> 1625850351).toEqual(lg(88233, 0))
expect(lg(-681452715, -1446696044) >>> -742234373).toEqual(lg(21, 0))
expect(lg(301750839, 1600782865) >>> 1678034787).toEqual(lg(200097858, 0))
expect(lg(-2077889650, 445749598) >>> 363036476).toEqual(lg(1, 0))
expect(lg(-1633078438, 1644025478) >>> -1297864237).toEqual(lg(-1160719403, 3135))
expect(lg(1159483779, 906375175) >>> -1204888593).toEqual(lg(27660, 0))
expect(lg(179807326, 1050325098) >>> -1598422013).toEqual(lg(1096217739, 131290637))
expect(lg(952383136, -193355640) >>> 415626042).toEqual(lg(61, 0))
expect(lg(972435428, -1130194211) >>> -1259042456).toEqual(lg(12362394, 0))
expect(lg(605823642, 555993310) >>> 1780437072).toEqual(lg(-924965860, 8483))
expect(lg(665774635, 184915839) >>> 1729784373).toEqual(lg(88, 0))
expect(lg(-263808048, -741669613) >>> -204793551).toEqual(lg(27109, 0))
expect(lg(-954198224, 369053217) >>> 768150041).toEqual(lg(-5828381, 10))
}
it("shift arithmetic right >>") {
expect(lg(-1780076655, 720252680) >> -1316031160).toEqual(lg(144041519, 2813487))
expect(lg(234061537, 796729805) >> 1452874739).toEqual(lg(1519, 0))
expect(lg(1523206972, 1046748891) >> 1356453463).toEqual(lg(-935479627, 124))
expect(lg(1866043067, -2009962307) >> 393061105).toEqual(lg(-15335, -1))
expect(lg(89507691, 183545611) >> -1980770119).toEqual(lg(5, 0))
expect(lg(-1062312593, 1831556953) >> 1545082311).toEqual(lg(-1283367734, 14309038))
expect(lg(-1568293714, 523169438) >> -2119005984).toEqual(lg(523169438, 0))
expect(lg(-2013675422, -748851607) >> 511130378).toEqual(lg(-1704853904, -731301))
expect(lg(-521585277, -770402055) >> -1176556648).toEqual(lg(345569760, -46))
expect(lg(-145701849, 257587932) >> -1512809002).toEqual(lg(1777038301, 61))
expect(lg(-973180026, -1694110170) >> 2083093369).toEqual(lg(-51, -1))
expect(lg(1761120319, -539393529) >> -207994821).toEqual(lg(-5, -1))
expect(lg(-30904807, -1662128199) >> -638486135).toEqual(lg(-587262921, -3246345))
expect(lg(1812122560, -701571284) >> 611632432).toEqual(lg(-10706, -1))
expect(lg(119750375, 1605805472) >> 244039684).toEqual(lg(7484398, 100362842))
expect(lg(269986751, 1459449758) >> -439796226).toEqual(lg(1, 0))
expect(lg(-1969890020, 2011804532) >> -652735044).toEqual(lg(7, 0))
expect(lg(-1582649974, 826310885) >> 613066583).toEqual(lg(-2130588861, 98))
expect(lg(756433442, -1459944907) >> -775565931).toEqual(lg(-669931160, -697))
expect(lg(1678061064, -1680910162) >> -531660641).toEqual(lg(933146972, -1))
expect(lg(1870355258, 332325727) >> -434372344).toEqual(lg(1601141595, 1298147))
expect(lg(1886551280, -2122502046) >> -763866098).toEqual(lg(-1047936567, -129548))
expect(lg(-1169141408, -592336405) >> -1841005139).toEqual(lg(-72307, -1))
expect(lg(686282122, 295988927) >> 69079212).toEqual(lg(72262, 0))
expect(lg(1825529126, -1527213400) >> 1371712838).toEqual(lg(-1582088844, -23862710))
expect(lg(633149491, 1126324183) >> 1948323684).toEqual(lg(70395261, 0))
expect(lg(-363762029, -1377253181) >> -1243200330).toEqual(lg(-329, -1))
expect(lg(-1694234908, -689608667) >> 728732313).toEqual(lg(1924403917, -21))
expect(lg(1319661865, -2053067582) >> -777879057).toEqual(lg(-62655, -1))
expect(lg(-1472236443, 19900875) >> 373478400).toEqual(lg(-1472236443, 19900875))
expect(lg(-1719111010, -1766452468) >> 942391743).toEqual(lg(-1, -1))
expect(lg(-624682758, 1345231635) >> -813574478).toEqual(lg(5131, 0))
expect(lg(1316519660, 314590421) >> -641829383).toEqual(lg(9, 0))
expect(lg(-1380652891, -474856510) >> -920501329).toEqual(lg(-14492, -1))
expect(lg(-2084688189, 1352268039) >> -177471111).toEqual(lg(40, 0))
expect(lg(507881044, 1779362534) >> -508943033).toEqual(lg(-868447412, 13901269))
expect(lg(1742323077, -1229747072) >> 401183471).toEqual(lg(-37529, -1))
expect(lg(346182810, 770838817) >> 797274667).toEqual(lg(376386, 0))
expect(lg(828281422, -477411393) >> 1298272370).toEqual(lg(-1822, -1))
expect(lg(-341778503, -671026265) >> 532386578).toEqual(lg(1021967080, -2560))
expect(lg(-1907127360, 1144311248) >> -2131012273).toEqual(lg(-1683940185, 34921))
expect(lg(756366897, -1994294687) >> -1642432978).toEqual(lg(-121723, -1))
expect(lg(-1363894143, 1241756453) >> 1681307793).toEqual(lg(-644688038, 9473))
expect(lg(1708006412, -1138876437) >> 2010442220).toEqual(lg(-278047, -1))
expect(lg(-664430929, 446891142) >> -1707024855).toEqual(lg(872834, 0))
expect(lg(-1904131429, -938887) >> -829231944).toEqual(lg(-1, -1))
expect(lg(-1043053889, 1572668786) >> 309495249).toEqual(lg(-2101780246, 11998))
expect(lg(563683687, -1497656119) >> -176819791).toEqual(lg(-11427, -1))
expect(lg(-627312011, 421917318) >> 2056663541).toEqual(lg(201, 0))
expect(lg(-904956287, -543423347) >> -617227620).toEqual(lg(-104838948, -3))
}
it("negate -") {
expect(-lg(0)).toEqual(lg(0))
expect(-lg(-1)).toEqual(lg(1))
expect(-lg(1)).toEqual(lg(-1))
expect(-MaxVal).toEqual(lg(1, -2147483648))
expect(-MinVal).toEqual(MinVal)
expect(-lg(0, 1)).toEqual(lg(0, -1))
expect(-lg(-792771844, 1518464954)).toEqual(lg(792771844, -1518464955))
expect(-lg(-1313283210, 1172119605)).toEqual(lg(1313283210, -1172119606))
expect(-lg(1034897743, 341494685)).toEqual(lg(-1034897743, -341494686))
expect(-lg(924881290, -1614058539)).toEqual(lg(-924881290, 1614058538))
expect(-lg(1636891236, 1405401039)).toEqual(lg(-1636891236, -1405401040))
expect(-lg(-2044349674, 477271432)).toEqual(lg(2044349674, -477271433))
expect(-lg(-1426086684, 1493816435)).toEqual(lg(1426086684, -1493816436))
expect(-lg(2125201680, -1667846200)).toEqual(lg(-2125201680, 1667846199))
expect(-lg(-161054645, 1272528724)).toEqual(lg(161054645, -1272528725))
expect(-lg(1013390126, 1323844682)).toEqual(lg(-1013390126, -1323844683))
expect(-lg(1028806094, 691441880)).toEqual(lg(-1028806094, -691441881))
expect(-lg(-1060422114, 11477648)).toEqual(lg(1060422114, -11477649))
expect(-lg(-1366334123, 2046238760)).toEqual(lg(1366334123, -2046238761))
expect(-lg(-1307711795, -940346050)).toEqual(lg(1307711795, 940346049))
expect(-lg(-421687960, 250174761)).toEqual(lg(421687960, -250174762))
expect(-lg(-379452754, 843386802)).toEqual(lg(379452754, -843386803))
expect(-lg(1251296999, -1144268298)).toEqual(lg(-1251296999, 1144268297))
expect(-lg(690359429, 1676679601)).toEqual(lg(-690359429, -1676679602))
expect(-lg(-1952563749, 882544419)).toEqual(lg(1952563749, -882544420))
expect(-lg(1420900897, 1865273590)).toEqual(lg(-1420900897, -1865273591))
expect(-lg(-115947827, 832851216)).toEqual(lg(115947827, -832851217))
expect(-lg(1834973959, 1423776004)).toEqual(lg(-1834973959, -1423776005))
expect(-lg(-1376766876, -1519617585)).toEqual(lg(1376766876, 1519617584))
expect(-lg(1845217535, -724725866)).toEqual(lg(-1845217535, 724725865))
expect(-lg(1133294381, -699400554)).toEqual(lg(-1133294381, 699400553))
expect(-lg(-113507585, -615978890)).toEqual(lg(113507585, 615978889))
expect(-lg(1839784424, -1163726653)).toEqual(lg(-1839784424, 1163726652))
expect(-lg(-1065777168, -1301742164)).toEqual(lg(1065777168, 1301742163))
expect(-lg(-334075220, 1058529733)).toEqual(lg(334075220, -1058529734))
expect(-lg(-1443112398, -1148167881)).toEqual(lg(1443112398, 1148167880))
expect(-lg(-1647739462, -12310883)).toEqual(lg(1647739462, 12310882))
expect(-lg(-1461318149, -518941732)).toEqual(lg(1461318149, 518941731))
expect(-lg(-56833825, 162898591)).toEqual(lg(56833825, -162898592))
expect(-lg(680096727, 1760413868)).toEqual(lg(-680096727, -1760413869))
expect(-lg(-461541717, 1103626949)).toEqual(lg(461541717, -1103626950))
expect(-lg(-1287248387, -1483137215)).toEqual(lg(1287248387, 1483137214))
expect(-lg(1681467124, 1197977022)).toEqual(lg(-1681467124, -1197977023))
expect(-lg(310946355, -885055748)).toEqual(lg(-310946355, 885055747))
expect(-lg(717629012, 1299204707)).toEqual(lg(-717629012, -1299204708))
expect(-lg(-800584851, -350245994)).toEqual(lg(800584851, 350245993))
expect(-lg(-1911014238, 441020785)).toEqual(lg(1911014238, -441020786))
expect(-lg(1647080824, 1197295588)).toEqual(lg(-1647080824, -1197295589))
expect(-lg(925751968, 479541399)).toEqual(lg(-925751968, -479541400))
expect(-lg(656919119, -1574890073)).toEqual(lg(-656919119, 1574890072))
expect(-lg(1833364814, -432106463)).toEqual(lg(-1833364814, 432106462))
expect(-lg(315730911, 1990201784)).toEqual(lg(-315730911, -1990201785))
expect(-lg(-1218524771, 572482047)).toEqual(lg(1218524771, -572482048))
expect(-lg(-276668811, -2002398730)).toEqual(lg(276668811, 2002398729))
expect(-lg(-1489416833, -834462754)).toEqual(lg(1489416833, 834462753))
expect(-lg(-2066446588, -688546121)).toEqual(lg(2066446588, 688546120))
}
it("plus +") {
expect(lg(-202981355, -566689628) + lg(1005131087, 0)).toEqual(lg(802149732, -566689627))
expect(lg(1153016325, 1674149440) + lg(-250247224, -1)).toEqual(lg(902769101, 1674149440))
expect(lg(1701699755, -1965159800) + lg(-573053270, -1)).toEqual(lg(1128646485, -1965159800))
expect(lg(-1183294843, -973893590) + lg(1250231259, 0)).toEqual(lg(66936416, -973893589))
expect(lg(-2145882999, 449544496) + lg(1990064998, 0)).toEqual(lg(-155818001, 449544496))
expect(lg(-528276750, -917980205) + lg(-716322894, -1)).toEqual(lg(-1244599644, -917980205))
expect(lg(-1061043923, 1794016498) + lg(1641637933, 0)).toEqual(lg(580594010, 1794016499))
expect(lg(-315483661, 1883156001) + lg(-1559068210, -1)).toEqual(lg(-1874551871, 1883156001))
expect(lg(-1899047326, 95409025) + lg(1287459517, 0)).toEqual(lg(-611587809, 95409025))
expect(lg(-705065818, 1167571449) + lg(-688682067, -1)).toEqual(lg(-1393747885, 1167571449))
expect(lg(-192210545, -607437554) + lg(1327945299, 0)).toEqual(lg(1135734754, -607437553))
expect(lg(11453726, -2007097641) + lg(534018444, 0)).toEqual(lg(545472170, -2007097641))
expect(lg(1809973610, -1191350400) + lg(500964333, 0)).toEqual(lg(-1984029353, -1191350400))
expect(lg(972641234, 108684756) + lg(58650386, 0)).toEqual(lg(1031291620, 108684756))
expect(lg(-1511325903, 127758048) + lg(135565137, 0)).toEqual(lg(-1375760766, 127758048))
expect(lg(-942832491, 429508921) + lg(1583511963, 0)).toEqual(lg(640679472, 429508922))
expect(lg(1500842230, -594798242) + lg(1973621483, 0)).toEqual(lg(-820503583, -594798242))
expect(lg(-1088230684, 910473912) + lg(-1331434717, -1)).toEqual(lg(1875301895, 910473912))
expect(lg(798219431, 378724963) + lg(1740882894, 0)).toEqual(lg(-1755864971, 378724963))
expect(lg(-1763683665, -683558197) + lg(-2063230727, -1)).toEqual(lg(468052904, -683558197))
expect(lg(627629519, -1636478024) + lg(-2116479866, -1)).toEqual(lg(-1488850347, -1636478025))
expect(lg(-526665240, -338305026) + lg(1442547647, 0)).toEqual(lg(915882407, -338305025))
expect(lg(-1265295286, -466473801) + lg(314413183, 0)).toEqual(lg(-950882103, -466473801))
expect(lg(-1412852606, -1417005301) + lg(739574383, 0)).toEqual(lg(-673278223, -1417005301))
expect(lg(708993121, -2035157269) + lg(2020674339, 0)).toEqual(lg(-1565299836, -2035157269))
expect(lg(847269791, 1182702858) + lg(-208540595, -1)).toEqual(lg(638729196, 1182702858))
expect(lg(97084677, -1902383954) + lg(-1550736122, -1)).toEqual(lg(-1453651445, -1902383955))
expect(lg(-267181534, -606967005) + lg(1383751193, 0)).toEqual(lg(1116569659, -606967004))
expect(lg(-904322265, 1063184819) + lg(1433370295, 0)).toEqual(lg(529048030, 1063184820))
expect(lg(1841727454, 101142421) + lg(1953979618, 0)).toEqual(lg(-499260224, 101142421))
expect(lg(-1716387490, 1045175929) + lg(-1125714932, -1)).toEqual(lg(1452864874, 1045175929))
expect(lg(-1020814821, 1506316756) + lg(2003551542, 0)).toEqual(lg(982736721, 1506316757))
expect(lg(467820886, 1107506956) + lg(-1945885691, -1)).toEqual(lg(-1478064805, 1107506955))
expect(lg(-103701719, -57552833) + lg(1540648885, 0)).toEqual(lg(1436947166, -57552832))
expect(lg(1280780483, -414981457) + lg(-1276893027, -1)).toEqual(lg(3887456, -414981457))
expect(lg(-1505747919, 606376864) + lg(-1850135506, -1)).toEqual(lg(939083871, 606376864))
expect(lg(-1797917239, -606274238) + lg(636421914, 0)).toEqual(lg(-1161495325, -606274238))
expect(lg(-551974000, 52949338) + lg(-1596979514, -1)).toEqual(lg(2146013782, 52949338))
expect(lg(484182807, -623553408) + lg(-643244860, -1)).toEqual(lg(-159062053, -623553409))
expect(lg(1170065239, 371486519) + lg(510095074, 0)).toEqual(lg(1680160313, 371486519))
expect(lg(553737773, -251530660) + lg(1669491974, 0)).toEqual(lg(-2071737549, -251530660))
expect(lg(1363264202, -324566030) + lg(-569386551, -1)).toEqual(lg(793877651, -324566030))
expect(lg(1461362302, 1255689015) + lg(436194663, 0)).toEqual(lg(1897556965, 1255689015))
expect(lg(-1463314706, 718534179) + lg(922446648, 0)).toEqual(lg(-540868058, 718534179))
expect(lg(-1684072850, -716998233) + lg(1686620381, 0)).toEqual(lg(2547531, -716998232))
expect(lg(-183257712, -2086072551) + lg(-1526555559, -1)).toEqual(lg(-1709813271, -2086072551))
expect(lg(-485818523, -1223154956) + lg(-1648523419, -1)).toEqual(lg(-2134341942, -1223154956))
expect(lg(392330048, -1934382665) + lg(1242289638, 0)).toEqual(lg(1634619686, -1934382665))
expect(lg(1907808353, -75135322) + lg(977231853, 0)).toEqual(lg(-1409927090, -75135322))
expect(lg(88305723, 1362535803) + lg(-1481307045, -1)).toEqual(lg(-1393001322, 1362535802))
}
it("minus -") {
// Workaround for https://code.google.com/p/v8/issues/detail?id=3304
expect(lg(0) - lg(1)).toEqual(lg(-1))
expect(lg(406229717, 462416044) - lg(-911848978, -1)).toEqual(lg(1318078695, 462416044))
expect(lg(873646396, 466142261) - lg(414233982, 0)).toEqual(lg(459412414, 466142261))
expect(lg(-2077914189, -573388520) - lg(467630401, 0)).toEqual(lg(1749422706, -573388520))
expect(lg(-789253983, -1980988132) - lg(-1645120336, -1)).toEqual(lg(855866353, -1980988131))
expect(lg(-482388232, 1825277273) - lg(1954093602, 0)).toEqual(lg(1858485462, 1825277273))
expect(lg(-1616159373, -1077757379) - lg(1467199419, 0)).toEqual(lg(1211608504, -1077757379))
expect(lg(-105778670, -1825579414) - lg(1285633111, 0)).toEqual(lg(-1391411781, -1825579414))
expect(lg(1306759468, -2018677385) - lg(-267161569, -1)).toEqual(lg(1573921037, -2018677385))
expect(lg(618139116, -289291128) - lg(-1457699858, -1)).toEqual(lg(2075838974, -289291128))
expect(lg(1736445522, -1980710784) - lg(1136432395, 0)).toEqual(lg(600013127, -1980710784))
expect(lg(-1970971750, 21136449) - lg(-1412537571, -1)).toEqual(lg(-558434179, 21136449))
expect(lg(-1491842755, 229693364) - lg(-1148192639, -1)).toEqual(lg(-343650116, 229693364))
expect(lg(2125082313, -2064363005) - lg(439010339, 0)).toEqual(lg(1686071974, -2064363005))
expect(lg(922634658, -1887690341) - lg(-1785080227, -1)).toEqual(lg(-1587252411, -1887690341))
expect(lg(478015362, 1754335329) - lg(1470432050, 0)).toEqual(lg(-992416688, 1754335328))
expect(lg(-1788952896, -845578935) - lg(787746350, 0)).toEqual(lg(1718268050, -845578935))
expect(lg(-1177368338, -1479013672) - lg(1801279447, 0)).toEqual(lg(1316319511, -1479013672))
expect(lg(1761081661, -2147323821) - lg(192205100, 0)).toEqual(lg(1568876561, -2147323821))
expect(lg(261772552, 1604940225) - lg(1384264283, 0)).toEqual(lg(-1122491731, 1604940224))
expect(lg(-1441241840, 1018615990) - lg(1296729001, 0)).toEqual(lg(1556996455, 1018615990))
expect(lg(907527568, -155632233) - lg(959786241, 0)).toEqual(lg(-52258673, -155632234))
expect(lg(1509034771, 1534910973) - lg(-402776628, -1)).toEqual(lg(1911811399, 1534910973))
expect(lg(-344668006, -718856465) - lg(-1579173309, -1)).toEqual(lg(1234505303, -718856464))
expect(lg(-2096618226, 1792314521) - lg(934525319, 0)).toEqual(lg(1263823751, 1792314521))
expect(lg(1861956484, -977488448) - lg(-531140528, -1)).toEqual(lg(-1901870284, -977488448))
expect(lg(-691455907, -1532994270) - lg(-861516811, -1)).toEqual(lg(170060904, -1532994269))
expect(lg(-693769914, -946809431) - lg(-276525192, -1)).toEqual(lg(-417244722, -946809431))
expect(lg(-1698674051, -834216711) - lg(1203787429, 0)).toEqual(lg(1392505816, -834216711))
expect(lg(1453492556, -930632047) - lg(1114387533, 0)).toEqual(lg(339105023, -930632047))
expect(lg(-516102112, -422836103) - lg(-2104772210, -1)).toEqual(lg(1588670098, -422836102))
expect(lg(1194707556, 1839759286) - lg(-1306927198, -1)).toEqual(lg(-1793332542, 1839759286))
expect(lg(1188016800, -1652840750) - lg(-1173206901, -1)).toEqual(lg(-1933743595, -1652840750))
expect(lg(-1268512415, 1790839027) - lg(1853779377, 0)).toEqual(lg(1172675504, 1790839027))
expect(lg(-777434907, 275932678) - lg(1260810171, 0)).toEqual(lg(-2038245078, 275932678))
expect(lg(607917442, 658575619) - lg(1248037638, 0)).toEqual(lg(-640120196, 658575618))
expect(lg(-1490388970, -2089057829) - lg(-551184357, -1)).toEqual(lg(-939204613, -2089057829))
expect(lg(-1342917439, 992436418) - lg(746979592, 0)).toEqual(lg(-2089897031, 992436418))
expect(lg(-1045496394, -1192540532) - lg(-278449623, -1)).toEqual(lg(-767046771, -1192540532))
expect(lg(1555450000, -683257085) - lg(820258106, 0)).toEqual(lg(735191894, -683257085))
expect(lg(1022728181, 481753248) - lg(-1003692417, -1)).toEqual(lg(2026420598, 481753248))
expect(lg(2028304312, 1411964223) - lg(-134013562, -1)).toEqual(lg(-2132649422, 1411964223))
expect(lg(704117341, -217374406) - lg(-642306919, -1)).toEqual(lg(1346424260, -217374406))
expect(lg(313351245, 278237511) - lg(1006229802, 0)).toEqual(lg(-692878557, 278237510))
expect(lg(2076724262, 2054685372) - lg(-672962991, -1)).toEqual(lg(-1545280043, 2054685372))
expect(lg(1990098163, 261806288) - lg(833446186, 0)).toEqual(lg(1156651977, 261806288))
expect(lg(1425435353, 1626774418) - lg(1669982892, 0)).toEqual(lg(-244547539, 1626774417))
expect(lg(2084724465, -1714068645) - lg(-2084385716, -1)).toEqual(lg(-125857115, -1714068645))
expect(lg(-1799809279, -543675020) - lg(324617484, 0)).toEqual(lg(-2124426763, -543675020))
expect(lg(1972622018, -602489858) - lg(-177176047, -1)).toEqual(lg(-2145169231, -602489858))
expect(lg(883147297, 967789979) - lg(474187246, 0)).toEqual(lg(408960051, 967789979))
}
it("times *") {
expect(lg(-1436299491, 1172705251) * lg(1721031968, 0)).toEqual(lg(-1056314208, 1039912134))
expect(lg(-1754547158, 1592794750) * lg(-850659149, -1)).toEqual(lg(15417694, -1235494072))
expect(lg(-582562130, 1508550574) * lg(-2054981347, -1)).toEqual(lg(-1312839754, -486483117))
expect(lg(-517256163, 1107889737) * lg(324089381, 0)).toEqual(lg(-377676239, 1969822597))
expect(lg(1862517504, -2146745095) * lg(2043533548, 0)).toEqual(lg(-1426078720, -1379092277))
expect(lg(-1341087062, 93674761) * lg(1272468928, 0)).toEqual(lg(-1611894400, 514550890))
expect(lg(-1911825604, 1026411170) * lg(244738503, 0)).toEqual(lg(88803236, -172420721))
expect(lg(2102189793, 425022510) * lg(750432219, 0)).toEqual(lg(1486387579, 668666773))
expect(lg(-1628887094, 2043879870) * lg(-1367964491, -1)).toEqual(lg(913918418, 2124658288))
expect(lg(454909009, -1096315634) * lg(-461844145, -1)).toEqual(lg(-1067082241, 864193319))
expect(lg(-1346593793, -331776468) * lg(1495188289, 0)).toEqual(lg(949541055, 403324299))
expect(lg(39946028, -363039140) * lg(-1134101206, -1)).toEqual(lg(-232871624, -1943313306))
expect(lg(769959254, -432157368) * lg(-488368768, -1)).toEqual(lg(-528828160, -1884969955))
expect(lg(1975078475, 1181124823) * lg(-1852476533, -1)).toEqual(lg(913322937, -2105457977))
expect(lg(-2118478876, -1521449422) * lg(-235907376, -1)).toEqual(lg(1594278208, 943829214))
expect(lg(-192590815, -1552754278) * lg(990887112, 0)).toEqual(lg(-50678328, 2146883835))
expect(lg(-74714605, 386143916) * lg(1634792395, 0)).toEqual(lg(1779498513, -1732099612))
expect(lg(1839773441, -590412588) * lg(799604314, 0)).toEqual(lg(982209626, 857499597))
expect(lg(1395571130, -1228992407) * lg(1440046952, 0)).toEqual(lg(1806268816, -990479821))
expect(lg(-1094818235, 1759139279) * lg(-156634285, -1)).toEqual(lg(1683728223, -957382628))
expect(lg(853844787, 525523561) * lg(600761926, 0)).toEqual(lg(-1590791694, 595489480))
expect(lg(-903115469, 793487771) * lg(1986597957, 0)).toEqual(lg(1353714367, 146465211))
expect(lg(-830036223, 830164681) * lg(-1711884663, -1)).toEqual(lg(1421874569, -1462441210))
expect(lg(1514898873, 1802395563) * lg(1763957470, 0)).toEqual(lg(-962035602, -2086325336))
expect(lg(-1931885288, 136587512) * lg(-241565738, -1)).toEqual(lg(213232144, -1084932179))
expect(lg(571274323, 1264898114) * lg(1823828906, 0)).toEqual(lg(-915935202, 1495104097))
expect(lg(-15708939, -2105030313) * lg(48280153, 0)).toEqual(lg(1116543789, -1473151538))
expect(lg(1792017337, -1626094957) * lg(301685947, 0)).toEqual(lg(-1230228445, -570579388))
expect(lg(-1942632452, -691115342) * lg(-889918259, -1)).toEqual(lg(1335719116, 1447187791))
expect(lg(-683458011, -1409200935) * lg(-996910555, -1)).toEqual(lg(1398640985, -1330552693))
expect(lg(562125786, -1303526635) * lg(-1761056509, -1)).toEqual(lg(-402621042, 1775759707))
expect(lg(2115902292, -1194658096) * lg(-1549721205, -1)).toEqual(lg(129149596, -78429064))
expect(lg(1852083423, 330104035) * lg(1414822755, 0)).toEqual(lg(1706925885, 1413499189))
expect(lg(-1623207532, 1442771787) * lg(-948878276, -1)).toEqual(lg(-722178384, 1850552711))
expect(lg(-898643831, 773279296) * lg(1294488911, 0)).toEqual(lg(545021767, -1389368834))
expect(lg(421823854, 802578424) * lg(1394107269, 0)).toEqual(lg(1541594150, 820379725))
expect(lg(1589092022, 237831212) * lg(-763790472, -1)).toEqual(lg(-279324848, 1175391379))
expect(lg(-1247207721, -370556328) * lg(1449901386, 0)).toEqual(lg(2089067814, 975727054))
expect(lg(109386811, 368962517) * lg(1406834819, 0)).toEqual(lg(-1977714127, -377823390))
expect(lg(2135299059, -798752868) * lg(-1861488893, -1)).toEqual(lg(1759713497, -312922364))
expect(lg(-695671854, 1917612060) * lg(2083344781, 0)).toEqual(lg(1030024362, -795941843))
expect(lg(250669253, -442179349) * lg(-552836178, -1)).toEqual(lg(-704748314, 388197332))
expect(lg(1237744278, 206295616) * lg(-1547545223, -1)).toEqual(lg(758103782, -158300478))
expect(lg(492775518, 1691641907) * lg(1172634963, 0)).toEqual(lg(-629736326, 810097466))
expect(lg(-1549380722, 49835026) * lg(-1645815552, -1)).toEqual(lg(610754048, 1997636055))
expect(lg(1850430325, -1942955614) * lg(-295254732, -1)).toEqual(lg(1696857284, 1549588995))
expect(lg(-1276671498, -1984743584) * lg(-1583554303, -1)).toEqual(lg(-66011146, -376837532))
expect(lg(-2127158934, -2058421178) * lg(1620104636, 0)).toEqual(lg(2033040344, -167450557))
expect(lg(69958717, -772556465) * lg(21655944, 0)).toEqual(lg(-1886196376, -31345953))
expect(lg(406538265, -107036516) * lg(2077087683, 0)).toEqual(lg(-38147573, -1269583268))
}
it("divide /") {
expectThrows[ArithmeticException](lg(0) / lg(0))
expectThrows[ArithmeticException](lg(5, 0) / lg(0))
expectThrows[ArithmeticException](lg(0, 5) / lg(0))
expectThrows[ArithmeticException](lg(-1) / lg(0))
expectThrows[ArithmeticException](lg(-1, 0) / lg(0))
expect(IntMinVal / lg(-1)).toEqual(IntMaxValPlus1)
expect(IntMinVal / IntMaxValPlus1).toEqual(lg(-1))
expect(IntMaxValPlus1 / lg(-1)).toEqual(IntMinVal)
expect(IntMaxValPlus1 / IntMinVal).toEqual(lg(-1))
expect(MaxVal / lg(-1)).toEqual(lg(1, -2147483648))
expect(MinVal / lg(1)).toEqual(MinVal)
expect(MinVal / lg(-1)).toEqual(MinVal)
// int32 / int32
expect(lg(-10426835, -1) / lg(-6243356, -1)).toEqual(lg(1, 0))
expect(lg(49659080, 0) / lg(-170373, -1)).toEqual(lg(-291, -1))
expect(lg(97420, 0) / lg(27521, 0)).toEqual(lg(3, 0))
expect(lg(-9881291, -1) / lg(-366, -1)).toEqual(lg(26998, 0))
expect(lg(-40, -1) / lg(81, 0)).toEqual(lg(0, 0))
expect(lg(-6007, -1) / lg(-326806, -1)).toEqual(lg(0, 0))
expect(lg(202, 0) / lg(-112, -1)).toEqual(lg(-1, -1))
expect(lg(0, 0) / lg(47, 0)).toEqual(lg(0, 0))
expect(lg(22667160, 0) / lg(70, 0)).toEqual(lg(323816, 0))
expect(lg(254, 0) / lg(-307349204, -1)).toEqual(lg(0, 0))
expect(lg(-17, -1) / lg(-44648, -1)).toEqual(lg(0, 0))
expect(lg(39646, 0) / lg(-976, -1)).toEqual(lg(-40, -1))
expect(lg(9, 0) / lg(315779722, 0)).toEqual(lg(0, 0))
expect(lg(-2674, -1) / lg(-3051991, -1)).toEqual(lg(0, 0))
expect(lg(-37697, -1) / lg(2015928, 0)).toEqual(lg(0, 0))
expect(lg(-13, -1) / lg(-31, -1)).toEqual(lg(0, 0))
expect(lg(6, 0) / lg(-334, -1)).toEqual(lg(0, 0))
expect(lg(-15989, -1) / lg(-1918, -1)).toEqual(lg(8, 0))
expect(lg(-113261535, -1) / lg(-12950, -1)).toEqual(lg(8746, 0))
expect(lg(-6362112, -1) / lg(-115, -1)).toEqual(lg(55322, 0))
expect(lg(455, 0) / lg(13919, 0)).toEqual(lg(0, 0))
expect(lg(293468259, 0) / lg(8109, 0)).toEqual(lg(36190, 0))
expect(lg(-48287007, -1) / lg(-27531186, -1)).toEqual(lg(1, 0))
expect(lg(1048904, 0) / lg(3, 0)).toEqual(lg(349634, 0))
expect(lg(-34, -1) / lg(3949717, 0)).toEqual(lg(0, 0))
expect(lg(1449, 0) / lg(-983, -1)).toEqual(lg(-1, -1))
expect(lg(18537151, 0) / lg(-1, -1)).toEqual(lg(-18537151, -1))
expect(lg(14037, 0) / lg(23645, 0)).toEqual(lg(0, 0))
expect(lg(1785, 0) / lg(-398, -1)).toEqual(lg(-4, -1))
expect(lg(346, 0) / lg(2198158, 0)).toEqual(lg(0, 0))
expect(lg(-3517419, -1) / lg(4381, 0)).toEqual(lg(-802, -1))
expect(lg(6, 0) / lg(-1, -1)).toEqual(lg(-6, -1))
expect(lg(-822, -1) / lg(-21, -1)).toEqual(lg(39, 0))
expect(lg(3629, 0) / lg(282734, 0)).toEqual(lg(0, 0))
expect(lg(-278856469, -1) / lg(3019, 0)).toEqual(lg(-92367, -1))
expect(lg(-13, -1) / lg(37, 0)).toEqual(lg(0, 0))
expect(lg(-4, -1) / lg(47150459, 0)).toEqual(lg(0, 0))
expect(lg(-26, -1) / lg(-210691, -1)).toEqual(lg(0, 0))
expect(lg(-21294, -1) / lg(156839456, 0)).toEqual(lg(0, 0))
expect(lg(-5, -1) / lg(-25644, -1)).toEqual(lg(0, 0))
expect(lg(-1009, -1) / lg(28100, 0)).toEqual(lg(0, 0))
expect(lg(16282815, 0) / lg(-18989, -1)).toEqual(lg(-857, -1))
expect(lg(-2201086, -1) / lg(276963, 0)).toEqual(lg(-7, -1))
expect(lg(11412578, 0) / lg(-37989, -1)).toEqual(lg(-300, -1))
expect(lg(8406900, 0) / lg(239727371, 0)).toEqual(lg(0, 0))
expect(lg(-1, -1) / lg(-479069, -1)).toEqual(lg(0, 0))
expect(lg(4, 0) / lg(-21776, -1)).toEqual(lg(0, 0))
expect(lg(-16812960, -1) / lg(1, 0)).toEqual(lg(-16812960, -1))
expect(lg(10873, 0) / lg(57145, 0)).toEqual(lg(0, 0))
expect(lg(-1, -1) / lg(-7, -1)).toEqual(lg(0, 0))
// int32 / int53
expect(lg(-6975858, -1) / lg(42227636, 14)).toEqual(lg(0, 0))
expect(lg(-1, -1) / lg(370644892, 82735)).toEqual(lg(0, 0))
expect(lg(43, 0) / lg(-1602218381, 49)).toEqual(lg(0, 0))
expect(lg(4063968, 0) / lg(973173538, 23810)).toEqual(lg(0, 0))
expect(lg(-388987094, -1) / lg(-241988155, 1723)).toEqual(lg(0, 0))
expect(lg(5939808, 0) / lg(-1882484681, 12)).toEqual(lg(0, 0))
expect(lg(7, 0) / lg(-385609304, 1342)).toEqual(lg(0, 0))
expect(lg(-1175803932, -1) / lg(297649103, 2408)).toEqual(lg(0, 0))
expect(lg(464610492, 0) / lg(829919518, 2777)).toEqual(lg(0, 0))
expect(lg(214483, 0) / lg(1502817270, 8078)).toEqual(lg(0, 0))
// int32 / big
expect(lg(211494165, 0) / lg(1365318534, 14804989)).toEqual(lg(0, 0))
expect(lg(5353, 0) / lg(-1032992082, -394605386)).toEqual(lg(0, 0))
expect(lg(2926, 0) / lg(26982087, -226814570)).toEqual(lg(0, 0))
expect(lg(-6, -1) / lg(-1339229562, -580578613)).toEqual(lg(0, 0))
expect(lg(-8, -1) / lg(-108570365, 4920615)).toEqual(lg(0, 0))
expect(lg(-585878041, -1) / lg(551925027, -1296114209)).toEqual(lg(0, 0))
expect(lg(-4, -1) / lg(474545806, 64068407)).toEqual(lg(0, 0))
expect(lg(34, 0) / lg(-137127086, -18652281)).toEqual(lg(0, 0))
expect(lg(785315, 0) / lg(-881374655, 29722835)).toEqual(lg(0, 0))
expect(lg(713146, 0) / lg(1442548271, 2727525)).toEqual(lg(0, 0))
// int53 / int32
expect(lg(397755625, 53271) / lg(-395701427, -1)).toEqual(lg(-578207, -1))
expect(lg(-1680186460, 2) / lg(3, 0)).toEqual(lg(-560062154, 0))
expect(lg(1514942014, 56) / lg(3, 0)).toEqual(lg(-926675094, 18))
expect(lg(713597492, 1154) / lg(-30524, -1)).toEqual(lg(-162400270, -1))
expect(lg(2028377478, 1) / lg(-691707459, -1)).toEqual(lg(-9, -1))
expect(lg(1387175556, 73) / lg(2332622, 0)).toEqual(lg(135006, 0))
expect(lg(1756997282, 1397) / lg(-116, -1)).toEqual(lg(-200274428, -13))
expect(lg(-1655346723, 0) / lg(2346, 0)).toEqual(lg(1125157, 0))
expect(lg(198249458, 5686) / lg(24492497, 0)).toEqual(lg(997096, 0))
expect(lg(873090497, 11162) / lg(-37, -1)).toEqual(lg(1369365326, -302))
expect(lg(360057887, 3519) / lg(-6976354, -1)).toEqual(lg(-2166511, -1))
expect(lg(1115898639, 48) / lg(-30, -1)).toEqual(lg(1680790298, -2))
expect(lg(154624251, 955) / lg(935, 0)).toEqual(lg(92036331, 1))
expect(lg(806830498, 1063) / lg(196698, 0)).toEqual(lg(23215066, 0))
expect(lg(-220365267, 21359) / lg(-6938757, -1)).toEqual(lg(-13221428, -1))
expect(lg(759822848, 648657) / lg(-323, -1)).toEqual(lg(-973041595, -2009))
expect(lg(-1180673754, 486098) / lg(293, 0)).toEqual(lg(171873494, 1659))
expect(lg(1387172319, 769661) / lg(980, 0)).toEqual(lg(1583541189, 785))
expect(lg(-305851327, 2) / lg(-13709, -1)).toEqual(lg(-917576, -1))
expect(lg(577374631, 17) / lg(161353, 0)).toEqual(lg(456092, 0))
expect(lg(809983260, 752) / lg(2, 0)).toEqual(lg(404991630, 376))
expect(lg(495082175, 39) / lg(1, 0)).toEqual(lg(495082175, 39))
expect(lg(1455620681, 30929) / lg(1461502, 0)).toEqual(lg(90893135, 0))
expect(lg(1388707384, 34362) / lg(184688, 0)).toEqual(lg(799104733, 0))
expect(lg(2105854641, 140021) / lg(-2, -1)).toEqual(lg(1094556328, -70011))
expect(lg(1310105355, 427420) / lg(271150, 0)).toEqual(lg(-1819673734, 1))
expect(lg(-236557650, 35455) / lg(-7052, -1)).toEqual(lg(-119338773, -6))
expect(lg(-1127581476, 0) / lg(96492, 0)).toEqual(lg(32825, 0))
expect(lg(2004387480, 7243) / lg(-545624, -1)).toEqual(lg(-57018115, -1))
expect(lg(381447319, 2213) / lg(-1597249, -1)).toEqual(lg(-5950946, -1))
expect(lg(-1860702702, 12744) / lg(-3, -1)).toEqual(lg(-811421531, -4249))
expect(lg(-548164065, 6487) / lg(5877480, 0)).toEqual(lg(4741011, 0))
expect(lg(-476290317, 131491) / lg(2874, 0)).toEqual(lg(-1064193809, 45))
expect(lg(499912484, 1) / lg(21, 0)).toEqual(lg(228327608, 0))
expect(lg(-1509435894, 8467) / lg(366943, 0)).toEqual(lg(99111506, 0))
expect(lg(-1580093356, 5) / lg(-20, -1)).toEqual(lg(-1209485521, -1))
expect(lg(1299112295, 55074) / lg(-739295, -1)).toEqual(lg(-319956618, -1))
expect(lg(-1405948570, 43) / lg(-3015755, -1)).toEqual(lg(-62197, -1))
expect(lg(1405130313, 57) / lg(27093454, 0)).toEqual(lg(9087, 0))
expect(lg(-1804200888, 1989226) / lg(24722497, 0)).toEqual(lg(345582531, 0))
expect(lg(-1642507127, 886) / lg(-2672324, -1)).toEqual(lg(-1424974, -1))
expect(lg(-1276796892, 35) / lg(77004, 0)).toEqual(lg(1991351, 0))
expect(lg(-1200759296, 816) / lg(2939970, 0)).toEqual(lg(1193137, 0))
expect(lg(399171813, 123795) / lg(926969, 0)).toEqual(lg(573585390, 0))
expect(lg(1649267984, 229752) / lg(-244, -1)).toEqual(lg(1683063904, -942))
expect(lg(-387146187, 7364) / lg(-5255245, -1)).toEqual(lg(-6019138, -1))
expect(lg(149703916, 19121) / lg(660, 0)).toEqual(lg(-123416174, 28))
expect(lg(-1582312743, 7920) / lg(-835168, -1)).toEqual(lg(-40732946, -1))
expect(lg(1431643220, 596) / lg(2, 0)).toEqual(lg(715821610, 298))
expect(lg(-1717918737, 8458) / lg(-63727, -1)).toEqual(lg(-570078780, -1))
// int53 / int53
expect(lg(-1232398900, 28871) / lg(13989713, 22345)).toEqual(lg(1, 0))
expect(lg(-916994839, 12266) / lg(1713571419, 15301)).toEqual(lg(0, 0))
expect(lg(1133414946, 229) / lg(256531666, 7)).toEqual(lg(32, 0))
expect(lg(134792921, 3907) / lg(-1656790262, 10)).toEqual(lg(368, 0))
expect(lg(1532393452, 52260) / lg(-701373106, 31864)).toEqual(lg(1, 0))
expect(lg(193990135, 1460) / lg(867607428, 6918)).toEqual(lg(0, 0))
expect(lg(867672590, 1) / lg(-1315044816, 987593)).toEqual(lg(0, 0))
expect(lg(-978844610, 2) / lg(720710523, 209)).toEqual(lg(0, 0))
expect(lg(-297570329, 1) / lg(-2127979750, 195738)).toEqual(lg(0, 0))
expect(lg(-1035330427, 5) / lg(-2091513925, 70)).toEqual(lg(0, 0))
expect(lg(1037142987, 15) / lg(-485498951, 30819)).toEqual(lg(0, 0))
expect(lg(744551901, 15) / lg(-604684037, 1587)).toEqual(lg(0, 0))
expect(lg(1341710951, 232724) / lg(1864827988, 3)).toEqual(lg(67766, 0))
expect(lg(-409318148, 157818) / lg(517165426, 227)).toEqual(lg(694, 0))
expect(lg(1908192460, 110512) / lg(-61974596, 95795)).toEqual(lg(1, 0))
expect(lg(946490654, 498) / lg(-1889366637, 1163)).toEqual(lg(0, 0))
expect(lg(1765257877, 34422) / lg(728455544, 2851)).toEqual(lg(12, 0))
expect(lg(-1725136864, 84) / lg(1122821677, 14720)).toEqual(lg(0, 0))
expect(lg(1854803780, 2) / lg(-302860117, 1)).toEqual(lg(1, 0))
expect(lg(380756581, 107) / lg(-806772264, 0)).toEqual(lg(131, 0))
expect(lg(1868292481, 1134) / lg(691774521, 33775)).toEqual(lg(0, 0))
expect(lg(-1515810361, 98) / lg(2038289788, 198)).toEqual(lg(0, 0))
expect(lg(-1943767475, 31777) / lg(-1513506636, 100)).toEqual(lg(315, 0))
expect(lg(1508904915, 18) / lg(1834666309, 976)).toEqual(lg(0, 0))
expect(lg(1430753947, 3772) / lg(-1853122145, 3615)).toEqual(lg(1, 0))
expect(lg(-1654852151, 1195820) / lg(-2100231332, 0)).toEqual(lg(2340149, 0))
expect(lg(1011710080, 18) / lg(-616681449, 57)).toEqual(lg(0, 0))
expect(lg(-495370429, 356832) / lg(-34555439, 25233)).toEqual(lg(14, 0))
expect(lg(744211838, 511) / lg(-475809581, 3)).toEqual(lg(131, 0))
expect(lg(1135128265, 67) / lg(163864249, 972)).toEqual(lg(0, 0))
expect(lg(954856869, 5120) / lg(1474096435, 3606)).toEqual(lg(1, 0))
expect(lg(1544045220, 1) / lg(85376495, 2353)).toEqual(lg(0, 0))
expect(lg(1367437144, 53) / lg(2010850631, 6)).toEqual(lg(8, 0))
expect(lg(-1398730804, 13) / lg(-2055007528, 52)).toEqual(lg(0, 0))
expect(lg(1598156017, 13) / lg(-1006929331, 160)).toEqual(lg(0, 0))
expect(lg(738323529, 41) / lg(-1508093984, 10361)).toEqual(lg(0, 0))
expect(lg(-1788797806, 31) / lg(588557582, 575930)).toEqual(lg(0, 0))
expect(lg(-913009845, 1002) / lg(204577043, 13)).toEqual(lg(76, 0))
expect(lg(1908599465, 6) / lg(1058868127, 3383)).toEqual(lg(0, 0))
expect(lg(-634312634, 75) / lg(-850292534, 332928)).toEqual(lg(0, 0))
expect(lg(-1679695022, 148) / lg(-1395453213, 912)).toEqual(lg(0, 0))
expect(lg(456310936, 71) / lg(487720864, 1590813)).toEqual(lg(0, 0))
expect(lg(-1724925398, 0) / lg(-273170277, 38)).toEqual(lg(0, 0))
expect(lg(-6742076, 15) / lg(192793866, 175)).toEqual(lg(0, 0))
expect(lg(337939061, 2094205) / lg(880147944, 41142)).toEqual(lg(50, 0))
expect(lg(-998413092, 0) / lg(-1758700885, 29)).toEqual(lg(0, 0))
expect(lg(1986052307, 3) / lg(-2092246422, 47)).toEqual(lg(0, 0))
expect(lg(-109615093, 1) / lg(-2066395387, 20016)).toEqual(lg(0, 0))
expect(lg(-1147373454, 901) / lg(313439710, 7)).toEqual(lg(127, 0))
expect(lg(-792716629, 66379) / lg(2017337246, 250513)).toEqual(lg(0, 0))
// int53 / big
expect(lg(291278707, 13808) / lg(941639833, -14430466)).toEqual(lg(0, 0))
expect(lg(-857819626, 204588) / lg(-1909684886, -709519130)).toEqual(lg(0, 0))
expect(lg(-978105991, 7435) / lg(-306472275, 158306339)).toEqual(lg(0, 0))
expect(lg(75049741, 248171) / lg(-1574105194, 64879257)).toEqual(lg(0, 0))
expect(lg(136051120, 621) / lg(-1671784392, 102642869)).toEqual(lg(0, 0))
expect(lg(-448460356, 2858) / lg(71740423, -16715717)).toEqual(lg(0, 0))
expect(lg(-1266403435, 2) / lg(-1022999838, 25812014)).toEqual(lg(0, 0))
expect(lg(552733494, 22) / lg(241731505, -33191170)).toEqual(lg(0, 0))
expect(lg(1366167794, 115591) / lg(191854687, -2136953)).toEqual(lg(0, 0))
expect(lg(1329114439, 80951) / lg(-51187101, 1471052997)).toEqual(lg(0, 0))
// big / int32
expect(lg(-1495113094, 168518701) / lg(27633219, 0)).toEqual(lg(422668131, 6))
expect(lg(-1211847018, -609137255) / lg(-2976, -1)).toEqual(lg(932715295, 204683))
expect(lg(-457166837, -15040808) / lg(-340331202, -1)).toEqual(lg(189814434, 0))
expect(lg(-104570473, -117704108) / lg(104, 0)).toEqual(lg(-1116045071, -1131771))
expect(lg(453828098, -10187034) / lg(-707, -1)).toEqual(lg(-784306379, 14408))
expect(lg(1911518920, 168201762) / lg(84, 0)).toEqual(lg(-284027201, 2002401))
expect(lg(610589058, 36481453) / lg(-30381877, -1)).toEqual(lg(-862273257, -2))
expect(lg(410700182, 503953004) / lg(-7181145, -1)).toEqual(lg(-761280647, -71))
expect(lg(194917334, -8806907) / lg(3471, 0)).toEqual(lg(-1212582262, -2538))
expect(lg(852311155, 9671380) / lg(2048884, 0)).toEqual(lg(-1201233065, 4))
expect(lg(-1028681544, 4163983) / lg(13506586, 0)).toEqual(lg(1324107666, 0))
expect(lg(-708734088, 12722223) / lg(2, 0)).toEqual(lg(-354367044, 6361111))
expect(lg(1693696214, 18402294) / lg(-241, -1)).toEqual(lg(-292170842, -76359))
expect(lg(-1932788158, 206747917) / lg(-5, -1)).toEqual(lg(2104544550, -41349584))
expect(lg(1427262980, -60732866) / lg(3409, 0)).toEqual(lg(-1928473941, -17816))
expect(lg(-677896940, 2512898) / lg(-3693, -1)).toEqual(lg(-1929237164, -681))
expect(lg(-926729663, -9677195) / lg(279372, 0)).toEqual(lg(1550060300, -35))
expect(lg(-405257725, -2271799) / lg(-3770075, -1)).toEqual(lg(-1706875941, 0))
expect(lg(-1893733008, -6491069) / lg(-595, -1)).toEqual(lg(1540708852, 10909))
expect(lg(-1343018634, -2584815) / lg(7233, 0)).toEqual(lg(-1563665409, -358))
expect(lg(-1224507547, 122799570) / lg(-328, -1)).toEqual(lg(278715917, -374389))
expect(lg(-2082712791, -15998594) / lg(-48337828, -1)).toEqual(lg(1421525100, 0))
expect(lg(-2147318181, -32907160) / lg(15, 0)).toEqual(lg(1574832373, -2193811))
expect(lg(1074158039, 118905936) / lg(-1930, -1)).toEqual(lg(-1260116915, -61610))
expect(lg(1270835097, -2201288) / lg(141, 0)).toEqual(lg(130856059, -15612))
expect(lg(320077861, -446108079) / lg(-189997, -1)).toEqual(lg(-110248455, 2347))
expect(lg(1075676628, 54005547) / lg(440453, 0)).toEqual(lg(-1659387265, 122))
expect(lg(-1800001035, 54578889) / lg(2877683, 0)).toEqual(lg(-144903831, 18))
expect(lg(-654120591, 33364168) / lg(-1393, -1)).toEqual(lg(-1312994937, -23952))
expect(lg(302695822, -2432394) / lg(58667176, 0)).toEqual(lg(-178073210, -1))
expect(lg(523451067, -54366538) / lg(-382038, -1)).toEqual(lg(1316938460, 142))
expect(lg(-78968601, 52670560) / lg(3, 0)).toEqual(lg(-1457978633, 17556853))
expect(lg(-773046192, -1010259224) / lg(-2, -1)).toEqual(lg(-1760960552, 505129611))
expect(lg(1515488136, -21874592) / lg(-9452, -1)).toEqual(lg(1210355204, 2314))
expect(lg(-1043595428, -1725615548) / lg(-2, -1)).toEqual(lg(-1625685934, 862807773))
expect(lg(-1217231978, 1516494005) / lg(375097846, 0)).toEqual(lg(184379181, 4))
expect(lg(-1873413508, -236381131) / lg(-816152673, -1)).toEqual(lg(1243945230, 0))
expect(lg(265593875, 26513736) / lg(-30289, -1)).toEqual(lg(-1540093941, -876))
expect(lg(106204837, -839801203) / lg(-1544, -1)).toEqual(lg(-1304692919, 543912))
expect(lg(815576040, -55524975) / lg(-2331779, -1)).toEqual(lg(-806250591, 23))
expect(lg(-2053929476, -1795047022) / lg(720742474, 0)).toEqual(lg(-2106907248, -3))
expect(lg(1552099699, 65024502) / lg(-525272, -1)).toEqual(lg(893100234, -124))
expect(lg(-194253417, -12405472) / lg(-9879, -1)).toEqual(lg(-1109915706, 1255))
expect(lg(412309016, 112344162) / lg(154800321, 0)).toEqual(lg(-1177955013, 0))
expect(lg(343591192, -102047607) / lg(2, 0)).toEqual(lg(-1975688052, -51023804))
expect(lg(1756765281, 8058834) / lg(-26, -1)).toEqual(lg(-728332094, -309956))
expect(lg(1762668787, -960735493) / lg(-782994, -1)).toEqual(lg(10173004, 1227))
expect(lg(1523935530, -109345767) / lg(-18963, -1)).toEqual(lg(1157067129, 5766))
expect(lg(-1256703941, 1438436241) / lg(34, 0)).toEqual(lg(1226263794, 42306948))
expect(lg(-444491016, -6150392) / lg(14, 0)).toEqual(lg(1502167534, -439314))
// big / int53
expect(lg(-1883357942, 360257606) / lg(1478768728, 4075)).toEqual(lg(88399, 0))
expect(lg(-1991900757, -48856999) / lg(-1087694619, 1074)).toEqual(lg(-45459, -1))
expect(lg(518426119, 218946975) / lg(-808940852, 49)).toEqual(lg(4395497, 0))
expect(lg(-946567777, 600381050) / lg(-1165957306, 187)).toEqual(lg(3198134, 0))
expect(lg(257885254, 845979705) / lg(792779187, 1798424)).toEqual(lg(470, 0))
expect(lg(1278680372, 6485140) / lg(1376461023, 70263)).toEqual(lg(92, 0))
expect(lg(1445602310, 420550818) / lg(1397186900, 2507)).toEqual(lg(167728, 0))
expect(lg(1822058703, 522114268) / lg(1355449555, 20)).toEqual(lg(25700177, 0))
expect(lg(532749659, -130990067) / lg(-1474774415, 3)).toEqual(lg(-35822646, -1))
expect(lg(1329707986, -2121642) / lg(-63366094, 6086)).toEqual(lg(-348, -1))
expect(lg(1028585430, -118524228) / lg(1655878874, 54392)).toEqual(lg(-2179, -1))
expect(lg(203502475, 42252914) / lg(36519512, 35581)).toEqual(lg(1187, 0))
expect(lg(341088508, 35053507) / lg(917391400, 10874)).toEqual(lg(3223, 0))
expect(lg(1454135412, 69933847) / lg(-162213744, 2)).toEqual(lg(23608500, 0))
expect(lg(1674604578, 10565585) / lg(1932570831, 1)).toEqual(lg(7286803, 0))
expect(lg(-1910257093, -16610962) / lg(-640594227, 120)).toEqual(lg(-137450, -1))
expect(lg(1080864951, 17606069) / lg(-1542196664, 153)).toEqual(lg(114592, 0))
expect(lg(-1419644278, 13937517) / lg(-919779905, 227700)).toEqual(lg(61, 0))
expect(lg(-1958380469, -855713410) / lg(1631833189, 3459)).toEqual(lg(-247360, -1))
expect(lg(1951473618, -4122677) / lg(-899615165, 66)).toEqual(lg(-61725, -1))
expect(lg(1521276132, 182952467) / lg(346742782, 82171)).toEqual(lg(2226, 0))
expect(lg(-1003647481, -7808320) / lg(-228453385, 7826)).toEqual(lg(-997, -1))
expect(lg(-875689390, 4467236) / lg(-590010750, 120938)).toEqual(lg(36, 0))
expect(lg(1189085620, 611543209) / lg(1619962756, 10919)).toEqual(lg(56005, 0))
expect(lg(-1072173311, -18503031) / lg(1971480267, 205)).toEqual(lg(-90057, -1))
expect(lg(767303802, -3407362) / lg(-339044225, 352939)).toEqual(lg(-9, -1))
expect(lg(427996893, 482974074) / lg(-736462105, 7759)).toEqual(lg(62240, 0))
expect(lg(842450255, -4396651) / lg(859272322, 2477)).toEqual(lg(-1774, -1))
expect(lg(1640433988, -2618618) / lg(302672196, 17)).toEqual(lg(-153400, -1))
expect(lg(-361322518, 63967358) / lg(-1922353888, 29810)).toEqual(lg(2145, 0))
expect(lg(-1774479550, 43276853) / lg(472456506, 408)).toEqual(lg(106042, 0))
expect(lg(-1756338345, -38928780) / lg(283612141, 102)).toEqual(lg(-381407, -1))
expect(lg(-495049835, 37161263) / lg(-2052025512, 30)).toEqual(lg(1217514, 0))
expect(lg(1606509747, -10876159) / lg(1068727249, 635715)).toEqual(lg(-17, -1))
expect(lg(-1857686692, 1918485655) / lg(454913535, 393)).toEqual(lg(4880327, 0))
expect(lg(-502107392, -511268482) / lg(-1118977400, 499)).toEqual(lg(-1023070, -1))
expect(lg(-909192131, 45216813) / lg(1442986382, 102923)).toEqual(lg(439, 0))
expect(lg(259184089, 14858724) / lg(-671961291, 6)).toEqual(lg(2171202, 0))
expect(lg(1737846340, -614952982) / lg(1379175047, 115)).toEqual(lg(-5332527, -1))
expect(lg(-406629212, -528407898) / lg(973577032, 1214)).toEqual(lg(-435180, -1))
expect(lg(-597461306, 538945619) / lg(-1867966522, 19360)).toEqual(lg(27837, 0))
expect(lg(-1906945200, -371170760) / lg(151858506, 936902)).toEqual(lg(-396, -1))
expect(lg(-1366510, -207691415) / lg(-872314548, 1)).toEqual(lg(-115583279, -1))
expect(lg(-1280665444, -104856505) / lg(1964875665, 15)).toEqual(lg(-6783543, -1))
expect(lg(897601097, -1352132581) / lg(-328204224, 0)).toEqual(lg(-1464006069, -1))
expect(lg(-496529216, 32992512) / lg(-668292521, 2)).toEqual(lg(11599107, 0))
expect(lg(1819966537, 311969505) / lg(-879441284, 370147)).toEqual(lg(842, 0))
expect(lg(433235702, 408255734) / lg(573404298, 9382)).toEqual(lg(43514, 0))
expect(lg(1693350453, -4127304) / lg(-1671879801, 17931)).toEqual(lg(-230, -1))
expect(lg(-492682302, 64433722) / lg(-1408841594, 258)).toEqual(lg(249094, 0))
// big / big
expect(lg(1450795502, -706709103) / lg(742056886, 64843937)).toEqual(lg(-10, -1))
expect(lg(-392893244, 72026637) / lg(1419676270, 875736789)).toEqual(lg(0, 0))
expect(lg(-1861146463, 8382761) / lg(-724412724, -3000735)).toEqual(lg(-2, -1))
expect(lg(1373482238, 23344691) / lg(1835527248, -294342355)).toEqual(lg(0, 0))
expect(lg(1956796392, 107480459) / lg(-560958184, -2839471)).toEqual(lg(-37, -1))
expect(lg(422228275, 30436377) / lg(-2023395425, 8226201)).toEqual(lg(3, 0))
expect(lg(1747624836, -215352612) / lg(-1349940168, 58723974)).toEqual(lg(-3, -1))
expect(lg(-583006891, 16111063) / lg(1853686630, 5479773)).toEqual(lg(2, 0))
expect(lg(1498104050, 7322401) / lg(-407388940, 2141575618)).toEqual(lg(0, 0))
expect(lg(1943726712, 869895175) / lg(-627430826, 169278540)).toEqual(lg(5, 0))
expect(lg(1872895982, 98966340) / lg(1347573135, 529034148)).toEqual(lg(0, 0))
expect(lg(16010610, 187913494) / lg(-848952152, -81951424)).toEqual(lg(-2, -1))
expect(lg(830929771, -4393252) / lg(1829525088, 52659897)).toEqual(lg(0, 0))
expect(lg(-2093526384, 133319293) / lg(-464927151, 6049576)).toEqual(lg(22, 0))
expect(lg(1056318793, 13467735) / lg(1970348162, -672507521)).toEqual(lg(0, 0))
expect(lg(-28853693, -169722715) / lg(-83877421, 770900857)).toEqual(lg(0, 0))
expect(lg(1743854071, -302158995) / lg(80117835, 11113120)).toEqual(lg(-27, -1))
expect(lg(635796581, -146765250) / lg(441664676, 23716738)).toEqual(lg(-6, -1))
expect(lg(-1048312948, -37662905) / lg(1319664078, 208772026)).toEqual(lg(0, 0))
expect(lg(-784292680, -14102823) / lg(2037268040, 744987722)).toEqual(lg(0, 0))
expect(lg(-1116104092, -2073525743) / lg(1766685765, -11731135)).toEqual(lg(176, 0))
expect(lg(-1991687284, 19448294) / lg(-1731357606, -202272807)).toEqual(lg(0, 0))
expect(lg(-2042068328, -52956481) / lg(370482897, -7759903)).toEqual(lg(6, 0))
expect(lg(334395247, 1906338595) / lg(342095090, 1248830168)).toEqual(lg(1, 0))
expect(lg(-309616588, 44123460) / lg(2040055580, -476494291)).toEqual(lg(0, 0))
expect(lg(137178123, 36336421) / lg(-360221107, -515689970)).toEqual(lg(0, 0))
expect(lg(-422856762, -16760844) / lg(-334268074, -43984484)).toEqual(lg(0, 0))
expect(lg(-24820293, 25823996) / lg(390711705, 288223876)).toEqual(lg(0, 0))
expect(lg(1170265006, 2998984) / lg(-134995170, -2123267074)).toEqual(lg(0, 0))
expect(lg(-1501380980, -6088910) / lg(-1175861016, -56027408)).toEqual(lg(0, 0))
expect(lg(307880183, 196786483) / lg(-1107761890, -3480429)).toEqual(lg(-56, -1))
expect(lg(-588606997, -37732967) / lg(-1124435958, -77404915)).toEqual(lg(0, 0))
expect(lg(90560661, 990295925) / lg(731139348, 9165999)).toEqual(lg(108, 0))
expect(lg(46312609, -28251908) / lg(1279863155, -519028300)).toEqual(lg(0, 0))
expect(lg(1123427761, 55212863) / lg(-1081219733, 233090714)).toEqual(lg(0, 0))
expect(lg(1447869812, -3646400) / lg(-1237950546, -27122943)).toEqual(lg(0, 0))
expect(lg(-1399920635, 110072031) / lg(-398678056, -8069387)).toEqual(lg(-13, -1))
expect(lg(513704441, 14319377) / lg(-796719013, 260081997)).toEqual(lg(0, 0))
expect(lg(166886349, -190148673) / lg(68245235, -21656365)).toEqual(lg(8, 0))
expect(lg(-1594024534, -144937584) / lg(177399758, 200473672)).toEqual(lg(0, 0))
expect(lg(447753993, -23591908) / lg(1399162166, 12505918)).toEqual(lg(-1, -1))
expect(lg(1500283330, 5361180) / lg(348398676, 156400271)).toEqual(lg(0, 0))
expect(lg(-216115001, 670826068) / lg(1759253954, -470062110)).toEqual(lg(-1, -1))
expect(lg(-1251659767, 18831569) / lg(-669341445, -34474821)).toEqual(lg(0, 0))
expect(lg(817032953, 218701872) / lg(-176557210, 6899121)).toEqual(lg(31, 0))
expect(lg(1365998269, 613319842) / lg(319204438, -30758748)).toEqual(lg(-19, -1))
expect(lg(-428500325, 6610536) / lg(-46648893, -105360271)).toEqual(lg(0, 0))
expect(lg(784528299, -6958267) / lg(1370662827, -774132635)).toEqual(lg(0, 0))
expect(lg(-769114167, 137614183) / lg(-929091402, -67103082)).toEqual(lg(-2, -1))
expect(lg(1810734914, 124115952) / lg(1149563530, 15197570)).toEqual(lg(8, 0))
}
it("modulo %") {
expectThrows[ArithmeticException](lg(0) % lg(0))
expectThrows[ArithmeticException](lg(5, 0) % lg(0))
expectThrows[ArithmeticException](lg(0, 5) % lg(0))
expectThrows[ArithmeticException](lg(-1) % lg(0))
expectThrows[ArithmeticException](lg(-1, 0) % lg(0))
expect(IntMinVal % lg(-1)).toEqual(lg(0))
expect(IntMinVal % IntMaxValPlus1).toEqual(lg(0))
expect(IntMaxValPlus1 % lg(-1)).toEqual(lg(0))
expect(IntMaxValPlus1 % IntMinVal).toEqual(lg(0))
expect(MaxVal % lg(-1)).toEqual(lg(0))
expect(MinVal % lg(1)).toEqual(lg(0))
expect(MinVal % lg(-1)).toEqual(lg(0))
expect(MaxVal % MinVal).toEqual(lg(-1, 2147483647))
expect(MaxVal % MaxVal).toEqual(lg(0))
expect(MinVal % MinVal).toEqual(lg(0))
expect(MinVal % MaxVal).toEqual(lg(-1))
// int32 % int32
expect(lg(880, 0) % lg(-219594, -1)).toEqual(lg(880, 0))
expect(lg(-49125, -1) % lg(98, 0)).toEqual(lg(-27, -1))
expect(lg(-1922504, -1) % lg(4195, 0)).toEqual(lg(-1194, -1))
expect(lg(3, 0) % lg(7963, 0)).toEqual(lg(3, 0))
expect(lg(-626, -1) % lg(-484628621, -1)).toEqual(lg(-626, -1))
expect(lg(11315, 0) % lg(-3914076, -1)).toEqual(lg(11315, 0))
expect(lg(15712341, 0) % lg(-1045740, -1)).toEqual(lg(26241, 0))
expect(lg(-855439, -1) % lg(5213, 0)).toEqual(lg(-507, -1))
expect(lg(-101026259, -1) % lg(-500, -1)).toEqual(lg(-259, -1))
expect(lg(27720977, 0) % lg(-42317657, -1)).toEqual(lg(27720977, 0))
expect(lg(25954, 0) % lg(-3, -1)).toEqual(lg(1, 0))
expect(lg(338447650, 0) % lg(-8505730, -1)).toEqual(lg(6724180, 0))
expect(lg(23967, 0) % lg(-13479, -1)).toEqual(lg(10488, 0))
expect(lg(885202, 0) % lg(-3, -1)).toEqual(lg(1, 0))
expect(lg(692795590, 0) % lg(-10, -1)).toEqual(lg(0, 0))
expect(lg(-1, -1) % lg(156, 0)).toEqual(lg(-1, -1))
expect(lg(388, 0) % lg(189523294, 0)).toEqual(lg(388, 0))
expect(lg(352, 0) % lg(-3257, -1)).toEqual(lg(352, 0))
expect(lg(-9, -1) % lg(14653, 0)).toEqual(lg(-9, -1))
expect(lg(-258745, -1) % lg(8, 0)).toEqual(lg(-1, -1))
expect(lg(-206976653, -1) % lg(34321, 0)).toEqual(lg(-21023, -1))
expect(lg(-1, -1) % lg(-971, -1)).toEqual(lg(-1, -1))
expect(lg(59, 0) % lg(388, 0)).toEqual(lg(59, 0))
expect(lg(-7, -1) % lg(1, 0)).toEqual(lg(0, 0))
expect(lg(77, 0) % lg(13, 0)).toEqual(lg(12, 0))
expect(lg(224246, 0) % lg(719055, 0)).toEqual(lg(224246, 0))
expect(lg(-61296, -1) % lg(-135723660, -1)).toEqual(lg(-61296, -1))
expect(lg(6897809, 0) % lg(793543, 0)).toEqual(lg(549465, 0))
expect(lg(45, 0) % lg(984210147, 0)).toEqual(lg(45, 0))
expect(lg(-64, -1) % lg(1, 0)).toEqual(lg(0, 0))
expect(lg(379611734, 0) % lg(4, 0)).toEqual(lg(2, 0))
expect(lg(0, 0) % lg(-263, -1)).toEqual(lg(0, 0))
expect(lg(29, 0) % lg(-117, -1)).toEqual(lg(29, 0))
expect(lg(245094, 0) % lg(-70, -1)).toEqual(lg(24, 0))
expect(lg(0, 0) % lg(5, 0)).toEqual(lg(0, 0))
expect(lg(2, 0) % lg(47787927, 0)).toEqual(lg(2, 0))
expect(lg(-124, -1) % lg(-22714040, -1)).toEqual(lg(-124, -1))
expect(lg(412, 0) % lg(-17176, -1)).toEqual(lg(412, 0))
expect(lg(-11860, -1) % lg(9506787, 0)).toEqual(lg(-11860, -1))
expect(lg(-31, -1) % lg(-1544676, -1)).toEqual(lg(-31, -1))
expect(lg(-1990315281, -1) % lg(-7, -1)).toEqual(lg(-3, -1))
expect(lg(99, 0) % lg(-277, -1)).toEqual(lg(99, 0))
expect(lg(-29227, -1) % lg(-161, -1)).toEqual(lg(-86, -1))
expect(lg(106, 0) % lg(-47032956, -1)).toEqual(lg(106, 0))
expect(lg(18, 0) % lg(510836179, 0)).toEqual(lg(18, 0))
expect(lg(3543112, 0) % lg(10, 0)).toEqual(lg(2, 0))
expect(lg(3547603, 0) % lg(-1506666, -1)).toEqual(lg(534271, 0))
expect(lg(-16361, -1) % lg(10637613, 0)).toEqual(lg(-16361, -1))
expect(lg(606879016, 0) % lg(-16, -1)).toEqual(lg(8, 0))
expect(lg(-1, -1) % lg(46424570, 0)).toEqual(lg(-1, -1))
// int32 % int53
expect(lg(-3, -1) % lg(206801065, 1)).toEqual(lg(-3, -1))
expect(lg(-57756, -1) % lg(-1211050362, 13)).toEqual(lg(-57756, -1))
expect(lg(0, 0) % lg(-475702596, 10040)).toEqual(lg(0, 0))
expect(lg(423524, 0) % lg(-2084961556, 16)).toEqual(lg(423524, 0))
expect(lg(38317, 0) % lg(-1699004544, 24)).toEqual(lg(38317, 0))
expect(lg(60291, 0) % lg(-458289291, 56)).toEqual(lg(60291, 0))
expect(lg(1, 0) % lg(-1247681936, 1229953)).toEqual(lg(1, 0))
expect(lg(296788, 0) % lg(183245860, 52)).toEqual(lg(296788, 0))
expect(lg(-2005515, -1) % lg(331735459, 17)).toEqual(lg(-2005515, -1))
expect(lg(-179812, -1) % lg(-853047550, 5154)).toEqual(lg(-179812, -1))
expect(lg(-3678, -1) % lg(1751271067, 243605)).toEqual(lg(-3678, -1))
expect(lg(-93867, -1) % lg(-1925367590, 42)).toEqual(lg(-93867, -1))
expect(lg(7600917, 0) % lg(-1807424604, 95574)).toEqual(lg(7600917, 0))
expect(lg(300012, 0) % lg(1951216728, 101)).toEqual(lg(300012, 0))
expect(lg(-6347, -1) % lg(-438713154, 23)).toEqual(lg(-6347, -1))
expect(lg(-41, -1) % lg(-1211982116, 459)).toEqual(lg(-41, -1))
expect(lg(3425, 0) % lg(-1580976156, 2)).toEqual(lg(3425, 0))
expect(lg(-25, -1) % lg(200240265, 25993)).toEqual(lg(-25, -1))
expect(lg(-8303, -1) % lg(1353761386, 1921)).toEqual(lg(-8303, -1))
expect(lg(274032571, 0) % lg(1455543028, 255)).toEqual(lg(274032571, 0))
expect(lg(-3, -1) % lg(1143775281, 729)).toEqual(lg(-3, -1))
expect(lg(-1124428, -1) % lg(-521284400, 339)).toEqual(lg(-1124428, -1))
expect(lg(-2, -1) % lg(-303859962, 2524)).toEqual(lg(-2, -1))
expect(lg(1, 0) % lg(-402000545, 1)).toEqual(lg(1, 0))
expect(lg(107013504, 0) % lg(157604607, 3)).toEqual(lg(107013504, 0))
expect(lg(4976822, 0) % lg(-2046021074, 2230)).toEqual(lg(4976822, 0))
expect(lg(-1, -1) % lg(-306200858, 41)).toEqual(lg(-1, -1))
expect(lg(80396, 0) % lg(-409002766, 13)).toEqual(lg(80396, 0))
expect(lg(937638, 0) % lg(-697219650, 26)).toEqual(lg(937638, 0))
expect(lg(756, 0) % lg(-948806692, 1700920)).toEqual(lg(756, 0))
expect(lg(5, 0) % lg(646021801, 21350)).toEqual(lg(5, 0))
expect(lg(262831839, 0) % lg(1086270794, 10633)).toEqual(lg(262831839, 0))
expect(lg(-2146273993, -1) % lg(-1539129401, 0)).toEqual(lg(-2146273993, -1))
expect(lg(59799, 0) % lg(1910837623, 102082)).toEqual(lg(59799, 0))
expect(lg(-5347, -1) % lg(1965292799, 18)).toEqual(lg(-5347, -1))
expect(lg(926, 0) % lg(1939309159, 104206)).toEqual(lg(926, 0))
expect(lg(1, 0) % lg(1651864405, 1233)).toEqual(lg(1, 0))
expect(lg(334, 0) % lg(581635234, 20)).toEqual(lg(334, 0))
expect(lg(-61747, -1) % lg(-842193425, 1497)).toEqual(lg(-61747, -1))
expect(lg(-1, -1) % lg(758739794, 79508)).toEqual(lg(-1, -1))
expect(lg(59605313, 0) % lg(-1162319751, 0)).toEqual(lg(59605313, 0))
expect(lg(12267518, 0) % lg(1340161110, 568352)).toEqual(lg(12267518, 0))
expect(lg(19230695, 0) % lg(1844291137, 21)).toEqual(lg(19230695, 0))
expect(lg(3950296, 0) % lg(-848670202, 243)).toEqual(lg(3950296, 0))
expect(lg(503276, 0) % lg(-1756374670, 1)).toEqual(lg(503276, 0))
expect(lg(30880536, 0) % lg(-1380766565, 51064)).toEqual(lg(30880536, 0))
expect(lg(5659804, 0) % lg(-725339057, 1)).toEqual(lg(5659804, 0))
expect(lg(11882277, 0) % lg(243727355, 7)).toEqual(lg(11882277, 0))
expect(lg(371783010, 0) % lg(630143580, 14001)).toEqual(lg(371783010, 0))
expect(lg(840, 0) % lg(-1719362098, 109)).toEqual(lg(840, 0))
// int32 % big
expect(lg(-267334310, -1) % lg(1537718115, -134598983)).toEqual(lg(-267334310, -1))
expect(lg(57, 0) % lg(-1668867109, -10100325)).toEqual(lg(57, 0))
expect(lg(30332, 0) % lg(-615310153, -90004876)).toEqual(lg(30332, 0))
expect(lg(187, 0) % lg(-590535223, 8244144)).toEqual(lg(187, 0))
expect(lg(-2, -1) % lg(2125719729, 390762530)).toEqual(lg(-2, -1))
expect(lg(-4252915, -1) % lg(2070489053, 23484863)).toEqual(lg(-4252915, -1))
expect(lg(-2, -1) % lg(37507428, 96913792)).toEqual(lg(-2, -1))
expect(lg(10, 0) % lg(-533680689, -79923599)).toEqual(lg(10, 0))
expect(lg(-14, -1) % lg(-930313329, 2972085)).toEqual(lg(-14, -1))
expect(lg(-20155233, -1) % lg(-49989774, -25498857)).toEqual(lg(-20155233, -1))
expect(lg(-406, -1) % lg(2109762544, 126098611)).toEqual(lg(-406, -1))
expect(lg(43, 0) % lg(598811771, 154269509)).toEqual(lg(43, 0))
expect(lg(-4830, -1) % lg(-1043650540, -2874494)).toEqual(lg(-4830, -1))
expect(lg(-4271, -1) % lg(-950378080, -106126516)).toEqual(lg(-4271, -1))
expect(lg(126, 0) % lg(-877412093, -90804729)).toEqual(lg(126, 0))
expect(lg(40445345, 0) % lg(-1461218790, 6749169)).toEqual(lg(40445345, 0))
expect(lg(-1, -1) % lg(1776909778, 28425796)).toEqual(lg(-1, -1))
expect(lg(-2123811, -1) % lg(-51805125, 44153129)).toEqual(lg(-2123811, -1))
expect(lg(-25650126, -1) % lg(-1317209725, -16141386)).toEqual(lg(-25650126, -1))
expect(lg(30, 0) % lg(712479950, 158765535)).toEqual(lg(30, 0))
expect(lg(2494211, 0) % lg(-432472367, 21859989)).toEqual(lg(2494211, 0))
expect(lg(100937174, 0) % lg(212873269, -74778594)).toEqual(lg(100937174, 0))
expect(lg(901687, 0) % lg(-1225225931, -512562107)).toEqual(lg(901687, 0))
expect(lg(-422854, -1) % lg(-1361503923, -98826041)).toEqual(lg(-422854, -1))
expect(lg(2, 0) % lg(386622050, -9945722)).toEqual(lg(2, 0))
expect(lg(-465211, -1) % lg(-418132599, -160175963)).toEqual(lg(-465211, -1))
expect(lg(63, 0) % lg(-1330189832, 180061391)).toEqual(lg(63, 0))
expect(lg(47, 0) % lg(1439978282, -16520554)).toEqual(lg(47, 0))
expect(lg(233450563, 0) % lg(-328511972, 377539644)).toEqual(lg(233450563, 0))
expect(lg(-134912, -1) % lg(1349244684, -12612862)).toEqual(lg(-134912, -1))
expect(lg(-95441, -1) % lg(511120357, 16112596)).toEqual(lg(-95441, -1))
expect(lg(-1160726496, -1) % lg(-913371934, -9441145)).toEqual(lg(-1160726496, -1))
expect(lg(-502, -1) % lg(-1021329523, -377728463)).toEqual(lg(-502, -1))
expect(lg(3313324, 0) % lg(-67454848, 442297818)).toEqual(lg(3313324, 0))
expect(lg(-145, -1) % lg(-1010112762, 29724438)).toEqual(lg(-145, -1))
expect(lg(-19091, -1) % lg(-1944488998, -173788926)).toEqual(lg(-19091, -1))
expect(lg(-3331910, -1) % lg(2144172121, 73505274)).toEqual(lg(-3331910, -1))
expect(lg(56622, 0) % lg(-1451372835, 5219178)).toEqual(lg(56622, 0))
expect(lg(0, 0) % lg(556032035, 32471322)).toEqual(lg(0, 0))
expect(lg(800, 0) % lg(-1649243607, 2299368)).toEqual(lg(800, 0))
expect(lg(86949, 0) % lg(794150820, -1384562176)).toEqual(lg(86949, 0))
expect(lg(10, 0) % lg(-790693444, 1000869239)).toEqual(lg(10, 0))
expect(lg(-333236, -1) % lg(-1020207444, 125043716)).toEqual(lg(-333236, -1))
expect(lg(-598, -1) % lg(-93061561, -329975227)).toEqual(lg(-598, -1))
expect(lg(-19, -1) % lg(-1096862531, 163621631)).toEqual(lg(-19, -1))
expect(lg(465328283, 0) % lg(-21925149, -52057346)).toEqual(lg(465328283, 0))
expect(lg(-25837, -1) % lg(677002620, 8643698)).toEqual(lg(-25837, -1))
expect(lg(-383633650, -1) % lg(1609519787, 8262009)).toEqual(lg(-383633650, -1))
expect(lg(-66, -1) % lg(1917139359, 239618524)).toEqual(lg(-66, -1))
expect(lg(1676620, 0) % lg(910745834, 82765572)).toEqual(lg(1676620, 0))
// int53 / int32
expect(lg(1244623439, 3) % lg(-231372097, -1)).toEqual(lg(15827410, 0))
expect(lg(-1392787378, 124) % lg(-20252, -1)).toEqual(lg(15118, 0))
expect(lg(578165055, 72) % lg(13, 0)).toEqual(lg(11, 0))
expect(lg(-1836745385, 3) % lg(-95630157, -1)).toEqual(lg(42298679, 0))
expect(lg(-1766124150, 29) % lg(-45315780, -1)).toEqual(lg(17447610, 0))
expect(lg(540281958, 253606) % lg(-11, -1)).toEqual(lg(0, 0))
expect(lg(-442404110, 7696) % lg(1489246, 0)).toEqual(lg(51980, 0))
expect(lg(-631827526, 1455) % lg(8, 0)).toEqual(lg(2, 0))
expect(lg(1266390909, 49) % lg(-34627848, -1)).toEqual(lg(5125741, 0))
expect(lg(-453014259, 21413) % lg(149449, 0)).toEqual(lg(77691, 0))
expect(lg(1573062436, 653) % lg(671211684, 0)).toEqual(lg(521867604, 0))
expect(lg(-21113520, 0) % lg(177469767, 0)).toEqual(lg(14579368, 0))
expect(lg(-262825676, 31) % lg(1, 0)).toEqual(lg(0, 0))
expect(lg(-163968426, 1) % lg(33341027, 0)).toEqual(lg(24027362, 0))
expect(lg(668741217, 14380) % lg(-11334498, -1)).toEqual(lg(6792805, 0))
expect(lg(808041281, 1818) % lg(-10, -1)).toEqual(lg(9, 0))
expect(lg(-1601247507, 25) % lg(-235, -1)).toEqual(lg(204, 0))
expect(lg(-1577206289, 0) % lg(1618642, 0)).toEqual(lg(61089, 0))
expect(lg(863396135, 503) % lg(-321808286, -1)).toEqual(lg(289305533, 0))
expect(lg(-900149281, 55) % lg(15166197, 0)).toEqual(lg(7272892, 0))
expect(lg(1802954050, 3593) % lg(7, 0)).toEqual(lg(3, 0))
expect(lg(800669146, 41901) % lg(-20591, -1)).toEqual(lg(12036, 0))
expect(lg(-1055636867, 39) % lg(48, 0)).toEqual(lg(29, 0))
expect(lg(-491067123, 14) % lg(1, 0)).toEqual(lg(0, 0))
expect(lg(1420289126, 67) % lg(1010219079, 0)).toEqual(lg(260441364, 0))
expect(lg(1338756461, 32) % lg(-4427443, -1)).toEqual(lg(3936541, 0))
expect(lg(-820843233, 778) % lg(-273780418, -1)).toEqual(lg(183313645, 0))
expect(lg(-1033566360, 561225) % lg(-156677, -1)).toEqual(lg(91783, 0))
expect(lg(-1567070603, 38) % lg(-8, -1)).toEqual(lg(5, 0))
expect(lg(-1649343541, 185302) % lg(-19368267, -1)).toEqual(lg(11214823, 0))
expect(lg(-591434325, 76351) % lg(94212, 0)).toEqual(lg(75719, 0))
expect(lg(235794528, 55) % lg(17599, 0)).toEqual(lg(10941, 0))
expect(lg(-763589741, 116) % lg(-14942, -1)).toEqual(lg(5331, 0))
expect(lg(-1283158225, 237055) % lg(-2, -1)).toEqual(lg(1, 0))
expect(lg(1537105400, 29108) % lg(-37848, -1)).toEqual(lg(24400, 0))
expect(lg(-56778611, 994650) % lg(-170, -1)).toEqual(lg(95, 0))
expect(lg(-2057746932, 7) % lg(-10100, -1)).toEqual(lg(9836, 0))
expect(lg(1365793356, 12) % lg(-38454651, -1)).toEqual(lg(30255783, 0))
expect(lg(-2128793438, 4) % lg(6825, 0)).toEqual(lg(417, 0))
expect(lg(1667515072, 8) % lg(2, 0)).toEqual(lg(0, 0))
expect(lg(420324337, 980) % lg(-845, -1)).toEqual(lg(257, 0))
expect(lg(-771084081, 8204) % lg(105392, 0)).toEqual(lg(82991, 0))
expect(lg(-332377894, 1) % lg(882238, 0)).toEqual(lg(691256, 0))
expect(lg(1749263284, 11) % lg(-20, -1)).toEqual(lg(0, 0))
expect(lg(347303218, 1234317) % lg(-13, -1)).toEqual(lg(4, 0))
expect(lg(1199079324, 17271) % lg(11033, 0)).toEqual(lg(150, 0))
expect(lg(1196217208, 13) % lg(-23, -1)).toEqual(lg(14, 0))
expect(lg(-1078128939, 0) % lg(740155481, 0)).toEqual(lg(256216433, 0))
expect(lg(-1354463473, 3691) % lg(-63588, -1)).toEqual(lg(45583, 0))
expect(lg(-1255896801, 1469630) % lg(-502, -1)).toEqual(lg(459, 0))
// int53 % int53
expect(lg(1805177178, 1) % lg(-1293833696, 410)).toEqual(lg(1805177178, 1))
expect(lg(647007072, 1811985) % lg(1091239449, 3)).toEqual(lg(-583440651, 2))
expect(lg(1346307032, 1) % lg(-672335266, 33)).toEqual(lg(1346307032, 1))
expect(lg(858355422, 81) % lg(1490435172, 162402)).toEqual(lg(858355422, 81))
expect(lg(-1299053281, 6330) % lg(1042770708, 1)).toEqual(lg(744276027, 1))
expect(lg(-88774269, 25) % lg(775537355, 1)).toEqual(lg(29273105, 0))
expect(lg(-962613261, 4309) % lg(-529185362, 5)).toEqual(lg(383200445, 2))
expect(lg(-171009725, 445) % lg(-1167557775, 307982)).toEqual(lg(-171009725, 445))
expect(lg(1848497503, 78519) % lg(1533824479, 15755)).toEqual(lg(8166883, 15498))
expect(lg(-1752533311, 17) % lg(1904799096, 73566)).toEqual(lg(-1752533311, 17))
expect(lg(-1641266817, 46) % lg(-31936789, 751199)).toEqual(lg(-1641266817, 46))
expect(lg(-637954451, 32352) % lg(-10259599, 1131)).toEqual(lg(-350685679, 656))
expect(lg(-1657673170, 122149) % lg(-534342412, 0)).toEqual(lg(-1671876486, 0))
expect(lg(-660565679, 235) % lg(-897090894, 14655)).toEqual(lg(-660565679, 235))
expect(lg(-1798560222, 612) % lg(-236039758, 2924)).toEqual(lg(-1798560222, 612))
expect(lg(1010899296, 62798) % lg(-1974205776, 9515)).toEqual(lg(-28767936, 5704))
expect(lg(1206965517, 91420) % lg(880030876, 7)).toEqual(lg(-2004786867, 4))
expect(lg(712148070, 3) % lg(472319826, 2838)).toEqual(lg(712148070, 3))
expect(lg(-1275175525, 44) % lg(162799342, 861329)).toEqual(lg(-1275175525, 44))
expect(lg(-516916094, 191396) % lg(-1920802608, 30)).toEqual(lg(1187224322, 14))
expect(lg(-1627551726, 4499) % lg(1200735793, 1)).toEqual(lg(-1461747946, 0))
expect(lg(453535447, 39039) % lg(520791957, 141909)).toEqual(lg(453535447, 39039))
expect(lg(216221627, 20) % lg(-781572865, 8131)).toEqual(lg(216221627, 20))
expect(lg(-1999221053, 528) % lg(1107934896, 25)).toEqual(lg(1611884803, 23))
expect(lg(-701225584, 44) % lg(-1403297482, 0)).toEqual(lg(1722095012, 0))
expect(lg(-232837834, 5049) % lg(1000581509, 15836)).toEqual(lg(-232837834, 5049))
expect(lg(-82376749, 239) % lg(-163409376, 7688)).toEqual(lg(-82376749, 239))
expect(lg(941363778, 110) % lg(336092572, 3)).toEqual(lg(2063025646, 2))
expect(lg(1004884706, 1133) % lg(283309861, 750)).toEqual(lg(721574845, 383))
expect(lg(1436404594, 1595) % lg(1522987410, 70)).toEqual(lg(-2004547354, 47))
expect(lg(1696970595, 8) % lg(-1168832286, 4163)).toEqual(lg(1696970595, 8))
expect(lg(-1244970780, 32) % lg(394179266, 13)).toEqual(lg(-2033329312, 6))
expect(lg(1864629418, 1) % lg(528888491, 970677)).toEqual(lg(1864629418, 1))
expect(lg(-1763600443, 962032) % lg(1535552275, 102108)).toEqual(lg(1596298266, 43057))
expect(lg(1181714932, 5) % lg(1296434411, 26359)).toEqual(lg(1181714932, 5))
expect(lg(1535735456, 276446) % lg(-1930593680, 7)).toEqual(lg(-2140209952, 7))
expect(lg(2079501385, 97596) % lg(-1803771626, 21)).toEqual(lg(-1703068243, 11))
expect(lg(286993796, 174379) % lg(656426284, 70488)).toEqual(lg(-1025858772, 33402))
expect(lg(221015334, 1635766) % lg(-2014306775, 270673)).toEqual(lg(-578045904, 11724))
expect(lg(-2103734262, 977) % lg(-22949494, 920)).toEqual(lg(-2080784768, 56))
expect(lg(-922083739, 29) % lg(2040148267, 19160)).toEqual(lg(-922083739, 29))
expect(lg(-559850131, 11989) % lg(1366001936, 2880)).toEqual(lg(-1728890579, 468))
expect(lg(-1071198220, 2182) % lg(1526886260, 17)).toEqual(lg(1341547600, 13))
expect(lg(-896451936, 45) % lg(2132477227, 164356)).toEqual(lg(-896451936, 45))
expect(lg(-561327714, 1420) % lg(-368698210, 151)).toEqual(lg(-1538011120, 53))
expect(lg(2112956103, 118429) % lg(-374507565, 859)).toEqual(lg(1880884956, 621))
expect(lg(380445410, 8) % lg(-1822479769, 1)).toEqual(lg(902909663, 0))
expect(lg(-1867274924, 105813) % lg(175641312, 79)).toEqual(lg(-652149100, 56))
expect(lg(-991170416, 37) % lg(1740161397, 88122)).toEqual(lg(-991170416, 37))
expect(lg(-31602776, 1) % lg(-503633567, 241909)).toEqual(lg(-31602776, 1))
// int53 % big
expect(lg(-930109303, 3) % lg(1606982787, 925386547)).toEqual(lg(-930109303, 3))
expect(lg(-717668907, 16251) % lg(2079100937, 7825426)).toEqual(lg(-717668907, 16251))
expect(lg(265990345, 3) % lg(-1140922127, -3108870)).toEqual(lg(265990345, 3))
expect(lg(-1181318422, 1) % lg(1489652251, 75207246)).toEqual(lg(-1181318422, 1))
expect(lg(380276439, 59) % lg(-1062351234, -3631372)).toEqual(lg(380276439, 59))
expect(lg(1080382784, 7211) % lg(572850722, -139092025)).toEqual(lg(1080382784, 7211))
expect(lg(2020323378, 316) % lg(1716930349, -16333391)).toEqual(lg(2020323378, 316))
expect(lg(1302118364, 5) % lg(-442067036, 1941456592)).toEqual(lg(1302118364, 5))
expect(lg(-641137972, 602) % lg(1134212295, -135713760)).toEqual(lg(-641137972, 602))
expect(lg(-761172703, 499) % lg(769981236, 12756336)).toEqual(lg(-761172703, 499))
expect(lg(1601268090, 610) % lg(448513898, -160887452)).toEqual(lg(1601268090, 610))
expect(lg(-16483553, 0) % lg(-1253549192, -1748027086)).toEqual(lg(-16483553, 0))
expect(lg(-1284021361, 241) % lg(13275221, -3818882)).toEqual(lg(-1284021361, 241))
expect(lg(1499414278, 26) % lg(570654893, -17498947)).toEqual(lg(1499414278, 26))
expect(lg(-368610421, 5074) % lg(685701351, 31070898)).toEqual(lg(-368610421, 5074))
expect(lg(1200134796, 70) % lg(1230376618, -2490370)).toEqual(lg(1200134796, 70))
expect(lg(1537764087, 64483) % lg(-1252591472, 66761881)).toEqual(lg(1537764087, 64483))
expect(lg(-1981129198, 15) % lg(1937978150, 8201544)).toEqual(lg(-1981129198, 15))
expect(lg(32422964, 200) % lg(2051327691, -20319622)).toEqual(lg(32422964, 200))
expect(lg(1404616230, 30) % lg(-748420073, -120320053)).toEqual(lg(1404616230, 30))
expect(lg(-1860381107, 38) % lg(392948122, 60098039)).toEqual(lg(-1860381107, 38))
expect(lg(1050519262, 106431) % lg(361773491, -6329760)).toEqual(lg(1050519262, 106431))
expect(lg(460136491, 1681770) % lg(1399049044, 759923035)).toEqual(lg(460136491, 1681770))
expect(lg(2065599344, 11089) % lg(-465681057, 3484544)).toEqual(lg(2065599344, 11089))
expect(lg(1849358428, 418531) % lg(1023666326, 3435570)).toEqual(lg(1849358428, 418531))
expect(lg(1292603836, 80) % lg(-1114872574, 250120091)).toEqual(lg(1292603836, 80))
expect(lg(1456627133, 194844) % lg(-1256385160, 59427917)).toEqual(lg(1456627133, 194844))
expect(lg(-568179858, 160) % lg(1142846538, 154324747)).toEqual(lg(-568179858, 160))
expect(lg(-2133580755, 203337) % lg(111334842, 12695612)).toEqual(lg(-2133580755, 203337))
expect(lg(1961218705, 6687) % lg(-245612957, 134017780)).toEqual(lg(1961218705, 6687))
expect(lg(335350966, 55096) % lg(-1815119598, -120983980)).toEqual(lg(335350966, 55096))
expect(lg(-767561503, 211) % lg(554589640, -7873602)).toEqual(lg(-767561503, 211))
expect(lg(1476687067, 3767) % lg(552659809, -753378142)).toEqual(lg(1476687067, 3767))
expect(lg(-1107393223, 30) % lg(-78383575, -52663801)).toEqual(lg(-1107393223, 30))
expect(lg(607313614, 2) % lg(-234099925, 59184919)).toEqual(lg(607313614, 2))
expect(lg(-1542671184, 616882) % lg(1370026838, -45628731)).toEqual(lg(-1542671184, 616882))
expect(lg(525616384, 1001) % lg(1995646126, -11226360)).toEqual(lg(525616384, 1001))
expect(lg(2109958916, 21549) % lg(-419960245, -115959896)).toEqual(lg(2109958916, 21549))
expect(lg(-450913111, 32140) % lg(-99267096, -3640047)).toEqual(lg(-450913111, 32140))
expect(lg(1515870052, 198) % lg(1415757861, -110282301)).toEqual(lg(1515870052, 198))
expect(lg(124639649, 865615) % lg(-1354782388, 2569606)).toEqual(lg(124639649, 865615))
expect(lg(557119825, 7205) % lg(683150209, -15864187)).toEqual(lg(557119825, 7205))
expect(lg(992846513, 1385110) % lg(1578961851, -8380578)).toEqual(lg(992846513, 1385110))
expect(lg(1081385155, 4176) % lg(1892231070, 31130825)).toEqual(lg(1081385155, 4176))
expect(lg(-738492748, 8) % lg(-431212066, 687916944)).toEqual(lg(-738492748, 8))
expect(lg(-1448153936, 8101) % lg(-584523654, -4814205)).toEqual(lg(-1448153936, 8101))
expect(lg(-713251055, 243) % lg(261411225, 31444708)).toEqual(lg(-713251055, 243))
expect(lg(881178812, 47057) % lg(823893049, -5940358)).toEqual(lg(881178812, 47057))
expect(lg(-506817388, 0) % lg(-465610822, 10559551)).toEqual(lg(-506817388, 0))
expect(lg(-420315839, 112832) % lg(-686319219, -666166549)).toEqual(lg(-420315839, 112832))
// big % int32
expect(lg(-412174169, -319069709) % lg(-6, -1)).toEqual(lg(-3, -1))
expect(lg(1634601702, 814446468) % lg(825883, 0)).toEqual(lg(464005, 0))
expect(lg(-1005992901, 2694218) % lg(108493743, 0)).toEqual(lg(34559370, 0))
expect(lg(1534700309, -630528658) % lg(-506616, -1)).toEqual(lg(-286379, -1))
expect(lg(-456613426, -23298167) % lg(-206, -1)).toEqual(lg(-62, -1))
expect(lg(857770611, 2618490) % lg(1225551197, 0)).toEqual(lg(386945695, 0))
expect(lg(2127943654, 2768088) % lg(-291653, -1)).toEqual(lg(270232, 0))
expect(lg(1085973072, 3470797) % lg(-29714535, -1)).toEqual(lg(277129, 0))
expect(lg(1536124828, 1268901218) % lg(-121, -1)).toEqual(lg(15, 0))
expect(lg(371220141, 34588968) % lg(2, 0)).toEqual(lg(1, 0))
expect(lg(-1712997009, 187259899) % lg(129274, 0)).toEqual(lg(46669, 0))
expect(lg(586579000, -243530833) % lg(-31235, -1)).toEqual(lg(-1508, -1))
expect(lg(1745775262, -400161972) % lg(-1, -1)).toEqual(lg(0, 0))
expect(lg(-1564631310, -56487209) % lg(2626, 0)).toEqual(lg(-1680, -1))
expect(lg(-1848745069, 11533547) % lg(59, 0)).toEqual(lg(53, 0))
expect(lg(-1415791920, -26215621) % lg(-2142359, -1)).toEqual(lg(-1699972, -1))
expect(lg(-481609933, -25891343) % lg(483607, 0)).toEqual(lg(-200041, -1))
expect(lg(-889674017, -4084771) % lg(428648085, 0)).toEqual(lg(-13123232, -1))
expect(lg(1587465684, -367383975) % lg(7, 0)).toEqual(lg(0, 0))
expect(lg(811562260, -335104547) % lg(5502, 0)).toEqual(lg(-4528, -1))
expect(lg(2107357891, -10075787) % lg(110, 0)).toEqual(lg(-71, -1))
expect(lg(-1356326655, 5174156) % lg(-1, -1)).toEqual(lg(0, 0))
expect(lg(-1794856776, 3059124) % lg(-29413816, -1)).toEqual(lg(7872112, 0))
expect(lg(-1118254374, -3629384) % lg(-85, -1)).toEqual(lg(-37, -1))
expect(lg(288539563, 70814306) % lg(-14561, -1)).toEqual(lg(14227, 0))
expect(lg(-719069745, -128562664) % lg(-256, -1)).toEqual(lg(-49, -1))
expect(lg(1530955727, 15829469) % lg(195494, 0)).toEqual(lg(6101, 0))
expect(lg(2144004402, -5408490) % lg(11, 0)).toEqual(lg(-6, -1))
expect(lg(-1766192560, -17443468) % lg(-168087095, -1)).toEqual(lg(-137624717, -1))
expect(lg(-524619138, -371121095) % lg(4765, 0)).toEqual(lg(-3592, -1))
expect(lg(-1960083221, 176122524) % lg(-5564, -1)).toEqual(lg(4335, 0))
expect(lg(1528631102, -597885631) % lg(-413908, -1)).toEqual(lg(-271754, -1))
expect(lg(-1513123614, -30582360) % lg(-496311, -1)).toEqual(lg(-361112, -1))
expect(lg(-1975522255, -46421733) % lg(29, 0)).toEqual(lg(-4, -1))
expect(lg(-1715879325, 3072313) % lg(438221, 0)).toEqual(lg(414436, 0))
expect(lg(-1321015849, -300384564) % lg(1, 0)).toEqual(lg(0, 0))
expect(lg(-1088390706, -277354665) % lg(-1237, -1)).toEqual(lg(-454, -1))
expect(lg(-1012773943, 223943652) % lg(707359548, 0)).toEqual(lg(586891857, 0))
expect(lg(1097288344, 26740237) % lg(-3, -1)).toEqual(lg(2, 0))
expect(lg(-1121404205, -87484234) % lg(80229261, 0)).toEqual(lg(-24053960, -1))
expect(lg(-1503637931, -163703901) % lg(-983334452, -1)).toEqual(lg(-79944815, -1))
expect(lg(2012820970, 445991475) % lg(1035472980, 0)).toEqual(lg(2600110, 0))
expect(lg(2015362538, 2985510) % lg(-148, -1)).toEqual(lg(74, 0))
expect(lg(1764134228, 50881407) % lg(-1, -1)).toEqual(lg(0, 0))
expect(lg(-523555853, 77167937) % lg(-563, -1)).toEqual(lg(106, 0))
expect(lg(1531888651, -2389306) % lg(1, 0)).toEqual(lg(0, 0))
expect(lg(-181277952, 32599207) % lg(-729, -1)).toEqual(lg(659, 0))
expect(lg(223126732, 88838488) % lg(13378, 0)).toEqual(lg(968, 0))
expect(lg(670834629, 46037187) % lg(922370, 0)).toEqual(lg(920991, 0))
expect(lg(1098978850, 6541822) % lg(-8405198, -1)).toEqual(lg(2462152, 0))
// big % int53
expect(lg(2008672965, 41566313) % lg(313991275, 18390)).toEqual(lg(1057995305, 4748))
expect(lg(1922552561, 28139870) % lg(-2083633557, 19)).toEqual(lg(-1074209653, 18))
expect(lg(843627074, -173776705) % lg(1451117493, 14364)).toEqual(lg(1480601143, -11310))
expect(lg(204865470, -6692402) % lg(-645190286, 413)).toEqual(lg(-691687452, -38))
expect(lg(952830559, -214594684) % lg(-1778162360, 378)).toEqual(lg(-1218791457, -31))
expect(lg(1673740333, -69274846) % lg(-1549261605, 2390)).toEqual(lg(-281609960, -1292))
expect(lg(-1276804811, 367022678) % lg(-678111623, 11)).toEqual(lg(-860426348, 1))
expect(lg(-1331527548, -33013551) % lg(-1975438267, 2961)).toEqual(lg(-1244563205, -1264))
expect(lg(1067523314, 72606174) % lg(-1716982106, 255179)).toEqual(lg(-935830326, 135167))
expect(lg(-937134490, -32649070) % lg(-804857990, 57507)).toEqual(lg(-2025081444, -42140))
expect(lg(108363299, 1224097478) % lg(1137551776, 281)).toEqual(lg(85696931, 194))
expect(lg(-1965834834, -11053948) % lg(-942300324, 6487)).toEqual(lg(-385517902, -5258))
expect(lg(-3151939, 171473802) % lg(-2071379940, 3914)).toEqual(lg(-755355475, 2268))
expect(lg(1465781759, -970108425) % lg(-1251607207, 3003)).toEqual(lg(-676865399, -663))
expect(lg(919308511, -1689158617) % lg(658566728, 36406)).toEqual(lg(2042443783, -22321))
expect(lg(-418485001, 1000432592) % lg(-1653953022, 31957)).toEqual(lg(-903837593, 31415))
expect(lg(-880302655, -14116770) % lg(913871933, 118223)).toEqual(lg(496274972, -48207))
expect(lg(-525597278, -3790314) % lg(2133284776, 127083)).toEqual(lg(1210119082, -104892))
expect(lg(-393124913, -28106221) % lg(958070140, 159)).toEqual(lg(473810731, -5))
expect(lg(6929245, 2749730) % lg(1462129294, 43237)).toEqual(lg(-1912903061, 25777))
expect(lg(708024745, -15568245) % lg(1288198049, 56)).toEqual(lg(1099532724, -19))
expect(lg(487601139, 13603229) % lg(723875593, 45021)).toEqual(lg(920504149, 6836))
expect(lg(-2070321133, 115478389) % lg(-1799479616, 75)).toEqual(lg(1778080723, 29))
expect(lg(-307180735, 3049800) % lg(1043781053, 3319)).toEqual(lg(-720480381, 2735))
expect(lg(-1073877839, -6538577) % lg(-1408649838, 0)).toEqual(lg(1473972065, -1))
expect(lg(-1892822171, -1698321438) % lg(96164237, 514)).toEqual(lg(-1389255096, -200))
expect(lg(-674980011, 2764943) % lg(-445529419, 65125)).toEqual(lg(857386403, 29656))
expect(lg(2003347800, -46928389) % lg(368897711, 128159)).toEqual(lg(-419043446, -22164))
expect(lg(-1929871429, -241628283) % lg(202358381, 7645)).toEqual(lg(-1599543668, -6569))
expect(lg(419719197, 661188517) % lg(2112360098, 1)).toEqual(lg(581185953, 1))
expect(lg(1092830824, 1600823129) % lg(-1827462760, 172800)).toEqual(lg(-1880704128, 171407))
expect(lg(-836779994, -27475595) % lg(-417527207, 16)).toEqual(lg(1210159480, -13))
expect(lg(-1759597755, 9157722) % lg(-987185779, 1)).toEqual(lg(807846066, 1))
expect(lg(-1097231525, 20092165) % lg(1106421078, 1)).toEqual(lg(949995673, 1))
expect(lg(390678483, 3835040) % lg(1221250555, 14)).toEqual(lg(-712450167, 7))
expect(lg(-284334384, -18425278) % lg(-1111448031, 6)).toEqual(lg(1129531033, -4))
expect(lg(-233961390, 53260849) % lg(-613558136, 3663)).toEqual(lg(2094997010, 3022))
expect(lg(-3383211, 8039036) % lg(-1668680584, 749874)).toEqual(lg(-496446555, 540290))
expect(lg(804358887, -189240235) % lg(179665302, 12347)).toEqual(lg(1280740603, -9472))
expect(lg(208769744, 280071599) % lg(-325433064, 14)).toEqual(lg(2127427912, 6))
expect(lg(-1527711901, -51564742) % lg(-1019145455, 0)).toEqual(lg(-722136158, -1))
expect(lg(-159182038, -2145592347) % lg(-483720705, 15)).toEqual(lg(-1603688570, -2))
expect(lg(1059926378, 477886379) % lg(924988992, 543468)).toEqual(lg(-256578646, 177817))
expect(lg(-1800046387, 119696078) % lg(436524799, 94037)).toEqual(lg(1286157765, 80885))
expect(lg(-822280387, 44882065) % lg(-940828508, 22947)).toEqual(lg(251450065, 19154))
expect(lg(1465101985, 269803551) % lg(-1953360551, 334)).toEqual(lg(1310986115, 209))
expect(lg(-567675197, -8838663) % lg(1903221047, 6)).toEqual(lg(1436855439, -5))
expect(lg(689376065, -22622471) % lg(1534988921, 63)).toEqual(lg(296887390, -17))
expect(lg(-2017356377, -57717216) % lg(-1390284125, 42)).toEqual(lg(1577958450, -39))
expect(lg(-128715878, 982583003) % lg(2004099318, 988167)).toEqual(lg(661387374, 344542))
// big % big
expect(lg(-320078007, 205603273) % lg(2020227799, -360928021)).toEqual(lg(-320078007, 205603273))
expect(lg(-800732960, -371808530) % lg(744251542, -11199592)).toEqual(lg(408769930, -2221999))
expect(lg(-56774921, -32434115) % lg(1413374280, -2726592)).toEqual(lg(1575977183, -2441606))
expect(lg(1667937500, 228622683) % lg(-243248020, 69909529)).toEqual(lg(-1897285736, 18894093))
expect(lg(-1333815518, 2097776) % lg(-1750106076, 18608702)).toEqual(lg(-1333815518, 2097776))
expect(lg(-162800691, -117885498) % lg(-709007774, 8711127)).toEqual(lg(-789967161, -4640836))
expect(lg(-1909427145, -2824029) % lg(2028036056, -660713154)).toEqual(lg(-1909427145, -2824029))
expect(lg(14077923, 63046905) % lg(-688765214, 375445962)).toEqual(lg(14077923, 63046905))
expect(lg(272760540, 19525127) % lg(-396955631, 848435537)).toEqual(lg(272760540, 19525127))
expect(lg(-600396362, 406643261) % lg(-1533973181, 491661310)).toEqual(lg(-600396362, 406643261))
expect(lg(1801834226, 200420454) % lg(-1889418050, -328758068)).toEqual(lg(1801834226, 200420454))
expect(lg(1170836790, 510289402) % lg(202445942, 113936327)).toEqual(lg(361053022, 54544094))
expect(lg(-378923036, -1036580478) % lg(905093048, 5526353)).toEqual(lg(1369752396, -3152427))
expect(lg(-2137034353, 1455139814) % lg(1665353214, 27574343)).toEqual(lg(1458911735, 21273958))
expect(lg(-1350216191, -3821167) % lg(-1333339390, -4746360)).toEqual(lg(-1350216191, -3821167))
expect(lg(-1289646201, -5193401) % lg(1838778646, -3822651)).toEqual(lg(1166542449, -1370750))
expect(lg(301867174, 5185218) % lg(157012848, -15464466)).toEqual(lg(301867174, 5185218))
expect(lg(467711834, 155069651) % lg(-44860799, 106733768)).toEqual(lg(512572633, 48335882))
expect(lg(1624269582, 11007763) % lg(-158694824, -491219717)).toEqual(lg(1624269582, 11007763))
expect(lg(-1015519521, -163989350) % lg(1652525166, 530116116)).toEqual(lg(-1015519521, -163989350))
expect(lg(2001612518, -452587333) % lg(1115217917, 90680733)).toEqual(lg(-2127450406, -89864400))
expect(lg(1039524645, -86121932) % lg(1131434363, 13339357)).toEqual(lg(-761803769, -6085789))
expect(lg(-1922291990, 6439098) % lg(-1083372307, -20634200)).toEqual(lg(-1922291990, 6439098))
expect(lg(1408756974, 235847122) % lg(-1813277898, -9066180)).toEqual(lg(1508171882, 126457))
expect(lg(1121009342, -1533788016) % lg(-1724900447, -5821788)).toEqual(lg(-496706473, -2657930))
expect(lg(-1626361260, -113469353) % lg(1216987736, -817139415)).toEqual(lg(-1626361260, -113469353))
expect(lg(-433139577, -182483493) % lg(1019490766, -595625160)).toEqual(lg(-433139577, -182483493))
expect(lg(793542905, 198273616) % lg(-82759497, -2621599)).toEqual(lg(-1118452074, 1653764))
expect(lg(425605214, 249789222) % lg(392156278, 6716943)).toEqual(lg(-1199275184, 1262327))
expect(lg(213473729, 11660532) % lg(-547058106, 894811834)).toEqual(lg(213473729, 11660532))
expect(lg(-1550227391, 2847368) % lg(-1996700003, 689370771)).toEqual(lg(-1550227391, 2847368))
expect(lg(-144234222, -54239417) % lg(-1102770075, -7213193)).toEqual(lg(-1014778289, -3747071))
expect(lg(524484467, 15124083) % lg(-1101379967, -39968226)).toEqual(lg(524484467, 15124083))
expect(lg(314758022, 5390195) % lg(-1234755328, -3305123)).toEqual(lg(-919997306, 2085072))
expect(lg(580679232, -10426812) % lg(-1964013803, -1738507605)).toEqual(lg(580679232, -10426812))
expect(lg(1670083752, -254253193) % lg(722212413, -125031969)).toEqual(lg(225658926, -4189255))
expect(lg(-1744001445, -5443198) % lg(1248252191, 3609991)).toEqual(lg(-495749254, -1833207))
expect(lg(-1786439869, 137339199) % lg(1821158508, 2909161)).toEqual(lg(-1481543825, 608612))
expect(lg(1273422584, -284542935) % lg(1626032463, -17392208)).toEqual(lg(1026706952, -6267613))
expect(lg(-513801887, -32580141) % lg(-342074286, 27651829)).toEqual(lg(-855876173, -4928311))
expect(lg(-1027906958, 55543678) % lg(-1936394792, 928937151)).toEqual(lg(-1027906958, 55543678))
expect(lg(251585986, -50474191) % lg(-2045396991, 32687162)).toEqual(lg(-1793811005, -17787029))
expect(lg(66679938, -917589429) % lg(2124767660, -3454168)).toEqual(lg(-356034186, -2235041))
expect(lg(-599564184, -209788131) % lg(-325046915, 133280284)).toEqual(lg(-924611099, -76507846))
expect(lg(838338995, -12983151) % lg(-842402530, 19411056)).toEqual(lg(838338995, -12983151))
expect(lg(1444498155, 520850879) % lg(851271837, 23920116)).toEqual(lg(747658762, 18528439))
expect(lg(2096765386, -117024114) % lg(-1726450785, -5694999)).toEqual(lg(-2028924578, -3124146))
expect(lg(-425905039, -180148939) % lg(-1397064581, -15926795)).toEqual(lg(2056903464, -4954201))
expect(lg(-920215872, 219325473) % lg(1357686103, 54682263)).toEqual(lg(-2055992988, 596420))
expect(lg(1279110660, -10784541) % lg(278869448, 758126792)).toEqual(lg(1279110660, -10784541))
}
}
}
object RuntimeLongOldTest extends JasmineTest {
import RuntimeLong.fromDouble
/** overload expect for long to add toString */
def expect(l: RuntimeLong): JasmineExpectation = expect(l.toHexString)
describe("scala.scalajs.runtime.RuntimeLong - old") {
// scalastyle:off disallow.space.before.token disallow.space.after.token
def fromInt(x: Int): RuntimeLong = new RuntimeLong(x)
val maxInt = fromInt(Int.MaxValue)
val minInt = fromInt(Int.MinValue)
val one = fromInt(1)
val billion = fromInt(1000000000)
val `4503599627370510L` = new RuntimeLong( 14, 0, 256)
val `613354684553L` = new RuntimeLong( 639113, 146235, 0)
val `9863155567412L` = new RuntimeLong(2247476, 2351559, 0)
val `3632147899696541255L` = new RuntimeLong(1568327, 2954580, 206463)
val `7632147899696541255L` = new RuntimeLong(2616903, 1593290, 433837)
val minValue = new RuntimeLong(0, 0, 524288)
val minus1 = new RuntimeLong(4194303, 4194303, 1048575)
val minus2 = new RuntimeLong(4194302, 4194303, 1048575)
val minus3 = new RuntimeLong(4194301, 4194303, 1048575)
val minus4 = new RuntimeLong(4194300, 4194303, 1048575)
val minus15 = new RuntimeLong(4194289, 4194303, 1048575)
val minus16 = new RuntimeLong(4194288, 4194303, 1048575)
it("should correctly implement negation") {
expect(-fromInt(5)).toEqual("fffffffffffffffb")
expect(-fromInt(0)).toEqual("0")
expect(-minInt ).toEqual("80000000")
}
it("should correctly implement comparison") {
expect(fromInt(7) < fromInt(15)).toBe(true)
expect(fromInt(15) < fromInt(15)).toBe(false)
expect(fromInt(15) <= fromInt(15)).toBe(true)
expect(fromInt(14) <= fromInt(15)).toBe(true)
expect(fromInt(15) > fromInt(15)).toBe(false)
expect(fromInt(14) > fromInt(15)).toBe(false)
expect(fromInt(16) > fromInt(15)).toBe(true)
expect(fromInt(15) >= fromInt(15)).toBe(true)
expect(fromInt(14) >= fromInt(15)).toBe(false)
expect(fromInt(16) >= fromInt(15)).toBe(true)
}
it("should correctly implement addition") {
expect(fromInt(7) + fromInt(15)).toEqual("16")
expect( maxInt + maxInt ).toEqual("fffffffe")
expect( maxInt + one ).toEqual("80000000")
}
it("should correctly implement subtraction") {
expect(fromInt(7) - fromInt(15)).toEqual("fffffffffffffff8")
expect( maxInt - maxInt ).toEqual("0")
}
it("should correctly implement multiplication") {
expect(fromInt(7) * fromInt(15)).toEqual("69")
expect(fromInt(-7) * fromInt(15)).toEqual("ffffffffffffff97")
expect( maxInt * maxInt ).toEqual("3fffffff00000001")
expect(`4503599627370510L` * fromInt(-4)).toEqual("ffbfffffffffffc8")
}
it("should correctly implement division") {
expect( fromInt(7) / fromInt(15)).toEqual("0")
expect( fromInt(24) / fromInt(5) ).toEqual("4")
expect( fromInt(24) / fromInt(-5)).toEqual("fffffffffffffffc")
expect( maxInt / fromInt(-5)).toEqual("ffffffffe6666667")
expect( maxInt / billion ).toEqual("2")
expect((maxInt+one) / billion ).toEqual("2")
expect(minValue / minValue).toEqual("1")
expect(minValue / minus1).toEqual("8000000000000000")
expect(minValue / minus2).toEqual("4000000000000000")
expect(minValue / minus3).toEqual("2aaaaaaaaaaaaaaa")
expect(minValue / minus4).toEqual("2000000000000000")
expect(minValue / minus15).toEqual("888888888888888")
expect(minValue / minus16).toEqual("800000000000000")
expect(`7632147899696541255L` / minValue).toEqual("0")
expect(`7632147899696541255L` / minus1).toEqual("961529ec0d5811b9")
expect(`7632147899696541255L` / minus2).toEqual("cb0a94f606ac08dd")
expect(`7632147899696541255L` / minus3).toEqual("dcb1b8a40472b093")
expect(`7632147899696541255L` / minus4).toEqual("e5854a7b0356046f")
expect(`7632147899696541255L` / minus15).toEqual("f8f05820cdb089b7")
expect(`7632147899696541255L` / minus16).toEqual("f961529ec0d5811c")
}
it("should correctly implement modulus") {
expect( fromInt(7) % fromInt(15)).toEqual("7")
expect( fromInt(24) % fromInt(5) ).toEqual("4")
expect( fromInt(24) % fromInt(-5)).toEqual("4")
expect( maxInt % billion ).toEqual("8ca6bff")
expect((maxInt+one) % billion ).toEqual("8ca6c00")
expect( maxInt % fromInt(-5)).toEqual("2")
expect(minValue % minValue).toEqual("0")
expect(minValue % minus1).toEqual("0")
expect(minValue % minus2).toEqual("0")
expect(minValue % minus3).toEqual("fffffffffffffffe")
expect(minValue % minus4).toEqual("0")
expect(minValue % minus15).toEqual("fffffffffffffff8")
expect(minValue % minus16).toEqual("0")
expect(`7632147899696541255L` % minValue).toEqual("69ead613f2a7ee47")
expect(`7632147899696541255L` % minus1).toEqual("0")
expect(`7632147899696541255L` % minus2).toEqual("1")
expect(`7632147899696541255L` % minus3).toEqual("0")
expect(`7632147899696541255L` % minus4).toEqual("3")
expect(`7632147899696541255L` % minus15).toEqual("0")
expect(`7632147899696541255L` % minus16).toEqual("7")
}
it("should correctly implement toString") {
expect(maxInt.toString).toEqual("2147483647")
expect(fromInt(-50).toString).toEqual("-50")
expect(fromInt(-1000000000).toString).toEqual("-1000000000")
expect((maxInt+one).toString).toEqual("2147483648")
expect(minInt.toString).toEqual("-2147483648")
}
it("should correctly implement fromDouble") {
expect(fromDouble( 4.5)).toEqual("4")
expect(fromDouble(-4.5)).toEqual("fffffffffffffffc")
}
it("should correctly implement toDouble") {
expect(fromInt(5).toDouble).toEqual(5.0)
expect((maxInt+one).toDouble).toEqual(2147483648.0)
}
it("should correctly implement numberOfLeadingZeros") {
expect(fromInt( 0).numberOfLeadingZeros).toEqual(64)
expect(fromInt( 1).numberOfLeadingZeros).toEqual(63)
expect(fromInt(-1).numberOfLeadingZeros).toEqual(0)
expect(fromInt( 2).numberOfLeadingZeros).toEqual(62)
}
it("should implement hashCode() according to spec in j.l.Long") {
expect(fromInt(0 ).hashCode()).toEqual(0)
expect(fromInt(55 ).hashCode()).toEqual(55)
expect(fromInt(-12 ).hashCode()).toEqual(11)
expect(fromInt(10006548).hashCode()).toEqual(10006548)
expect(fromInt(-1098748).hashCode()).toEqual(1098747)
expect(`613354684553L` .hashCode()).toEqual(-825638905)
expect(`9863155567412L` .hashCode()).toEqual(1910653900)
expect(`3632147899696541255L`.hashCode()).toEqual(1735398658)
expect(`7632147899696541255L`.hashCode()).toEqual(-1689438124)
}
// scalastyle:on disallow.space.before.token disallow.space.after.token
}
}
| andreaTP/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/jsinterop/RuntimeLongTest.scala | Scala | bsd-3-clause | 156,187 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kafka
import java.util.concurrent.{Delayed, ScheduledFuture, TimeUnit}
import com.github.benmanes.caffeine.cache.Ticker
object ExpirationMocking {
class WrappedRunnable(val delay: Long) {
var runnable: Runnable = _
var cancelled: Boolean = false
var done: Boolean = false
}
class ScheduledExpiry[T](runnable: WrappedRunnable) extends ScheduledFuture[T] {
override def getDelay(unit: TimeUnit): Long = unit.convert(runnable.delay, TimeUnit.MILLISECONDS)
override def compareTo(o: Delayed): Int = java.lang.Long.compare(runnable.delay, o.getDelay(TimeUnit.MILLISECONDS))
override def cancel(mayInterruptIfRunning: Boolean): Boolean = { runnable.cancelled = true; true }
override def isCancelled: Boolean = runnable.cancelled
override def isDone: Boolean = runnable.done
override def get(): T = runnable.runnable.run().asInstanceOf[T]
override def get(timeout: Long, unit: TimeUnit): T = runnable.runnable.run().asInstanceOf[T]
}
class MockTicker extends Ticker {
var millis = System.currentTimeMillis()
override def read(): Long = millis * 1000000L
}
}
| locationtech/geomesa | geomesa-kafka/geomesa-kafka-datastore/src/test/scala/org/locationtech/geomesa/kafka/ExpirationMocking.scala | Scala | apache-2.0 | 1,615 |
package app.claim
import utils.WithJsBrowser
import utils.pageobjects.preview.PreviewTestableData
import utils.pageobjects.s_eligibility.GBenefitsPage
import utils.pageobjects._
import app.FunctionalTestCommon
/**
* End-to-End functional tests using input files created by Steve Moody.
* @author Jorge Migueis
* Date: 03/09/2013
*/
class FunctionalTestCase8Spec extends FunctionalTestCommon {
isolated
section("functional", "claim")
"The application" should {
"Successfully run absolute Test Case 8" in new WithJsBrowser with PageObjects {
val page = GBenefitsPage(context)
val claim = TestData.readTestDataFromFile("/functional_scenarios/ClaimScenario_TestCase8.csv")
test(page, claim, buildPreviewUseData)
}
}
section("functional", "claim")
private def buildPreviewUseData = {
PreviewTestableData() +
"AboutYouTitle" + "AboutYouFirstName" + "AboutYouMiddleName" + "AboutYouSurname" +
dateConversion("AboutYouDateOfBirth") +
dateConversion("ClaimDateWhenDoYouWantYourCarersAllowanceClaimtoStart") +
addressConversion("AboutYouAddress") + "AboutYouPostcode" +
"AboutYouNationalityAndResidencyNationality" +
"OtherMoneyOtherAreYouReceivingPensionFromAnotherEEA" +
"AboutYourPartnerTitle" + "AboutYourPartnerFirstName" + "AboutYourPartnerFirstName" + "AboutYourPartnerSurname" +
dateConversion("AboutYourPartnerDateofBirth") +
"OtherMoneyOtherAreYouPayingInsuranceToAnotherEEA" +
"AboutTheCareYouProvideWhatTheirRelationshipToYou" +
"AboutTheCareYouProvideTitlePersonCareFor" + "AboutTheCareYouProvideFirstNamePersonCareFor" + "AboutTheCareYouProvideMiddleNamePersonCareFor" + "AboutTheCareYouProvideSurnamePersonCareFor" +
dateConversion("AboutTheCareYouProvideDateofBirthPersonYouCareFor") +
addressConversion("AboutTheCareYouProvideAddressPersonCareFor") + "AboutTheCareYouProvidePostcodePersonCareFor" +
"AboutTheCareYouProvideDoYouSpend35HoursorMoreEachWeek" +
"AboutTheCareYouProvideOtherCarer" +
"AboutTheCareYouProvideHaveYouHadAnyMoreBreaksInCare_1" +
"EducationHaveYouBeenOnACourseOfEducation" +
"EducationCourseTitle" +
"EducationNameofSchool" +
"EducationNameOfMainTeacherOrTutor" +
"EducationPhoneNumber" +
dateConversion("EducationWhenDidYouStartTheCourse") +
dateConversion("EducationWhenDoYouExpectTheCourseToEnd") +
"EmploymentHaveYouBeenEmployedAtAnyTime_0" +
"EmploymentEmployerName_1" +
"EmploymentHaveYouBeenSelfEmployedAtAnyTime"
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/app/claim/FunctionalTestCase8Spec.scala | Scala | mit | 2,591 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.detailquery
import java.sql.Timestamp
import org.apache.spark.sql.Row
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.keygenerator.directdictionary.timestamp.TimeStampGranularityConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.test.util.QueryTest
/**
* Test Class for Range Filters.
*/
class RangeFilterMyTests extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
//For the Hive table creation and data loading
sql("drop table if exists filtertestTable")
sql("drop table if exists NO_DICTIONARY_HIVE_1")
sql("drop table if exists NO_DICTIONARY_CARBON_1")
sql("drop table if exists NO_DICTIONARY_CARBON_2")
sql("drop table if exists NO_DICTIONARY_HIVE_6")
sql("drop table if exists dictionary_hive_6")
sql("drop table if exists NO_DICTIONARY_HIVE_7")
sql("drop table if exists NO_DICTIONARY_CARBON_6")
sql("drop table if exists NO_DICTIONARY_CARBON")
sql("drop table if exists NO_DICTIONARY_HIVE")
sql("drop table if exists complexcarbontable")
sql("drop table if exists DICTIONARY_CARBON_6")
sql("drop table if exists NO_DICTIONARY_CARBON_7")
sql("drop table if exists NO_DICTIONARY_CARBON_8")
sql("drop table if exists NO_DICTIONARY_HIVE_8")
//For Carbon cube creation.
sql("CREATE TABLE DICTIONARY_CARBON_6 (empno string, " +
"doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
"deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
"projectenddate Timestamp, designation String,attendance Int,utilization " +
"Int,salary Int) STORED AS carbondata "
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE DICTIONARY_CARBON_6 " +
"OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')"
)
sql(
"create table DICTIONARY_HIVE_6(empno string,empname string,designation string,doj " +
"Timestamp,workgroupcategory int, " +
"workgroupcategoryname string,deptno int, deptname string, projectcode int, " +
"projectjoindate Timestamp,projectenddate Timestamp,attendance int, "
+ "utilization int,salary int) row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") " +
""
)
sql(
s"load data local inpath '$resourcesPath/datawithoutheader.csv' into table " +
"DICTIONARY_HIVE_6"
);
sql("CREATE TABLE NO_DICTIONARY_CARBON_6 (empno string, " +
"doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
"deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
"projectenddate Timestamp, designation String,attendance Int,utilization " +
"Int,salary Int) STORED AS carbondata "
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE NO_DICTIONARY_CARBON_6 " +
"OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')"
)
sql(
"create table NO_DICTIONARY_HIVE_6(empno string,empname string,designation string,doj " +
"Timestamp,workgroupcategory int, " +
"workgroupcategoryname string,deptno int, deptname string, projectcode int, " +
"projectjoindate Timestamp,projectenddate Timestamp,attendance int, "
+ "utilization int,salary int) row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") " +
""
)
sql(
s"load data local inpath '$resourcesPath/datawithoutheader.csv' into table " +
"NO_DICTIONARY_HIVE_6"
);
sql("CREATE TABLE NO_DICTIONARY_CARBON (empno string, " +
"doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
"deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
"projectenddate Timestamp, designation String,attendance Int,utilization " +
"Int,salary Int) STORED AS carbondata "
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE NO_DICTIONARY_CARBON " +
"OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')"
)
sql(
"create table NO_DICTIONARY_HIVE(empno string,empname string,designation string,doj " +
"Timestamp,workgroupcategory int, " +
"workgroupcategoryname string,deptno int, deptname string, projectcode int, " +
"projectjoindate Timestamp,projectenddate Timestamp,attendance int, "
+ "utilization int,salary int) row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") " +
""
)
sql(
s"load data local inpath '$resourcesPath/datawithoutheader.csv' into table " +
"NO_DICTIONARY_HIVE"
);
sql("CREATE TABLE NO_DICTIONARY_CARBON_8 (empno string, " +
"doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
"deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
"projectenddate Timestamp, designation String,attendance Int,utilization " +
"Int,salary Int) STORED AS carbondata "
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/rangedata.csv' INTO TABLE NO_DICTIONARY_CARBON_8 " +
"OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')"
)
sql(
"create table NO_DICTIONARY_HIVE_8(empno string,empname string,designation string,doj " +
"Timestamp,workgroupcategory int, " +
"workgroupcategoryname string,deptno int, deptname string, projectcode int, " +
"projectjoindate Timestamp,projectenddate Timestamp,attendance int, "
+ "utilization int,salary int) row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") " +
""
)
sql(
s"load data local inpath '$resourcesPath/datawithoutheader.csv' into table " +
"NO_DICTIONARY_HIVE_8"
);
sql("create table complexcarbontable(deviceInformationId int, channelsId string," +
"ROMSize string, purchasedate string, mobile struct<imei:string, imsi:string>," +
"MAC array<string>, locationinfo array<struct<ActiveAreaId:int, ActiveCountry:string, " +
"ActiveProvince:string, Activecity:string, ActiveDistrict:string, ActiveStreet:string>>," +
"proddate struct<productionDate:string,activeDeactivedate:array<string>>, gamePointId " +
"double,contractNumber double) " +
"STORED AS carbondata "
)
//CarbonProperties.getInstance()
// .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
sql(
s"LOAD DATA local inpath '$resourcesPath/complexdata.csv' INTO table " +
"complexcarbontable " +
"OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\\"', 'FILEHEADER'='deviceInformationId,channelsId," +
"ROMSize,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId,contractNumber'," +
"'COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')"
)
try {
CarbonProperties.getInstance()
.addProperty(TimeStampGranularityConstants.CARBON_CUTOFF_TIMESTAMP, "2000-12-13 02:10.00")
CarbonProperties.getInstance()
.addProperty(TimeStampGranularityConstants.CARBON_TIME_GRANULARITY,
TimeStampGranularityConstants.TIME_GRAN_SEC.toString
)
CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "true")
sql("drop table if exists directDictionaryTable")
sql("drop table if exists directDictionaryTable_hive")
sql(
"CREATE TABLE if not exists directDictionaryTable (empno int,doj Timestamp, salary int) " +
"STORED AS carbondata"
)
sql(
"CREATE TABLE if not exists directDictionaryTable_hive (empno int,doj Timestamp, salary int) " +
"row format delimited fields terminated by ','"
)
val csvFilePath = s"$resourcesPath/rangedatasample.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryTable OPTIONS" +
"('DELIMITER'= ',', 'QUOTECHAR'= '\\"')")
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryTable_hive")
} catch {
case x: Throwable =>
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
}
}
test("test for dictionary columns"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where workgroupcategory > 1 and workgroupcategory < 5"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where workgroupcategory > 1 and workgroupcategory < 5")
)
}
test("test for dictionary columns OR "){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where workgroupcategory > 1 or workgroupcategory < 5"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where workgroupcategory > 1 or workgroupcategory < 5")
)
}
test("test for dictionary columns OR AND"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where workgroupcategory > 1 or workgroupcategory < 5 and workgroupcategory > 3"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where workgroupcategory > 1 or workgroupcategory < 5 and workgroupcategory > 3")
)
}
test("test for dictionary columns OR AND OR"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where workgroupcategory > 1 or workgroupcategory < 5 and workgroupcategory > 3 or workgroupcategory < 10"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where workgroupcategory > 1 or workgroupcategory < 5 and workgroupcategory > 3 or workgroupcategory < 10")
)
}
test("test range filter for direct dictionary"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > '2016-03-14 15:00:17'"),
sql("select doj from directDictionaryTable_hive where doj > '2016-03-14 15:00:17'")
)
}
test("test range filter for direct dictionary and"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > '2016-03-14 15:00:16' and doj < '2016-03-14 15:00:18'"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:17.0"))
)
)
}
test("test range filter for direct dictionary equality"){
checkAnswer(
sql("select doj from directDictionaryTable where doj = '2016-03-14 15:00:16'"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:16.0"))
)
)
}
test("test range filter for less than filter"){
sql("drop table if exists timestampTable")
sql("create table timestampTable (timestampCol timestamp) STORED AS carbondata ")
sql(s"load data local inpath '$resourcesPath/timestamp.csv' into table timestampTable")
checkAnswer(sql("select * from timestampTable where timestampCol='1970-01-01 05:30:00'"),
sql("select * from timestampTable where timestampCol<='1970-01-01 05:30:00'"))
sql("drop table if exists timestampTable")
}
test("test range filter for direct dictionary not equality"){
checkAnswer(
sql("select doj from directDictionaryTable where doj != '2016-03-14 15:00:16'"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:10.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:11.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:12.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:13.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:14.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:15.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:17.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:18.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:19.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:20.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:24.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:25.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:31.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:35.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:38.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:39.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:49.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:50.0")))
)
}
test("test range filter for direct dictionary and with explicit casts"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast ('2016-03-14 15:00:16' as timestamp) and doj < cast ('2016-03-14 15:00:18' as timestamp)"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:17.0"))
)
)
}
/*
Commented this test case
test("test range filter for direct dictionary and with DirectVals as long") {
checkAnswer(
sql(
"select doj from directDictionaryTable where doj > cast (1457992816l as timestamp) and doj < cast (1457992818l as timestamp)"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:17.0"))
)
)
}
*/
// Test of Cast Optimization
test("test range filter for different Timestamp formats"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj = '2016-03-14 15:00:180000000'"),
Seq(Row(0)
)
)
}
test("test range filter for different Timestamp formats1"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj = '03-03-14 15:00:18'"),
Seq(Row(0)
)
)
}
test("test range filter for different Timestamp formats2"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj = '2016-03-14'"),
Seq(Row(0)
)
)
}
test("test range filter for different Timestamp formats3"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > '2016-03-14 15:00:18.000'"),
sql("select count(*) from directDictionaryTable_hive where doj > '2016-03-14 15:00:18.000'")
)
}
test("test range filter for different Timestamp In format "){
checkAnswer(
sql("select doj from directDictionaryTable where doj in ('2016-03-14 15:00:18', '2016-03-14 15:00:17')"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:17.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:18.0")))
)
}
/*
test("test range filter for different Timestamp Not In format 5"){
sql("select doj from directDictionaryTable where doj not in (null, '2016-03-14 15:00:18', '2016-03-14 15:00:17','2016-03-14 15:00:11', '2016-03-14 15:00:12')").show(200, false)
checkAnswer(
sql("select doj from directDictionaryTable where doj Not in (null, '2016-03-14 15:00:18', '2016-03-14 15:00:17','2016-03-14 15:00:11', '2016-03-14 15:00:12')"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:10.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:13.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:14.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:15.0")),
Row(Timestamp.valueOf("2016-03-14 15:00:16.0")))
)
}
*/
test("test range filter for direct dictionary and boundary"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > '2016-03-14 15:00:18.0' and doj < '2016-03-14 15:00:09.0'"),
Seq(Row(0)
)
)
}
test("test range filter for direct dictionary and boundary 2"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > '2016-03-14 15:00:23.0' and doj < '2016-03-14 15:00:60.0'"),
sql("select count(*) from directDictionaryTable_hive where doj > '2016-03-14 15:00:23.0' and doj < '2016-03-14 15:00:60.0'")
)
}
test("test range filter for direct dictionary and boundary 3"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj >= '2016-03-14 15:00:23.0' and doj < '2016-03-14 15:00:60.0'"),
sql("select count(*) from directDictionaryTable_hive where doj >= '2016-03-14 15:00:23.0' and doj < '2016-03-14 15:00:60.0'")
)
}
test("test range filter for direct dictionary and boundary 4"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > '2016-03-14 15:00:23.0' and doj < '2016-03-14 15:00:40.0'"),
sql("select count(*) from directDictionaryTable_hive where doj > '2016-03-14 15:00:23.0' and doj < '2016-03-14 15:00:40.0'")
)
}
test("test range filter for direct dictionary and boundary 5"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > '2016-03-14 15:00:23.0' and doj <= '2016-03-14 15:00:40.0'"),
sql("select count(*) from directDictionaryTable_hive where doj > '2016-03-14 15:00:23.0' and doj <= '2016-03-14 15:00:40.0'")
)
}
test("test range filter for direct dictionary more values after filter"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > '2016-03-14 15:00:09'"),
sql("select doj from directDictionaryTable_hive where doj > '2016-03-14 15:00:09'")
)
}
test("test range filter for direct dictionary or condition"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > '2016-03-14 15:00:09' or doj > '2016-03-14 15:00:15'"),
sql("select doj from directDictionaryTable_hive where doj > '2016-03-14 15:00:09' or doj > '2016-03-14 15:00:15'")
)
}
test("test range filter for direct dictionary or and condition"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > '2016-03-14 15:00:09' or doj > '2016-03-14 15:00:15' and doj < '2016-03-14 15:00:13'"),
sql("select doj from directDictionaryTable_hive where doj > '2016-03-14 15:00:09' or doj > '2016-03-14 15:00:15' and doj < '2016-03-14 15:00:13'")
)
}
test("test range filter for direct dictionary with no data in csv"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > '2016-03-14 15:05:09' or doj > '2016-03-14 15:05:15' and doj < '2016-03-14 15:50:13'"),
sql("select doj from directDictionaryTable_hive where doj > '2016-03-14 15:05:09' or doj > '2016-03-14 15:05:15' and doj < '2016-03-14 15:50:13'")
)
}
// use cast for range
test("test range filter for direct dictionary cast"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast ('2016-03-14 15:00:17' as timestamp)"),
sql("select doj from directDictionaryTable_hive where doj > cast ('2016-03-14 15:00:17' as timestamp)")
)
}
test("test range filter for direct dictionary cast and"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast ('2016-03-14 15:00:16' as timestamp) and doj < cast ('2016-03-14 15:00:18' as timestamp)"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:17.0"))
)
)
}
test("test range filter for direct dictionary and boundary cast "){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > cast ('2016-03-14 15:00:18.0' as timestamp) and doj < cast ('2016-03-14 15:00:09.0' as timestamp)"),
Seq(Row(0)
)
)
}
test("test range filter for direct dictionary and boundary 2 cast "){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > cast('2016-03-14 15:00:23.0' as timestamp) and doj < cast ('2016-03-14 15:00:60.0' as timestamp)"),
sql("select count(*) from directDictionaryTable_hive where doj > cast('2016-03-14 15:00:23.0' as timestamp) and doj < cast ('2016-03-14 15:00:60.0' as timestamp)")
)
}
test("test range filter for direct dictionary and boundary 3 cast"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj >= cast('2016-03-14 15:00:23.0' as timestamp) and doj < cast('2016-03-14 15:00:60.0' as timestamp)"),
sql("select count(*) from directDictionaryTable_hive where doj >= cast('2016-03-14 15:00:23.0' as timestamp) and doj < cast('2016-03-14 15:00:60.0' as timestamp)")
)
}
test("test range filter for direct dictionary and boundary 4 cast"){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > cast('2016-03-14 15:00:23.0' as timestamp) and doj < cast('2016-03-14 15:00:40.0' as timestamp)"),
sql("select count(*) from directDictionaryTable_hive where doj > cast('2016-03-14 15:00:23.0' as timestamp) and doj < cast('2016-03-14 15:00:40.0' as timestamp)")
)
}
test("test range filter for direct dictionary and boundary 5 cast "){
checkAnswer(
sql("select count(*) from directDictionaryTable where doj > cast('2016-03-14 15:00:23.0' as timestamp) and doj <= cast('2016-03-14 15:00:40.0' as timestamp)"),
sql("select count(*) from directDictionaryTable_hive where doj > cast('2016-03-14 15:00:23.0' as timestamp) and doj <= cast('2016-03-14 15:00:40.0' as timestamp)")
)
}
test("test range filter for direct dictionary more values after filter cast"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast('2016-03-14 15:00:09' as timestamp)"),
sql("select doj from directDictionaryTable_hive where doj > cast('2016-03-14 15:00:09' as timestamp)")
)
}
test("test range filter for direct dictionary or condition cast"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast('2016-03-14 15:00:09' as timestamp) or doj > cast('2016-03-14 15:00:15' as timestamp)"),
sql("select doj from directDictionaryTable_hive where doj > cast('2016-03-14 15:00:09' as timestamp) or doj > cast('2016-03-14 15:00:15' as timestamp)")
)
}
test("test range filter for direct dictionary or and condition cast"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast('2016-03-14 15:00:09' as timestamp) or doj > cast('2016-03-14 15:00:15' as timestamp) and doj < cast('2016-03-14 15:00:13' as timestamp)"),
sql("select doj from directDictionaryTable_hive where doj > cast('2016-03-14 15:00:09' as timestamp) or doj > cast('2016-03-14 15:00:15' as timestamp) and doj < cast('2016-03-14 15:00:13' as timestamp)")
)
}
test("test range filter for direct dictionary with no data in csv cast"){
checkAnswer(
sql("select doj from directDictionaryTable where doj > cast('2016-03-14 15:05:09' as timestamp) or doj > cast('2016-03-14 15:05:15' as timestamp) and doj < cast('2016-03-14 15:50:13' as timestamp)"),
sql("select doj from directDictionaryTable_hive where doj > cast('2016-03-14 15:05:09' as timestamp) or doj > cast('2016-03-14 15:05:15' as timestamp) and doj < cast('2016-03-14 15:50:13' as timestamp)")
)
}
test("test range filter for measure in dictionary include"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 10 "),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 10 ")
)
}
test("test range filter for measure in dictionary include and condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 10 and deptno < 10"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 10 and deptno < 10")
)
}
test("test range filter for measure in dictionary include or condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 10 or deptno < 10"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 10 or deptno < 10")
)
}
test("test range filter for measure in dictionary include or and condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 10 or deptno < 15 and deptno >12"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 10 or deptno < 10 and deptno >12")
)
}
test("test range filter for measure in dictionary include or and condition 1"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 10 or deptno < 15 and deptno > 12"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 10 or deptno < 15 and deptno > 12")
)
}
test("test range filter for measure in dictionary include boundary values"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 14 or deptno < 10 and deptno > 12"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 14 or deptno < 10 and deptno > 12")
)
}
test("test range filter for measure in dictionary include same values and"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 14 and deptno < 14"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 14 and deptno < 14")
)
}
test("test range filter for measure in dictionary include same values or"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where deptno > 14 or deptno < 14"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where deptno > 14 or deptno < 14")
)
}
test("test for dictionary exclude columns"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where empno > '11' and empno < '15'"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where empno > '11' and empno < '15'")
)
}
test("test for dictionary exclude columns or condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where empno > '11' or empno > '15'"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where empno > '11' or empno > '15'")
)
}
test("test for dictionary exclude columns or and condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where empno > '11' or empno > '20' and empno < '18'"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where empno > '11' or empno > '20' and empno < '18'")
)
}
test("test for dictionary exclude columns boundary condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where empno < '11' or empno > '20'"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where empno < '11' or empno > '20'")
)
}
test("test range filter for multiple columns and condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where empno > '11' and workgroupcategory > 2"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where empno > '11' and workgroupcategory > 2")
)
}
test("test range filter for multiple columns or condition"){
checkAnswer(
sql("select empno,empname,workgroupcategory from DICTIONARY_CARBON_6 where empno > '11' or workgroupcategory > 2"),
sql("select empno,empname,workgroupcategory from DICTIONARY_HIVE_6 where empno > '11' or workgroupcategory > 2")
)
}
test("test range filter for multiplecolumns conditions"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_8 where empno > '13' and workgroupcategory < 3 "),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_8 where empno > '13' and workgroupcategory < 3 ")
)
}
test("test range filter No Dictionary Range"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_8 where empno > '13' and empno < '17'"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_8 where empno > '13' and empno < '17'")
)
}
test("test range filter for more columns conditions"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_8 where empno > '13' and workgroupcategory < 3 and deptno > 12 and empno < '17'"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_8 where empno > '13' and workgroupcategory < 3 and deptno >12 and empno < '17'")
)
}
test("test range filter for multiple columns and or combination"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_8 where empno > '13' or workgroupcategory < 3 and deptno > 12 and projectcode > 928478 and empno > '17' "),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_8 where empno > '13' or workgroupcategory < 3 and deptno >12 and projectcode > 928478 and empno > '17'")
)
}
test("test range filter for more columns boundary conditions"){
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_8 where empno > '13' and empno < '17' and workgroupcategory < 1 and deptno > 14 "),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_8 where empno > '13' and empno < '17' and workgroupcategory < 1 and deptno > 14 ")
)
}
override def afterAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
sql("drop table if exists filtertestTable")
sql("drop table if exists NO_DICTIONARY_HIVE_1")
sql("drop table if exists NO_DICTIONARY_CARBON_1")
sql("drop table if exists NO_DICTIONARY_CARBON_2")
sql("drop table if exists NO_DICTIONARY_HIVE_6")
sql("drop table if exists directdictionarytable")
sql("drop table if exists dictionary_hive_6")
sql("drop table if exists NO_DICTIONARY_HIVE_7")
sql("drop table if exists NO_DICTIONARY_CARBON_6")
sql("drop table if exists NO_DICTIONARY_CARBON")
sql("drop table if exists NO_DICTIONARY_HIVE")
sql("drop table if exists complexcarbontable")
sql("drop table if exists DICTIONARY_CARBON_6")
sql("drop table if exists NO_DICTIONARY_CARBON_7")
sql("drop table if exists NO_DICTIONARY_CARBON_8")
sql("drop table if exists NO_DICTIONARY_HIVE_8")
sql("drop table if exists directdictionarytable_hive")
//sql("drop cube NO_DICTIONARY_CARBON_1")
}
} | jackylk/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala | Scala | apache-2.0 | 31,289 |
//package regolic.smt.qfeuf
//
//import regolic.asts.fol.Trees._
//import regolic.asts.core.Trees._
//import regolic.asts.core.Manip._
//
//import org.scalatest.FunSuite
//
//class FastCongruenceClosureSuite extends FunSuite {
//
// import FastCongruenceClosure._
//
// private val sort = Sort("A", List())
// private val f1Sym = FunctionSymbol("f1", List(sort), sort)
// private val f2Sym = FunctionSymbol("f2", List(sort, sort), sort)
// private val f3Sym = FunctionSymbol("f3", List(sort, sort, sort), sort)
// private val g1Sym = FunctionSymbol("g1", List(sort), sort)
// private val g2Sym = FunctionSymbol("g2", List(sort, sort), sort)
// private val g3Sym = FunctionSymbol("g3", List(sort, sort, sort), sort)
// private def f1(t: Term): Term = FunctionApplication(f1Sym, List(t))
// private def f2(t1: Term, t2: Term): Term = FunctionApplication(f2Sym, List(t1, t2))
// private def f3(t1: Term, t2: Term, t3: Term): Term = FunctionApplication(f3Sym, List(t1, t2, t3))
// private def g1(t: Term): Term = FunctionApplication(g1Sym, List(t))
// private def g2(t1: Term, t2: Term): Term = FunctionApplication(g2Sym, List(t1, t2))
// private def g3(t1: Term, t2: Term, t3: Term): Term = FunctionApplication(g3Sym, List(t1, t2, t3))
//
// private val x = Variable("v", sort)
// private val y = Variable("v", sort)
// private val z = Variable("v", sort)
//
// private val aSym = FunctionSymbol("a", List(), sort)
// private val bSym = FunctionSymbol("b", List(), sort)
// private val cSym = FunctionSymbol("c", List(), sort)
// private val a = FunctionApplication(aSym, List())
// private val b = FunctionApplication(bSym, List())
// private val c = FunctionApplication(cSym, List())
//
// test("basic merge") {
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3)
// assert(!cc1.areCongruent(Constant(0), Constant(1)))
// assert(!cc1.areCongruent(Constant(1), Constant(2)))
// assert(!cc1.areCongruent(Constant(0), Constant(2)))
// cc1.merge(0, 1)
// assert(cc1.areCongruent(Constant(0), Constant(1)))
// cc1.merge(1, 2)
// assert(cc1.areCongruent(Constant(1), Constant(2)))
// assert(cc1.areCongruent(Constant(0), Constant(2)))
// assert(cc1.areCongruent(Constant(2), Constant(0)))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(5)
// assert(!cc2.areCongruent(Constant(0), Constant(1)))
// assert(!cc2.areCongruent(Constant(1), Constant(2)))
// assert(!cc2.areCongruent(Constant(0), Constant(2)))
// assert(!cc2.areCongruent(Constant(2), Constant(4)))
// cc2.merge(0, 1)
// assert(cc2.areCongruent(Constant(0), Constant(1)))
// cc2.merge(3, 2)
// assert(!cc2.areCongruent(Constant(1), Constant(2)))
// assert(!cc2.areCongruent(Constant(0), Constant(2)))
// assert(!cc2.areCongruent(Constant(2), Constant(4)))
// assert(cc2.areCongruent(Constant(2), Constant(3)))
//
// cc2.merge(0, 4)
// assert(cc2.areCongruent(Constant(0), Constant(4)))
// assert(cc2.areCongruent(Constant(1), Constant(4)))
// assert(!cc2.areCongruent(Constant(0), Constant(2)))
// assert(!cc2.areCongruent(Constant(2), Constant(4)))
//
// cc2.merge(3, 4)
// assert(cc2.areCongruent(Constant(0), Constant(4)))
// assert(cc2.areCongruent(Constant(1), Constant(4)))
// assert(cc2.areCongruent(Constant(0), Constant(2)))
// assert(cc2.areCongruent(Constant(2), Constant(4)))
// assert(cc2.areCongruent(Constant(3), Constant(1)))
// assert(cc2.areCongruent(Constant(3), Constant(4)))
// }
//
// test("merge with apply") {
// val cc1 = new FastCongruenceClosure
// cc1.initialize(4)
// cc1.merge(0, 1, 2) //g(a) = b
// assert(!cc1.areCongruent(Constant(0), Constant(1)))
// assert(!cc1.areCongruent(Constant(0), Constant(2)))
// assert(cc1.areCongruent(Apply(Constant(0), Constant(1)), Constant(2))) //assert g(a) = b
// cc1.merge(2, 3) // b = c
// assert(cc1.areCongruent(Apply(Constant(0), Constant(1)), Constant(3))) //assert g(a) = c
// assert(!cc1.areCongruent(Constant(0), Constant(1)))
// assert(!cc1.areCongruent(Constant(0), Constant(2)))
// assert(!cc1.areCongruent(Constant(0), Constant(3)))
// assert(!cc1.areCongruent(Constant(1), Constant(2)))
// assert(!cc1.areCongruent(Constant(1), Constant(3)))
// cc1.merge(0, 1, 3) //g(a) = c
// assert(!cc1.areCongruent(Constant(0), Constant(1)))
// assert(!cc1.areCongruent(Constant(0), Constant(2)))
// assert(!cc1.areCongruent(Constant(0), Constant(3)))
// assert(!cc1.areCongruent(Constant(1), Constant(2)))
// assert(!cc1.areCongruent(Constant(1), Constant(3)))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4) //f, a, b, c
// cc2.merge(0, 1, 2) //f(a) = b
// assert(!cc2.areCongruent(Constant(2), Constant(3))) // b != c
// cc2.merge(0, 1, 3) //f(a) = c
// assert(cc2.areCongruent(Constant(2), Constant(3))) // b = c
//
// val cc3 = new FastCongruenceClosure
// cc3.initialize(5) //f, a, b, c, d
// cc3.merge(0, 1, 2) //f(a) = b
// cc3.merge(0, 2, 3) //f(f(a)) = c
// cc3.merge(0, 3, 1) //f(f(f(a))) = a
// assert(cc3.areCongruent(Apply(Constant(0), Apply(Constant(0), Apply(Constant(0), Constant(1)))), Constant(1)))
// assert(!cc3.areCongruent(Apply(Constant(0), Apply(Constant(0), Constant(1))), Constant(1)))
//
// val cc4 = new FastCongruenceClosure
// cc4.initialize(8)
// cc4.merge(6, 0, 2)
// cc4.merge(6, 1, 3)
// cc4.merge(7, 3, 5)
// cc4.merge(7, 4, 0)
// cc4.merge(0, 1)
// cc4.merge(4, 2)
// assert(cc4.areCongruent(Constant(0), Constant(5)))
//
// val cc5 = new FastCongruenceClosure
// cc5.initialize(8)
// cc5.merge(6, 0, 2)
// cc5.merge(6, 1, 3)
// cc5.merge(7, 3, 5)
// cc5.merge(7, 4, 0)
// cc5.merge(4, 2)
// cc5.merge(0, 1)
// assert(cc5.areCongruent(Constant(0), Constant(5)))
// }
//
// test("simple explain") {
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3)
// cc1.merge(0, 1)
// val ex1 = cc1.explain(0, 1)
// assert(ex1.size === 1)
// assert(ex1.head === Left((0, 1)))
// cc1.merge(1,2)
// val ex2 = cc1.explain(1, 2)
// assert(ex2.size === 1)
// assert(ex2.head === Left((1, 2)))
// val ex3 = cc1.explain(0, 2)
// assert(ex3.size === 2)
// assert(ex3.contains(Left((1, 2))))
// assert(ex3.contains(Left((0, 1))))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(3)
// cc2.merge(1, 0)
// val ex4 = cc2.explain(0, 1)
// assert(ex4.size === 1)
// assert(ex4.head === Left((1, 0)))
// cc2.merge(1,2)
// val ex5 = cc2.explain(1, 2)
// assert(ex5.size === 1)
// assert(ex5.head === Left((1, 2)))
// val ex6 = cc2.explain(0, 2)
// assert(ex6.size === 2)
// assert(ex6.contains(Left((1, 2))))
// assert(ex6.contains(Left((1, 0))))
//
// val cc3 = new FastCongruenceClosure
// cc3.initialize(4)
// cc3.merge(1, 0)
// cc3.merge(2, 3)
// val ex7 = cc3.explain(3, 2)
// assert(ex7.size === 1)
// assert(ex7.head === Left((2, 3)))
// cc3.merge(1, 2)
// val ex8 = cc3.explain(0, 2)
// assert(ex8.size === 2)
// assert(ex8.contains(Left((1, 2))))
// assert(ex8.contains(Left((1, 0))))
// val ex9 = cc3.explain(1, 3)
// assert(ex9.size === 2)
// assert(ex9.contains(Left((1, 2))))
// assert(ex9.contains(Left((2, 3))))
// val ex10 = cc3.explain(0, 3)
// assert(ex10.size === 3)
// assert(ex10.contains(Left((1, 0))))
// assert(ex10.contains(Left((2, 3))))
// assert(ex10.contains(Left((1, 2))))
// }
//
// test("explain with apply") {
// val cc1 = new FastCongruenceClosure
// cc1.initialize(4)
// cc1.merge(0, 1, 2) //f(a) = b
// cc1.merge(0, 1, 3) //f(a) = c
// val ex1 = cc1.explain(2, 3)
// assert(ex1.size == 2)
// assert(ex1.contains(Right((0, 1, 2))))
// assert(ex1.contains(Right((0, 1, 3))))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(5)
// cc2.merge(0, 1, 3) //f(a) = c
// cc2.merge(0, 2, 4) //f(b) = d
// cc2.merge(1, 2) //a = b
// val ex2 = cc2.explain(3, 4)
// assert(ex2.size == 3)
// assert(ex2.contains(Left((1, 2))))
// assert(ex2.contains(Right((0, 1, 3))))
// assert(ex2.contains(Right((0, 2, 4))))
//
// }
//
// test("positive setTrue") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 0, true, null)
// val lit3 = Literal(Left(0, 2), 0, true, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// assert(!cc1.isTrue(lit1))
// cc1.setTrue(lit1)
// assert(cc1.isTrue(lit1))
// assert(!cc1.isTrue(lit2))
// cc1.setTrue(lit2)
// assert(cc1.isTrue(lit1))
// assert(cc1.isTrue(lit2))
// assert(cc1.isTrue(lit3))
// }
//
// test("negative setTrue") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 0, true, null)
// val lit3 = Literal(Left(0, 2), 0, true, null)
// val lit4 = Literal(Left(0, 1), 0, false, null)
// val lit5 = Literal(Left(1, 2), 0, false, null)
// val lit6 = Literal(Left(0, 2), 0, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc1.setTrue(lit1)
// assert(cc1.isTrue(lit1))
// assert(!cc1.isTrue(lit2))
// assert(!cc1.isTrue(lit4))
// cc1.setTrue(lit2)
// assert(cc1.isTrue(lit1))
// assert(cc1.isTrue(lit2))
// assert(cc1.isTrue(lit3))
// assert(!cc1.isTrue(lit4))
// assert(!cc1.isTrue(lit5))
// assert(!cc1.isTrue(lit6))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(3, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc2.setTrue(lit4)
// assert(cc2.isTrue(lit4))
// assert(!cc2.isTrue(lit1))
// cc2.setTrue(lit2)
// assert(cc2.isTrue(lit2))
// assert(cc2.isTrue(lit4))
// assert(cc2.isTrue(lit6))
// assert(!cc2.isTrue(lit3))
// }
//
// test("setTrue InconsistencyException") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 1, true, null)
// val lit3 = Literal(Left(0, 2), 2, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// cc1.setTrue(lit3)
// cc1.setTrue(lit1)
// intercept[InconsistencyException]{cc1.setTrue(lit2)}
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(3, Set(lit1, lit2, lit3))
// cc2.setTrue(lit1)
// cc2.setTrue(lit3)
// intercept[InconsistencyException]{cc2.setTrue(lit2)}
//
// val cc3 = new FastCongruenceClosure
// cc3.initialize(3, Set(lit1, lit2, lit3))
// cc3.setTrue(lit1)
// cc3.setTrue(lit2)
// intercept[InconsistencyException]{cc3.setTrue(lit3)}
//
// val lit4 = Literal(Left(2, 3), 3, true, null)
// val lit5 = Literal(Left(0, 1), 0, false, null)
//
// val cc4 = new FastCongruenceClosure
// cc4.initialize(5, Set(lit1, lit2, lit3, lit4, lit5))
// cc4.merge(4, 2, 0) //f(c) = a
// cc4.merge(4, 3, 1) //f(d) = b
// cc4.setTrue(lit4)
// intercept[InconsistencyException]{cc4.setTrue(lit5)}
//
// val cc5 = new FastCongruenceClosure
// cc5.initialize(5, Set(lit1, lit2, lit3, lit4, lit5))
// cc5.merge(4, 2, 0) //f(c) = a
// cc5.merge(4, 3, 1) //f(d) = b
// cc5.setTrue(lit5)
// intercept[InconsistencyException]{cc5.setTrue(lit4)}
//
// val lit7 = Literal(Left(0, 0), 4, false, null)
// val cc6 = new FastCongruenceClosure
// cc6.initialize(1, Set(lit7))
// intercept[InconsistencyException]{cc6.setTrue(lit7)}
// }
//
// test("setTrue InconsistencyException with apply") {
// val lit1 = Literal(Left(3, 4), 0, true, null)
// val lit2 = Literal(Left(3, 4), 0, false, null)
// val lit3 = Literal(Left(1, 2), 0, true, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(5, Set(lit1, lit2, lit3))
// cc1.merge(0, 1, 3)
// cc1.merge(0, 2, 4)
// cc1.setTrue(lit2)
// intercept[InconsistencyException]{cc1.setTrue(lit3)}
// }
//
// test("advanced setTrue") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(2, 3), 0, true, null)
// val lit3 = Literal(Left(0, 3), 0, false, null)
// val lit4 = Literal(Left(1, 2), 0, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(4, Set(lit1, lit2, lit3, lit4))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// assert(!cc1.isTrue(lit4))
// assert(!cc1.isTrue(lit3))
// cc1.setTrue(lit3)
// assert(cc1.isTrue(lit4))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4, Set(lit1, lit2, lit3, lit4))
// cc2.setTrue(lit3)
// assert(cc2.isTrue(lit3))
// assert(!cc2.isTrue(lit1))
// assert(!cc2.isTrue(lit2))
// cc2.setTrue(lit1)
// cc2.setTrue(lit2)
// assert(cc2.isTrue(lit3))
// assert(cc2.isTrue(lit4))
//
// val lit5 = Literal(Left(1, 3), 0, true, null)
// val lit6 = Literal(Left(0, 2), 0, true, null)
//
// val cc3 = new FastCongruenceClosure
// cc3.initialize(4, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc3.setTrue(lit1)
// cc3.setTrue(lit3)
// cc3.setTrue(lit4)
// intercept[InconsistencyException]{ cc3.setTrue(lit5) }
// intercept[InconsistencyException]{ cc3.setTrue(lit6) }
// }
//
// test("setTrue basic theory propagation") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 1, true, null)
// val lit3 = Literal(Left(0, 2), 2, true, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// val csq1 = cc1.setTrue(lit1)
// assert(csq1.isEmpty)
// val csq2 = cc1.setTrue(lit2)
// assert(csq2.size === 1)
// assert(csq2.contains(lit3))
//
// val lit4 = Literal(Left(2, 3), 3, true, null)
// val lit5 = Literal(Left(0, 3), 4, true, null)
// val lit6 = Literal(Left(1, 3), 5, true, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// val csq3 = cc2.setTrue(lit1)
// assert(csq3.isEmpty)
// val csq4 = cc2.setTrue(lit4)
// assert(csq4.isEmpty)
// val csq5 = cc2.setTrue(lit2)
// assert(csq5.size === 3)
// assert(csq5.contains(lit5))
// assert(csq5.contains(lit3))
// assert(csq5.contains(lit6))
// }
//
// test("setTrue theory propagation of negative literals") {
// val lit1 = Literal(Left(0, 1), 0, false, null)
// val lit2 = Literal(Left(1, 2), 1, true, null)
// val lit3 = Literal(Left(0, 2), 2, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// val csq1 = cc1.setTrue(lit1)
// assert(csq1.isEmpty)
// val csq2 = cc1.setTrue(lit2)
// assert(csq2.size === 1)
// assert(csq2.contains(lit3))
//
// val lit4 = Literal(Left(2, 3), 3, true, null)
// val lit5 = Literal(Left(1, 3), 4, true, null)
// val lit6 = Literal(Left(0, 3), 5, false, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// val csq3 = cc2.setTrue(lit1)
// assert(csq3.isEmpty)
// val csq4 = cc2.setTrue(lit4)
// assert(csq4.isEmpty)
// val csq5 = cc2.setTrue(lit2)
// assert(csq5.size === 3)
// assert(csq5.contains(lit3))
// assert(csq5.contains(lit5))
// assert(csq5.contains(lit6))
//
// val lit7 = Literal(Left(0, 1), 0, true, null)
// val lit8 = Literal(Left(0, 4), 6, false, null)
// val lit9 = Literal(Left(1, 4), 7, false, null)
// val lit10 = Literal(Left(2, 4), 8, false, null)
// val lit11 = Literal(Left(3, 4), 9, false, null)
// val cc3 = new FastCongruenceClosure
// cc3.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11))
// cc3.setTrue(lit7)
// cc3.setTrue(lit4)
// cc3.setTrue(lit8)
// cc3.setTrue(lit9)
// val csq6 = cc3.setTrue(lit5)
// assert(csq6.size === 3)
// assert(csq6.contains(lit2))
// assert(csq6.contains(lit10))
// assert(csq6.contains(lit11))
//
// val cc4 = new FastCongruenceClosure
// cc4.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11))
// cc4.setTrue(lit7) //a = b
// cc4.setTrue(lit4) //c = d
// val csq7 = cc4.setTrue(lit8) //a != e
// assert(csq7.size === 1)
// assert(csq7.contains(lit9)) //b != e
// val csq8 = cc4.setTrue(lit5) //b = d
// assert(csq8.size === 3)
// assert(csq8.contains(lit2)) //b = c
// assert(csq8.contains(lit10)) //c != e
// assert(csq8.contains(lit11)) //d != e
//
// val cc5 = new FastCongruenceClosure
// cc5.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11))
// cc5.setTrue(lit7) //a = b
// cc5.setTrue(lit4) //c = d
// val csq9 = cc5.setTrue(lit10) //c != e
// assert(csq9.size === 1)
// assert(csq9.contains(lit11))
// val csq10 = cc5.setTrue(lit5)
// assert(csq10.size === 3)
// assert(csq10.contains(lit2))
// assert(csq10.contains(lit8))
// assert(csq10.contains(lit9))
// }
//
// test("negative setTrue theory propagation") {
// val lit1 = Literal(Left(1, 2), 0, true, null)
// val lit2 = Literal(Left(0, 1), 1, false, null)
// val lit3 = Literal(Left(0, 2), 2, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// val csq1 = cc1.setTrue(lit1)
// assert(csq1.isEmpty)
// val csq2 = cc1.setTrue(lit2)
// assert(csq2.size === 1)
// assert(csq2.contains(lit3))
// }
//
// test("setTrue propagation basics redundancy") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 1, true, null)
// val lit3 = Literal(Left(2, 3), 2, false, null)
// val lit4 = Literal(Left(2, 4), 3, true, null)
// val lit5 = Literal(Left(3, 4), 4, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(5, Set(lit1, lit2, lit3, lit4, lit5))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// cc1.setTrue(lit3)
// cc1.setTrue(lit5)
// assert(cc1.setTrue(lit4).isEmpty)
//
// val lit6 = Literal(Left(2, 3), 2, true, null)
// val lit7 = Literal(Left(1, 2), 1, false, null)
// val lit8 = Literal(Left(0, 3), 5, false, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8))
// cc2.setTrue(lit1) //a = b
// cc2.setTrue(lit6) //c = d
// assert(cc2.setTrue(lit7).size === 1) //b != c
// assert(cc2.setTrue(lit8).isEmpty) //a != d
// }
//
// test("setTrue with apply") {
// val lit1 = Literal(Left(1, 2), 0, true, null)
// val lit2 = Literal(Left(3, 4), 1, true, null)
// val lit3 = Literal(Left(1, 3), 2, true, null)
// val lit4 = Literal(Left(2, 4), 3, true, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(5, Set(lit1, lit2, lit3, lit4))
// cc1.merge(0, 1, 3) //f(a) = b
// cc1.merge(0, 2, 4) //f(c) = d
// val csq1 = cc1.setTrue(lit1)
// assert(csq1.size === 1)
// assert(csq1.contains(lit2))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(5, Set(lit1, lit2, lit3, lit4))
// cc2.merge(0, 1, 3) //f(a) = b
// cc2.merge(0, 2, 4) //f(c) = d
// val csq2 = cc2.setTrue(lit2)
// assert(csq2.size === 0)
// }
//
// test("negative setTrue with apply") {
// val lit1 = Literal(Left(1, 2), 0, true, null)
// val lit2 = Literal(Left(1, 2), 0, false, null)
// val lit3 = Literal(Left(3, 4), 0, true, null)
// val lit4 = Literal(Left(3, 4), 0, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(5, Set(lit1, lit2, lit3, lit4))
// cc1.merge(0, 1, 3) //f(a) = b
// cc1.merge(0, 2, 4) //f(c) = d
// val csq1 = cc1.setTrue(lit2)
// assert(csq1.size === 0)
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(5, Set(lit1, lit2, lit3, lit4))
// cc2.merge(0, 1, 3) //f(a) = b
// cc2.merge(0, 2, 4) //f(c) = d
// val csq2 = cc2.setTrue(lit4)
// println(csq2)
// }
//
// test("basic explanation") {
// val lit1 = Literal(Left(1, 2), 0, true, null)
// val lit2 = Literal(Left(0, 1), 0, true, null)
// val lit3 = Literal(Left(0, 2), 0, true, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// val expl1 = cc1.explanation(lit3)
// assert(expl1.size === 2)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit2))
//
// val lit4 = Literal(Left(2, 3), 0, true, null)
// val lit5 = Literal(Left(0, 3), 0, true, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4, Set(lit1, lit2, lit3, lit4, lit5))
// cc2.setTrue(lit2)
// cc2.setTrue(lit4)
// cc2.setTrue(lit1)
// val expl2 = cc2.explanation(lit5)
// assert(expl2.size === 3)
// assert(expl2.contains(lit2))
// assert(expl2.contains(lit4))
// assert(expl2.contains(lit1))
//
// val lit6 = Literal(Left(0, 4), 0, true, null)
// val cc3 = new FastCongruenceClosure
// cc3.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc3.setTrue(lit2)
// cc3.setTrue(lit6) //add irrelevant literal in explanation
// cc3.setTrue(lit4)
// cc3.setTrue(lit1)
// val expl3 = cc3.explanation(lit5)
// assert(expl3.size === 3)
// assert(expl3.contains(lit2))
// assert(expl3.contains(lit4))
// assert(expl3.contains(lit1))
// assert(!expl3.contains(lit6)) //explanation should not contains lit6
// }
//
// test("basic explanation returns same literal") {
// val lit1 = Literal(Left(1, 2), 0, true, null)
// val lit2 = Literal(Left(0, 1), 1, true, null)
// val lit3 = Literal(Left(0, 2), 2, true, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// val expl1 = cc1.explanation(lit3)
// assert(expl1.size === 2)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit2))
// }
//
//
// test("explanation with apply basic") {
//
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(2, 3), 0, true, null)
// val lit3 = Literal(Left(1, 2), 0, true, null)
// val lit4 = Literal(Left(0, 3), 0, true, null)
// val lit5 = Literal(Right(4, 0, 2), 0, true, null)
// val lit6 = Literal(Right(4, 1, 3), 0, true, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc1.merge(4, 0, 2) //TODO: should be passed via setTrue maybe ?
// cc1.merge(4, 1, 3)
// cc1.setTrue(lit1)
// val expl1 = cc1.explanation(lit2)
// /* //TODO: should it return the lit5 and 6 ?
// assert(expl1.size === 3)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit5))
// assert(expl1.contains(lit6))
// */
// assert(expl1.size === 1)
// assert(expl1.contains(lit1))
//
// val lit7 = Literal(Left(0,5), 0, true, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(6, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7))
// cc2.merge(4, 0, 2)
// cc2.merge(4, 1, 3)
// cc2.setTrue(lit7)
// cc2.setTrue(lit1)
// val expl2 = cc2.explanation(lit2)
// /*
// assert(expl2.size === 3)
// assert(expl2.contains(lit1))
// assert(expl2.contains(lit5))
// assert(expl2.contains(lit6))
// */
// assert(expl2.size === 1)
// assert(expl2.contains(lit1))
//
// }
//
// test("explanation with apply advanced") {
// val lit1 = Literal(Left(2, 3), 0, true, null) //c = d
// val lit2 = Literal(Left(4, 2), 0, true, null) //e = c
// val lit3 = Literal(Left(4, 1), 0, true, null) //e = b
// val lit4 = Literal(Left(1, 5), 0, true, null) //b = f
// val lit5 = Literal(Left(0, 1), 0, true, null) //a = b
// val lit6 = Literal(Right(6, 5, 3), 0, true, null) //g(f) = d
// val lit7 = Literal(Right(6, 5, 3), 0, true, null) //g(d) = a
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(7, Set(lit1, lit2, lit3, lit4, lit5))
// cc1.merge(6, 5, 3) //g(f) = d
// cc1.merge(6, 3, 0) //g(d) = a
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// cc1.setTrue(lit3)
// cc1.setTrue(lit4)
// val expl1 = cc1.explanation(lit5)
// //assert(expl1.size == 6)
// assert(expl1.size == 4)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit2))
// assert(expl1.contains(lit3))
// assert(expl1.contains(lit4))
// //assert(expl1.contains(lit6))
// //assert(expl1.contains(lit7))
// }
//
// test("explanation of negative setTrue") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 0, false, null)
// val lit3 = Literal(Left(0, 2), 0, false, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// val expl1 = cc1.explanation(lit3)
// assert(expl1.size === 2)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit2))
//
// val lit4 = Literal(Left(2, 3), 0, true, null)
// val lit5 = Literal(Left(0, 3), 0, false, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4, Set(lit1, lit2, lit3, lit4, lit5))
// cc2.setTrue(lit1)
// cc2.setTrue(lit4)
// cc2.setTrue(lit2)
// val expl2 = cc2.explanation(lit5)
// assert(expl2.size === 3)
// assert(expl2.contains(lit1))
// assert(expl2.contains(lit4))
// assert(expl2.contains(lit2))
//
// val lit6 = Literal(Left(0, 4), 0, true, null)
// val cc3 = new FastCongruenceClosure
// cc3.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc3.setTrue(lit1)
// cc3.setTrue(lit6) //add irrelevant literal in explanation
// cc3.setTrue(lit4)
// cc3.setTrue(lit2)
// val expl3 = cc3.explanation(lit5)
// assert(expl3.size === 3)
// assert(expl3.contains(lit1))
// assert(expl3.contains(lit4))
// assert(expl3.contains(lit2))
// assert(!expl3.contains(lit6)) //explanation should not contains lit6
//
// val lit7 = Literal(Left(0, 4), 0, false, null)
// val lit8 = Literal(Left(1, 4), 0, false, null)
// val lit9 = Literal(Left(2, 4), 0, false, null)
// val lit10 = Literal(Left(3, 4), 0, false, null)
// val lit11 = Literal(Left(0, 3), 0, true, null)
// val cc4 = new FastCongruenceClosure
// cc4.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11))
// cc4.setTrue(lit1) //a = b
// cc4.setTrue(lit4) //c = d
// cc4.setTrue(lit7) //a != e
// val csq1 = cc4.setTrue(lit11) //a = d
// assert(csq1.size === 2)
// assert(csq1.contains(lit9))
// assert(csq1.contains(lit10))
// val expl4 = cc4.explanation(lit9)
// assert(expl4.contains(lit7))
// val expl5 = cc4.explanation(lit10)
// assert(expl5.contains(lit7))
//
// val cc5 = new FastCongruenceClosure
// cc5.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11))
// cc5.setTrue(lit1) //a = b
// cc5.setTrue(lit4) //c = d
// cc5.setTrue(lit7) //a != e
// cc5.setTrue(lit8) //b != e
// val csq2 = cc5.setTrue(lit11) //a = d
// assert(csq2.size === 2)
// assert(csq2.contains(lit9))
// assert(csq2.contains(lit10))
// val expl6 = cc5.explanation(lit9)
// assert(expl4.contains(lit7) || expl4.contains(lit8))
// val expl7 = cc5.explanation(lit10)
// assert(expl5.contains(lit7) || expl5.contains(lit8))
// }
//
// test("backtrack basic") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 0, true, null)
// val lit3 = Literal(Left(0, 2), 0, true, null)
// val lit4 = Literal(Left(0, 1), 0, false, null)
// val lit5 = Literal(Left(0, 2), 0, false, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3, lit4, lit5))
// cc1.setTrue(lit1)
// cc1.backtrack(1)
// cc1.setTrue(lit4)
// val csq1 = cc1.setTrue(lit2)
// assert(csq1.size === 1)
// assert(csq1.contains(lit5))
// assert(cc1.isTrue(lit5))
// assert(cc1.isTrue(lit4))
// assert(!cc1.isTrue(lit1))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(3, Set(lit1, lit2, lit3, lit4, lit5))
// cc2.setTrue(lit1)
// cc2.setTrue(lit2)
// cc2.backtrack(2)
// cc2.setTrue(lit4)
// val csq2 = cc2.setTrue(lit2)
// assert(csq2.size === 1)
// assert(csq2.contains(lit5))
// assert(cc2.isTrue(lit5))
// assert(!cc2.isTrue(lit1))
// assert(cc2.isTrue(lit2))
//
// val lit6 = Literal(Left(2, 3), 0, true, null)
// val lit7 = Literal(Left(1, 3), 0, false, null)
// val cc3 = new FastCongruenceClosure
// cc3.initialize(4, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7))
// cc3.setTrue(lit1)
// cc3.setTrue(lit6)
// cc3.setTrue(lit2)
// cc3.backtrack(1)
// cc3.setTrue(lit5)
// cc3.isTrue(lit7)
//
// val lit8 = Literal(Left(3, 4), 0, true, null)
// val cc4 = new FastCongruenceClosure
// cc4.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8))
// cc4.setTrue(lit1)
// cc4.setTrue(lit5)
// intercept[InconsistencyException]{ cc4.setTrue(lit2) }
// cc4.backtrack(2)
// cc4.setTrue(lit8)
// cc4.setTrue(lit2)
// assert(cc4.isTrue(lit3))
//
// val cc5 = new FastCongruenceClosure
// cc5.initialize(2, Set(lit1))
// assert(!cc5.isTrue(lit1))
// cc5.setTrue(lit1)
// assert(cc5.isTrue(lit1))
// cc5.backtrack(1)
// assert(!cc5.isTrue(lit1))
//
//
// }
//
// test("backtrack with apply") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 2), 0, true, null)
// val lit3 = Literal(Left(2, 3), 0, true, null)
// val lit4 = Literal(Left(0, 3), 0, true, null) //a = d
// val lit5 = Literal(Left(1, 4), 0, true, null)
// val lit6 = Literal(Left(0, 4), 0, true, null)
// val lit7 = Literal(Left(0, 2), 0, false, null)
// val lit8 = Literal(Left(0, 2), 0, true, null)
// val lit9 = Literal(Left(5, 2), 0, false, null) //f != c
// val lit10 = Literal(Left(4, 2), 0, true, null)
// val lit11 = Literal(Left(5, 0), 0, true, null)
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(7, Set(lit1, lit2, lit3, lit4, lit5, lit9))
// cc1.merge(6, 0, 2)
// cc1.merge(6, 1, 3)
// cc1.setTrue(lit1)
// assert(cc1.isTrue(lit3))
// cc1.backtrack(1)
// assert(!cc1.isTrue(lit3))
// cc1.setTrue(lit1)
// assert(cc1.isTrue(lit3))
//
// val cc2 = new FastCongruenceClosure
// cc2.initialize(7, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit9))
// cc2.merge(6, 0, 2)
// cc2.merge(6, 1, 3)
// cc2.setTrue(lit1)
// assert(cc2.isTrue(lit3))
// cc2.backtrack(1)
// assert(!cc2.isTrue(lit3))
// cc2.setTrue(lit5)
// assert(!cc2.isTrue(lit3))
// cc2.setTrue(lit6)
// assert(cc2.isTrue(lit1))
// assert(cc2.isTrue(lit3))
// cc2.backtrack(2)
// cc2.setTrue(lit6)
// cc2.setTrue(lit5)
// assert(cc2.isTrue(lit1))
// assert(cc2.isTrue(lit3))
//
// val cc3 = new FastCongruenceClosure
// cc3.initialize(3)
// cc3.setTrue(lit7)
// cc3.setTrue(lit1)
// intercept[InconsistencyException]{ cc3.setTrue(lit2) }
// cc3.backtrack(2)
// intercept[InconsistencyException]{ cc3.setTrue(lit8) }
//
// val cc4 = new FastCongruenceClosure
// cc4.initialize(8)
// cc4.merge(6, 0, 2)
// cc4.merge(6, 1, 3)
// cc4.merge(7, 3, 5)
// cc4.merge(7, 4, 0)
// cc4.setTrue(lit1)
// assert(cc4.isTrue(lit3))
// cc4.setTrue(lit10)
// assert(cc4.isTrue(lit11))
// cc4.backtrack(2)
// cc4.setTrue(lit10)
// assert(!cc4.isTrue(lit11))
// cc4.setTrue(lit1)
// assert(cc4.isTrue(lit11))
//
// val cc5 = new FastCongruenceClosure
// cc5.initialize(7)
// cc5.merge(6, 4, 1) //g(e) = b
// cc5.merge(6, 4, 5) //g(e) = f
// cc5.merge(6, 5, 1) //g(f) = b
// cc5.merge(6, 5, 2) //g(f) = c
// intercept[InconsistencyException]{ cc5.setTrue(lit9) }
// cc5.backtrack(1)
// cc5.setTrue(lit4)
// assert(cc5.isTrue(lit4))
// assert(!cc5.isTrue(lit9))
//
// //this testcase attempts to force lookup to take a not being removed by backtracking
// val lit12 = Literal(Left(4, 0), 0, true, null)
// val cc6 = new FastCongruenceClosure
// cc6.initialize(7)
// cc6.merge(6, 0, 2) //f(a) = c
// cc6.merge(6, 1, 3) //f(b) = d
// cc6.setTrue(lit6) //a = e
// assert(cc6.isTrue(lit6))
// assert(!cc6.isTrue(lit1))
// assert(!cc6.isTrue(lit3))
// cc6.backtrack(1)
// assert(!cc6.isTrue(lit6))
// cc6.setTrue(lit5) //b = e
// assert(cc6.isTrue(lit5))
// assert(!cc6.isTrue(lit1))
// assert(!cc6.isTrue(lit3))
// }
//
// test("backtracking with explanation") {
// val lit1 = Literal(Left(1, 2), 0, true, null)
// val lit2 = Literal(Left(0, 1), 0, true, null)
// val lit3 = Literal(Left(0, 2), 0, true, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(3, Set(lit1, lit2, lit3))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// cc1.backtrack(1)
// cc1.setTrue(lit2)
// val expl1 = cc1.explanation(lit3)
// assert(expl1.size === 2)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit2))
//
// val lit4 = Literal(Left(2, 3), 0, true, null)
// val lit5 = Literal(Left(0, 3), 0, true, null)
// val cc2 = new FastCongruenceClosure
// cc2.initialize(4, Set(lit1, lit2, lit3, lit4, lit5))
// cc2.setTrue(lit2)
// cc2.setTrue(lit4)
// cc2.setTrue(lit1)
// cc2.backtrack(1)
// cc2.setTrue(lit1)
// val expl2 = cc2.explanation(lit5)
// assert(expl2.size === 3)
// assert(expl2.contains(lit2))
// assert(expl2.contains(lit4))
// assert(expl2.contains(lit1))
//
// val lit6 = Literal(Left(0, 4), 0, true, null)
// val cc3 = new FastCongruenceClosure
// cc3.initialize(5, Set(lit1, lit2, lit3, lit4, lit5, lit6))
// cc3.setTrue(lit2)
// cc3.setTrue(lit6) //add irrelevant literal in explanation
// cc3.setTrue(lit4)
// cc3.setTrue(lit1)
// cc3.backtrack(1)
// cc3.setTrue(lit1)
// val expl3 = cc3.explanation(lit5)
// assert(expl3.size === 3)
// assert(expl3.contains(lit2))
// assert(expl3.contains(lit4))
// assert(expl3.contains(lit1))
// assert(!expl3.contains(lit6)) //explanation should not contains lit6
// cc3.backtrack(4)
// cc3.setTrue(lit2)
// cc3.setTrue(lit4)
// cc3.setTrue(lit1)
// val expl4 = cc3.explanation(lit5)
// assert(expl4.size === 3)
// assert(expl4.contains(lit2))
// assert(expl4.contains(lit4))
// assert(expl4.contains(lit1))
// assert(!expl4.contains(lit6)) //explanation should not contains lit6
//
// val lit7 = Literal(Left(1, 3), 0, true, null)
// val cc4 = new FastCongruenceClosure
// cc4.initialize(4, Set(lit1, lit2, lit3, lit4, lit7))
// cc4.setTrue(lit1)
// cc4.setTrue(lit2)
// assert(cc4.isTrue(lit3))
// cc4.backtrack(1)
// cc4.setTrue(lit4)
// assert(cc4.isTrue(lit7))
// assert(!cc4.isTrue(lit3))
// val expl5 = cc4.explanation(lit7)
// assert(expl5.size === 2)
// assert(expl5.contains(lit1))
// assert(expl5.contains(lit4))
// }
//
// test("Theory propagation not redundant") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 3), 1, true, null)
// val lit3 = Literal(Left(0, 2), 2, true, null)
// val lit4 = Literal(Left(2, 3), 3, true, null)
// val lit5 = Literal(Left(0, 3), 4, true, null)
// val lit6 = Literal(Left(2, 3), 5, false, null) //c != d
// val lit7 = Literal(Left(1, 2), 6, true, null) //b == c
// val lit8 = Literal(Left(0, 3), 4, false, null) //a != d
// val lit9 = Literal(Left(5, 4), 7, true, null) // f == e
// val lit10 = Literal(Left(2, 4), 8, true, null) // c == e
// val lit11 = Literal(Left(3, 4), 9, false, null) // d != e
// val lit12 = Literal(Left(4, 5), 10, true, null) // e == f
// val lit13 = Literal(Left(0, 5), 11, true, null) // a == f
//
// val cc1 = new FastCongruenceClosure
// cc1.initialize(6, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11, lit12, lit13))
// cc1.setTrue(lit1) //a == b
// cc1.setTrue(lit6) //c != d
// val csq1 = cc1.setTrue(lit7) //b == c
// assert(csq1.size === 2)
// assert(csq1.contains(lit3))
// assert(csq1.contains(lit8))
// cc1.setTrue(lit11) //e != d
// cc1.setTrue(lit12) //e == f
// val csq2 = cc1.setTrue(lit13) //a == f
// assert(!csq2.contains(lit3))
// assert(!csq2.contains(lit8))
// }
//
// test("Explanation no cycle") {
// val lit1 = Literal(Left(0, 1), 0, true, null)
// val lit2 = Literal(Left(1, 3), 1, true, null)
// val lit3 = Literal(Left(0, 2), 2, true, null)
// val lit4 = Literal(Left(2, 3), 3, true, null)
// val lit5 = Literal(Left(0, 3), 4, true, null)
// val cc1 = new FastCongruenceClosure
// cc1.initialize(4, Set(lit1, lit2, lit3, lit4, lit5))
// cc1.setTrue(lit1)
// cc1.setTrue(lit2)
// assert(cc1.isTrue(lit5))
// cc1.setTrue(lit3)
// cc1.setTrue(lit4)
// val expl1 = cc1.explanation(lit5)
// assert(expl1.size === 2)
// assert(expl1.contains(lit1))
// assert(expl1.contains(lit2))
// cc1.backtrack(2)
// cc1.setTrue(lit3)
// cc1.setTrue(lit4)
// val expl2 = cc1.explanation(lit5)
// assert(expl2.size === 2)
// assert(expl2.contains(lit1))
// assert(expl2.contains(lit2))
//
// val lit6 = Literal(Left(2, 3), 5, false, null) //c != d
// val lit7 = Literal(Left(1, 2), 6, true, null) //b == c
// val lit8 = Literal(Left(0, 3), 4, false, null) //a != d
// val lit9 = Literal(Left(5, 4), 7, true, null) // f == e
// val lit10 = Literal(Left(2, 4), 8, true, null) // c == e
// val lit11 = Literal(Left(3, 4), 9, false, null) // d != e
// val lit12 = Literal(Left(4, 5), 10, true, null) // e == f
// val lit13 = Literal(Left(0, 5), 11, true, null) // a == f
// val cc2 = new FastCongruenceClosure
// cc2.initialize(6, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10))
// cc2.setTrue(lit1) // a == b
// cc2.setTrue(lit6) // c != d
// cc2.setTrue(lit7) // b == c
// assert(cc2.isTrue(lit8)) // a != d
// val expl3 = cc2.explanation(lit8)
// assert(expl3.size === 3)
// assert(expl3.contains(lit1))
// assert(expl3.contains(lit6))
// assert(expl3.contains(lit7))
// cc2.setTrue(lit9)
// cc2.setTrue(lit10)
// val expl4 = cc2.explanation(lit8)
// assert(expl4.size === 3)
// assert(expl4.contains(lit1))
// assert(expl4.contains(lit6))
// assert(expl4.contains(lit7))
//
// val cc3 = new FastCongruenceClosure
// cc3.initialize(6, Set(lit1, lit2, lit3, lit4, lit5, lit6, lit7, lit8, lit9, lit10, lit11, lit12, lit13))
// cc3.setTrue(lit1) //a == b
// cc3.setTrue(lit6) //c != d
// cc3.setTrue(lit7) //b == c
// cc3.setTrue(lit11) //e != d
// cc3.setTrue(lit12) //e == f
// cc3.setTrue(lit13) //a == f
// val expl5 = cc2.explanation(lit8)
// assert(expl5.size === 3)
// assert(expl5.contains(lit1))
// assert(expl5.contains(lit6))
// assert(expl5.contains(lit7))
// }
//
// //TODO: test redundant setTrue (multiple same, or implied ones), with backtracking and explain
// //TODO: test with different literals id
//
//}
| regb/scabolic | src/test/scala/regolic/smt/qfeuf/FastCongruenceClosureSuite.scala | Scala | mit | 39,110 |
// These are meant to be typed into the REPL. You can also run
// scala -Xnojline < repl-session.scala to run them all at once.
val a = Array(1, 1, 2, 3, 5, 8, 13, 21, 34, 55)
for (i <- 0 until a.length)
println(i + ": " + a(i))
0 until a.length
0 until (a.length, 2)
(0 until a.length).reverse
for (elem <- a)
println(elem)
| P7h/ScalaPlayground | Scala for the Impatient/examples/ch03/sec03/repl-session.scala | Scala | apache-2.0 | 336 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import config.{BaseControllerComponents, FrontendAppConfig}
import featureswitch.core.config.ShortOrgName
import models.api._
import play.api.mvc.{Action, AnyContent}
import services.{ApplicantDetailsService, SessionProfile, SessionService, VatRegistrationService}
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.http.InternalServerException
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class TradingNameResolverController @Inject()(val sessionService: SessionService,
val authConnector: AuthConnector,
vatRegistrationService: VatRegistrationService,
applicantDetailsService: ApplicantDetailsService
)(implicit val appConfig: FrontendAppConfig,
val executionContext: ExecutionContext,
baseControllerComponents: BaseControllerComponents)
extends BaseController with SessionProfile {
//scalastyle:off
def resolve: Action[AnyContent] = isAuthenticatedWithProfile() {
implicit request =>
implicit profile =>
vatRegistrationService.partyType.flatMap {
case Individual | NETP =>
Future.successful(Redirect(controllers.registration.business.routes.MandatoryTradingNameController.show))
case Partnership | ScotPartnership =>
Future.successful(Redirect(controllers.registration.business.routes.PartnershipNameController.show))
case UkCompany | RegSociety | CharitableOrg | Trust | UnincorpAssoc | NonUkNonEstablished =>
applicantDetailsService.getCompanyName.map {
case Some(companyName) if companyName.length > 105 & isEnabled(ShortOrgName) => Redirect(controllers.registration.business.routes.ShortOrgNameController.show)
case Some(_) => Redirect(controllers.registration.business.routes.TradingNameController.show)
case None => Redirect(controllers.registration.business.routes.BusinessNameController.show)
}
case ScotLtdPartnership | LtdPartnership | LtdLiabilityPartnership =>
applicantDetailsService.getCompanyName.map {
case Some(companyName) if companyName.length > 105 & isEnabled(ShortOrgName) => Redirect(controllers.registration.business.routes.ShortOrgNameController.show)
case Some(_) => Redirect(controllers.registration.business.routes.TradingNameController.show)
case None => Redirect(controllers.registration.business.routes.PartnershipNameController.show)
}
case pt => throw new InternalServerException(s"PartyType: $pt not supported")
}
}
}
| hmrc/vat-registration-frontend | app/controllers/TradingNameResolverController.scala | Scala | apache-2.0 | 3,469 |
package net.mtgto.garoon
import com.github.nscala_time.time.Imports._
import java.net.URI
import org.apache.axis2.Constants
import org.apache.axis2.addressing.EndpointReference
import org.apache.axis2.client.{Options, ServiceClient}
import org.apache.axiom.om.{OMAbstractFactory, OMElement}
import org.apache.axiom.soap.SOAP12Constants
import org.apache.axis2.transport.http.HTTPConstants
import org.apache.commons.httpclient.Header
import scala.util.Try
/**
* ガルーンSOAP APIクライアント
* @note このクラスはスレッドセーフではありません.
*
* @param uri
*/
class GaroonClient(uri: URI) {
val factory = OMAbstractFactory.getOMFactory
private[this] val serviceClient = new ServiceClient
private[this] val scheme = uri.getScheme
private[this] val namespace = factory.createOMNamespace("http://wsdl.cybozu.co.jp/api/2008", "tns")
def sendReceive(actionName: String, actionPath: String, parameters: OMElement)
(implicit auth: Authentication, requestTokenOpt: Option[RequestToken] = None): Try[OMElement] = {
serviceClient.removeHeaders
val header = createHeader(actionName, DateTime.now, DateTime.now + 1.day, auth)
serviceClient.addHeader(header)
val option = createOption(actionName, actionPath, auth)
serviceClient.setOptions(option)
requestTokenOpt.foreach { requestToken =>
val tokenNode = factory.createOMElement("request_token", null)
tokenNode.setText(requestToken.value)
parameters.addChild(tokenNode)
}
val request = factory.createOMElement(actionName, namespace)
request.addChild(parameters)
Try(serviceClient.sendReceive(request))
}
private[this] def createHeader(actionName: String, createdTime: DateTime, expiredTime: DateTime, auth: Authentication): OMElement = {
val namespace = factory.createOMNamespace(SOAP12Constants.SOAP_ENVELOPE_NAMESPACE_URI, "soapenv")
val header = factory.createOMElement("Header", namespace)
header.addChild(createAction(actionName))
val security = auth match {
case Password(username, password) => createSecurity(username, password)
case _ => createSecurity("", "")
}
header.addChild(security)
header.addChild(createTimestamp(createdTime, expiredTime))
header
}
private[this] def createAction(actionName: String): OMElement = {
val namespace = factory.createOMNamespace("http://schemas.xmlsoap.org/ws/2003/03/addressing", "")
val actionElement = factory.createOMElement("Action", namespace)
actionElement.addChild(factory.createOMText(actionElement, actionName))
actionElement
}
private[this] def createSecurity(username: String, password: String): OMElement = {
val namespace = factory.createOMNamespace("http://schemas.xmlsoap.org/ws/2002/12/secext", "")
val securityElement = factory.createOMElement("Security", namespace)
val usernameTokenElement = factory.createOMElement("UsernameToken", namespace)
val usernameElement = factory.createOMElement("Username", namespace)
usernameElement.addChild(factory.createOMText(usernameElement, username))
val passwordElement = factory.createOMElement("Password", namespace)
passwordElement.addChild(factory.createOMText(passwordElement, password))
usernameTokenElement.addChild(usernameElement)
usernameTokenElement.addChild(passwordElement)
securityElement.addChild(usernameTokenElement)
securityElement
}
private[this] def createTimestamp(createdTime: DateTime, expiredTime: DateTime): OMElement = {
val namespace = factory.createOMNamespace("http://schemas.xmlsoap.org/ws/2002/07/utility", "")
val timestampElement = factory.createOMElement("Timestamp", namespace)
val created = factory.createOMElement("Created", namespace)
created.addChild(factory.createOMText(created, createdTime.toString()))
val expired = factory.createOMElement("Expires", namespace)
expired.addChild(factory.createOMText(created, expiredTime.toString()))
timestampElement.addChild(created)
timestampElement.addChild(expired)
timestampElement
}
private[this] def createOption(actionName: String, actionPath: String, auth: Authentication): Options = {
val options = new Options
val actionUri = uri.toString + actionPath
options.setTo(new EndpointReference(actionUri.toString))
options.setTransportInProtocol(scheme)
options.setProperty(HTTPConstants.CHUNKED, Constants.VALUE_FALSE)
options.setSoapVersionURI(SOAP12Constants.SOAP_ENVELOPE_NAMESPACE_URI)
options.setAction(actionName)
auth match {
case SessionCookie(cookie) => setCookieToOptions(options, cookie)
case _ => ()
}
options
}
private[this] def setCookieToOptions(options: Options, cookie: Cookie): Unit = {
val httpHeader = new Header("Cookie", "CBSESSID=" + cookie.value)
val httpHeaders = new java.util.ArrayList[Header]()
httpHeaders.add(httpHeader)
options.setProperty(HTTPConstants.HTTP_HEADERS, httpHeaders)
}
}
| mtgto/garoon | src/main/scala/net/mtgto/garoon/GaroonClient.scala | Scala | gpl-3.0 | 5,008 |
/**
*
* TeeAPI
* Ledger wallet
*
* Created by Pierre Pollastri on 31/03/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.ledger.ledgerwallet.remote.api
import java.io.File
import android.content.Context
import android.os.Build
import com.ledger.ledgerwallet.remote.HttpClient
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Promise, Future}
import scala.util.{Try, Failure, Success}
class TeeAPI(context: Context, client: HttpClient = HttpClient.websocketInstance) {
private[this] var _lastResult: Option[Boolean] = None
def isDeviceEligible: Future[Boolean] = {
val p = Promise[Boolean]()
if (_lastResult.isDefined)
p.success(_lastResult.get)
else {
if (new File("/dev/mobicore").exists() || new File("/dev/mobicore-user").exists()) {
client.getJsonObject(s"/mobile/tee/${Build.MODEL}/is_eligible").future.onComplete {
case Success(json) =>
val value = Try(json.getBoolean("is_eligible"))
_lastResult = if (value.isSuccess) Option(value.get) else None
p.tryComplete(value)
case Failure(ex) => p.failure(ex)
}
} else {
p.success(false)
}
}
p.future
}
}
object TeeAPI {
private[this] var _defaultInstance: TeeAPI = _
def defaultInstance(implicit context: Context): TeeAPI = {
if (_defaultInstance == null)
_defaultInstance = new TeeAPI(context)
_defaultInstance
}
} | Morveus/ledger-wallet-android | app/src/main/scala/com/ledger/ledgerwallet/remote/api/TeeAPI.scala | Scala | mit | 2,574 |
package info.hargrave.composer.ui.cue
import info.hargrave.composer._
import info.hargrave.composer.backend.manager.projects.CUEProject
import info.hargrave.composer.ui.cue.FileDataView
import info.hargrave.composer.util.CUEUtilities._
import jwbroek.cuelib.{CueSheet, FileData, TrackData}
import scalafx.Includes._
import scalafx.geometry.Side
import scalafx.scene.control.TabPane.TabClosingPolicy
import scalafx.scene.control._
import scalafx.scene.layout._
/**
* Frontend implementation for [[CUEProject CUEProjects]]
*/
class CUEProjectUI(project: CUEProject) extends TabPane {
import info.hargrave.composer.ui.cue.CUESheetMemberTree.CueSheetMember
private def cueSheet: CueSheet = project.cueSheet match {
case someSheet: Some[CueSheet] => someSheet.get
case None => throw new IllegalArgumentException("project has an invalid cuesheet")
}
rotateGraphic = true
tabClosingPolicy = TabClosingPolicy.UNAVAILABLE
side = Side.RIGHT
// Element Editor --------------------------------------------------------------------------------------------------
/*
* Contains the active cuesheet element
*/
private val elementsEditor = new SplitPane {
vgrow = Priority.Always
hgrow = Priority.Always
}
private val elementTree = new CUESheetMemberTree(cueSheet){
editable = true
vgrow = Priority.Always
}
elementsEditor.items.add(elementTree)
elementsEditor.items.add(new Pane)
elementsEditor.dividers(0).setPosition(.25)
elementTree.onSelectionChanged {(selection: Option[CueSheetMember]) =>
val newChild = selection match {
case Some(Left(fileData)) => new FileDataView(fileData) {
editable = true
vgrow = Priority.Always
hgrow = Priority.Always
}
case Some(Right(trackData)) => new TrackDataView(trackData) {
editable = true
vgrow = Priority.Always
hgrow = Priority.Always
}
case None => new Pane
}
/*
* Stop using intermediate panes, because debugging vstretch sucks.
*/
elementsEditor.getItems.set(1, newChild)
}
tabs += new Tab {
content = elementsEditor
text = t"ui.cue.member_data_editor"
}.delegate
// CUE Sheet Metadata Editor ---------------------------------------------------------------------------------------
private val sheetMetadataEditor = new MetaDataView(cueSheet) {
editable = true
vgrow = Priority.Always
hgrow = Priority.Always
}
tabs += new Tab {
content = sheetMetadataEditor
text = t"ui.cue.sheet_data_editor"
}.delegate
hgrow = Priority.Always
vgrow = Priority.Always
}
| RomanHargrave/CUEComposer | src/main/scala/info/hargrave/composer/ui/cue/CUEProjectUI.scala | Scala | gpl-3.0 | 2,900 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.Activity
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{CaffeLoader, File}
import scala.reflect.ClassTag
object Module {
def load[T: ClassTag](path : String) : AbstractModule[Activity, Activity, T] = {
File.load[AbstractModule[Activity, Activity, T]](path)
}
def loadTorch[T: ClassTag](path : String) : AbstractModule[Activity, Activity, T] = {
File.loadTorch[AbstractModule[Activity, Activity, T]](path)
}
def loadCaffe[T: ClassTag](model: AbstractModule[Activity, Activity, T],
defPath: String, modelPath: String, matchAll: Boolean = true)(
implicit ev: TensorNumeric[T]): AbstractModule[Activity, Activity, T] = {
CaffeLoader.load[T](model, defPath, modelPath, matchAll)
}
def flatten[@specialized(Float, Double) T: ClassTag](parameters: Array[Tensor[T]])(
implicit ev: TensorNumeric[T]): Tensor[T] = {
val compactedTensor = isCompact(parameters)
if (compactedTensor != null) {
return compactedTensor
}
var i = 0
var length = 0
while (i < parameters.length) {
require(parameters(i).isContiguous())
length += parameters(i).nElement()
i += 1
}
val result = Tensor[T](length)
val resultStorage = result.storage()
i = 0
var offset = 0
while (i < parameters.length) {
System.arraycopy(parameters(i).storage().array(), parameters(i).storageOffset() - 1,
resultStorage.array(), offset, parameters(i).nElement())
parameters(i).set(resultStorage, offset + 1, parameters(i).size(), parameters(i).stride())
offset += parameters(i).nElement()
i += 1
}
result
}
def isCompact[@specialized(Float, Double) T: ClassTag](paramters: Array[Tensor[T]])(
implicit ev: TensorNumeric[T]): Tensor[T] = {
require(paramters.length > 0)
var i = 1
val storage = paramters(0).storage()
var length = paramters(0).nElement()
while (i < paramters.length) {
if (!storage.eq(paramters(i).storage())) {
return null
}
length += paramters(i).nElement()
i += 1
}
if (length != storage.array().length) {
return null
}
return Tensor(storage)
}
}
| psyyz10/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Module.scala | Scala | apache-2.0 | 3,015 |
package at.logic.gapt.examples
import org.specs2.mutable.Specification
class GapticExamplesTest extends Specification {
"examples" in { gapticExamples; ok }
"drinker" in { drinker; ok }
}
| gebner/gapt | tests/src/test/scala/at/logic/gapt/examples/GapticExamplesTest.scala | Scala | gpl-3.0 | 197 |
/*
* @author Philip Stutz
*
* Copyright 2012 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect.triplerush.util
import com.signalcollect.Vertex
import com.signalcollect.interfaces.VertexStore
import com.signalcollect.triplerush.EfficientIndexPattern._
import scala.util.hashing.MurmurHash3._
object Hashing {
/**
* Inlined Murmur3, equivalent to:
* finalizeHash(mixLast(a, b), 7)
*/
@inline final def hash(a: Int, b: Int) = {
var k = b
k *= 0xcc9e2d51
k = (k << 15) | (k >>> -15)
k *= 0x1b873593
var h = a ^ k
h ^= 7
h ^= h >>> 16
h *= 0x85ebca6b
h ^= h >>> 13
h *= 0xc2b2ae35
h ^= h >>> 16
h
}
@inline final def avalanche(hash: Int): Int = {
var h = hash
h ^= h >>> 16
h *= 0x85ebca6b
h ^= h >>> 13
h *= 0xc2b2ae35
h ^= h >>> 16
h
}
@inline final def finalizeHash(h: Int, length: Int): Int = avalanche(h ^ length)
@inline final def rotateLeft(i: Int, distance: Int): Int = {
(i << distance) | (i >>> -distance)
}
@inline final def mixLast(a: Int, b: Int): Int = {
var k = b
k *= 0xcc9e2d51
k = rotateLeft(k, 15)
k *= 0x1b873593
a ^ k
}
}
// A special adaptation of LongHashMap[Vertex[Long, _, Long, Any]].
// We allow arbitrary types for the vertex id to make
// the usage of the framework simple.
// This unfortunately means that we cannot use the id
// as the key, as these keys might be expensive to
// compare and require more space than an array of Ints.
// As a proxy we use the hashCode of a vertex id as
// the key in this map. In order to handle (rare) collisions,
// we have to do an additional check to verify that the vertex id
// matches indeed (and not just the hash of the vertex id).
class TripleRushVertexMap(
initialSize: Int = 32768,
rehashFraction: Float = 0.75f) extends VertexStore[Long, Any] {
assert(initialSize > 0)
final var maxSize = nextPowerOfTwo(initialSize)
assert(1.0f >= rehashFraction && rehashFraction > 0.1f, "Unreasonable rehash fraction.")
assert(maxSize > 0 && maxSize >= initialSize, "Initial size is too large.")
private[this] final var maxElements: Int = (rehashFraction * maxSize).floor.toInt
private[this] final var values = new Array[Vertex[Long, _, Long, Any]](maxSize)
private[this] final var keys = new Array[Long](maxSize) // 0 means empty
private[this] final var mask = maxSize - 1
private[this] final var nextPositionToProcess = 0
final override def size: Long = numberOfElements
final def isEmpty: Boolean = numberOfElements == 0
private[this] final var numberOfElements = 0
def stream: Stream[Vertex[Long, _, Long, Any]] = {
def remainder(i: Int, elementsProcessed: Int): Stream[Vertex[Long, _, Long, Any]] = {
if (elementsProcessed == numberOfElements) {
Stream.empty
} else {
var index = i
var vertex = values(index)
while (vertex == null) {
index += 1
vertex = values(index)
}
Stream.cons(vertex, remainder(index + 1, elementsProcessed + 1))
}
}
remainder(0, 0)
}
final def clear {
values = new Array[Vertex[Long, _, Long, Any]](maxSize)
keys = new Array[Long](maxSize)
numberOfElements = 0
nextPositionToProcess = 0
}
final def foreach(f: Vertex[Long, _, Long, Any] => Unit) {
var i = 0
var elementsProcessed = 0
while (elementsProcessed < numberOfElements) {
val vertex = values(i)
if (vertex != null) {
f(vertex)
elementsProcessed += 1
}
i += 1
}
}
// Removes the vertices after they have been processed.
final def process(p: Vertex[Long, _, Long, Any] => Unit, numberOfVertices: Option[Int] = None): Int = {
val limit = math.min(numberOfElements, numberOfVertices.getOrElse(numberOfElements))
var elementsProcessed = 0
while (elementsProcessed < limit) {
val vertex = values(nextPositionToProcess)
if (vertex != null) {
p(vertex)
elementsProcessed += 1
keys(nextPositionToProcess) = 0
values(nextPositionToProcess) = null
numberOfElements -= 1
}
nextPositionToProcess = (nextPositionToProcess + 1) & mask
}
if (elementsProcessed > 0) {
optimizeFromPosition(nextPositionToProcess)
}
limit
}
// Removes the vertices after they have been processed.
final def processWithCondition(p: Vertex[Long, _, Long, Any] => Unit, breakCondition: () => Boolean): Int = {
val limit = numberOfElements
var elementsProcessed = 0
while (elementsProcessed < limit && !breakCondition()) {
val vertex = values(nextPositionToProcess)
if (vertex != null) {
p(vertex)
elementsProcessed += 1
keys(nextPositionToProcess) = 0
values(nextPositionToProcess) = null
numberOfElements -= 1
}
nextPositionToProcess = (nextPositionToProcess + 1) & mask
}
if (elementsProcessed > 0) {
optimizeFromPosition(nextPositionToProcess)
}
elementsProcessed
}
private[this] final def tryDouble {
// 1073741824 is the largest size and cannot be doubled anymore.
if (maxSize != 1073741824) {
val oldSize = maxSize
val oldValues = values
val oldKeys = keys
val oldNumberOfElements = numberOfElements
maxSize *= 2
maxElements = (rehashFraction * maxSize).floor.toInt
values = new Array[Vertex[Long, _, Long, Any]](maxSize)
keys = new Array[Long](maxSize)
mask = maxSize - 1
numberOfElements = 0
var i = 0
var elementsMoved = 0
while (elementsMoved < oldNumberOfElements) {
if (oldKeys(i) != 0) {
put(oldValues(i))
elementsMoved += 1
}
i += 1
}
}
}
final def remove(vertexId: Long) {
remove(vertexId, true)
}
private final def remove(vertexId: Long, optimize: Boolean) {
var position = keyToPosition(vertexId)
var keyAtPosition = keys(position)
while (keyAtPosition != 0 && vertexId != keyAtPosition) {
position = (position + 1) & mask
keyAtPosition = keys(position)
}
// We can only remove the entry if it was found.
if (keyAtPosition != 0) {
keys(position) = 0
values(position) = null
numberOfElements -= 1
if (optimize) {
optimizeFromPosition((position + 1) & mask)
}
}
}
// Try to reinsert all elements that are not optimally placed until an empty position is found.
// See http://stackoverflow.com/questions/279539/best-way-to-remove-an-entry-from-a-hash-table
private[this] final def optimizeFromPosition(startingPosition: Int) {
var currentPosition = startingPosition
var keyAtPosition = keys(currentPosition)
while (isCurrentPositionOccupied) {
val perfectPositionForEntry = keyToPosition(keyAtPosition)
if (perfectPositionForEntry != currentPosition) {
// We try to optimize the placement of the entry by removing and then reinserting it.
val vertex = values(currentPosition)
removeCurrentEntry
putWithKey(keyAtPosition, vertex)
}
advance
}
def advance {
currentPosition = ((currentPosition + 1) & mask)
keyAtPosition = keys(currentPosition)
}
def isCurrentPositionOccupied = {
keyAtPosition != 0
}
def removeCurrentEntry {
keys(currentPosition) = 0
values(currentPosition) = null
numberOfElements -= 1
}
}
final def get(vertexId: Long): Vertex[Long, _, Long, Any] = {
var position = keyToPosition(vertexId)
var keyAtPosition = keys(position)
while (keyAtPosition != 0 && vertexId != keyAtPosition) {
position = (position + 1) & mask
keyAtPosition = keys(position)
}
if (keyAtPosition != 0) {
values(position)
} else {
null
}
}
// Only put if no vertex with the same id is present. If a vertex was put, return true.
final def put(vertex: Vertex[Long, _, Long, Any]): Boolean = {
val success = putWithKey(vertex.id, vertex)
success
}
private[this] final def putWithKey(key: Long, vertex: Vertex[Long, _, Long, Any]): Boolean = {
var position = keyToPosition(key)
var keyAtPosition = keys(position)
while (keyAtPosition != 0 && key != keyAtPosition) {
position = (position + 1) & mask
keyAtPosition = keys(position)
}
var doPut = keyAtPosition == 0
// Only put if the there is no vertex with the same id yet.
if (doPut) {
keys(position) = key
values(position) = vertex
numberOfElements += 1
if (numberOfElements >= maxElements) {
tryDouble
if (numberOfElements >= maxSize) {
throw new OutOfMemoryError("The hash map is full and cannot be expanded any further.")
}
}
}
doPut
}
private[this] final def keyToPosition(efficientIndexPattern: Long): Int = {
Hashing.hash(efficientIndexPattern.extractFirst, efficientIndexPattern.extractSecond) & mask
}
private[this] final def nextPowerOfTwo(x: Int): Int = {
var r = x - 1
r |= r >> 1
r |= r >> 2
r |= r >> 4
r |= r >> 8
r |= r >> 16
r + 1
}
}
| jacqueslk/triplerush-filter | src/main/scala/com/signalcollect/triplerush/util/TripleRushVertexMap.scala | Scala | apache-2.0 | 9,755 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\
* @ @ *
* # # # # (c) 2017 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.core.plumbing.fitting.pipes
import mathact.core.bricks.plumbing.fitting.Socket
import mathact.core.model.enums.DequeueAlgo
import mathact.core.plumbing.Pump
import mathact.core.plumbing.fitting.Pipe
import mathact.core.plumbing.fitting.flows.InflowLike
/** Wrapper fot Inlet
* Created by CAB on 24.08.2016.
*/
private[core] class InPipe[H] (
private[core] val in: InflowLike[H],
private[core] val inletName: Option[String],
private[core] val pump: Pump,
private[core] val dequeue: DequeueAlgo)
extends Pipe[H] with Socket[H]{
//Construction
private[core] val (blockId, inletId) = pump.addInlet(this, inletName)
//Methods
override def toString: String = s"InPipe(in: $in, outletName: $inletName, pump: $pump)"
def processValue(value: Any): Unit = in.processValue(value)}
| AlexCAB/MathAct | mathact_core/src/main/scala/mathact/core/plumbing/fitting/pipes/InPipe.scala | Scala | mit | 1,920 |
/**
* Copyright 2015 Frank Austin Nothaft
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fnothaft.ananas.debruijn
import net.fnothaft.ananas.models.{ CanonicalKmer, IntMer}
import org.apache.spark.graphx.Edge
import org.apache.spark.rdd.RDD
object TransientKmerVertex extends Serializable {
private[debruijn] def merge[T](v1: TransientKmerVertex[T],
v2: TransientKmerVertex[T]): TransientKmerVertex[T] = {
TransientKmerVertex(v1.forwardTerminals ++ v2.forwardTerminals,
v1.forwardStronglyConnected ++ v2.forwardStronglyConnected,
v1.forwardLinked ++ v2.forwardLinked,
v1.reverseTerminals ++ v2.reverseTerminals,
v1.reverseStronglyConnected ++ v2.reverseStronglyConnected,
v1.reverseLinked ++ v2.reverseLinked)
}
private[debruijn] def mergeCanon[T](k1: CanonicalKmer,
v1: TransientKmerVertex[T],
k2: CanonicalKmer,
v2: TransientKmerVertex[T]): (CanonicalKmer,
TransientKmerVertex[T]) = {
val i1 = k1.asInstanceOf[IntMer]
val i2 = k2.asInstanceOf[IntMer]
assert(k1.sameExceptForOrientation(k2),
"Asked to merge %s and %s, which are not canonical twins. I1: %d %d %s, I2: %d %d %s.".format(
k1, k2,
i1.kmer, i1.mask, i1.isOriginal,
i2.kmer, i2.mask, i2.isOriginal))
val (fwdKmer, fwdVertex, revVertex) = if (k1.isOriginal) {
(k1, v1, v2)
} else {
(k2, v2, v1)
}
(fwdKmer, TransientKmerVertex(fwdVertex.forwardTerminals ++ revVertex.reverseTerminals,
fwdVertex.forwardStronglyConnected ++ revVertex.reverseStronglyConnected,
fwdVertex.forwardLinked ++ revVertex.reverseLinked,
fwdVertex.reverseTerminals ++ revVertex.forwardTerminals,
fwdVertex.reverseStronglyConnected ++ revVertex.forwardStronglyConnected,
fwdVertex.reverseLinked ++ revVertex.forwardLinked))
}
private[debruijn] def toEdges[T](rdd: RDD[(CanonicalKmer, TransientKmerVertex[T])]): RDD[Edge[Unit]] = {
rdd.flatMap(kv => {
val (kmer, vertex) = kv
val srcId = kmer.longHash
// merge map values and eliminate dupes
(vertex.forwardStronglyConnected.values ++ vertex.forwardLinked.values ++
vertex.reverseStronglyConnected.values ++ vertex.reverseLinked.values)
.toSet
.map((v: Long) => (new Edge[Unit](srcId, v)))
})
}
}
case class TransientKmerVertex[T](forwardTerminals: Set[(T, Int)] = Set.empty[(T, Int)],
forwardStronglyConnected: Map[(T, Int), Long] = Map.empty[(T, Int), Long],
forwardLinked: Map[(T, Int), Long] = Map.empty[(T, Int), Long],
reverseTerminals: Set[(T, Int)] = Set.empty[(T, Int)],
reverseStronglyConnected: Map[(T, Int), Long] = Map.empty[(T, Int), Long],
reverseLinked: Map[(T, Int), Long] = Map.empty[(T, Int), Long]) {
}
| fnothaft/ananas | src/main/scala/net/fnothaft/ananas/debruijn/TransientKmerVertex.scala | Scala | apache-2.0 | 3,863 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.csv
import java.math.BigDecimal
import java.util.Locale
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class UnivocityParserSuite extends SparkFunSuite {
private val parser = new UnivocityParser(
StructType(Seq.empty),
new CSVOptions(Map.empty[String, String], false, "GMT"))
private def assertNull(v: Any) = assert(v == null)
test("Can parse decimal type values") {
val stringValues = Seq("10.05", "1,000.01", "158,058,049.001")
val decimalValues = Seq(10.05, 1000.01, 158058049.001)
val decimalType = new DecimalType()
stringValues.zip(decimalValues).foreach { case (strVal, decimalVal) =>
val decimalValue = new BigDecimal(decimalVal.toString)
val options = new CSVOptions(Map.empty[String, String], false, "GMT")
assert(parser.makeConverter("_1", decimalType, options = options).apply(strVal) ===
Decimal(decimalValue, decimalType.precision, decimalType.scale))
}
}
test("Nullable types are handled") {
val types = Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType,
BooleanType, DecimalType.DoubleDecimal, TimestampType, DateType, StringType)
// Nullable field with nullValue option.
types.foreach { t =>
// Tests that a custom nullValue.
val nullValueOptions = new CSVOptions(Map("nullValue" -> "-"), false, "GMT")
val converter =
parser.makeConverter("_1", t, nullable = true, options = nullValueOptions)
assertNull(converter.apply("-"))
assertNull(converter.apply(null))
// Tests that the default nullValue is empty string.
val options = new CSVOptions(Map.empty[String, String], false, "GMT")
assertNull(parser.makeConverter("_1", t, nullable = true, options = options).apply(""))
}
// Not nullable field with nullValue option.
types.foreach { t =>
// Casts a null to not nullable field should throw an exception.
val options = new CSVOptions(Map("nullValue" -> "-"), false, "GMT")
val converter =
parser.makeConverter("_1", t, nullable = false, options = options)
var message = intercept[RuntimeException] {
converter.apply("-")
}.getMessage
assert(message.contains("null value found but field _1 is not nullable."))
message = intercept[RuntimeException] {
converter.apply(null)
}.getMessage
assert(message.contains("null value found but field _1 is not nullable."))
}
// If nullValue is different with empty string, then, empty string should not be casted into
// null.
Seq(true, false).foreach { b =>
val options = new CSVOptions(Map("nullValue" -> "null"), false, "GMT")
val converter =
parser.makeConverter("_1", StringType, nullable = b, options = options)
assert(converter.apply("") == UTF8String.fromString(""))
}
}
test("Throws exception for empty string with non null type") {
val options = new CSVOptions(Map.empty[String, String], false, "GMT")
val exception = intercept[RuntimeException]{
parser.makeConverter("_1", IntegerType, nullable = false, options = options).apply("")
}
assert(exception.getMessage.contains("null value found but field _1 is not nullable."))
}
test("Types are cast correctly") {
val options = new CSVOptions(Map.empty[String, String], false, "GMT")
assert(parser.makeConverter("_1", ByteType, options = options).apply("10") == 10)
assert(parser.makeConverter("_1", ShortType, options = options).apply("10") == 10)
assert(parser.makeConverter("_1", IntegerType, options = options).apply("10") == 10)
assert(parser.makeConverter("_1", LongType, options = options).apply("10") == 10)
assert(parser.makeConverter("_1", FloatType, options = options).apply("1.00") == 1.0)
assert(parser.makeConverter("_1", DoubleType, options = options).apply("1.00") == 1.0)
assert(parser.makeConverter("_1", BooleanType, options = options).apply("true") == true)
val timestampsOptions =
new CSVOptions(Map("timestampFormat" -> "dd/MM/yyyy hh:mm"), false, "GMT")
val customTimestamp = "31/01/2015 00:00"
val expectedTime = timestampsOptions.timestampFormat.parse(customTimestamp).getTime
val castedTimestamp =
parser.makeConverter("_1", TimestampType, nullable = true, options = timestampsOptions)
.apply(customTimestamp)
assert(castedTimestamp == expectedTime * 1000L)
val customDate = "31/01/2015"
val dateOptions = new CSVOptions(Map("dateFormat" -> "dd/MM/yyyy"), false, "GMT")
val expectedDate = dateOptions.dateFormat.parse(customDate).getTime
val castedDate =
parser.makeConverter("_1", DateType, nullable = true, options = dateOptions)
.apply(customTimestamp)
assert(castedDate == DateTimeUtils.millisToDays(expectedDate))
val timestamp = "2015-01-01 00:00:00"
assert(parser.makeConverter("_1", TimestampType, options = options).apply(timestamp) ==
DateTimeUtils.stringToTime(timestamp).getTime * 1000L)
assert(parser.makeConverter("_1", DateType, options = options).apply("2015-01-01") ==
DateTimeUtils.millisToDays(DateTimeUtils.stringToTime("2015-01-01").getTime))
}
test("Throws exception for casting an invalid string to Float and Double Types") {
val options = new CSVOptions(Map.empty[String, String], false, "GMT")
val types = Seq(DoubleType, FloatType)
val input = Seq("10u000", "abc", "1 2/3")
types.foreach { dt =>
input.foreach { v =>
val message = intercept[NumberFormatException] {
parser.makeConverter("_1", dt, options = options).apply(v)
}.getMessage
assert(message.contains(v))
}
}
}
test("Float NaN values are parsed correctly") {
val options = new CSVOptions(Map("nanValue" -> "nn"), false, "GMT")
val floatVal: Float = parser.makeConverter(
"_1", FloatType, nullable = true, options = options
).apply("nn").asInstanceOf[Float]
// Java implements the IEEE-754 floating point standard which guarantees that any comparison
// against NaN will return false (except != which returns true)
assert(floatVal != floatVal)
}
test("Double NaN values are parsed correctly") {
val options = new CSVOptions(Map("nanValue" -> "-"), false, "GMT")
val doubleVal: Double = parser.makeConverter(
"_1", DoubleType, nullable = true, options = options
).apply("-").asInstanceOf[Double]
assert(doubleVal.isNaN)
}
test("Float infinite values can be parsed") {
val negativeInfOptions = new CSVOptions(Map("negativeInf" -> "max"), false, "GMT")
val floatVal1 = parser.makeConverter(
"_1", FloatType, nullable = true, options = negativeInfOptions
).apply("max").asInstanceOf[Float]
assert(floatVal1 == Float.NegativeInfinity)
val positiveInfOptions = new CSVOptions(Map("positiveInf" -> "max"), false, "GMT")
val floatVal2 = parser.makeConverter(
"_1", FloatType, nullable = true, options = positiveInfOptions
).apply("max").asInstanceOf[Float]
assert(floatVal2 == Float.PositiveInfinity)
}
test("Double infinite values can be parsed") {
val negativeInfOptions = new CSVOptions(Map("negativeInf" -> "max"), false, "GMT")
val doubleVal1 = parser.makeConverter(
"_1", DoubleType, nullable = true, options = negativeInfOptions
).apply("max").asInstanceOf[Double]
assert(doubleVal1 == Double.NegativeInfinity)
val positiveInfOptions = new CSVOptions(Map("positiveInf" -> "max"), false, "GMT")
val doubleVal2 = parser.makeConverter(
"_1", DoubleType, nullable = true, options = positiveInfOptions
).apply("max").asInstanceOf[Double]
assert(doubleVal2 == Double.PositiveInfinity)
}
}
| bravo-zhang/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/UnivocityParserSuite.scala | Scala | apache-2.0 | 8,703 |
/*
* Copyright (C) 2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.backchat.http
package parser
import org.parboiled.scala._
import BasicRules._
private[parser] trait AcceptRangesHeader {
this: Parser with ProtocolParameterRules ⇒
def ACCEPT_RANGES = rule (
RangeUnitsDef ~ EOI ~~> (HttpHeaders.`Accept-Ranges`(_)))
def RangeUnitsDef = rule {
NoRangeUnitsDef | zeroOrMore(RangeUnit, separator = ListSep)
}
def NoRangeUnitsDef = rule {
"none" ~ push(List.empty[RangeUnit])
}
} | backchatio/http-parsers | src/main/scala/io/backchat/http/parser/AcceptRangesHeader.scala | Scala | apache-2.0 | 1,051 |
package it.dtk.twitter.entities
/**
* Created by gigitsu on 30/06/15.
*/
case class Polygon(bounds: LineString, holes: Seq[LineString]) extends Shape
| DataToKnowledge/wheretolive-backend | entities/src/main/scala/it/dtk/twitter/entities/Polygon.scala | Scala | gpl-2.0 | 153 |
package breeze.stats.distributions
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import org.scalatest._;
import org.scalatest.junit._;
import org.scalatest.prop._;
import org.scalacheck._;
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class WaldTest extends FunSuite with Checkers with MomentsTestBase[Double] {
import Arbitrary.arbitrary
override val numSamples = 40000
def asDouble(x: Double) = x
def fromDouble(x: Double) = x
implicit def arbDistr = Arbitrary {
for(location <- arbitrary[Double].map{x => math.abs(x) % 5.0 + 1.1}; // Wald pdf at 0 not defined when location == 1
scale <- arbitrary[Double].map {x => math.abs(x) % 4.0 + 1.0}) yield new Wald(location,scale)(RandBasis.mt0)
}
}
| eponvert/breeze | src/test/scala/breeze/stats/distributions/WaldTest.scala | Scala | apache-2.0 | 1,277 |
package mesosphere.mesos
import mesosphere.marathon.state.AppDefinition
import scala.collection.immutable.Seq
import scala.collection.mutable
import scala.util.Random
object EnvironmentHelper {
val maxEnvironmentVarLength = 512
val labelEnvironmentKeyPrefix = "MARATHON_APP_LABEL_"
val maxVariableLength = maxEnvironmentVarLength - labelEnvironmentKeyPrefix.length
/**
* @param name is an optional port name
* @param port is either a container port (if port mappings are in use) or else a host port; may be zero
*/
case class PortRequest(name: Option[String], port: Int)
object PortRequest {
def apply(name: String, port: Int): PortRequest = PortRequest(Some(name), port)
def apply(port: Int): PortRequest = PortRequest(None, port)
}
// portsEnv generates $PORT{x} and $PORT_{y} environment variables, wherein `x` is an index into
// the portDefinitions or portMappings array and `y` is a non-zero port specifically requested by
// the application specification.
//
// @param requestedPorts are either declared container ports (if port mappings are specified) or host ports;
// may be 0's
// @param effectivePorts resolved non-dynamic host ports allocated from Mesos resource offers
// @return a dictionary of variables that should be added to a tasks environment
//scalastyle:off cyclomatic.complexity method.length
def portsEnv(
requestedPorts: Seq[PortRequest],
effectivePorts: Seq[Option[Int]]): Map[String, String] = {
if (effectivePorts.isEmpty) {
Map.empty
} else {
val env = Map.newBuilder[String, String]
val generatedPortsBuilder = Map.newBuilder[Int, Int] // index -> container port
object ContainerPortGenerator {
// track which port numbers are already referenced by PORT_xxx envvars
lazy val consumedPorts = mutable.Set(requestedPorts.map(_.port): _*) ++= effectivePorts.flatten
val maxPort: Int = 65535 - 1024
// carefully pick a container port that doesn't overlap with other ports used by this
// container. and avoid ports in the range (0 - 1024)
def next: Int = {
val p = Random.nextInt(maxPort) + 1025
if (!consumedPorts.contains(p)) {
consumedPorts += p
p
} else next // TODO(jdef) **highly** unlikely, but still possible that the port range could be exhausted
}
}
effectivePorts.zipWithIndex.foreach {
// matches fixed or dynamic host port assignments
case (Some(effectivePort), portIndex) =>
env += (s"PORT$portIndex" -> effectivePort.toString)
// matches container-port-only mappings; no host port was defined for this mapping
case (None, portIndex) =>
requestedPorts.lift(portIndex) match {
case Some(PortRequest(_, containerPort)) if containerPort == AppDefinition.RandomPortValue =>
val randomPort = ContainerPortGenerator.next
generatedPortsBuilder += portIndex -> randomPort
env += (s"PORT$portIndex" -> randomPort.toString)
case Some(PortRequest(_, containerPort)) if containerPort != AppDefinition.RandomPortValue =>
env += (s"PORT$portIndex" -> containerPort.toString)
case _ => //ignore
}
}
val generatedPorts = generatedPortsBuilder.result
requestedPorts.zip(effectivePorts).zipWithIndex.foreach {
case ((PortRequest(_, requestedPort), Some(effectivePort)), _) if requestedPort != AppDefinition.RandomPortValue =>
env += (s"PORT_$requestedPort" -> effectivePort.toString)
case ((PortRequest(_, requestedPort), Some(effectivePort)), _) if requestedPort == AppDefinition.RandomPortValue =>
env += (s"PORT_$effectivePort" -> effectivePort.toString)
case ((PortRequest(_, requestedPort), None), _) if requestedPort != AppDefinition.RandomPortValue =>
env += (s"PORT_$requestedPort" -> requestedPort.toString)
case ((PortRequest(_, requestedPort), None), portIndex) if requestedPort == AppDefinition.RandomPortValue =>
val generatedPort = generatedPorts(portIndex)
env += (s"PORT_$generatedPort" -> generatedPort.toString)
}
requestedPorts.map(_.name).zip(effectivePorts).foreach {
case (Some(portName), Some(effectivePort)) =>
env += (s"PORT_${portName.toUpperCase}" -> effectivePort.toString)
// TODO(jdef) port name envvars for generated container ports
case _ =>
}
val allAssigned = effectivePorts.flatten ++ generatedPorts.values
allAssigned.headOption.foreach { port => env += ("PORT" -> port.toString) }
env += ("PORTS" -> allAssigned.mkString(","))
env.result()
}
}
def labelsToEnvVars(labels: Map[String, String]): Map[String, String] = {
def escape(name: String): String = name.replaceAll("[^a-zA-Z0-9_]+", "_").toUpperCase
val validLabels = labels.collect {
case (key, value) if key.length < maxVariableLength
&& value.length < maxEnvironmentVarLength => escape(key) -> value
}
val names = Map("MARATHON_APP_LABELS" -> validLabels.keys.mkString(" "))
val values = validLabels.map { case (key, value) => s"$labelEnvironmentKeyPrefix$key" -> value }
names ++ values
}
}
| Caerostris/marathon | src/main/scala/mesosphere/mesos/EnvironmentHelper.scala | Scala | apache-2.0 | 5,319 |
package com.eltimn.scamongo.field
/*
* Copyright 2010 Tim Nelson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
import _root_.net.liftweb.common.{Box, Empty, Failure, Full}
import _root_.net.liftweb.http.js.JE.{JsNull, Str}
import _root_.net.liftweb.json.JsonAST.{JNothing, JNull, JString, JValue}
import _root_.net.liftweb.record.{Field, Record}
import _root_.net.liftweb.record.FieldHelpers
import com.mongodb.{ObjectId, DBRef}
/*
* Field for storing an ObjectId
*/
class ObjectIdField[OwnerType <: MongoRecord[OwnerType]](rec: OwnerType)
extends Field[ObjectId, OwnerType] {
def asJs = valueBox.map(v => Str(v.toString)) openOr JsNull
def asJValue: JValue = valueBox.map(v => JString(v.toString)) openOr (JNothing: JValue)
def setFromJValue(jvalue: JValue): Box[ObjectId] = jvalue match {
case JNothing|JNull if optional_? => setBox(Empty)
case JString(s) => setFromString(s)
case other => setBox(FieldHelpers.expectedA("JString", other))
}
def asXHtml = <div></div>
def defaultValue = ObjectId.get
def setFromAny(in: Any): Box[ObjectId] = in match {
case oid: ObjectId => Full(set(oid))
case Some(oid: ObjectId) => Full(set(oid))
case Full(oid: ObjectId) => Full(set(oid))
case seq: Seq[_] if !seq.isEmpty => seq.map(setFromAny)(0)
case (s: String) :: _ => setFromString(s)
case null => Full(set(null))
case s: String => setFromString(s)
case None | Empty | Failure(_, _, _) => Full(set(null))
case o => setFromString(o.toString)
}
def setFromString(in: String): Box[ObjectId] = {
ObjectId.isValid(in) match {
case true => Full(set(new ObjectId(in)))
case false => Empty
}
}
def toForm = <div></div>
def owner = rec
}
| eltimn/scamongo | src/main/scala/com/eltimn/scamongo/field/ObjectIdField.scala | Scala | apache-2.0 | 2,223 |
package tools.extractors
import org.specs2.mutable._
import org.mockito.Matchers._
import org.specs2.mock._
import tools.Context
import play.api.Configuration
import java.io.File
import tools.storage.FileSystem
class PdfToPngSpec extends SpecificationWithJUnit with Specification with Mockito {
val base = new File(".").getCanonicalPath
def delete(f:File) {
if (f.isDirectory) for (file <- f.listFiles()) delete(file)
f.delete()
}
delete (new File(base.concat("/target/test-out")))
"Extract method " should {
"do nothing if storage/stream are not defined" in {
val conf = mock[Configuration]
conf.getString("storage.type", None) returns None
Context.configure(conf)
PdfToPng.extract("some") must_== None
}
"extract correctly on local file system" in {
Context.isProd = () => false
Context.isDev = () => false
Context.isTest = () => true
val conf = mock[Configuration]
conf.getString("storage.type", None) returns Some("fs")
conf.getString("fs.inbox", None) returns Some(base.concat("/test/assets"))
conf.getString("fs.outbox", None) returns Some(base.concat("/target/test-out"))
conf.getString("conversion.scale", None) returns Some("1.5")
conf.getString("conversion.preview_scale", None) returns Some("0.3")
conf.getBoolean("conversion.allow_config_display") returns Some(false)
Context.configure(conf)
Context.getStorage must_!= None
Context.getStorage must_== Some(FileSystem)
val result = PdfToPng.extract("file1.pdf")
result must_!= None
val meta = result.get
meta must_!= null
for (p <- meta.pages) {
val file = new File(p.key)
file.exists must_== true
file.length > 0 must_== true
}
meta.pages.size > 0 must_== true
}
}
}
| vba/grom | rest/test/tools/extractors/PdfToPngSpec.scala | Scala | apache-2.0 | 1,746 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.util
import scala.collection.immutable.TreeMap
object CaseInsensitiveStringOrdering extends Ordering[String] {
override def compare(x: String, y: String): Int = x compareToIgnoreCase y
}
class HeaderMap private (val headers : TreeMap[String, List[String]])
extends Map[String, List[String]] {
def this() = this(new TreeMap[String, List[String]]()(CaseInsensitiveStringOrdering))
def addHeader (name : String, value : String) : HeaderMap = {
new HeaderMap(headers + (name -> (headers.getOrElse(name, List()) :+ value)))
}
def removeHeader (name : String, value : String) : HeaderMap = {
get(name) match {
case Some(l) => l.filter(a => a != value) match {
case h :: t => removeHeaders(name).addHeaders(name, h::t)
case Nil => removeHeaders(name)
}
case None => this
}
}
def addHeaders (name : String, values : List[String]) : HeaderMap = {
new HeaderMap(headers + (name -> (headers.getOrElse(name, List()) ::: values)))
}
def addHeaders (otherHeaders : HeaderMap) : HeaderMap = {
var retHeaders = this
otherHeaders.foreach (h_v => retHeaders = retHeaders.addHeaders(h_v._1, h_v._2))
retHeaders
}
def removeHeaders (name : String) : HeaderMap = {
(this - name).asInstanceOf[HeaderMap]
}
// Members declared in scala.collection.immutable.Map
override def +[B1 >: List[String]](kv: (String, B1)): scala.collection.immutable.Map[String,B1] = {
new HeaderMap((headers + kv).asInstanceOf[TreeMap[String, List[String]]])
}
// Members declared in scala.collection.MapLike
override def -(key: String): scala.collection.immutable.Map[String,List[String]] = {
new HeaderMap ((headers - key).asInstanceOf[TreeMap[String, List[String]]])
}
override def get(key: String): Option[List[String]] = headers.get(key)
override def iterator: Iterator[(String, List[String])] = headers.iterator
}
| tylerroyal/api-checker | core/src/main/scala/com/rackspace/com/papi/components/checker/util/HeaderMap.scala | Scala | apache-2.0 | 2,580 |
// Copyright 2015 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.rogue.test
import com.mongodb.{BasicDBObjectBuilder, DB, DBCollection, DBObject, MongoClient, ServerAddress, WriteConcern}
import io.fsq.field.OptionalField
import io.fsq.rogue.{
DBCollectionFactory,
InitialState,
LegacyMongo,
MongoJavaDriverAdapter,
Query,
QueryExecutor,
QueryOptimizer,
Rogue,
RogueReadSerializer,
RogueWriteSerializer
}
import io.fsq.rogue.MongoHelpers.{AndCondition, MongoSelect}
import io.fsq.rogue.index.UntypedMongoIndex
import org.junit.{Before, Test}
import org.specs2.matcher.JUnitMustMatchers
/** A trivial ORM layer that implements the interfaces rogue needs. The goal is
* to make sure that rogue-core works without the assistance of rogue-lift.
* Ideally this would be even smaller; as it is, I needed to copy-paste some
* code from the Lift implementations. */
object TrivialORM {
trait Record {
type Self >: this.type <: Record
def meta: Meta[Self]
}
trait Meta[R] {
def collectionName: String
def fromDBObject(dbo: DBObject): R
def toDBObject(record: R): DBObject
}
lazy val mongoClient = {
val (host, port) = Option(System.getProperty("default.mongodb.server"))
.map({ str =>
val arr = str.split(':')
(arr(0), arr(1).toInt)
})
.getOrElse(("localhost", 27017))
new MongoClient(new ServerAddress(host, port))
}
type MB = Meta[_]
class MyDBCollectionFactory(db: DB) extends DBCollectionFactory[MB, Record] {
override def getDBCollection[M <: MB](query: Query[M, _, _]): DBCollection = {
db.getCollection(query.meta.collectionName)
}
override def getPrimaryDBCollection[M <: MB](query: Query[M, _, _]): DBCollection = {
db.getCollection(query.meta.collectionName)
}
override def getPrimaryDBCollection(record: Record): DBCollection = {
db.getCollection(record.meta.collectionName)
}
override def getInstanceName[M <: MB](query: Query[M, _, _]): String = {
db.getName
}
override def getInstanceName(record: Record): String = {
db.getName
}
override def getIndexes[M <: MB](query: Query[M, _, _]): Option[Seq[UntypedMongoIndex]] = {
None
}
}
class MyQueryExecutor extends QueryExecutor[Meta[_], Record] {
override val adapter = new MongoJavaDriverAdapter[Meta[_], Record](
new MyDBCollectionFactory(LegacyMongo.getDB(mongoClient, "test"))
)
override val optimizer = new QueryOptimizer
override val defaultWriteConcern: WriteConcern = WriteConcern.ACKNOWLEDGED
protected def readSerializer[M <: Meta[_], R](
meta: M,
select: Option[MongoSelect[M, R]]
): RogueReadSerializer[R] = new RogueReadSerializer[R] {
override def fromDBObject(dbo: DBObject): R = select match {
case Some(MongoSelect(Nil, transformer)) =>
// A MongoSelect clause exists, but has empty fields. Return null.
// This is used for .exists(), where we just want to check the number
// of returned results is > 0.
transformer(null)
case Some(MongoSelect(fields, transformer)) =>
transformer(fields.map(f => f.valueOrDefault(Option(dbo.get(f.field.name)))))
case None =>
meta.fromDBObject(dbo).asInstanceOf[R]
}
}
override protected def writeSerializer(record: Record): RogueWriteSerializer[Record] =
new RogueWriteSerializer[Record] {
override def toDBObject(record: Record): DBObject = {
val meta = record.meta
record.meta.toDBObject(record)
}
}
}
object Implicits extends Rogue {
implicit def meta2Query[M <: Meta[R], R](meta: M with Meta[R]): Query[M, R, InitialState] = {
Query[M, R, InitialState](
meta,
meta.collectionName,
None,
None,
None,
None,
AndCondition(Nil, None),
None,
None,
None
)
}
}
}
case class SimpleRecord(a: Int, b: String) extends TrivialORM.Record {
override type Self = SimpleRecord
override def meta: SimpleRecord.type = SimpleRecord
}
object SimpleRecord extends TrivialORM.Meta[SimpleRecord] {
val a = new OptionalField[Int, SimpleRecord.type] { override val owner = SimpleRecord; override val name = "a" }
val b = new OptionalField[String, SimpleRecord.type] { override val owner = SimpleRecord; override val name = "b" }
override val collectionName = "simple_records"
override def fromDBObject(dbo: DBObject): SimpleRecord = {
new SimpleRecord(dbo.get(a.name).asInstanceOf[Int], dbo.get(b.name).asInstanceOf[String])
}
override def toDBObject(record: SimpleRecord): DBObject = {
(BasicDBObjectBuilder.start
.add(a.name, record.a)
.add(b.name, record.b)
.get)
}
}
// TODO: Everything in the rogue-lift tests should move here, except for the lift-specific extensions.
class TrivialORMQueryTest extends JUnitMustMatchers {
val executor = new TrivialORM.MyQueryExecutor
import TrivialORM.Implicits._
@Before
def cleanUpMongo = {
executor.bulkDelete_!!(SimpleRecord)
}
@Test
def canBuildQuery: Unit = {
(SimpleRecord: Query[SimpleRecord.type, SimpleRecord, InitialState])
.toString() must_== """db.simple_records.find({})"""
SimpleRecord.where(_.a eqs 1).toString() must_== """db.simple_records.find({"a": 1})"""
}
@Test
def canExecuteQuery: Unit = {
executor.fetch(SimpleRecord.where(_.a eqs 1)) must_== Nil
executor.count(SimpleRecord) must_== 0
}
@Test
def canUpsertAndGetResults: Unit = {
executor.count(SimpleRecord) must_== 0
executor.upsertOne(SimpleRecord.modify(_.a setTo 1).and(_.b setTo "foo"))
executor.count(SimpleRecord) must_== 1
val results = executor.fetch(SimpleRecord.where(_.a eqs 1))
results.size must_== 1
results(0).a must_== 1
results(0).b must_== "foo"
executor.fetch(SimpleRecord.where(_.a eqs 1).select(_.a)) must_== List(Some(1))
executor.fetch(SimpleRecord.where(_.a eqs 1).select(_.b)) must_== List(Some("foo"))
executor.fetch(SimpleRecord.where(_.a eqs 1).select(_.a, _.b)) must_== List((Some(1), Some("foo")))
}
}
| foursquare/fsqio | test/jvm/io/fsq/rogue/test/TrivialORMQueryTest.scala | Scala | apache-2.0 | 6,201 |
package com.twitter.finagle.toggle
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.toggle.Toggle.Metadata
import com.twitter.logging.Logger
import java.nio.charset.StandardCharsets.UTF_8
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicReference
import java.util.zip.CRC32
import scala.annotation.varargs
import scala.collection.JavaConverters._
import scala.collection.{breakOut, immutable, mutable}
import scala.util.hashing.MurmurHash3
/**
* A collection of Int-typed [[Toggle toggles]] which can be
* used to build a form of feature toggles which allow for modifying
* behavior without changing code.
*
* Expected usage is for code to have [[Toggle toggles]] passed into
* their constructors instead of dynamically creating new [[Toggle toggles]]
* on every call.
*
* @see [[Toggle]]
* @see [[ServiceLoadedToggleMap]] and [[StandardToggleMap]] for typical usage
* entry points.
* @see [[http://martinfowler.com/articles/feature-toggles.html Feature Toggles]]
* for detailed discussion on the topic.
*/
abstract class ToggleMap { self =>
/**
* Get a [[Toggle]] for this `id`.
*
* The `Toggle.isDefined` method should return `false` if the
* [[ToggleMap]] does not know about that [[Toggle]]
* or it is currently not "operating" on that `id`.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
*/
def apply(id: String): Toggle[Int]
def iterator: Iterator[Toggle.Metadata]
/**
* Creates a [[ToggleMap]] which uses `this` before `that`.
*
* [[apply]] returns a [[Toggle]] that uses the [[Toggle]] from `this`
* if it `isDefinedAt` for the input, before trying `that`.
*
* [[iterator]] includes metadata from both `self` and `that`,
* with `self`'s metadata taking precedence on conflicting ids.
* Note however that if a `ToggleMetadata.description` is not defined on `self`,
* the description from `that` will be preferred. This is done because many
* sources of `ToggleMaps` do not have a description defined and we want to
* surface that information.
*/
def orElse(that: ToggleMap): ToggleMap = {
new ToggleMap with ToggleMap.Composite {
override def toString: String =
s"${self.toString}.orElse(${that.toString})"
def apply(id: String): Toggle[Int] = {
self(id).orElse(that(id))
}
def iterator: Iterator[Metadata] = {
val byName = mutable.Map.empty[String, Toggle.Metadata]
that.iterator.foreach { md =>
byName.put(md.id, md)
}
self.iterator.foreach { md =>
val mdWithDesc = md.description match {
case Some(_) => md
case None => md.copy(description =
byName.get(md.id).flatMap(ToggleMap.MdDescFn))
}
byName.put(md.id, mdWithDesc)
}
byName.valuesIterator
}
def components: Seq[ToggleMap] = {
Seq(self, that)
}
}
}
}
object ToggleMap {
// more or less picked out of thin air as the initial hashing value
private[this] val HashSeed = 1300476044
/**
* Used to create a `Toggle[Int]` that hashes its inputs to
* `apply` and `isDefinedAt` in order to promote a relatively even
* distribution even when the inputs do not have a good distribution.
*
* This allows users to get away with using a poor hashing function,
* such as `String.hashCode`.
*/
private def hashedToggle(
id: String,
pf: PartialFunction[Int, Boolean]
): Toggle[Int] = new Toggle[Int](id) {
override def toString: String = s"Toggle($id)"
private[this] def hash(i: Int): Int = {
val h = MurmurHash3.mix(HashSeed, i)
MurmurHash3.finalizeHash(h, 1)
}
def isDefinedAt(x: Int): Boolean = pf.isDefinedAt(hash(x))
def apply(x: Int): Boolean = pf(hash(x))
}
private[this] val MetadataOrdering: Ordering[Toggle.Metadata] =
new Ordering[Toggle.Metadata] {
def compare(x: Metadata, y: Metadata): Int = {
val ids = Ordering.String.compare(x.id, y.id)
if (ids != 0) ids
else Ordering.Double.compare(x.fraction, y.fraction)
}
}
/**
* Creates a [[ToggleMap]] with a `Gauge`, "checksum", which summarizes the
* current state of the `Toggles` which may be useful for comparing state
* across a cluster or over time.
*
* @param statsReceiver in typical usage by [[StandardToggleMap]], will be
* scoped to "toggles/\\$libraryName".
*/
def observed(toggleMap: ToggleMap, statsReceiver: StatsReceiver): ToggleMap = {
new Proxy with Composite {
private[this] val checksum = statsReceiver.addGauge("checksum") {
// crc32 is not a cryptographic hash, but good enough for our purposes
// of summarizing the current state of the ToggleMap. we only need it
// to be efficient to compute and have small changes to the input affect
// the output.
val crc32 = new CRC32()
// need a consistent ordering, forcing the sort before computation
iterator.toIndexedSeq.sorted(MetadataOrdering).foreach { md =>
crc32.update(md.id.getBytes(UTF_8))
// convert the md's fraction to a Long and then feed each
// byte into the crc
val f = java.lang.Double.doubleToLongBits(md.fraction)
crc32.update((0xff & f ).toInt)
crc32.update((0xff & (f >> 8) ).toInt)
crc32.update((0xff & (f >> 16)).toInt)
crc32.update((0xff & (f >> 24)).toInt)
crc32.update((0xff & (f >> 32)).toInt)
crc32.update((0xff & (f >> 40)).toInt)
crc32.update((0xff & (f >> 48)).toInt)
crc32.update((0xff & (f >> 56)).toInt)
}
crc32.getValue.toFloat
}
protected def underlying: ToggleMap = toggleMap
override def toString: String =
s"observed($toggleMap, $statsReceiver)"
def components: Seq[ToggleMap] =
Seq(underlying)
}
}
/**
* A marker interface in support of [[components(ToggleMap)]]
*/
private trait Composite {
def components: Seq[ToggleMap]
}
/**
* For some administrative purposes, it can be useful to get at the
* component `ToggleMaps` that may make up a [[ToggleMap]].
*
* For example:
* {{{
* val toggleMap1: ToggleMap = ...
* val toggleMap2: ToggleMap = ...
* val combined = toggleMap1.orElse(toggleMap2)
* assert(Seq(toggleMap1, toggleMap2) == ToggleMap.components(combined))
* }}}
*/
def components(toggleMap: ToggleMap): Seq[ToggleMap] = {
toggleMap match {
case composite: Composite =>
composite.components.flatMap(components)
case _ =>
Seq(toggleMap)
}
}
/**
* The [[ToggleMap]] interface is read only and this
* is the mutable side of it.
*
* Implementations are expected to be thread-safe.
*/
trait Mutable extends ToggleMap {
/**
* Add or replace the [[Toggle]] for this `id` with a
* [[Toggle]] that returns `true` for a `fraction` of the inputs.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
* @param fraction must be within `0.0–1.0`, inclusive. If not, the operation
* is ignored.
*/
def put(id: String, fraction: Double): Unit
/**
* Remove the [[Toggle]] for this `id`.
*
* This is a no-op for missing values.
*
* @param id the identifying name of the `Toggle`.
* These should generally be fully qualified names to avoid conflicts
* between libraries. For example, "com.twitter.finagle.CoolThing".
*/
def remove(id: String): Unit
}
/**
* Create a [[Toggle]] where `fraction` of the inputs will return `true.`
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*
* @param id the name of the Toggle which is used to mix
* where along the universe of Ints does the range fall.
* @param fraction the fraction, from 0.0 - 1.0 (inclusive), of Ints
* to return `true`. If outside of that range, a
* `java.lang.IllegalArgumentException` will be thrown.
*/
private[toggle] def fractional(id: String, fraction: Double): Toggle[Int] = {
Toggle.validateFraction(id, fraction)
// we want a continuous range within the space of Int.MinValue
// to Int.MaxValue, including overflowing Max.
// By mapping the range to a Long and then mapping this into the
// space of Ints we create a Toggle that is both space efficient
// as well as quick to respond to `apply`.
// within a range of [0, Int.MaxValue*2]
val range: Long = ((1L << 32) * fraction).toLong
// We want to use `id` as an input into the function so
// that ints have different likelihoods depending on the toggle's id.
// Without this, every Toggle's range would start at 0.
// The input to many toggle's may be something consistent per node,
// say a machine name. So without the offset, nodes that hash to
// close to 0 will be much more likely to have most or all toggles
// turned on. by using the id as an offset, we can shift this and
// make them be more evenly distributed.
val start = id.hashCode
val end: Int = (start + range).toInt
if (range == 0) {
Toggle.off(id) // 0%
} else if (start == end) {
Toggle.on(id) // 100%
} else if (start <= end) {
// the range is contiguous without overflows.
hashedToggle(id, { case i => i >= start && i <= end })
} else {
// the range overflows around Int.MaxValue
hashedToggle(id, { case i => i >= start || i <= end })
}
}
/**
* Create a [[ToggleMap]] out of the given [[ToggleMap ToggleMaps]].
*
* If `toggleMaps` is empty, [[NullToggleMap]] will be returned.
*/
@varargs
def of(toggleMaps: ToggleMap*): ToggleMap = {
val start: ToggleMap = NullToggleMap
toggleMaps.foldLeft(start) { case (acc, tm) =>
acc.orElse(tm)
}
}
/**
* A [[ToggleMap]] implementation based on immutable [[Toggle.Metadata]].
*/
class Immutable(
metadata: immutable.Seq[Toggle.Metadata])
extends ToggleMap {
private[this] val toggles: immutable.Map[String, Toggle[Int]] =
metadata.map { md =>
md.id -> fractional(md.id, md.fraction)
}(breakOut)
override def toString: String =
s"ToggleMap.Immutable@${System.identityHashCode(this)}"
def apply(id: String): Toggle[Int] =
toggles.get(id) match {
case Some(t) => t
case None => Toggle.Undefined
}
def iterator: Iterator[Toggle.Metadata] =
metadata.iterator
}
private[this] val log = Logger.get()
private[this] val NoFractionAndToggle = (Double.NaN, Toggle.Undefined)
private class MutableToggle(id: String) extends Toggle[Int](id) {
private[this] val fractionAndToggle =
new AtomicReference[(Double, Toggle[Int])](NoFractionAndToggle)
override def toString: String = s"MutableToggle($id)"
private[ToggleMap] def currentFraction: Double =
fractionAndToggle.get()._1
private[ToggleMap] def setFraction(fraction: Double): Unit = {
val fAndT: (Double, Toggle[Int]) = if (Toggle.isValidFraction(fraction)) {
(fraction, fractional(id, fraction))
} else {
NoFractionAndToggle
}
fractionAndToggle.set(fAndT)
}
def isDefinedAt(t: Int): Boolean =
fractionAndToggle.get()._2.isDefinedAt(t)
def apply(t: Int): Boolean =
fractionAndToggle.get()._2(t)
}
/**
* Create an empty [[Mutable]] instance with a default [[Metadata.source]]
* specified.
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*/
def newMutable(): Mutable =
newMutable(None)
/**
* Create an empty [[Mutable]] instance with the given [[Metadata.source]].
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*/
def newMutable(source: String): Mutable =
newMutable(Some(source))
private[this] def newMutable(source: Option[String]): Mutable = new Mutable {
override def toString: String = source match {
case Some(src) => src
case None => s"ToggleMap.Mutable@${Integer.toHexString(hashCode())}"
}
// There will be minimal updates, so we can use a low concurrency level,
// which makes the footprint smaller.
private[this] val toggles =
new ConcurrentHashMap[String, MutableToggle](32, 0.75f, 1)
private[this] def toggleFor(id: String): MutableToggle = {
val curr = toggles.get(id)
if (curr != null) {
curr
} else {
val newToggle = new MutableToggle(id)
val prev = toggles.putIfAbsent(id, newToggle)
if (prev == null)
newToggle
else
prev
}
}
def apply(id: String): Toggle[Int] =
toggleFor(id)
def iterator: Iterator[Toggle.Metadata] = {
val source = toString
toggles.asScala.collect {
case (id, toggle) if Toggle.isValidFraction(toggle.currentFraction) =>
Toggle.Metadata(id, toggle.currentFraction, None, source)
}.toIterator
}
def put(id: String, fraction: Double): Unit = {
if (Toggle.isValidFraction(fraction)) {
log.info(s"Mutable Toggle id='$id' set to fraction=$fraction")
toggleFor(id).setFraction(fraction)
} else {
log.warning(s"Mutable Toggle id='$id' ignoring invalid fraction=$fraction")
}
}
def remove(id: String): Unit = {
log.info(s"Mutable Toggle id='$id' removed")
toggleFor(id).setFraction(Double.NaN)
}
}
/**
* A [[ToggleMap]] that is backed by a `com.twitter.app.GlobalFlag`,
* [[flag.overrides]].
*
* Its [[Toggle Toggles]] will reflect changes to the underlying `Flag` which
* enables usage in tests.
*
* Fractions that are out of range (outside of `[0.0-1.0]`) will be
* ignored.
*
* @note that inputs to [[Toggle.apply]] will be modified to promote
* better distributions in the face of low entropy inputs.
*/
val flags: ToggleMap = new ToggleMap {
override def toString: String = "ToggleMap.Flags"
private[this] def fractions: Map[String, Double] =
flag.overrides()
private[this] class FlagToggle(id: String) extends Toggle[Int](id) {
private[this] val fractionAndToggle =
new AtomicReference[(Double, Toggle[Int])](NoFractionAndToggle)
override def toString: String = s"FlagToggle($id)"
def isDefinedAt(t: Int): Boolean =
fractions.get(id) match {
case Some(f) if Toggle.isValidFraction(f) => true
case _ => false
}
def apply(t: Int): Boolean = {
fractions.get(id) match {
case Some(f) if Toggle.isValidFraction(f) =>
val prev = fractionAndToggle.get()
val toggle =
if (f == prev._1) {
// we can use the cached toggle since the fraction matches
prev._2
} else {
val newToggle = fractional(id, f)
fractionAndToggle.compareAndSet(prev, (f, newToggle))
newToggle
}
toggle(t)
case _ =>
throw new IllegalStateException(s"$this not defined for input: $t")
}
}
}
def apply(id: String): Toggle[Int] =
new FlagToggle(id)
def iterator: Iterator[Toggle.Metadata] = {
val source = toString
fractions.iterator.collect { case (id, f) if Toggle.isValidFraction(f) =>
Toggle.Metadata(id, f, None, source)
}
}
}
/**
* A [[ToggleMap]] that proxies work to `underlying`.
*/
trait Proxy extends ToggleMap {
protected def underlying: ToggleMap
override def toString: String = underlying.toString
def apply(id: String): Toggle[Int] = underlying(id)
def iterator: Iterator[Metadata] = underlying.iterator
}
private val MdDescFn: Toggle.Metadata => Option[String] =
md => md.description
/**
* A [[ToggleMap]] which returns [[Toggle.on]] for all `ids`.
*
* @note [[ToggleMap.iterator]] will always be empty.
*/
val On: ToggleMap = new ToggleMap {
def apply(id: String): Toggle[Int] = Toggle.on(id)
def iterator: Iterator[Metadata] = Iterator.empty
}
/**
* A [[ToggleMap]] which returns [[Toggle.off]] for all `ids`.
*
* @note [[ToggleMap.iterator]] will always be empty.
*/
val Off: ToggleMap = new ToggleMap {
def apply(id: String): Toggle[Int] = Toggle.off(id)
def iterator: Iterator[Metadata] = Iterator.empty
}
}
| spockz/finagle | finagle-toggle/src/main/scala/com/twitter/finagle/toggle/ToggleMap.scala | Scala | apache-2.0 | 17,190 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import cascading.tuple.Fields
trait Sortable[+Self] {
// Perform an inner secondary sort
def sortBy(innerSort : Fields) : Self
def sorting : Option[Fields]
}
| rjhall/etsy.scalding | src/main/scala/com/twitter/scalding/Sortable.scala | Scala | apache-2.0 | 751 |
package io.coding.me.m2p2.core.actor.artifact
import io.coding.me.m2p2.core.analyzer.P2Unit
class P2MetadataAnalyzerTest extends ArtifactAnalyzerTest[P2Unit] {
override lazy val analyzerActor = system.actorOf(ArtifactAnalyzer.p2metadataProps(repositoryId))
override lazy val analyzerFile = getResourceFile("example-bundle-2.0.0-20150528.145642-258-p2metadata.xml")
} | tssp/maven-p2-view | core/src/test/scala/io/coding/me/m2p2/core/actor/artifact/P2MetadataAnalyzerTest.scala | Scala | mit | 376 |
package org.littlewings.javaee7.beanvalidation
import scala.beans.BeanProperty
@PostSalaryConstraint
class Employee {
var name: String = _
var post: String = _
var salary: Int = _
}
| kazuhira-r/javaee7-scala-examples | bean-validation-interrelation/src/test/scala/org/littlewings/javaee7/beanvalidation/Employee.scala | Scala | mit | 190 |
package demo.util
//<Factorial>
object Factorial {
def apply(n: Int): BigInt = {
if (n <= 1)
1
else
Factorial(n-1) * n
}
}
//</Factorial>
object test {
//<main>
def main(args: Array[String]) {
println(Factorial(5))
}
//</main>
}
| bhoward/Escalator | doc/demo/util/test.scala | Scala | apache-2.0 | 267 |
/* NSC -- new Scala compiler
* Copyright 2005-2012 LAMP/EPFL
* @author Martin Odersky
*/
package scala
package tools.nsc
package backend
package jvm
import scala.collection.immutable
import scala.tools.asm
/*
*
* @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
* @version 1.0
*
*/
abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
import global._
import bTypes._
import coreBTypes._
/*
* Functionality to lower `synchronized` and `try` expressions.
*/
abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) {
def genSynchronized(tree: Apply, expectedType: BType): BType = {
val Apply(fun, args) = tree
val monitor = locals.makeLocal(ObjectRef, "monitor")
val monCleanup = new asm.Label
// if the synchronized block returns a result, store it in a local variable.
// Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks).
val hasResult = (expectedType != UNIT)
val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null
/* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */
genLoadQualifier(fun)
bc dup ObjectRef
locals.store(monitor)
emit(asm.Opcodes.MONITORENTER)
/* ------ (2) Synchronized block.
* Reached by fall-through from (1).
* Protected by:
* (2.a) the EH-version of the monitor-exit, and
* (2.b) whatever protects the whole synchronized expression.
* ------
*/
val startProtected = currProgramPoint()
registerCleanup(monCleanup)
genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */)
unregisterCleanup(monCleanup)
if (hasResult) { locals.store(monitorResult) }
nopIfNeeded(startProtected)
val endProtected = currProgramPoint()
/* ------ (3) monitor-exit after normal, non-early-return, termination of (2).
* Reached by fall-through from (2).
* Protected by whatever protects the whole synchronized expression.
* ------
*/
locals.load(monitor)
emit(asm.Opcodes.MONITOREXIT)
if (hasResult) { locals.load(monitorResult) }
val postHandler = new asm.Label
bc goTo postHandler
/* ------ (4) exception-handler version of monitor-exit code.
* Reached upon abrupt termination of (2).
* Protected by whatever protects the whole synchronized expression.
* null => "any" exception in bytecode, like we emit for finally.
* Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233)
* ------
*/
protect(startProtected, endProtected, currProgramPoint(), null)
locals.load(monitor)
emit(asm.Opcodes.MONITOREXIT)
emit(asm.Opcodes.ATHROW)
/* ------ (5) cleanup version of monitor-exit code.
* Reached upon early-return from (2).
* Protected by whatever protects the whole synchronized expression.
* ------
*/
if (shouldEmitCleanup) {
markProgramPoint(monCleanup)
locals.load(monitor)
emit(asm.Opcodes.MONITOREXIT)
pendingCleanups()
}
/* ------ (6) normal exit of the synchronized expression.
* Reached after normal, non-early-return, termination of (3).
* Protected by whatever protects the whole synchronized expression.
* ------
*/
mnode visitLabel postHandler
lineNumber(tree)
expectedType
}
/*
* Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP.
* Useful to avoid emitting an empty try-block being protected by exception handlers,
* which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102.
*/
def nopIfNeeded(lbl: asm.Label) {
val noInstructionEmitted = isAtProgramPoint(lbl)
if (noInstructionEmitted) { emit(asm.Opcodes.NOP) }
}
/*
* Emitting try-catch is easy, emitting try-catch-finally not quite so.
* A finally-block (which always has type Unit, thus leaving the operand stack unchanged)
* affects control-transfer from protected regions, as follows:
*
* (a) `return` statement:
*
* First, the value to return (if any) is evaluated.
* Afterwards, all enclosing finally-blocks are run, from innermost to outermost.
* Only then is the return value (if any) returned.
*
* Some terminology:
* (a.1) Executing a return statement that is protected
* by one or more finally-blocks is called "early return"
* (a.2) the chain of code sections (a code section for each enclosing finally-block)
* to run upon early returns is called "cleanup chain"
*
* As an additional spin, consider a return statement in a finally-block.
* In this case, the value to return depends on how control arrived at that statement:
* in case it arrived via a previous return, the previous return enjoys priority:
* the value to return is given by that statement.
*
* (b) A finally-block protects both the try-clause and the catch-clauses.
*
* Sidenote:
* A try-clause may contain an empty block. On CLR, a finally-block has special semantics
* regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler
* that protects an "empty" range ("empty" as in "containing NOPs only",
* see `asm.optimiz.DanglingExcHandlers` and SI-6720).
*
* This means a finally-block indicates instructions that can be reached:
* (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause
* In this case, the next-program-point is that following the try-catch-finally expression.
* (b.2) Upon early-return initiated in the try-clause or a catch-clause
* In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return.
* (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause
* In this case, the unhandled exception must be re-thrown after running the finally-block.
*
* (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock)
* that's why `genSynchronized()` too emits cleanup-sections.
*
* A number of code patterns can be emitted to realize the intended semantics.
*
* A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position.
* The principle at work being that once control is transferred to a cleanup-section,
* control will always stay within the cleanup-chain.
* That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block
* (reached via abrupt termination) takes over.
*
* The observations above hint at another code layout, less verbose, for the cleanup-chain.
*
* The code layout that GenBCode emits takes into account that once a cleanup section has been reached,
* jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics.
*
* There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway):
* one for normal control flow and another chain consisting of exception handlers.
* The in-line comments below refer to them as
* - "early-return-cleanups" and
* - "exception-handler-version-of-finally-block" respectively.
*
*/
def genLoadTry(tree: Try): BType = {
val Try(block, catches, finalizer) = tree
val kind = tpeTK(tree)
val caseHandlers: List[EHClause] =
for (CaseDef(pat, _, caseBody) <- catches) yield {
pat match {
case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody)
case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody)
case Bind(_, _) => BoundEH (pat.symbol, caseBody)
}
}
// ------ (0) locals used later ------
/*
* `postHandlers` is a program point denoting:
* (a) the finally-clause conceptually reached via fall-through from try-catch-finally
* (in case a finally-block is present); or
* (b) the program point right after the try-catch
* (in case there's no finally-block).
* The name choice emphasizes that the code section lies "after all exception handlers",
* where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks.
*/
val postHandlers = new asm.Label
val hasFinally = (finalizer != EmptyTree)
/*
* used in the finally-clause reached via fall-through from try-catch, if any.
*/
val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer)
/*
* please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type.
* Because those two types can be different, dedicated vars are needed.
*/
val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null
/*
* upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause)
* AND hasFinally, a cleanup is needed.
*/
val finCleanup = if (hasFinally) new asm.Label else null
/* ------ (1) try-block, protected by:
* (1.a) the EHs due to case-clauses, emitted in (2),
* (1.b) the EH due to finally-clause, emitted in (3.A)
* (1.c) whatever protects the whole try-catch-finally expression.
* ------
*/
val startTryBody = currProgramPoint()
registerCleanup(finCleanup)
genLoad(block, kind)
unregisterCleanup(finCleanup)
nopIfNeeded(startTryBody)
val endTryBody = currProgramPoint()
bc goTo postHandlers
/**
* A return within a `try` or `catch` block where a `finally` is present ("early return")
* emits a store of the result to a local, jump to a "cleanup" version of the `finally` block,
* and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]).
*
* If the try-catch is nested, outer `finally` blocks need to be emitted in a cleanup version
* as well, so the `shouldEmitCleanup` variable remains `true` until the outermost `finally`.
* Nested cleanup `finally` blocks jump to the next enclosing one. For the outermost, we emit
* a read of the local variable, a return, and we set `shouldEmitCleanup = false` (see
* [[pendingCleanups]]).
*
* Now, assume we have
*
* try { return 1 } finally {
* try { println() } finally { println() }
* }
*
* Here, the outer `finally` needs a cleanup version, but the inner one does not. The method
* here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to
* nested `finally` blocks.
*/
def withFreshCleanupScope(body: => Unit) = {
val savedShouldEmitCleanup = shouldEmitCleanup
shouldEmitCleanup = false
body
shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup
}
/* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause)
* An EH in (2) is reached upon abrupt termination of (1).
* An EH in (2) is protected by:
* (2.a) the EH-version of the finally-clause, if any.
* (2.b) whatever protects the whole try-catch-finally expression.
* ------
*/
for (ch <- caseHandlers) withFreshCleanupScope {
// (2.a) emit case clause proper
val startHandler = currProgramPoint()
var endHandler: asm.Label = null
var excType: ClassBType = null
registerCleanup(finCleanup)
ch match {
case NamelessEH(typeToDrop, caseBody) =>
bc drop typeToDrop
genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`.
nopIfNeeded(startHandler)
endHandler = currProgramPoint()
excType = typeToDrop
case BoundEH (patSymbol, caseBody) =>
// test/files/run/contrib674.scala , a local-var already exists for patSymbol.
// rather than creating on first-access, we do it right away to emit debug-info for the created local var.
val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol)
bc.store(patIdx, patTK)
genLoad(caseBody, kind)
nopIfNeeded(startHandler)
endHandler = currProgramPoint()
emitLocalVarScope(patSymbol, startHandler, endHandler)
excType = patTK.asClassBType
}
unregisterCleanup(finCleanup)
// (2.b) mark the try-body as protected by this case clause.
protect(startTryBody, endTryBody, startHandler, excType)
// (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given.
bc goTo postHandlers
}
// Need to save the state of `shouldEmitCleanup` at this point: while emitting the first
// version of the `finally` block below, the variable may become true. But this does not mean
// that we need a cleanup version for the current block, only for the enclosing ones.
val currentFinallyBlockNeedsCleanup = shouldEmitCleanup
/* ------ (3.A) The exception-handler-version of the finally-clause.
* Reached upon abrupt termination of (1) or one of the EHs in (2).
* Protected only by whatever protects the whole try-catch-finally expression.
* ------
*/
// a note on terminology: this is not "postHandlers", despite appearances.
// "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
if (hasFinally) withFreshCleanupScope {
nopIfNeeded(startTryBody)
val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception.
protect(startTryBody, finalHandler, finalHandler, null)
val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc"))
bc.store(eIdx, eTK)
emitFinalizer(finalizer, null, isDuplicate = true)
bc.load(eIdx, eTK)
emit(asm.Opcodes.ATHROW)
}
/* ------ (3.B) Cleanup-version of the finally-clause.
* Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2)
* (and only from there, ie reached only upon early RETURN from
* program regions bracketed by registerCleanup/unregisterCleanup).
* Protected only by whatever protects the whole try-catch-finally expression.
*
* Given that control arrives to a cleanup section only upon early RETURN,
* the value to return (if any) is always available. Therefore, a further RETURN
* found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`).
* In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section,
* the variable `insideCleanupBlock` is used.
* ------
*/
// this is not "postHandlers" either.
// `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause.
// In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid.
if (hasFinally && currentFinallyBlockNeedsCleanup) {
markProgramPoint(finCleanup)
// regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
emitFinalizer(finalizer, null, isDuplicate = true)
pendingCleanups()
}
/* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit
* Reached upon normal, non-early-return termination of (1) or of an EH in (2).
* Protected only by whatever protects the whole try-catch-finally expression.
* TODO explain what happens upon RETURN contained in (4)
* ------
*/
markProgramPoint(postHandlers)
if (hasFinally) {
emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false`
}
kind
} // end of genLoadTry()
/* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */
private def pendingCleanups() {
cleanups match {
case Nil =>
if (earlyReturnVar != null) {
locals.load(earlyReturnVar)
bc.emitRETURN(locals(earlyReturnVar).tk)
} else {
bc emitRETURN UNIT
}
shouldEmitCleanup = false
case nextCleanup :: _ =>
bc goTo nextCleanup
}
}
def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType) {
val excInternalName: String =
if (excType == null) null
else excType.internalName
assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.")
mnode.visitTryCatchBlock(start, end, handler, excInternalName)
}
/* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */
def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean) {
var saved: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
if (isDuplicate) {
saved = jumpDest
for(ldef <- labelDefsAtOrUnder(finalizer)) {
jumpDest -= ldef.symbol
}
}
// when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok)
if (tmp != null) { locals.store(tmp) }
genLoad(finalizer, UNIT)
if (tmp != null) { locals.load(tmp) }
if (isDuplicate) {
jumpDest = saved
}
}
/* Does this tree have a try-catch block? */
def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] }
trait EHClause
case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause
case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause
}
}
| felixmulder/scala | src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala | Scala | bsd-3-clause | 19,508 |
package de.htwg.zeta.server.model.modelValidator.validator.rules.metaModelDependent
import scala.collection.immutable.Seq
import de.htwg.zeta.common.models.project.concept.elements.AttributeType.StringType
import de.htwg.zeta.common.models.project.concept.elements.AttributeValue
import de.htwg.zeta.common.models.project.concept.elements.AttributeValue.StringValue
import de.htwg.zeta.common.models.project.concept.elements.MAttribute
import de.htwg.zeta.common.models.project.concept.elements.MClass
import de.htwg.zeta.common.models.project.concept.Concept
import de.htwg.zeta.common.models.project.instance.elements.NodeInstance
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class NodeAttributesGlobalUniqueTest extends AnyFlatSpec with Matchers {
val mClass1 = MClass("nodeType1", "", abstractness = false, Seq.empty, Seq.empty, Seq.empty, Seq[MAttribute](), Seq.empty)
val mClass2 = MClass("nodeType2", "", abstractness = false, Seq.empty, Seq.empty, Seq.empty, Seq[MAttribute](), Seq.empty)
val rule = new NodeAttributesGlobalUnique(Seq("nodeType1", "nodeType2"), "attributeType")
"check" should "return success validation results on correct attributes" in {
val attribute1: Map[String, List[AttributeValue]] = Map("attributeType" -> List(StringValue("value1")))
val node1 = NodeInstance.empty("", mClass1.name, Seq(), Seq()).copy(attributeValues = attribute1)
val attribute2: Map[String, List[AttributeValue]] = Map("attributeType" -> List(StringValue("value2")))
val node2 = NodeInstance.empty("", mClass1.name, Seq(), Seq()).copy(attributeValues = attribute2)
val attribute3: Map[String, List[AttributeValue]] = Map("attributeType" -> List(StringValue("value3")))
val node3 = NodeInstance.empty("", mClass2.name, Seq(), Seq()).copy(attributeValues = attribute3)
val results = rule.check(Seq(node1, node2, node3))
results.size should be(3)
results.forall(_.valid) should be(true)
}
it should "return failure validation results on invalid attributes" in {
val attribute1: Map[String, List[AttributeValue]] = Map("attributeType" -> List(StringValue("duplicateValue")))
val node1 = NodeInstance.empty("", mClass1.name, Seq(), Seq()).copy(attributeValues = attribute1)
val attribute2: Map[String, List[AttributeValue]] = Map("attributeType" -> List(StringValue("value")))
val node2 = NodeInstance.empty("", mClass1.name, Seq(), Seq()).copy(attributeValues = attribute2)
val attribute3: Map[String, List[AttributeValue]] = Map("attributeType" -> List(StringValue("duplicateValue")))
val node3 = NodeInstance.empty("", mClass2.name, Seq(), Seq()).copy(attributeValues = attribute3)
val results = rule.check(Seq(node1, node2, node3))
results.size should be(3)
results.head.valid should be(false)
results(1).valid should be(true)
results(2).valid should be(false)
}
"dslStatement" should "return the correct string" in {
rule.dslStatement should be(
"""Attributes ofType "attributeType" inNodes Seq("nodeType1", "nodeType2") areGlobalUnique ()""")
}
"generateFor" should "generate this rule from the meta model" in {
val globalUniqueAttribute = MAttribute("attributeName", globalUnique = true, localUnique = false, StringType, StringValue(""), constant = false,
singleAssignment = false, "", ordered = false, transient = false)
val nonGlobalUniqueAttribute = MAttribute("attributeName2", globalUnique = false, localUnique = false, StringType, StringValue(""), constant = false,
singleAssignment = false, "", ordered = false, transient = false)
val mClass = MClass("class", "", abstractness = false, superTypeNames = Seq.empty, Seq.empty, Seq.empty, Seq[MAttribute]
(nonGlobalUniqueAttribute, globalUniqueAttribute), Seq.empty)
val metaModel = Concept.empty.copy(classes = Seq(mClass))
val result = NodeAttributesGlobalUnique.generateFor(metaModel)
result.size should be(1)
result.head match {
case rule: NodeAttributesGlobalUnique =>
rule.nodeTypes should be(Seq("class"))
rule.attributeType should be("attributeName")
case _ => fail
}
}
}
| Zeta-Project/zeta | api/server/test/de/htwg/zeta/server/model/modelValidator/validator/rules/metaModelDependent/NodeAttributesGlobalUniqueTest.scala | Scala | bsd-2-clause | 4,190 |
package repository.anormimpl
import anorm.SqlParser._
import anorm._
import models.Event
import models.shared.{Page, QueryUtils}
import play.api.Play.current
import play.api.db.DB
import repository.EventRepositoryComponent
/**
* @author adelfiri, " .. "
* @since 07 February 2016
*/
trait EventRepositoryAnormComponent extends EventRepositoryComponent {
def eventLocator = new EventLocatorAnorm
def eventUpdator = new EventUpdatorAnorm
/** generic insert */
/*def insert(table: String, ps: Seq[(Any, ParameterValue)]) = DB.withConnection { implicit c =>
SQL("INSERT INTO " + table + paramsToInsert(ps)).on(ps).execute
}*/
/** re-usable mapping */
def toParams(event: Event): Seq[NamedParameter] = Seq(
"name" -> event.name,
"description" -> event.description,
"status" -> event.status,
"website" -> event.webSite,
"path" -> event.path)
class EventUpdatorAnorm extends EventUpdator {
def saveEvent(event: Event) = {
val params = toParams(event)
val sql = "INSERT INTO event" + QueryUtils.paramsToInsert(params)
//println(sql)
val id = DB.withConnection { implicit connection =>
SQL(sql).on(params: _*).executeInsert()
}
}
def updateEvent(event: Event) = {
val fieldParams = toParams(event)
val sql = "UPDATE event" + QueryUtils.paramsToUpdate(fieldParams) + " where id = {id}"
val advanced: Seq[NamedParameter] = Seq("id" -> event.id)
val params: Seq[NamedParameter] = fieldParams ++ advanced
//println(sql)
val id = DB.withConnection { implicit connection =>
SQL(sql).on(params: _*).executeUpdate()
}
}
def deleteEvent(eventId: Long) = {
val id = DB.withTransaction { implicit connection =>
SQL("delete from Guest where event_id = {event_id}").on("event_id" -> eventId).executeUpdate()
SQL("delete from Event where id = {event_id}").on("event_id" -> eventId).executeUpdate()
}
}
}
class EventLocatorAnorm extends EventLocator {
def findEventList(pageNumber: Int, pageSize: Int, direction: String, column: String): Page[Event] = {
val offset = pageSize * pageNumber
val listParams = QueryUtils.listParams(pageSize, offset, column, direction)
//println(listParams)
val sql = "select * from event" + QueryUtils.paramsToList(column, direction)
//println(sql)
DB.withConnection { implicit connection =>
val events: List[Event] =
SQL(sql).on(listParams: _*).as(Event.simple2.*)
val totalRows = SQL("select count(*) from event").as(scalar[Long].single)
val totalPages = QueryUtils.getTotalPages(pageSize, totalRows)
Page[Event](events, pageNumber, offset, totalPages)
}
}
def findOne(eventId: Long): Option[Event] = {
Event.findById(eventId)
}
}
}
| Nectarius/taffeit | app/repository/anormimpl/EventRepositoryAnormComponent.scala | Scala | mit | 2,880 |
/*
* Copyright 2015-2019 Snowflake Computing
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.snowflake.spark.snowflake
import java.sql._
import java.util.TimeZone
import net.snowflake.spark.snowflake.Utils.SNOWFLAKE_SOURCE_NAME
import net.snowflake.spark.snowflake.test.TestHook
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.plans.logical.Expand
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.IntegerType
import scala.reflect.internal.util.TableDef
// scalastyle:off println
class PushdownEnhancement02 extends IntegrationSuiteBase {
private var thisConnectorOptionsNoTable: Map[String, String] = Map()
private val test_table_basic: String = s"test_basic_$randomSuffix"
private val test_table_number = s"test_table_number_$randomSuffix"
private val test_table_date = s"test_table_date_$randomSuffix"
private val test_table_rank = s"test_table_rank_$randomSuffix"
override def afterAll(): Unit = {
try {
jdbcUpdate(s"drop table if exists $test_table_basic")
jdbcUpdate(s"drop table if exists $test_table_number")
jdbcUpdate(s"drop table if exists $test_table_date")
jdbcUpdate(s"drop table if exists $test_table_rank")
} finally {
TestHook.disableTestHook()
super.afterAll()
SnowflakeConnectorUtils.disablePushdownSession(sparkSession)
}
}
override def beforeAll(): Unit = {
super.beforeAll()
// There is bug for Date.equals() to compare Date with different timezone,
// so set up the timezone to work around it.
val gmtTimezone = TimeZone.getTimeZone("GMT")
TimeZone.setDefault(gmtTimezone)
connectorOptionsNoTable.foreach(tup => {
thisConnectorOptionsNoTable += tup
})
}
test("test pushdown basic: OR, BIT operation") {
jdbcUpdate(s"create or replace table $test_table_basic(name String, value1 Integer, value2 Integer)")
jdbcUpdate(s"insert into $test_table_basic values ('Ray', 1, 9), ('Ray', 2, 8), ('Ray', 3, 7), ('Emily', 4, 6), ('Emily', 5, 5)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_basic)
.load()
// Spark doesn't support these bit operation on Decimal, so we convert them.
.withColumn("value1", col("value1").cast(IntegerType))
.withColumn("value2", col("value2").cast(IntegerType))
tmpDF.printSchema()
tmpDF.createOrReplaceTempView("test_table_basic")
val resultDF =
sparkSession
.sql(s"select name, value1 as v1, value2 as v2," +
s"(value1 & value2) as bitand, (value1 | value2) as bitor," +
s"(value1 ^ value2) as xor , ( ~value1 ) as bitnot" +
" from test_table_basic where name = 'Ray' or name = 'Emily'")
resultDF.show(10, false)
val expectedResult = Seq(
Row("Ray" , 1, 9, 1, 9, 8, -2),
Row("Ray" , 2, 8, 0, 10, 10, -3),
Row("Ray" , 3, 7, 3, 7, 4, -4),
Row("Emily", 4, 6, 4, 6, 2, -5),
Row("Emily", 5, 5, 5, 5, 0, -6)
)
testPushdown(
s"""SELECT ( "SUBQUERY_1"."NAME" ) AS "SUBQUERY_2_COL_0" ,
|( CAST ( "SUBQUERY_1"."VALUE1" AS NUMBER ) ) AS "SUBQUERY_2_COL_1" ,
|( CAST ( "SUBQUERY_1"."VALUE2" AS NUMBER ) ) AS "SUBQUERY_2_COL_2" ,
|( BITAND ( CAST ( "SUBQUERY_1"."VALUE1" AS NUMBER ) ,
|CAST ( "SUBQUERY_1"."VALUE2" AS NUMBER ) ) ) AS "SUBQUERY_2_COL_3" ,
|( BITOR ( CAST ( "SUBQUERY_1"."VALUE1" AS NUMBER ) ,
|CAST ( "SUBQUERY_1"."VALUE2" AS NUMBER ) ) ) AS "SUBQUERY_2_COL_4" ,
|( BITXOR ( CAST ( "SUBQUERY_1"."VALUE1" AS NUMBER ) ,
|CAST ( "SUBQUERY_1"."VALUE2" AS NUMBER ) ) ) AS "SUBQUERY_2_COL_5" ,
|( BITNOT ( CAST ( "SUBQUERY_1"."VALUE1" AS NUMBER ) ) )
|AS "SUBQUERY_2_COL_6" FROM ( SELECT * FROM (
|SELECT * FROM ( $test_table_basic ) AS "SF_CONNECTOR_QUERY_ALIAS" )
|AS "SUBQUERY_0" WHERE ( ( "SUBQUERY_0"."NAME" = 'Ray' ) OR
|( "SUBQUERY_0"."NAME" = 'Emily' ) ) ) AS "SUBQUERY_1"
|""".stripMargin,
resultDF,
expectedResult
)
}
test("test pushdown boolean functions: NOT/Contains/EndsWith/StartsWith") {
jdbcUpdate(s"create or replace table $test_table_basic(name String, value1 Integer, value2 Integer)")
jdbcUpdate(s"insert into $test_table_basic values ('Ray', 1, 9), ('Ray', 2, 8), ('Ray', 3, 7), ('Emily', 4, 6), ('Emily', 5, 5)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_basic)
.load()
tmpDF.printSchema()
tmpDF.createOrReplaceTempView("test_table_basic")
val resultDF =
sparkSession
.sql(s"select * " +
" from test_table_basic where name != 'Ray'" +
" OR (name like '%ay')" +
" OR (name like 'Emi%')" +
" OR (name like '%i%')" +
" OR (not (value1 >= 5))" +
" OR (not (value1 <= 6))" +
" OR (not (value2 > 7))" +
" OR (not (value2 < 8))" )
resultDF.show(10, false)
val expectedResult = Seq(
Row("Ray" , 1, 9),
Row("Ray" , 2, 8),
Row("Ray" , 3, 7),
Row("Emily", 4, 6),
Row("Emily", 5, 5)
)
testPushdown(
s"""SELECT * FROM ( SELECT * FROM ( $test_table_basic )
| AS "SF_CONNECTOR_QUERY_ALIAS" ) AS "SUBQUERY_0"
| WHERE ( ( ( (
| "SUBQUERY_0"."NAME" != 'Ray' )
| OR "SUBQUERY_0"."NAME" LIKE '%ay' )
| OR ( "SUBQUERY_0"."NAME" LIKE 'Emi%'
| OR "SUBQUERY_0"."NAME" LIKE '%i%' ) )
| OR ( ( ( "SUBQUERY_0"."VALUE1" < 5 )
| OR ( "SUBQUERY_0"."VALUE1" > 6 ) )
| OR ( ( "SUBQUERY_0"."VALUE2" <= 7 )
| OR ( "SUBQUERY_0"."VALUE2" >= 8 ) ) ) )
|""".stripMargin,
resultDF,
expectedResult, false, true
)
}
test("test pushdown number functions: PI() and Round()/Random") {
// Don't run test with use_copy_unload because COPY UNLOAD converts
// PI value 3.141592653589793 to 3.141592654
if (!params.useCopyUnload) {
jdbcUpdate(s"create or replace table $test_table_number " +
s"(d1 decimal(38, 10), f1 float)")
jdbcUpdate(s"insert into $test_table_number values " +
s"(-1.9, -1.9), (-1.1, -1.1), (0, 0), (1.1, 1.1), (1.9, 1.9)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_number)
.load()
tmpDF.createOrReplaceTempView("test_table_number")
val resultDF =
sparkSession
.sql(s"select round(d1), round(f1), PI()" +
" from test_table_number")
resultDF.printSchema()
resultDF.show(10, false)
val PI = 3.141592653589793
val expectedResult = Seq(
Row(BigDecimal(-2), (-2).toDouble, PI),
Row(BigDecimal(-1), (-1).toDouble, PI),
Row(BigDecimal(0), (0).toDouble, PI),
Row(BigDecimal(1), (1).toDouble, PI),
Row(BigDecimal(2), (2).toDouble, PI)
)
testPushdown(
s"""SELECT ( ROUND ( "SUBQUERY_0"."D1" , 0 ) ) AS "SUBQUERY_1_COL_0",
|( ROUND ( "SUBQUERY_0"."F1" , 0 ) ) AS "SUBQUERY_1_COL_1",
|( 3.141592653589793 ) AS "SUBQUERY_1_COL_2" FROM
|( SELECT * FROM ( $test_table_number ) AS
|"SF_CONNECTOR_QUERY_ALIAS" ) AS "SUBQUERY_0"
|""".stripMargin,
resultDF,
expectedResult
)
// Can't assert the returned value for random(). So just run it.
sparkSession
.sql(s"select d1, random(100), random() from test_table_number")
.show()
}
}
test("test pushdown functions date_add/date_sub") {
jdbcUpdate(s"create or replace table $test_table_date " +
s"(d1 date)")
jdbcUpdate(s"insert into $test_table_date values " +
s"('2020-07-28')")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_date)
.load()
val resultDF = tmpDF.select(
col("d1"),
date_add(col("d1"),4).as("date_add"),
date_sub(col("d1"),4).as("date_sub")
)
val expectedResult = Seq(
Row(
Date.valueOf("2020-07-28"),
Date.valueOf("2020-08-01"),
Date.valueOf("2020-07-24"))
)
testPushdown(
s"""SELECT (
| "SUBQUERY_0"."D1" ) AS "SUBQUERY_1_COL_0" ,
| ( DATEADD(day, 4 , "SUBQUERY_0"."D1" ) ) AS "SUBQUERY_1_COL_1" ,
| ( DATEADD(day, (0 - ( 4 )), "SUBQUERY_0"."D1" ) ) AS "SUBQUERY_1_COL_2"
|FROM (
| SELECT * FROM (
| $test_table_date
| ) AS "SF_CONNECTOR_QUERY_ALIAS"
|) AS "SUBQUERY_0"
|""".stripMargin,
resultDF,
expectedResult
)
}
test("test pushdown functions date_add/date_sub on Feb last day in leap and non-leap year") {
jdbcUpdate(s"create or replace table $test_table_date " +
s"(leap_02_29 date, leap_03_01 date, non_leap_02_28 date, non_leap_03_01 date)")
jdbcUpdate(s"insert into $test_table_date values " +
s"('2016-02-29', '2016-03-01', '2015-02-28', '2015-03-01')")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_date)
.load()
val resultDF = tmpDF.select(
col("leap_02_29"),
date_add(col("leap_02_29"),1), // Add +1
date_sub(col("leap_02_29"),-1), // Sub -1
date_add(col("leap_03_01"),-1), // Add -1
date_sub(col("leap_03_01"),1), // Sub +1
date_add(col("non_leap_02_28"),1), // Add +1
date_sub(col("non_leap_02_28"),-1), // Sub -1
date_add(col("non_leap_03_01"),-1), // Add -1
date_sub(col("non_leap_03_01"),1) // Sub +1
)
val expectedResult = Seq(
Row(
Date.valueOf("2016-02-29"),
Date.valueOf("2016-03-01"),
Date.valueOf("2016-03-01"),
Date.valueOf("2016-02-29"),
Date.valueOf("2016-02-29"),
Date.valueOf("2015-03-01"),
Date.valueOf("2015-03-01"),
Date.valueOf("2015-02-28"),
Date.valueOf("2015-02-28"))
)
testPushdown(
s"""SELECT
| ( "SUBQUERY_0"."LEAP_02_29" ) AS "SUBQUERY_1_COL_0" ,
| ( DATEADD ( day, 1 , "SUBQUERY_0"."LEAP_02_29" ) ) AS "SUBQUERY_1_COL_1" ,
| ( DATEADD ( day, (0 - ( -1 ) ), "SUBQUERY_0"."LEAP_02_29" ) ) AS "SUBQUERY_1_COL_2" ,
| ( DATEADD ( day, -1 , "SUBQUERY_0"."LEAP_03_01" ) ) AS "SUBQUERY_1_COL_3" ,
| ( DATEADD ( day, (0 - ( 1 ) ), "SUBQUERY_0"."LEAP_03_01" ) ) AS "SUBQUERY_1_COL_4" ,
| ( DATEADD ( day, 1 , "SUBQUERY_0"."NON_LEAP_02_28" ) ) AS "SUBQUERY_1_COL_5" ,
| ( DATEADD ( day, (0 - ( -1 ) ), "SUBQUERY_0"."NON_LEAP_02_28" ) ) AS "SUBQUERY_1_COL_6" ,
| ( DATEADD ( day, -1 , "SUBQUERY_0"."NON_LEAP_03_01" ) ) AS "SUBQUERY_1_COL_7" ,
| ( DATEADD ( day, (0 - ( 1 ) ), "SUBQUERY_0"."NON_LEAP_03_01" ) ) AS "SUBQUERY_1_COL_8"
|FROM (
| SELECT * FROM (
| $test_table_date
| ) AS "SF_CONNECTOR_QUERY_ALIAS"
|) AS "SUBQUERY_0"
|""".stripMargin,
resultDF,
expectedResult
)
}
test("test pushdown WindowExpression: Rank without PARTITION BY") {
jdbcUpdate(s"create or replace table $test_table_rank" +
s"(state String, bushels_produced Integer)")
jdbcUpdate(s"insert into $test_table_rank values" +
s"('Iowa', 130), ('Iowa', 120), ('Iowa', 120)," +
s"('Kansas', 100), ('Kansas', 100), ('Kansas', 90)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_rank)
.load()
tmpDF.printSchema()
tmpDF.createOrReplaceTempView("test_table_rank")
val resultDF =
sparkSession
.sql(s"select state, bushels_produced," +
" rank() over (order by bushels_produced desc) as total_rank" +
" from test_table_rank")
resultDF.show(10, false)
val expectedResult = Seq(
Row("Iowa", 130, 1),
Row("Iowa", 120, 2),
Row("Iowa", 120, 2),
Row("Kansas", 100, 4),
Row("Kansas", 100, 4),
Row("Kansas", 90, 6)
)
if (params.useCopyUnload) {
// COPY UNLOAD doesn't support rank()/dense_rank(). Refer to SNOW-177604
// The COPY UNLOAD supported function list can be found at
// https://docs.snowflake.com/en/user-guide/data-load-transform.html#supported-functions
testPushdown(
s"""SELECT * FROM ( $test_table_rank ) AS "SF_CONNECTOR_QUERY_ALIAS"
|""".stripMargin,
resultDF,
expectedResult
)
} else {
testPushdown(
s"""SELECT ( "SUBQUERY_0"."STATE" ) AS "SUBQUERY_1_COL_0" ,
|( "SUBQUERY_0"."BUSHELS_PRODUCED" ) AS "SUBQUERY_1_COL_1" ,
|( RANK () OVER ( ORDER BY ( "SUBQUERY_0"."BUSHELS_PRODUCED" ) DESC
| ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) )
| AS "SUBQUERY_1_COL_2"
|FROM ( SELECT * FROM ( $test_table_rank )
| AS "SF_CONNECTOR_QUERY_ALIAS" ) AS "SUBQUERY_0"
|""".stripMargin,
resultDF,
expectedResult
)
}
}
test("test pushdown WindowExpression: Rank with PARTITION BY") {
jdbcUpdate(s"create or replace table $test_table_rank" +
s"(state String, bushels_produced Integer)")
jdbcUpdate(s"insert into $test_table_rank values" +
s"('Iowa', 130), ('Iowa', 120), ('Iowa', 120)," +
s"('Kansas', 100), ('Kansas', 100), ('Kansas', 90)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_rank)
.load()
tmpDF.printSchema()
tmpDF.createOrReplaceTempView("test_table_rank")
val resultDF =
sparkSession
.sql(s"select state, bushels_produced," +
" rank() over (partition by state " +
" order by bushels_produced desc) as group_rank" +
" from test_table_rank")
resultDF.show(10, false)
val expectedResult = Seq(
Row("Iowa", 130, 1),
Row("Iowa", 120, 2),
Row("Iowa", 120, 2),
Row("Kansas", 100, 1),
Row("Kansas", 100, 1),
Row("Kansas", 90, 3)
)
if (params.useCopyUnload) {
// COPY UNLOAD doesn't support rank()/dense_rank(). Refer to SNOW-177604
// The COPY UNLOAD supported function list can be found at
// https://docs.snowflake.com/en/user-guide/data-load-transform.html#supported-functions
testPushdown(
s"""SELECT * FROM ( $test_table_rank ) AS "SF_CONNECTOR_QUERY_ALIAS"
|""".stripMargin,
resultDF,
expectedResult
)
} else {
testPushdown(
s"""SELECT ( "SUBQUERY_0"."STATE" ) AS "SUBQUERY_1_COL_0" ,
|( "SUBQUERY_0"."BUSHELS_PRODUCED" ) AS "SUBQUERY_1_COL_1" ,
|( RANK () OVER ( PARTITION BY "SUBQUERY_0"."STATE"
| ORDER BY ( "SUBQUERY_0"."BUSHELS_PRODUCED" ) DESC
| ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) )
| AS "SUBQUERY_1_COL_2"
|FROM ( SELECT * FROM ( $test_table_rank )
| AS "SF_CONNECTOR_QUERY_ALIAS" ) AS "SUBQUERY_0"
|""".stripMargin,
resultDF,
expectedResult
)
}
}
test("test pushdown WindowExpression: DenseRank without PARTITION BY") {
jdbcUpdate(s"create or replace table $test_table_rank" +
s"(state String, bushels_produced Integer)")
jdbcUpdate(s"insert into $test_table_rank values" +
s"('Iowa', 130), ('Iowa', 120), ('Iowa', 120)," +
s"('Kansas', 100), ('Kansas', 100), ('Kansas', 90)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_rank)
.load()
tmpDF.printSchema()
tmpDF.createOrReplaceTempView("test_table_rank")
val resultDF =
sparkSession
.sql(s"select state, bushels_produced," +
" dense_rank() over (order by bushels_produced desc) as total_rank" +
" from test_table_rank")
resultDF.show(10, false)
val expectedResult = Seq(
Row("Iowa", 130, 1),
Row("Iowa", 120, 2),
Row("Iowa", 120, 2),
Row("Kansas", 100, 3),
Row("Kansas", 100, 3),
Row("Kansas", 90, 4)
)
if (params.useCopyUnload) {
// COPY UNLOAD doesn't support rank()/dense_rank(). Refer to SNOW-177604
// The COPY UNLOAD supported function list can be found at
// https://docs.snowflake.com/en/user-guide/data-load-transform.html#supported-functions
testPushdown(
s"""SELECT * FROM ( $test_table_rank ) AS "SF_CONNECTOR_QUERY_ALIAS"
|""".stripMargin,
resultDF,
expectedResult
)
} else {
testPushdown(
s"""SELECT ( "SUBQUERY_0"."STATE" ) AS "SUBQUERY_1_COL_0" ,
|( "SUBQUERY_0"."BUSHELS_PRODUCED" ) AS "SUBQUERY_1_COL_1" ,
|( DENSE_RANK () OVER ( ORDER BY ( "SUBQUERY_0"."BUSHELS_PRODUCED" ) DESC
| ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) )
| AS "SUBQUERY_1_COL_2"
|FROM ( SELECT * FROM ( $test_table_rank )
| AS "SF_CONNECTOR_QUERY_ALIAS" ) AS "SUBQUERY_0"
|""".stripMargin,
resultDF,
expectedResult
)
}
}
test("test pushdown WindowExpression: DenseRank with PARTITION BY") {
jdbcUpdate(s"create or replace table $test_table_rank" +
s"(state String, bushels_produced Integer)")
jdbcUpdate(s"insert into $test_table_rank values" +
s"('Iowa', 130), ('Iowa', 120), ('Iowa', 120)," +
s"('Kansas', 100), ('Kansas', 100), ('Kansas', 90)")
val tmpDF = sparkSession.read
.format(SNOWFLAKE_SOURCE_NAME)
.options(thisConnectorOptionsNoTable)
.option("dbtable", test_table_rank)
.load()
tmpDF.printSchema()
tmpDF.createOrReplaceTempView("test_table_rank")
val resultDF =
sparkSession
.sql(s"select state, bushels_produced," +
" dense_rank() over (partition by state " +
" order by bushels_produced desc) as group_rank" +
" from test_table_rank")
resultDF.show(10, false)
val expectedResult = Seq(
Row("Iowa", 130, 1),
Row("Iowa", 120, 2),
Row("Iowa", 120, 2),
Row("Kansas", 100, 1),
Row("Kansas", 100, 1),
Row("Kansas", 90, 2)
)
if (params.useCopyUnload) {
// COPY UNLOAD doesn't support rank()/dense_rank(). Refer to SNOW-177604
// The COPY UNLOAD supported function list can be found at
// https://docs.snowflake.com/en/user-guide/data-load-transform.html#supported-functions
testPushdown(
s"""SELECT * FROM ( $test_table_rank ) AS "SF_CONNECTOR_QUERY_ALIAS"
|""".stripMargin,
resultDF,
expectedResult
)
} else {
testPushdown(
s"""SELECT ( "SUBQUERY_0"."STATE" ) AS "SUBQUERY_1_COL_0" ,
|( "SUBQUERY_0"."BUSHELS_PRODUCED" ) AS "SUBQUERY_1_COL_1" ,
|( DENSE_RANK () OVER ( PARTITION BY "SUBQUERY_0"."STATE"
| ORDER BY ( "SUBQUERY_0"."BUSHELS_PRODUCED" ) DESC
| ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) )
| AS "SUBQUERY_1_COL_2"
|FROM ( SELECT * FROM ( $test_table_rank )
| AS "SF_CONNECTOR_QUERY_ALIAS" ) AS "SUBQUERY_0"
|
|""".stripMargin,
resultDF,
expectedResult
)
}
}
override def beforeEach(): Unit = {
super.beforeEach()
}
}
// scalastyle:on println
| snowflakedb/spark-snowflakedb | src/it/scala/net/snowflake/spark/snowflake/PushdownEnhancement02.scala | Scala | apache-2.0 | 20,373 |
package com.twitter.zk.coordination
import scala.jdk.CollectionConverters._
import org.apache.zookeeper.ZooDefs.Ids.OPEN_ACL_UNSAFE
import org.scalatestplus.mockito.MockitoSugar
import com.twitter.conversions.DurationOps._
import com.twitter.util.{Await, Future, JavaTimer}
import com.twitter.zk.{NativeConnector, RetryPolicy, ZkClient}
import org.scalatest.wordspec.AnyWordSpec
class ShardCoordinatorTest extends AnyWordSpec with MockitoSugar {
"ShardCoordinator" should {
val path = "/testing/twitter/service/charm/shards/test"
Option { System.getProperty("com.twitter.zk.TEST_CONNECT") } foreach { connectString =>
def withClient(f: (ZkClient) => Unit) = {
implicit val timer = new JavaTimer(true)
val connector = NativeConnector(connectString, 5.seconds, 10.minutes)
val zk = ZkClient(connector)
.withRetryPolicy(RetryPolicy.Basic(3))
.withAcl(OPEN_ACL_UNSAFE.asScala.toSeq)
Await.result(Future { f(zk) } ensure { zk.release })
}
def acquire(coord: ShardCoordinator) = {
coord.acquire within (new JavaTimer(true), 1.second)
}
"provide shards" in {
withClient { zk =>
val coord = new ShardCoordinator(zk, path, 5)
val shard0 = Await.result(acquire(coord))
assert(shard0.id == 0)
val shard1 = Await.result(acquire(coord))
assert(shard1.id == 1)
val shard2 = Await.result(acquire(coord))
assert(shard2.id == 2)
val shard3 = Await.result(acquire(coord))
assert(shard3.id == 3)
val shard4 = Await.result(acquire(coord))
assert(shard4.id == 4)
val fshard5 = acquire(coord)
assert(fshard5.isDefined == (false))
shard3.release
val shard5 = Await.result(fshard5)
assert(shard5.id == 3)
shard0.release
shard1.release
shard2.release
shard4.release
shard5.release
}
}
}
}
}
| twitter/util | util-zk/src/test/scala/com/twitter/zk/coordination/ShardCoordinatorTest.scala | Scala | apache-2.0 | 2,022 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import _root_.org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import com.intellij.psi._
import com.intellij.psi.search.GlobalSearchScope
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScSimpleTypeElement
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypingContext}
import org.jetbrains.plugins.scala.lang.psi.types.{ScParameterizedType, ScType}
/**
* @author Alexander Podkhalyuzin
* Date: 07.03.2008
*/
trait ScAnnotations extends ScalaPsiElement with PsiReferenceList {
def getReferenceElements = Array[PsiJavaCodeReferenceElement]()
def foldFuns(initial: Any)(fail: Any)(l: List[PartialFunction[Any, _]]): Any = l match {
case h :: t => if (h.isDefinedAt(initial)) foldFuns(h(initial))(fail)(t) else fail
case Nil => initial
}
// todo rewrite via continuations
private def getExceptionTypes: Array[PsiClassType] = {
val annotations = getAnnotations
annotations.map(extractExceptionType _).filter(_ != null)
}
private def extractExceptionType(a: ScAnnotation): PsiClassType = {
val constr = a.annotationExpr.constr
constr.typeElement match {
case te: ScSimpleTypeElement =>
te.reference match {
case Some(ref) =>
ref.bind() match {
case Some(r: ScalaResolveResult) if r.getActualElement.isInstanceOf[PsiClass] &&
r.getActualElement.asInstanceOf[PsiClass].qualifiedName == "scala.throws" =>
constr.args match {
case Some(args) if args.exprs.length == 1 =>
args.exprs(0).getType(TypingContext.empty) match {
case Success(ScParameterizedType(tp, arg), _) if arg.length == 1 =>
ScType.extractClass(tp, Some(getProject)) match {
case Some(clazz) if clazz.qualifiedName == "java.lang.Class" =>
ScType.extractClass(arg(0), Some(getProject)) match {
case Some(p) =>
JavaPsiFacade.getInstance(getProject).getElementFactory.
createTypeByFQClassName(p.qualifiedName, GlobalSearchScope.allScope(getProject))
case _ => null
}
case _ => null
}
case _ => null
}
case _ => null
}
case _ => null
}
case _ => null
}
case _ => null
}
}
def getReferencedTypes = getExceptionTypes
//todo return appropriate roles
def getRole = PsiReferenceList.Role.THROWS_LIST
def getAnnotations: Array[ScAnnotation]
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotations.scala | Scala | apache-2.0 | 2,903 |
/**
* Saves a sequence file of people and how many pandas they have seen.
*/
package com.oreilly.learningsparkexamples.scala
import com.oreilly.learningsparkexamples.proto.Places
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.hadoop.io.Text
import com.twitter.elephantbird.mapreduce.io.ProtobufWritable
import com.twitter.elephantbird.mapreduce.output.LzoProtobufBlockOutputFormat
import org.apache.hadoop.conf.Configuration
object BasicSaveProtoBuf {
def main(args: Array[String]) {
val master = args(0)
val outputFile = args(1)
val sc = new SparkContext(master, "BasicSaveProtoBuf", System.getenv("SPARK_HOME"))
val conf = new Configuration()
LzoProtobufBlockOutputFormat.setClassConf(classOf[Places.Venue], conf);
val dnaLounge = Places.Venue.newBuilder()
dnaLounge.setId(1);
dnaLounge.setName("DNA Lounge")
dnaLounge.setType(Places.Venue.VenueType.CLUB)
val data = sc.parallelize(List(dnaLounge.build()))
val outputData = data.map{ pb =>
val protoWritable = ProtobufWritable.newInstance(classOf[Places.Venue]);
protoWritable.set(pb)
(null, protoWritable)
}
outputData.saveAsNewAPIHadoopFile(outputFile, classOf[Text], classOf[ProtobufWritable[Places.Venue]],
classOf[LzoProtobufBlockOutputFormat[ProtobufWritable[Places.Venue]]], conf)
}
}
| gaoxuesong/learning-spark | src/main/scala/com/oreilly/learningsparkexamples/scala/BasicSaveProtoBuf.scala | Scala | mit | 1,404 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.test.util
import java.util.{Locale, TimeZone}
import scala.collection.JavaConverters._
import org.apache.spark.sql.{DataFrame, Row, SQLContext}
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.test.TestQueryExecutor
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class QueryTest extends PlanTest {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)
/**
* Runs the plan and makes sure the answer contains all of the keywords, or the
* none of keywords are listed in the answer
* @param df the [[DataFrame]] to be executed
* @param exists true for make sure the keywords are listed in the output, otherwise
* to make sure none of the keyword are not listed in the output
* @param keywords keyword in string array
*/
def checkExistence(df: DataFrame, exists: Boolean, keywords: String*) {
val outputs = df.collect().map(_.mkString).mkString
for (key <- keywords) {
if (exists) {
assert(outputs.contains(key), s"Failed for $df ($key doesn't exist in result)")
} else {
assert(!outputs.contains(key), s"Failed for $df ($key existed in the result)")
}
}
}
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext) {
test(sqlString) {
checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
}
}
/**
* Runs the plan and makes sure the answer matches the expected result.
* @param df the [[DataFrame]] to be executed
* @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
*/
protected def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Unit = {
QueryTest.checkAnswer(df, expectedAnswer) match {
case Some(errorMessage) => fail(errorMessage)
case None =>
}
}
protected def checkAnswer(df: DataFrame, expectedAnswer: Row): Unit = {
checkAnswer(df, Seq(expectedAnswer))
}
protected def checkAnswer(df: DataFrame, expectedAnswer: DataFrame): Unit = {
checkAnswer(df, expectedAnswer.collect())
}
def sql(sqlText: String): DataFrame = TestQueryExecutor.INSTANCE.sql(sqlText)
val sqlContext: SQLContext = TestQueryExecutor.INSTANCE.sqlContext
lazy val storeLocation = CarbonProperties.getInstance().
getProperty(CarbonCommonConstants.STORE_LOCATION)
val resourcesPath = TestQueryExecutor.resourcesPath
val integrationPath = TestQueryExecutor.integrationPath
}
object QueryTest {
def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]): String = {
checkAnswer(df, expectedAnswer.asScala) match {
case Some(errorMessage) => errorMessage
case None => null
}
}
/**
* Runs the plan and makes sure the answer matches the expected result.
* If there was exception during the execution or the contents of the DataFrame does not
* match the expected result, an error message will be returned. Otherwise, a [[None]] will
* be returned.
* @param df the [[DataFrame]] to be executed
* @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
*/
def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Option[String] = {
val isSorted = df.logicalPlan.collect { case s: logical.Sort => s }.nonEmpty
def prepareAnswer(answer: Seq[Row]): Seq[Row] = {
// Converts data to types that we can do equality comparison using Scala collections.
// For BigDecimal type, the Scala type has a better definition of equality test (similar to
// Java's java.math.BigDecimal.compareTo).
// For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for
// equality test.
val converted: Seq[Row] = answer.map { s =>
Row.fromSeq(s.toSeq.map {
case d: java.math.BigDecimal => BigDecimal(d)
case b: Array[Byte] => b.toSeq
case o => o
})
}
if (!isSorted) converted.sortBy(_.toString()) else converted
}
val sparkAnswer = try df.collect().toSeq catch {
case e: Exception =>
val errorMessage =
s"""
|Exception thrown while executing query:
|${df.queryExecution}
|== Exception ==
|$e
|${org.apache.spark.sql.catalyst.util.stackTraceToString(e)}
""".stripMargin
return Some(errorMessage)
}
if (prepareAnswer(expectedAnswer) != prepareAnswer(sparkAnswer)) {
val errorMessage =
s"""
|Results do not match for query:
|${df.queryExecution}
|== Results ==
|${
sideBySide(
s"== Correct Answer - ${expectedAnswer.size} ==" +:
prepareAnswer(expectedAnswer).map(_.toString()),
s"== Spark Answer - ${sparkAnswer.size} ==" +:
prepareAnswer(sparkAnswer).map(_.toString())).mkString("\n")
}
""".stripMargin
return Some(errorMessage)
}
return None
}
}
| HuaweiBigData/carbondata | integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala | Scala | apache-2.0 | 6,183 |
package akka.contrib.persistence.mongodb
import akka.actor.Props
import akka.persistence.query.EventEnvelope
import com.mongodb.DBObject
import com.mongodb.casbah.Imports._
object AllPersistenceIds {
def props(driver: CasbahMongoDriver): Props = Props(new AllPersistenceIds(driver))
}
class AllPersistenceIds(val driver: CasbahMongoDriver) extends SyncActorPublisher[String, Stream[String]] {
import CasbahSerializers._
override protected def initialCursor: Stream[String] =
driver.journal
.distinct(PROCESSOR_ID, MongoDBObject())
.toStream
.collect { case s:String => s }
override protected def next(c: Stream[String], atMost: Long): (Vector[String], Stream[String]) = {
val (buf,remainder) = c.splitAt(atMost.toIntWithoutWrapping)
(buf.toVector, remainder)
}
override protected def isCompleted(c: Stream[String]): Boolean = {
c.isEmpty
}
override protected def discard(c: Stream[String]): Unit = ()
}
object AllEvents {
def props(driver: CasbahMongoDriver): Props = Props(new AllEvents(driver))
}
class AllEvents(val driver: CasbahMongoDriver) extends SyncActorPublisher[EventEnvelope, Stream[EventEnvelope]] {
import CasbahSerializers._
override protected def initialCursor: Stream[EventEnvelope] =
driver.journal
.find(MongoDBObject())
.sort(MongoDBObject(PROCESSOR_ID -> 1, SEQUENCE_NUMBER -> 1))
.toStream
.flatMap(_.getAs[MongoDBList](EVENTS))
.flatMap(lst => lst.collect {case x:DBObject => x} )
.map(driver.deserializeJournal)
.zipWithIndex
.map { case(e,i) => e.toEnvelope(i) }
override protected def next(c: Stream[EventEnvelope], atMost: Long): (Vector[EventEnvelope], Stream[EventEnvelope]) = {
val (buf,remainder) = c.splitAt(atMost.toIntWithoutWrapping)
(buf.toVector, remainder)
}
override protected def isCompleted(c: Stream[EventEnvelope]): Boolean = c.isEmpty
override protected def discard(c: Stream[EventEnvelope]): Unit = ()
}
object EventsByPersistenceId {
def props(driver: CasbahMongoDriver, persistenceId: String, fromSeq: Long, toSeq: Long): Props =
Props(new EventsByPersistenceId(driver, persistenceId, fromSeq, toSeq))
}
class EventsByPersistenceId(val driver: CasbahMongoDriver, persistenceId: String, fromSeq: Long, toSeq: Long) extends SyncActorPublisher[EventEnvelope, Stream[EventEnvelope]] {
import CasbahSerializers._
override protected def initialCursor: Stream[EventEnvelope] =
driver.journal
.find((PROCESSOR_ID $eq persistenceId) ++ (FROM $gte fromSeq) ++ (FROM $lte toSeq))
.sort(MongoDBObject(PROCESSOR_ID -> 1, FROM -> 1))
.toStream
.flatMap(_.getAs[MongoDBList](EVENTS))
.flatMap(lst => lst.collect {case x:DBObject => x} )
.filter(dbo => dbo.getAs[Long](SEQUENCE_NUMBER).exists(sn => sn >= fromSeq && sn <= toSeq))
.map(driver.deserializeJournal)
.zipWithIndex
.map { case(e,i) => e.toEnvelope(i) }
override protected def next(c: Stream[EventEnvelope], atMost: Long): (Vector[EventEnvelope], Stream[EventEnvelope]) = {
val (buf,remainder) = c.splitAt(atMost.toIntWithoutWrapping)
(buf.toVector, remainder)
}
override protected def isCompleted(c: Stream[EventEnvelope]): Boolean = c.isEmpty
override protected def discard(c: Stream[EventEnvelope]): Unit = ()
}
class CasbahPersistenceReadJournaller(driver: CasbahMongoDriver) extends MongoPersistenceReadJournallingApi {
override def allPersistenceIds: Props = AllPersistenceIds.props(driver)
override def allEvents: Props = AllEvents.props(driver)
override def eventsByPersistenceId(persistenceId: String, fromSeq: Long, toSeq: Long): Props =
EventsByPersistenceId.props(driver,persistenceId,fromSeq,toSeq)
}
| tomzhang/akka-persistence-mongo | casbah/src/main/scala/akka/contrib/persistence/mongodb/CasbahPersistenceReadJournaller.scala | Scala | apache-2.0 | 3,789 |
package courier
import java.util.Properties
import javax.mail.Provider
import org.jvnet.mock_javamail.{Mailbox, MockTransport}
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
class MockedSMTPProvider extends Provider(Provider.Type.TRANSPORT, "mocked", classOf[MockTransport].getName, "Mock", null)
class MailSpec extends munit.FunSuite {
private val mockedSession = javax.mail.Session.getDefaultInstance(new Properties() {{
put("mail.transport.protocol.rfc822", "mocked")
}})
mockedSession.setProvider(new MockedSMTPProvider)
test("the mailer should send an email") {
val mailer = Mailer(mockedSession)
val future = mailer(Envelope.from("[email protected]".addr)
.to("[email protected]".addr)
.cc("[email protected]".addr)
.subject("miss you")
.content(Text("hi mom")))
Await.ready(future, 5.seconds)
val momsInbox = Mailbox.get("[email protected]")
assertEquals(momsInbox.size, 1)
val momsMsg = momsInbox.get(0)
assertEquals(momsMsg.getContent, "hi mom")
assertEquals(momsMsg.getSubject, "miss you")
}
} | softprops/courier | src/test/scala/mailspec.scala | Scala | mit | 1,163 |
package test
// Specs2
import org.specs2.mutable.Specification
import java.io.File
import com.gensler.scalavro.types.AvroType
import com.gensler.scalavro.io.AvroTypeIO
import scala.util.{ Try, Success, Failure }
import Direction._
import test._
class EnumSpec extends Specification {
"A generated case class with an `scala.Enumeration` field" should {
"serialize and deserialize correctly" in {
println(AvroType[Direction].schema)
val compassIO = AvroType[Compass].io
val outputStream = new java.io.ByteArrayOutputStream
val record = Compass(Direction.NORTH)
compassIO.write(record, outputStream)
val bytes = outputStream.toByteArray
val inputStream = new java.io.ByteArrayInputStream(bytes)
val Success(sameRecord) = compassIO.read(inputStream)
sameRecord must ===(record)
}
}
}
| ppearcy/avrohugger | avrohugger-core/src/sbt-test/projects/GenericSerializationTests/src/test/scala/scalavro/EnumSpec.scala | Scala | apache-2.0 | 872 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.{ArrayData, MapData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
/**
* A mutable wrapper that makes two rows appear as a single concatenated row. Designed to
* be instantiated once per thread and reused.
*/
class JoinedRow extends InternalRow {
private[this] var row1: InternalRow = _
private[this] var row2: InternalRow = _
def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
def apply(r1: InternalRow, r2: InternalRow): JoinedRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
def withLeft(newLeft: InternalRow): JoinedRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
def withRight(newRight: InternalRow): JoinedRow = {
row2 = newRight
this
}
/** Gets this JoinedRow's left base row. */
def getLeft: InternalRow = {
row1
}
/** Gets this JoinedRow's right base row. */
def getRight: InternalRow = {
row2
}
override def toSeq(fieldTypes: Seq[DataType]): Seq[Any] = {
assert(fieldTypes.length == row1.numFields + row2.numFields)
val (left, right) = fieldTypes.splitAt(row1.numFields)
row1.toSeq(left) ++ row2.toSeq(right)
}
override def numFields: Int = row1.numFields + row2.numFields
override def get(i: Int, dt: DataType): AnyRef =
if (i < row1.numFields) row1.get(i, dt) else row2.get(i - row1.numFields, dt)
override def isNullAt(i: Int): Boolean =
if (i < row1.numFields) row1.isNullAt(i) else row2.isNullAt(i - row1.numFields)
override def getBoolean(i: Int): Boolean =
if (i < row1.numFields) row1.getBoolean(i) else row2.getBoolean(i - row1.numFields)
override def getByte(i: Int): Byte =
if (i < row1.numFields) row1.getByte(i) else row2.getByte(i - row1.numFields)
override def getShort(i: Int): Short =
if (i < row1.numFields) row1.getShort(i) else row2.getShort(i - row1.numFields)
override def getInt(i: Int): Int =
if (i < row1.numFields) row1.getInt(i) else row2.getInt(i - row1.numFields)
override def getLong(i: Int): Long =
if (i < row1.numFields) row1.getLong(i) else row2.getLong(i - row1.numFields)
override def getFloat(i: Int): Float =
if (i < row1.numFields) row1.getFloat(i) else row2.getFloat(i - row1.numFields)
override def getDouble(i: Int): Double =
if (i < row1.numFields) row1.getDouble(i) else row2.getDouble(i - row1.numFields)
override def getDecimal(i: Int, precision: Int, scale: Int): Decimal = {
if (i < row1.numFields) {
row1.getDecimal(i, precision, scale)
} else {
row2.getDecimal(i - row1.numFields, precision, scale)
}
}
override def getUTF8String(i: Int): UTF8String =
if (i < row1.numFields) row1.getUTF8String(i) else row2.getUTF8String(i - row1.numFields)
override def getBinary(i: Int): Array[Byte] =
if (i < row1.numFields) row1.getBinary(i) else row2.getBinary(i - row1.numFields)
override def getArray(i: Int): ArrayData =
if (i < row1.numFields) row1.getArray(i) else row2.getArray(i - row1.numFields)
override def getInterval(i: Int): CalendarInterval =
if (i < row1.numFields) row1.getInterval(i) else row2.getInterval(i - row1.numFields)
override def getMap(i: Int): MapData =
if (i < row1.numFields) row1.getMap(i) else row2.getMap(i - row1.numFields)
override def getStruct(i: Int, numFields: Int): InternalRow = {
if (i < row1.numFields) {
row1.getStruct(i, numFields)
} else {
row2.getStruct(i - row1.numFields, numFields)
}
}
override def anyNull: Boolean = row1.anyNull || row2.anyNull
override def setNullAt(i: Int): Unit = {
if (i < row1.numFields) {
row1.setNullAt(i)
} else {
row2.setNullAt(i - row1.numFields)
}
}
override def update(i: Int, value: Any): Unit = {
if (i < row1.numFields) {
row1.update(i, value)
} else {
row2.update(i - row1.numFields, value)
}
}
override def copy(): InternalRow = {
val copy1 = row1.copy()
val copy2 = row2.copy()
new JoinedRow(copy1, copy2)
}
override def toString: String = {
// Make sure toString never throws NullPointerException.
if ((row1 eq null) && (row2 eq null)) {
"[ empty row ]"
} else if (row1 eq null) {
row2.toString
} else if (row2 eq null) {
row1.toString
} else {
s"{${row1.toString} + ${row2.toString}}"
}
}
}
| ueshin/apache-spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/JoinedRow.scala | Scala | apache-2.0 | 5,578 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.lang
import scala.scalajs.js
import js.Dynamic.global
import js.typedarray
/** Manipulating the bits of floating point numbers. */
private[lang] object FloatingPointBits {
import scala.scalajs.LinkingInfo.assumingES6
private[this] val _areTypedArraysSupported = {
// Here we use `assumingES6` to dce the 4 subsequent tests
assumingES6 || {
js.typeOf(global.ArrayBuffer) != "undefined" &&
js.typeOf(global.Int32Array) != "undefined" &&
js.typeOf(global.Float32Array) != "undefined" &&
js.typeOf(global.Float64Array) != "undefined"
}
}
@inline
private def areTypedArraysSupported: scala.Boolean = {
/* We have a forwarder to the internal `val _areTypedArraysSupported` to
* be able to inline it. This achieves the following:
* * If we emit ES6, dce `|| _areTypedArraysSupported` and replace
* `areTypedArraysSupported` by `true` in the calling code, allowing
* polyfills in the calling code to be dce'ed in turn.
* * If we emit ES5, replace `areTypedArraysSupported` by
* `_areTypedArraysSupported` so we do not calculate it multiple times.
*/
assumingES6 || _areTypedArraysSupported
}
private val arrayBuffer =
if (areTypedArraysSupported) new typedarray.ArrayBuffer(8)
else null
private val int32Array =
if (areTypedArraysSupported) new typedarray.Int32Array(arrayBuffer, 0, 2)
else null
private val float32Array =
if (areTypedArraysSupported) new typedarray.Float32Array(arrayBuffer, 0, 2)
else null
private val float64Array =
if (areTypedArraysSupported) new typedarray.Float64Array(arrayBuffer, 0, 1)
else null
private val areTypedArraysBigEndian = {
if (areTypedArraysSupported) {
int32Array(0) = 0x01020304
(new typedarray.Int8Array(arrayBuffer, 0, 8))(0) == 0x01
} else {
true // as good a value as any
}
}
private val highOffset = if (areTypedArraysBigEndian) 0 else 1
private val lowOffset = if (areTypedArraysBigEndian) 1 else 0
/** Hash code of a number (excluding Longs).
*
* Because of the common encoding for integer and floating point values,
* the hashCode of Floats and Doubles must align with that of Ints for the
* common values.
*
* For other values, we use the hashCode specified by the JavaDoc for
* *Doubles*, even for values which are valid Float values. Because of the
* previous point, we cannot align completely with the Java specification,
* so there is no point trying to be a bit more aligned here. Always using
* the Double version should typically be faster on VMs without fround
* support because we avoid several fround operations.
*/
def numberHashCode(value: scala.Double): Int = {
val iv = rawToInt(value)
if (iv == value && 1.0/value != scala.Double.NegativeInfinity) iv
else doubleToLongBits(value).hashCode()
}
def intBitsToFloat(bits: Int): scala.Float = {
if (areTypedArraysSupported) {
int32Array(0) = bits
float32Array(0)
} else {
intBitsToFloatPolyfill(bits).toFloat
}
}
def floatToIntBits(value: scala.Float): Int = {
if (areTypedArraysSupported) {
float32Array(0) = value
int32Array(0)
} else {
floatToIntBitsPolyfill(value.toDouble)
}
}
def longBitsToDouble(bits: scala.Long): scala.Double = {
if (areTypedArraysSupported) {
int32Array(highOffset) = (bits >>> 32).toInt
int32Array(lowOffset) = bits.toInt
float64Array(0)
} else {
longBitsToDoublePolyfill(bits)
}
}
def doubleToLongBits(value: scala.Double): scala.Long = {
if (areTypedArraysSupported) {
float64Array(0) = value
((int32Array(highOffset).toLong << 32) |
(int32Array(lowOffset).toLong & 0xffffffffL))
} else {
doubleToLongBitsPolyfill(value)
}
}
/* --- Polyfills for floating point bit manipulations ---
*
* Originally inspired by
* https://github.com/inexorabletash/polyfill/blob/a682f42c1092280bb01907c245979fb07219513d/typedarray.js#L150-L255
*
* Note that if typed arrays are not supported, it is almost certain that
* fround is not supported natively, so Float operations are extremely slow.
*
* We therefore do all computations in Doubles here, which is also more
* predictable, since the results do not depend on strict floats semantics.
*/
private def intBitsToFloatPolyfill(bits: Int): scala.Double = {
val ebits = 8
val fbits = 23
val s = bits < 0
val e = (bits >> fbits) & ((1 << ebits) - 1)
val f = bits & ((1 << fbits) - 1)
decodeIEEE754(ebits, fbits, s, e, f)
}
private def floatToIntBitsPolyfill(value: scala.Double): Int = {
val ebits = 8
val fbits = 23
val (s, e, f) = encodeIEEE754(ebits, fbits, value)
(if (s) 0x80000000 else 0) | (e << fbits) | rawToInt(f)
}
private def longBitsToDoublePolyfill(bits: scala.Long): scala.Double = {
import js.JSNumberOps._
val ebits = 11
val fbits = 52
val hifbits = fbits-32
val hi = (bits >>> 32).toInt
val lo = bits.toInt.toUint
val s = hi < 0
val e = (hi >> hifbits) & ((1 << ebits) - 1)
val f = (hi & ((1 << hifbits) - 1)).toDouble * 0x100000000L.toDouble + lo
decodeIEEE754(ebits, fbits, s, e, f)
}
private def doubleToLongBitsPolyfill(value: scala.Double): scala.Long = {
val ebits = 11
val fbits = 52
val hifbits = fbits-32
val (s, e, f) = encodeIEEE754(ebits, fbits, value)
val hif = rawToInt(f / 0x100000000L.toDouble)
val hi = (if (s) 0x80000000 else 0) | (e << hifbits) | hif
val lo = rawToInt(f)
(hi.toLong << 32) | (lo.toLong & 0xffffffffL)
}
@inline private def decodeIEEE754(ebits: Int, fbits: Int,
s: scala.Boolean, e: Int, f: scala.Double): scala.Double = {
import Math.pow
val bias = (1 << (ebits-1)) - 1 // constant
if (e == (1 << ebits) - 1) {
// Special
if (f != 0.0) scala.Double.NaN
else if (s) scala.Double.NegativeInfinity
else scala.Double.PositiveInfinity
} else if (e > 0) {
// Normalized
val x = pow(2, e-bias) * (1 + f / pow(2, fbits))
if (s) -x else x
} else if (f != 0.0) {
// Subnormal
val x = pow(2, -(bias-1)) * (f / pow(2, fbits))
if (s) -x else x
} else {
// Zero
if (s) -0.0 else 0.0
}
}
@inline private def encodeIEEE754(ebits: Int, fbits: Int,
v: scala.Double): (scala.Boolean, Int, scala.Double) = {
import Math._
val bias = (1 << (ebits-1)) - 1 // constant
if (v.isNaN) {
// http://dev.w3.org/2006/webapi/WebIDL/#es-type-mapping
(false, (1 << ebits) - 1, pow(2, fbits-1))
} else if (v.isInfinite) {
(v < 0, (1 << ebits) - 1, 0.0)
} else if (v == 0.0) {
(1 / v == scala.Double.NegativeInfinity, 0, 0.0)
} else {
val LN2 = 0.6931471805599453
val s = v < 0
val av = if (s) -v else v
if (av >= pow(2, 1-bias)) {
val twoPowFbits = pow(2, fbits)
var e = min(rawToInt(floor(log(av) / LN2)), 1023)
var twoPowE = pow(2, e)
/* #2911: When av is very close under a power of 2 (e.g.,
* 9007199254740991.0 == 2^53 - 1), `log(av) / LN2` will already round
* *up* to an `e` which is 1 too much. The `floor()` afterwards comes
* too late to fix that.
* We now decrement `e` if it ends up being too big.
*/
if (twoPowE > av) {
e -= 1
twoPowE /= 2
}
var f = roundToEven(av / twoPowE * twoPowFbits)
if (f / twoPowFbits >= 2) {
e = e + 1
f = 1
}
if (e > bias) {
// Overflow
e = (1 << ebits) - 1
f = 0
} else {
// Normalized
e = e + bias
f = f - twoPowFbits
}
(s, e, f)
} else {
// Subnormal
(s, 0, roundToEven(av / pow(2, 1-bias-fbits)))
}
}
}
@inline private def rawToInt(x: scala.Double): Int =
(x.asInstanceOf[js.Dynamic] | 0.asInstanceOf[js.Dynamic]).asInstanceOf[Int]
@inline private def roundToEven(n: scala.Double): scala.Double = {
val w = Math.floor(n)
val f = n - w
if (f < 0.5) w
else if (f > 0.5) w + 1
else if (w % 2 != 0) w + 1
else w
}
}
| nicolasstucki/scala-js | javalanglib/src/main/scala/java/lang/FloatingPointBits.scala | Scala | apache-2.0 | 8,623 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.bridge.scala.internal
import org.apache.flink.annotation.Internal
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.internal.AbstractStreamTableEnvironmentImpl
import org.apache.flink.table.api.bridge.scala.{StreamStatementSet, StreamTableEnvironment}
import org.apache.flink.table.catalog._
import org.apache.flink.table.connector.ChangelogMode
import org.apache.flink.table.delegation.{Executor, Planner}
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.factories.PlannerFactoryUtil
import org.apache.flink.table.functions.{AggregateFunction, TableAggregateFunction, TableFunction, UserDefinedFunctionHelper}
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.operations._
import org.apache.flink.table.sources.{TableSource, TableSourceValidation}
import org.apache.flink.table.types.AbstractDataType
import org.apache.flink.table.types.utils.TypeConversions
import org.apache.flink.types.Row
import org.apache.flink.util.Preconditions
import java.util.Optional
import scala.collection.JavaConverters._
/**
* The implementation for a Scala [[StreamTableEnvironment]]. This enables conversions from/to
* [[DataStream]]. It is bound to a given [[StreamExecutionEnvironment]].
*/
@Internal
class StreamTableEnvironmentImpl (
catalogManager: CatalogManager,
moduleManager: ModuleManager,
functionCatalog: FunctionCatalog,
config: TableConfig,
scalaExecutionEnvironment: StreamExecutionEnvironment,
planner: Planner,
executor: Executor,
isStreaming: Boolean,
userClassLoader: ClassLoader)
extends AbstractStreamTableEnvironmentImpl(
catalogManager,
moduleManager,
config,
executor,
functionCatalog,
planner,
isStreaming,
userClassLoader,
scalaExecutionEnvironment.getWrappedStreamExecutionEnvironment)
with StreamTableEnvironment {
override def fromDataStream[T](dataStream: DataStream[T]): Table = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
fromStreamInternal(dataStream.javaStream, null, null, ChangelogMode.insertOnly())
}
override def fromDataStream[T](dataStream: DataStream[T], schema: Schema): Table = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
Preconditions.checkNotNull(schema, "Schema must not be null.")
fromStreamInternal(dataStream.javaStream, schema, null, ChangelogMode.insertOnly())
}
override def fromChangelogStream(dataStream: DataStream[Row]): Table = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
fromStreamInternal(dataStream.javaStream, null, null, ChangelogMode.all())
}
override def fromChangelogStream(dataStream: DataStream[Row], schema: Schema): Table = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
Preconditions.checkNotNull(schema, "Schema must not be null.")
fromStreamInternal(dataStream.javaStream, schema, null, ChangelogMode.all())
}
override def fromChangelogStream(
dataStream: DataStream[Row],
schema: Schema,
changelogMode: ChangelogMode)
: Table = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
Preconditions.checkNotNull(schema, "Schema must not be null.")
fromStreamInternal(dataStream.javaStream, schema, null, changelogMode)
}
override def createTemporaryView[T](
path: String,
dataStream: DataStream[T]): Unit = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
createTemporaryView(
path,
fromStreamInternal(dataStream.javaStream, null, path, ChangelogMode.insertOnly()))
}
override def createTemporaryView[T](
path: String,
dataStream: DataStream[T],
schema: Schema): Unit = {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.")
Preconditions.checkNotNull(schema, "Schema must not be null.")
createTemporaryView(
path,
fromStreamInternal(dataStream.javaStream, schema, path, ChangelogMode.insertOnly()))
}
override def toDataStream(table: Table): DataStream[Row] = {
Preconditions.checkNotNull(table, "Table must not be null.")
// include all columns of the query (incl. metadata and computed columns)
val sourceType = table.getResolvedSchema.toSourceRowDataType
toDataStream(table, sourceType)
}
override def toDataStream[T](table: Table, targetClass: Class[T]): DataStream[T] = {
Preconditions.checkNotNull(table, "Table must not be null.")
Preconditions.checkNotNull(targetClass, "Target class must not be null.")
if (targetClass == classOf[Row]) {
// for convenience, we allow the Row class here as well
return toDataStream(table).asInstanceOf[DataStream[T]]
}
toDataStream(table, DataTypes.of(targetClass))
}
override def toDataStream[T](table: Table, targetDataType: AbstractDataType[_]): DataStream[T] = {
Preconditions.checkNotNull(table, "Table must not be null.")
Preconditions.checkNotNull(targetDataType, "Target data type must not be null.")
val schemaTranslationResult = SchemaTranslator.createProducingResult(
catalogManager.getDataTypeFactory,
table.getResolvedSchema,
targetDataType)
new DataStream[T](toStreamInternal(table, schemaTranslationResult, ChangelogMode.insertOnly()))
}
override def toChangelogStream(table: Table): DataStream[Row] = {
Preconditions.checkNotNull(table, "Table must not be null.")
val schemaTranslationResult = SchemaTranslator.createProducingResult(
table.getResolvedSchema,
null)
new DataStream[Row](toStreamInternal(table, schemaTranslationResult, null))
}
override def toChangelogStream(table: Table, targetSchema: Schema): DataStream[Row] = {
Preconditions.checkNotNull(table, "Table must not be null.")
Preconditions.checkNotNull(targetSchema, "Target schema must not be null.")
val schemaTranslationResult = SchemaTranslator.createProducingResult(
table.getResolvedSchema,
targetSchema)
new DataStream[Row](toStreamInternal(table, schemaTranslationResult, null))
}
override def toChangelogStream(
table: Table,
targetSchema: Schema,
changelogMode: ChangelogMode)
: DataStream[Row] = {
Preconditions.checkNotNull(table, "Table must not be null.")
Preconditions.checkNotNull(targetSchema, "Target schema must not be null.")
Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.")
val schemaTranslationResult = SchemaTranslator.createProducingResult(
table.getResolvedSchema,
targetSchema)
new DataStream[Row](toStreamInternal(table, schemaTranslationResult, changelogMode))
}
override def createStatementSet(): StreamStatementSet = {
new StreamStatementSetImpl(this)
}
override def fromDataStream[T](dataStream: DataStream[T], fields: Expression*): Table = {
val queryOperation = asQueryOperation(dataStream.javaStream, Optional.of(fields.toList.asJava))
createTable(queryOperation)
}
override def registerDataStream[T](name: String, dataStream: DataStream[T]): Unit = {
registerTable(name, fromDataStream(dataStream))
}
override def registerDataStream[T](
name: String,
dataStream: DataStream[T],
fields: Expression*)
: Unit = {
registerTable(name, fromDataStream(dataStream, fields: _*))
}
override def toAppendStream[T: TypeInformation](table: Table): DataStream[T] = {
val returnType = createTypeInformation[T]
val modifyOperation = new OutputConversionModifyOperation(
table.getQueryOperation,
TypeConversions.fromLegacyInfoToDataType(returnType),
OutputConversionModifyOperation.UpdateMode.APPEND)
new DataStream[T](toStreamInternal[T](table, modifyOperation))
}
override def toRetractStream[T: TypeInformation](table: Table): DataStream[(Boolean, T)] = {
val returnType = createTypeInformation[(Boolean, T)]
val modifyOperation = new OutputConversionModifyOperation(
table.getQueryOperation,
TypeConversions.fromLegacyInfoToDataType(returnType),
OutputConversionModifyOperation.UpdateMode.RETRACT)
new DataStream[(Boolean, T)](toStreamInternal(table, modifyOperation))
}
override def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfTableFunction(tf, implicitly[TypeInformation[T]])
functionCatalog.registerTempSystemTableFunction(
name,
tf,
typeInfo
)
}
override def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: AggregateFunction[T, ACC])
: Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfAggregateFunction(f, implicitly[TypeInformation[T]])
val accTypeInfo = UserDefinedFunctionHelper
.getAccumulatorTypeOfAggregateFunction(f, implicitly[TypeInformation[ACC]])
functionCatalog.registerTempSystemAggregateFunction(
name,
f,
typeInfo,
accTypeInfo
)
}
override def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: TableAggregateFunction[T, ACC])
: Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfAggregateFunction(f, implicitly[TypeInformation[T]])
val accTypeInfo = UserDefinedFunctionHelper
.getAccumulatorTypeOfAggregateFunction(f, implicitly[TypeInformation[ACC]])
functionCatalog.registerTempSystemAggregateFunction(
name,
f,
typeInfo,
accTypeInfo
)
}
override protected def validateTableSource(tableSource: TableSource[_]): Unit = {
super.validateTableSource(tableSource)
// check that event-time is enabled if table source includes rowtime attributes
if (TableSourceValidation.hasRowtimeAttribute(tableSource) &&
scalaExecutionEnvironment.getStreamTimeCharacteristic != TimeCharacteristic.EventTime) {
throw new TableException(String.format(
"A rowtime attribute requires an EventTime time characteristic in stream " +
"environment. But is: %s}", scalaExecutionEnvironment.getStreamTimeCharacteristic))
}
}
override def createTemporaryView[T](
path: String,
dataStream: DataStream[T],
fields: Expression*): Unit = {
createTemporaryView(path, fromDataStream(dataStream, fields: _*))
}
}
object StreamTableEnvironmentImpl {
def create(
executionEnvironment: StreamExecutionEnvironment,
settings: EnvironmentSettings,
tableConfig: TableConfig)
: StreamTableEnvironmentImpl = {
// temporary solution until FLINK-15635 is fixed
val classLoader = Thread.currentThread.getContextClassLoader
val moduleManager = new ModuleManager
val catalogManager = CatalogManager.newBuilder
.classLoader(classLoader)
.config(tableConfig.getConfiguration)
.defaultCatalog(
settings.getBuiltInCatalogName,
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName,
settings.getBuiltInDatabaseName))
.executionConfig(executionEnvironment.getConfig)
.build
val functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager)
val executor = AbstractStreamTableEnvironmentImpl.lookupExecutor(
classLoader, settings.getExecutor, executionEnvironment.getWrappedStreamExecutionEnvironment)
val planner = PlannerFactoryUtil.createPlanner(settings.getPlanner, executor, tableConfig,
moduleManager, catalogManager, functionCatalog)
new StreamTableEnvironmentImpl(
catalogManager,
moduleManager,
functionCatalog,
tableConfig,
executionEnvironment,
planner,
executor,
settings.isStreamingMode,
classLoader
)
}
}
| lincoln-lil/flink | flink-table/flink-table-api-scala-bridge/src/main/scala/org/apache/flink/table/api/bridge/scala/internal/StreamTableEnvironmentImpl.scala | Scala | apache-2.0 | 12,967 |
package sp.system
import akka.actor.{ ExtendedActorSystem, Extension, ExtensionKey }
object SPSettings extends ExtensionKey[SPSettings]
/**
* The settings for Sequence planner read from the config file:
* core settings are read into predfined vals, but if you add
* your own settings, access them via
*/
class SPSettings(system: ExtendedActorSystem) extends Extension {
/**
* Return Config to access settings not defined in this file. See com.typesafe.config
* Core SP configs should be defined as vals instead
*/
val config = system.settings.config
/**
* The network interface the SP gui gets bound to, e.g. `"localhost"`.
*/
val interface: String = system.settings.config getString "sp.interface"
/**
* The port the the SP gui gets bound to, e.g. `8080`.
*/
val port: Int = system.settings.config getInt "sp.port"
val webFolder: String = system.settings.config getString "sp.webFolder"
val devFolder: String = system.settings.config getString "sp.devFolder"
val buildFolder: String = system.settings.config getString "sp.buildFolder"
val devMode: Boolean = system.settings.config getBoolean "sp.devMode"
val activeMQ: String = system.settings.config getString "sp.activeMQ"
val activeMQPort: Int = system.settings.config getInt "sp.activeMQPort"
val activeMQTopic: String = system.settings.config getString "sp.activeMQTopic"
val rcaEmitFakeEvents = system.settings.config getBoolean "sp.robotCycleAnalysis.emitFakeEvents"
} | kristoferB/SP | sp1/src/main/scala/sp/system/SPSettings.scala | Scala | mit | 1,494 |
package controllers.s_your_partner
import controllers.s_about_you.GYourDetails._
import models.{NationalInsuranceNumber, DayMonthYear}
import play.api.Play._
import play.api.data.validation.{Valid, ValidationError, Invalid, Constraint}
import gov.dwp.carers.xml.validation.CommonValidation
import language.reflectiveCalls
import play.api.data.{FormError, Form}
import play.api.data.Forms.mapping
import play.api.data.Forms.nonEmptyText
import play.api.data.Forms.text
import play.api.data.Forms.optional
import play.api.mvc.{Request, AnyContent, Controller, Action}
import controllers.mappings.Mappings._
import controllers.mappings.NINOMappings._
import models.domain._
import models.view.{Navigable, CachedClaim}
import utils.helpers.CarersForm.formBinding
import YourPartner.presentConditionally
import controllers.CarersForms._
import models.view.ClaimHandling.ClaimResult
import controllers.mappings.Mappings
import play.api.i18n._
object GYourPartnerPersonalDetails extends Controller with CachedClaim with Navigable with I18nSupport {
override val messagesApi: MessagesApi = current.injector.instanceOf[MMessages]
def form(implicit claim: Claim, request: Request[AnyContent]): Form[YourPartnerPersonalDetails] = Form(mapping(
"title" -> optional(carersNonEmptyText(maxLength = Mappings.twenty)),
"firstName" -> optional(nonEmptyText(maxLength = CommonValidation.FIRSTNAME_MAX_LENGTH).verifying(YourDetails.validName)),
"middleName" -> optional(text(maxLength = CommonValidation.MIDDLENAME_MAX_LENGTH).verifying(YourDetails.validName)),
"surname" -> optional(nonEmptyText(maxLength = CommonValidation.SURNAME_MAX_LENGTH).verifying(YourDetails.validName)),
"otherNames" -> optional(text(maxLength = CommonValidation.SURNAME_MAX_LENGTH).verifying(YourDetails.validName)),
"nationalInsuranceNumber" -> optional(nino.verifying(stopOnFirstFail (validNino, isSameNinoAsDPOrPartner))),
"dateOfBirth" -> optional(dayMonthYear.verifying(validDateOfBirth)),
"partner.nationality" -> optional(carersNonEmptyText(maxLength = CommonValidation.NATIONALITY_MAX_LENGTH)),
"separated.fromPartner" -> optional(nonEmptyText.verifying(validYesNo)),
"isPartnerPersonYouCareFor" -> optional(nonEmptyText.verifying(validYesNo)),
"hadPartnerSinceClaimDate" -> nonEmptyText.verifying(validYesNo)
)(YourPartnerPersonalDetails.apply)(YourPartnerPersonalDetails.unapply)
.verifying("title.required", YourPartnerPersonalDetails.validateTitle _)
.verifying("firstName.required", YourPartnerPersonalDetails.validateFirstName _)
.verifying("surname.required", YourPartnerPersonalDetails.validateSurName _)
.verifying("dateOfBirth.required", YourPartnerPersonalDetails.validateDateOfBirth _)
.verifying("separated.fromPartner.required", YourPartnerPersonalDetails.validateSeperatedFromPartner _)
.verifying("isPartnerPersonYouCareFor.required", YourPartnerPersonalDetails.validatePartnerPersonYoucareFor _)
.verifying("partner.nationality.required", YourPartnerPersonalDetails.validateNationalityIfPresent _)
)
def present: Action[AnyContent] = claimingWithCheck { implicit claim => implicit request => implicit request2lang =>
presentConditionally(yourPartnerPersonalDetails)
}
private def yourPartnerPersonalDetails(implicit claim: Claim, request: Request[AnyContent]): ClaimResult = {
track(YourPartnerPersonalDetails) { implicit claim => Ok(views.html.s_your_partner.g_yourPartnerPersonalDetails(form.fill(YourPartnerPersonalDetails))) }
}
def submit: Action[AnyContent] = claimingWithCheck { implicit claim => implicit request => implicit request2lang =>
form.bindEncrypted.fold(
formWithErrors => {
val formWithErrorsUpdate = formWithErrors
.replaceError("", "title.required", FormError("title", errorRequired))
.replaceError("", "firstName.required", FormError("firstName", errorRequired))
.replaceError("", "surname.required", FormError("surname", errorRequired))
.replaceError("", "dateOfBirth.required", FormError("dateOfBirth", errorRequired))
.replaceError("", "separated.fromPartner.required", FormError("separated.fromPartner", errorRequired))
.replaceError("", "isPartnerPersonYouCareFor.required", FormError("isPartnerPersonYouCareFor", errorRequired))
.replaceError("", "partner.nationality.required", FormError("partner.nationality", errorRequired))
BadRequest(views.html.s_your_partner.g_yourPartnerPersonalDetails(formWithErrorsUpdate))
},
f => {
val preUpdatedClaim = clearTheirPersonalDetailsIfPartnerQuestionChanged(claim, f)
val updatedDpClaim = updateDpDetails(preUpdatedClaim, f)
updatedDpClaim.update(f) -> Redirect(controllers.s_care_you_provide.routes.GTheirPersonalDetails.present())
}
)
}.withPreviewConditionally(goToPreviewCondition)
private def goToPreviewCondition(details: (Option[YourPartnerPersonalDetails], YourPartnerPersonalDetails), c: (Option[Claim], Claim)) = {
val currentClaim = c._2
val previewData = details._1
val formData = details._2
val matchingValue = previewData -> formData.isPartnerPersonYouCareFor
currentClaim.questionGroup[TheirPersonalDetails] match {
case None => false
case _ =>
matchingValue match {
case (Some(data), Some(isPartnerPerson))
if data.isPartnerPersonYouCareFor.nonEmpty && data.isPartnerPersonYouCareFor.get != isPartnerPerson => false
case (Some(data), None)
if data.isPartnerPersonYouCareFor.nonEmpty => false
case (Some(YourPartnerPersonalDetails(_, _, _, _, _, _, _, _, _, None, _)), Some(_)) => false
case _ => true
}
}
}
def clearTheirPersonalDetailsIfPartnerQuestionChanged(claim: Claim, formData: YourPartnerPersonalDetails) = {
claim.questionGroup[YourPartnerPersonalDetails] -> claim.questionGroup[TheirPersonalDetails] match {
case (Some(oldData), Some(theirPersonalDetails)) =>
val tupleData = (oldData.hadPartnerSinceClaimDate -> formData.hadPartnerSinceClaimDate) ->
(oldData.isPartnerPersonYouCareFor -> formData.isPartnerPersonYouCareFor)
tupleData match {
case (("no", "yes"), (None, Some("no"))) =>
//This case is when we change the partner question from no -> yes and we specify the DP is not our partner
//In this case we don't want to wipe the data
claim
case ((oldQ, newQ), _) if oldQ != newQ =>
wipeTheirPersonalDetailsData(claim, theirPersonalDetails)
case (_, (Some(oldQ), Some(newQ))) if oldQ != newQ =>
wipeTheirPersonalDetailsData(claim, theirPersonalDetails)
case _ => claim
}
case _ => claim
}
}
def wipeTheirPersonalDetailsData(claim: Claim, theirPersonalDetails: TheirPersonalDetails) = {
claim.delete(TheirPersonalDetails)
}
def updateDpDetails(claim: Claim, formData: YourPartnerPersonalDetails) = {
formData.isPartnerPersonYouCareFor match{
case(Some("yes")) => {
val dp=claim.questionGroup[TheirPersonalDetails].getOrElse(TheirPersonalDetails())
val newdp=dp.copy(
title = formData.title.getOrElse(""),
firstName = formData.firstName.getOrElse(""),
middleName = formData.middleName,
surname = formData.surname.getOrElse(""),
nationalInsuranceNumber = formData.nationalInsuranceNumber,
dateOfBirth = formData.dateOfBirth.getOrElse(DayMonthYear(1,1,1900))
)
claim.update(newdp)
}
case _ => claim
}
}
private def isSameNinoAsDPOrPartner(implicit request: Request[AnyContent]): Constraint[NationalInsuranceNumber] = Constraint[NationalInsuranceNumber]("constraint.nino") {
case nino@NationalInsuranceNumber(Some(_)) => checkSameValues(nino.nino.get.toUpperCase.replace(" ", ""), request)
case _ => Invalid(ValidationError("error.nationalInsuranceNumber"))
}
private def checkSameValues(nino: String, request: Request[AnyContent]) = {
val claim = fromCache(request).getOrElse(new Claim("xxxx"))
val theirPersonalDetails = claim.questionGroup[TheirPersonalDetails].getOrElse(TheirPersonalDetails())
val partnerDetails = claim.questionGroup[YourPartnerPersonalDetails].getOrElse(YourPartnerPersonalDetails())
val yourDetails = claim.questionGroup[YourDetails].getOrElse(YourDetails())
if (yourNINO(yourDetails) == nino) Invalid(ValidationError("error.you.and.partner.nationalInsuranceNumber", yourName(yourDetails), pageName(request)))
else if (dpNINO(theirPersonalDetails) == nino && getValueFromRequest(request, "isPartnerPersonYouCareFor") == Mappings.no
&& partnerDetails.isPartnerPersonYouCareFor.getOrElse(Mappings.no) == Mappings.no) Invalid(ValidationError("error.dp.and.partner.nationalInsuranceNumber", dpName(theirPersonalDetails), pageName(request)))
else Valid
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/app/controllers/s_your_partner/GYourPartnerPersonalDetails.scala | Scala | mit | 8,979 |
package org.bitcoins.dlc
import org.bitcoins.core.protocol.tlv.OracleParamsV0TLV
import org.bitcoins.testkitcore.dlc.DLCTest
import org.bitcoins.testkitcore.util.BitcoinSJvmTest
class DLCAdaptorPointComputerTest extends BitcoinSJvmTest with DLCTest {
behavior of "DLCAdaptorPointComputer"
it should "compute sigpoints correctly" in {
runTestsForParam(Vector(4, 6, 8)) { numDigitsOrOutcomes =>
runTestsForParam(Vector(true, false)) { isNumeric =>
runTestsForParam(Vector((1, 1), (2, 3), (3, 5))) {
case (threshold, numOracles) =>
runTestsForParam(
Vector(None,
Some(
OracleParamsV0TLV(numDigitsOrOutcomes / 2 + 1,
numDigitsOrOutcomes / 2,
maximizeCoverage = true)))) {
oracleParams =>
val contractParams = SingleContractParams(numDigitsOrOutcomes,
isNumeric,
threshold,
numOracles,
oracleParams)
val (client, _, _) = constructDLCClients(contractParams)
val contract = client.offer.contractInfo
val outcomes = contract.allOutcomes
val adaptorPoints = contract.adaptorPoints
val expectedAdaptorPoints = outcomes.map(_.sigPoint)
assert(adaptorPoints == expectedAdaptorPoints)
}
}
}
}
}
}
| bitcoin-s/bitcoin-s | dlc-test/src/test/scala/org/bitcoins/dlc/DLCAdaptorPointComputerTest.scala | Scala | mit | 1,663 |
package models
import java.sql.Date.{valueOf => date}
import java.sql.Connection
case class CreateTax(taxType: Int, localeId: Long, name: String, rate: BigDecimal) {
def save()(
implicit conn: Connection,
taxRepo: TaxRepo,
taxNameRepo: TaxNameRepo,
localeInfoRepo: LocaleInfoRepo,
taxHistoryRepo: TaxHistoryRepo
): Unit = {
val tax = taxRepo.createNew
taxNameRepo.createNew(tax, localeInfoRepo(localeId), name)
taxHistoryRepo.createNew(tax, TaxType.byIndex(taxType), rate, Until.EverInstant)
}
}
| ruimo/store2 | app/models/CreateTax.scala | Scala | apache-2.0 | 537 |
package org.jetbrains.plugins.scala
package lang.refactoring.rename.inplace
import java.util
import com.intellij.lang.Language
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.{Pair, TextRange}
import com.intellij.psi.search.LocalSearchScope
import com.intellij.psi.{PsiDocumentManager, PsiElement, PsiFile, PsiNamedElement}
import com.intellij.refactoring.RefactoringActionHandler
import com.intellij.refactoring.rename.inplace.VariableInplaceRenamer
import com.intellij.refactoring.util.TextOccurrencesUtil.processUsagesInStringsAndComments
import org.jetbrains.annotations.NotNull
import org.jetbrains.plugins.scala.extensions.ObjectExt
import org.jetbrains.plugins.scala.lang.refactoring.ScalaNamesValidator
import org.jetbrains.plugins.scala.lang.refactoring.rename.ScalaRenameUtil
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
/**
* Nikolay.Tropin
* 1/20/14
*/
class ScalaLocalInplaceRenamer(elementToRename: PsiNamedElement, editor: Editor, project: Project, initialName: String, oldName: String)
extends VariableInplaceRenamer(elementToRename, editor, project, initialName, oldName) {
private val elementRange = editor.getDocument.createRangeMarker(elementToRename.getTextRange)
def this(@NotNull elementToRename: PsiNamedElement, editor: Editor) =
this(elementToRename, editor, elementToRename.getProject,
ScalaNamesUtil.scalaName(elementToRename), ScalaNamesUtil.scalaName(elementToRename))
override def collectAdditionalElementsToRename(stringUsages: util.List[Pair[PsiElement, TextRange]]): Unit = {
val stringToSearch: String = ScalaNamesUtil.scalaName(elementToRename)
if (stringToSearch == null)
return
val localScope =
currentFile.map(new LocalSearchScope(_))
.getOrElse(LocalSearchScope.EMPTY)
processUsagesInStringsAndComments(elementToRename, localScope, stringToSearch, true,
(psiElement: PsiElement, textRange: TextRange) => {
stringUsages.add(Pair.create(psiElement, textRange))
true
}
)
}
override def isIdentifier(newName: String, language: Language): Boolean =
ScalaNamesValidator.isIdentifier(newName)
override def startsOnTheSameElement(handler: RefactoringActionHandler, element: PsiElement): Boolean = {
handler match {
case _: ScalaLocalInplaceRenameHandler => ScalaRenameUtil.sameElement(elementRange, element)
case _ => false
}
}
override def checkLocalScope(): PsiElement =
currentFile.getOrElse(super.checkLocalScope())
private def currentFile: Option[PsiFile] =
PsiDocumentManager.getInstance(myProject).getPsiFile(myEditor.getDocument).toOption
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaLocalInplaceRenamer.scala | Scala | apache-2.0 | 2,730 |
package HackerRank.Training.Sorting
import java.io.{ByteArrayInputStream, IOException, InputStream, PrintWriter}
import java.util.InputMismatchException
import scala.collection.SeqLike
import scala.collection.generic.CanBuildFrom
import scala.language.higherKinds
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <[email protected]> on 4/23/2017
*/
private[this] object QuickSort2 {
import Reader._
import Writer._
private[this] val TEST_INPUT: Option[String] = None
//------------------------------------------------------------------------------------------//
// Solution
//------------------------------------------------------------------------------------------//
private[this] def solve(): Unit = {
val n = nextInt()
val array = nextInt[Vector](n)
quickSort(array)
}
def quickSort[T, Coll](xs: Coll)
(implicit c2s: Coll <:< SeqLike[T, Coll],
cbf: CanBuildFrom[Coll, T, Coll],
ordering: Ordering[T]): Coll = {
import ordering._
if (xs.length <= 1) {
xs
} else {
def pivot(list: Coll): (Coll, T, Coll) = {
val (left, middle, right) = list.tail.foldLeft((cbf(), list.head, cbf())) {
(result, current) =>
val (left, pivot, right) = result
if (current < pivot) (left += current, pivot, right) else (left, pivot, right += current)
}
(left.result(), middle, right.result())
}
val (left, middle, right) = pivot(xs)
val builder = cbf()
builder.sizeHint(xs.size)
builder ++= quickSort(left) += middle ++= quickSort(right)
val result = builder.result()
println(result.mkString(" "))
result
}
}
//------------------------------------------------------------------------------------------//
// Run
//------------------------------------------------------------------------------------------//
@throws[Exception]
def main(args: Array[String]): Unit = {
val s = System.currentTimeMillis
solve()
flush()
if (TEST_INPUT.isDefined) System.out.println(System.currentTimeMillis - s + "ms")
}
//------------------------------------------------------------------------------------------//
// Input
//------------------------------------------------------------------------------------------//
private[this] final object Reader {
private[this] implicit val in: InputStream = TEST_INPUT.fold(System.in)(s => new ByteArrayInputStream(s.getBytes))
def nextSeq[T, Coll[_]](reader: => Seq[T], n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder ++= reader
}
builder.result()
}
def next[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += reader
}
builder.result()
}
def nextWithIndex[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((reader, i))
}
builder.result()
}
def nextDouble[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Double], Double, Coll[Double]]): Coll[Double] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextDouble()
}
builder.result()
}
def nextDoubleWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Double, Int)], (Double, Int), Coll[(Double, Int)]]): Coll[(Double, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextDouble(), i))
}
builder.result()
}
def nextChar[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Char], Char, Coll[Char]]): Coll[Char] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += b.toChar
p += 1
b = readByte().toInt
}
builder.result()
}
def nextCharWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Char, Int)], (Char, Int), Coll[(Char, Int)]]): Coll[(Char, Int)] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += ((b.toChar, p))
p += 1
b = readByte().toInt
}
builder.result()
}
def nextInt[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Int], Int, Coll[Int]]): Coll[Int] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextInt()
}
builder.result()
}
def nextIntWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Int, Int)], (Int, Int), Coll[(Int, Int)]]): Coll[(Int, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextInt(), i))
}
builder.result()
}
def nextLong[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Long], Long, Coll[Long]]): Coll[Long] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextLong()
}
builder.result()
}
def nextLongWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Long, Int)], (Long, Int), Coll[(Long, Int)]]): Coll[(Long, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextLong(), i))
}
builder.result()
}
def nextString[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[String], String, Coll[String]]): Coll[String] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextString()
}
builder.result()
}
def nextStringWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(String, Int)], (String, Int), Coll[(String, Int)]]): Coll[(String, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextString(), i))
}
builder.result()
}
def nextMultiLine(n: Int, m: Int): Array[Array[Char]] = {
val map = new Array[Array[Char]](n)
var i = 0
while (i < n) {
map(i) = nextChar[Array](m)
i += 1
}
map
}
def nextDouble(): Double = nextString().toDouble
def nextChar(): Char = skip.toChar
def nextString(): String = {
var b = skip
val sb = new java.lang.StringBuilder
while (!isSpaceChar(b)) {
sb.appendCodePoint(b)
b = readByte().toInt
}
sb.toString
}
def nextInt(): Int = {
var num = 0
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Int")
}
def nextLong(): Long = {
var num = 0L
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Long")
}
private[this] val inputBuffer = new Array[Byte](1024)
private[this] var lenBuffer = 0
private[this] var ptrBuffer = 0
private[this] def readByte()(implicit in: java.io.InputStream): Byte = {
if (lenBuffer == -1) throw new InputMismatchException
if (ptrBuffer >= lenBuffer) {
ptrBuffer = 0
try {
lenBuffer = in.read(inputBuffer)
} catch {
case _: IOException =>
throw new InputMismatchException
}
if (lenBuffer <= 0) return -1
}
inputBuffer({
ptrBuffer += 1
ptrBuffer - 1
})
}
private[this] def isSpaceChar(c: Int) = !(c >= 33 && c <= 126)
private[this] def skip = {
var b = 0
while ( {
b = readByte().toInt
b != -1 && isSpaceChar(b)
}) {}
b
}
}
//------------------------------------------------------------------------------------------//
// Output
//------------------------------------------------------------------------------------------//
private[this] final object Writer {
private[this] val out = new PrintWriter(System.out)
def flush(): Unit = out.flush()
def println(x: Any): Unit = out.println(x)
def print(x: Any): Unit = out.print(x)
}
} | robertoFischer/hackerrank | src/main/scala/HackerRank/Training/Sorting/QuickSort2.scala | Scala | mit | 9,461 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package types
import com.intellij.lang.PsiBuilder
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.util.ParserUtils
/**
* @author Alexander Podkhalyuzin
* Date: 28.02.2008
*/
/*
* InfixType ::= CompoundType {id [nl] CompoundType}
*/
object InfixType extends InfixType {
override protected def componentType = CompoundType
override protected def errorMessage = ScalaBundle.message("compound.type.expected")
}
trait InfixType {
protected def componentType: Type
protected def errorMessage: String
def parse(builder: ScalaPsiBuilder): Boolean = parse(builder, star = false)
def parse(builder: ScalaPsiBuilder, star: Boolean): Boolean = parse(builder,star,isPattern = false)
def parse(builder: ScalaPsiBuilder, star: Boolean, isPattern: Boolean): Boolean = {
var couldBeVarArg = false
var infixTypeMarker = builder.mark
var markerList = List[PsiBuilder.Marker]() //This list consist of markers for right-associated op
var count = 0
markerList = infixTypeMarker :: markerList
builder.getTokenType match {
case ScalaTokenTypes.tUNDER => //wildcard is possible for infix types, like for parameterized. No bounds possible
val typeMarker = builder.mark()
builder.advanceLexer()
typeMarker.done(ScalaElementTypes.WILDCARD_TYPE)
builder.getTokenText match {
case "<:" | ">:" =>
infixTypeMarker.rollbackTo()
return false
case _ =>
}
case _ =>
if (!componentType.parse(builder, star, isPattern)) {
infixTypeMarker.rollbackTo()
return false
}
}
var assoc: Int = 0 //this mark associativity: left - 1, right - -1
while (builder.getTokenType == ScalaTokenTypes.tIDENTIFIER && (!builder.newlineBeforeCurrentToken) &&
(!star || builder.getTokenText != "*") && (!isPattern || builder.getTokenText != "|")) {
count = count+1
//need to know associativity
val s = builder.getTokenText
couldBeVarArg = if (count == 1 && s == "*") true else false
s.charAt(s.length-1) match {
case ':' =>
assoc match {
case 0 => assoc = -1
case 1 => builder error ScalaBundle.message("wrong.type.associativity")
case -1 =>
}
case _ =>
assoc match {
case 0 => assoc = 1
case 1 =>
case -1 => builder error ScalaBundle.message("wrong.type.associativity")
}
}
parseId(builder)
if (assoc == -1) {
val newMarker = builder.mark
markerList = newMarker :: markerList
}
if (builder.twoNewlinesBeforeCurrentToken) {
builder.error(errorMessage)
}
builder.getTokenType match {
case ScalaTokenTypes.tUNDER => //wildcard is possible for infix types, like for parameterized. No bounds possible
val typeMarker = builder.mark()
builder.advanceLexer()
typeMarker.done(ScalaElementTypes.WILDCARD_TYPE)
case _ =>
if (!componentType.parse(builder, star, isPattern)) builder error errorMessage else couldBeVarArg = false
}
if (assoc == 1) {
val newMarker = infixTypeMarker.precede
infixTypeMarker.done(ScalaElementTypes.INFIX_TYPE)
infixTypeMarker = newMarker
}
}
//final ops closing
if (count>0) {
if (assoc == 1) {
if (couldBeVarArg && builder.lookBack() == ScalaTokenTypes.tIDENTIFIER && count == 1) {
infixTypeMarker.rollbackTo()
parseId(builder)
return false
} else infixTypeMarker.drop()
}
else {
markerList.head.drop()
for (x: PsiBuilder.Marker <- markerList.tail) x.done(ScalaElementTypes.INFIX_TYPE)
}
}
else {
if (assoc == 1) {
infixTypeMarker.drop()
}
else {
for (x: PsiBuilder.Marker <- markerList) x.drop()
}
}
true
}
protected def parseId(builder: ScalaPsiBuilder, elementType: IElementType = ScalaElementTypes.REFERENCE) {
val idMarker = builder.mark
builder.advanceLexer() //Ate id
idMarker.done(elementType)
}
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/types/InfixType.scala | Scala | apache-2.0 | 4,433 |
package com.twitter.finagle.memcached.protocol.text.server
import org.jboss.netty.channel._
import com.twitter.util.StateMachine
import org.jboss.netty.buffer.ChannelBuffer
import com.twitter.finagle.memcached.protocol.text._
import com.twitter.finagle.memcached.protocol.ClientError
import com.twitter.finagle.memcached.util.ChannelBufferUtils._
import com.twitter.finagle.memcached.util.ParserUtils
class Decoder(storageCommands: collection.Set[ChannelBuffer]) extends AbstractDecoder with StateMachine {
import ParserUtils._
case class AwaitingCommand() extends State
case class AwaitingData(tokens: Seq[ChannelBuffer], bytesNeeded: Int) extends State
final protected[memcached] def start() {
state = AwaitingCommand()
}
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent) {
super.exceptionCaught(ctx, e)
}
def decode(ctx: ChannelHandlerContext, channel: Channel, buffer: ChannelBuffer): Decoding = {
state match {
case AwaitingCommand() =>
decodeLine(buffer, needsData(_)) { tokens =>
Tokens(tokens)
}
case AwaitingData(tokens, bytesNeeded) =>
decodeData(bytesNeeded, buffer) { data =>
TokensWithData(tokens, data)
}
}
}
final protected[memcached] def awaitData(tokens: Seq[ChannelBuffer], bytesNeeded: Int) = {
state = AwaitingData(tokens, bytesNeeded)
needMoreData
}
private[this] def needsData(tokens: Seq[ChannelBuffer]) = {
val commandName = tokens.head
val args = tokens.tail
if (storageCommands.contains(commandName)) {
validateStorageCommand(args)
val bytesNeeded = tokens(4).toInt
Some(bytesNeeded)
} else None
}
private[this] val needMoreData = null
private[this] def validateStorageCommand(tokens: Seq[ChannelBuffer]) = {
if (tokens.size < 4) throw new ClientError("Too few arguments")
if (tokens.size > 5) throw new ClientError("Too many arguments")
if (!tokens(3).matches(DIGITS)) throw new ClientError("Bad frame length")
}
}
| enachb/finagle_2.9_durgh | finagle-memcached/src/main/scala/com/twitter/finagle/memcached/protocol/text/server/Decoder.scala | Scala | apache-2.0 | 2,044 |
package wandou.math.random
import java.security.SecureRandom
/**
* <p>{@link SeedGenerator} implementation that uses Java's bundled
* {@link SecureRandom} RNG to generate random seed data.</p>
*
* <p>The advantage of using SecureRandom for seeding but not as the
* primary RNG is that we can use it to seed RNGs that are much faster
* than SecureRandom.</p>
*
* <p>This is the only seeding strategy that is guaranteed to work on all
* platforms and therefore is provided as a fall-back option should
* none of the other provided {@link SeedGenerator} implementations be
* useable.</p>
* @author Daniel Dyer
*/
object SecureRandomSeedGenerator extends SeedGenerator {
private val SOURCE = new SecureRandom()
@throws(classOf[SeedException])
def generateSeed(length: Int): Array[Byte] = {
SecureRandomSeedGenerator.SOURCE.generateSeed(length)
}
override def toString = {
"java.security.SecureRandom"
}
}
| wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/random/SecureRandomSeedGenerator.scala | Scala | apache-2.0 | 938 |
package co.blocke.scalajack
package yaml
package primitives.plain
import model.ClassNameHintModifier
import TestUtil._
import munit._
import munit.internal.console
class Misc() extends FunSuite:
val sj = ScalaJack(YamlFlavor())
test("Read/write null into object") {
describe(
"-------------------------------\\n: Misc Tests (Plain - YAML) :\\n-------------------------------", Console.BLUE
)
assert(null == sj.read[PlayerMix]("null".asInstanceOf[YAML]) )
assertEquals("""null
|""".stripMargin, sj.render[PlayerMix](null).asInstanceOf[String] )
}
test("Handles type members with modifier") {
val prependHintMod = ClassNameHintModifier(
(hint: String) => "co.blocke.scalajack.yaml.primitives.plain." + hint,
(cname: String) => cname.split('.').last
)
val sj2 = sj.withTypeValueModifier(prependHintMod)
val yaml =
"""flower: Flower
|rose:
| thing: 5
| other: 6
|""".stripMargin.asInstanceOf[YAML]
val inst = sj2.read[WrapTrait[TraitBase]](yaml)
assertEquals(inst.rose.isInstanceOf[Flower], true)
assertEquals(sj2.render(inst), yaml)
}
test("Fails if no hint for type member") {
val yaml =
"""rose:
| thing: 5
| other: 6""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 0: Did not find required type member(s): flower"){
sj.read[WrapTrait[TraitBase]](yaml)
}
}
test("Must accept missing default constructor values") {
val yaml =
"""foobar: 3
|quatro: 4
|dontForget: 1""".stripMargin.asInstanceOf[YAML]
val inst = sj.read[InheritSimpleBase](yaml)
assertEquals(inst.one, "blather")
}
test("Must accept missing optional constructor values") {
val yaml = """{}""".asInstanceOf[YAML]
val inst = sj.read[OptConst](yaml)
assertEquals(inst.a, None)
assertEquals(inst.b, Some(3))
}
test("Must ignore unneeded type members") {
val inst = new UnneededType[String]()
inst.a = 9
assertEquals(sj.render(inst).asInstanceOf[String], """a: 9
|""".stripMargin)
}
test("Must require Java classes to have an empty constructor") {
val inst = new Unsupported("Foo")
interceptMessage[ScalaJackError]("""ScalaJack does not support Java classes with a non-empty constructor."""){
sj.render(inst)
}
}
test("Must handle MapName on Java setter") {
val yaml = "dos: 9".asInstanceOf[YAML]
val inst = sj.read[OnSetter](yaml)
assertEquals(inst.getTwo, 9)
} | gzoller/ScalaJack | core/src/test/scala/co.blocke.scalajack/yaml/primitives.plain/Misc.scala | Scala | mit | 2,577 |
package approximation
import approximation.TwoDGrid._
import org.specs2._
import piecewise._
class Coefficients extends Specification{def is = s2"""
Patched coefficient ${test}
Not patched coefficient ${test0}
"""
def test = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 12.0)
val coef0 = VarXCoef(xD,
(x0: Double, x1: Double) => Spline.const(1.0),
(x0: Double, x1: Double) => Spline.const(1E6),
(x0: Double, x1: Double) => Spline.const(1E-6)
)
val coef1 = PatchXCoef(xD,
(x0: Double, x1: Double) => Spline.const(2.0),
(x0: Double, x1: Double) => Spline.const(1E6),
(x0: Double, x1: Double) => Spline.const(2E-6),
-1, 3, -1, 8
)
val coef = new PatchedXCoef(coef0, coef1)
val grid = TwoDGrid(xD, yD)(One, Temp)(One, Temp)(One, Temp)(One, Temp)(coef)
val rC = grid.rowCoefs(5).toSet
rC.size must_== 2
}
def test0 = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 12.0)
val coef0 = VarYCoef(yD,
(y0: Double, y1: Double) => Spline.const(1.0),
(y0: Double, y1: Double) => Spline.const(1E6),
(y0: Double, y1: Double) => Spline.const(1E-6)
)
val grid = TwoDGrid(xD, yD)(One, Temp)(One, Temp)(One, Temp)(One, Temp)(coef0)
val rC = grid.rowCoefs(5).toSet
rC.size must_== 1
}
}
| daniil-timofeev/gridsplines | approximation/src/test/scala/approximation/Coefficients.scala | Scala | apache-2.0 | 1,422 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.scala.examples
import java.io.File
import org.apache.commons.io.FileUtils
import org.apache.flink.core.fs.FileSystem.WriteMode
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.examples.iteration.util.IterateExampleData
import org.apache.flink.streaming.scala.examples.iteration.IterateExample
import org.apache.flink.streaming.scala.examples.join.WindowJoin
import org.apache.flink.streaming.scala.examples.join.WindowJoin.{Grade, Salary}
import org.apache.flink.streaming.scala.examples.windowing.{SessionWindowing, WindowWordCount}
import org.apache.flink.streaming.scala.examples.wordcount.WordCount
import org.apache.flink.streaming.test.examples.join.WindowJoinData
import org.apache.flink.test.testdata.WordCountData
import org.apache.flink.test.util.{AbstractTestBase, TestBaseUtils}
import org.junit.Test
/**
* Integration test for streaming programs in Scala examples.
*/
class StreamingExamplesITCase extends AbstractTestBase {
@Test
def testIterateExample(): Unit = {
val inputPath = createTempFile("fibonacciInput.txt", IterateExampleData.INPUT_PAIRS)
val resultPath = getTempDirPath("result")
// the example is inherently non-deterministic. The iteration timeout of 5000 ms
// is frequently not enough to make the test run stable on CI infrastructure
// with very small containers, so we cannot do a validation here
IterateExample.main(Array(
"--input", inputPath,
"--output", resultPath
))
}
@Test
def testWindowJoin(): Unit = {
val resultPath = File.createTempFile("result-path", "dir").toURI.toString
try {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val grades = env
.fromCollection(WindowJoinData.GRADES_INPUT.split("\\n"))
.map( line => {
val fields = line.split(",")
Grade(fields(1), fields(2).toInt)
})
val salaries = env
.fromCollection(WindowJoinData.SALARIES_INPUT.split("\\n"))
.map( line => {
val fields = line.split(",")
Salary(fields(1), fields(2).toInt)
})
WindowJoin.joinStreams(grades, salaries, 100)
.writeAsText(resultPath, WriteMode.OVERWRITE)
env.execute()
TestBaseUtils.checkLinesAgainstRegexp(resultPath, "^Person\\\\([a-z]+,(\\\\d),(\\\\d)+\\\\)")
}
finally try
FileUtils.deleteDirectory(new File(resultPath))
catch {
case _: Throwable =>
}
}
@Test
def testSessionWindowing(): Unit = {
val resultPath = getTempDirPath("result")
SessionWindowing.main(Array("--output", resultPath))
}
@Test
def testWindowWordCount(): Unit = {
val windowSize = "25"
val slideSize = "15"
val textPath = createTempFile("text.txt", WordCountData.TEXT)
val resultPath = getTempDirPath("result")
WindowWordCount.main(Array(
"--input", textPath,
"--output", resultPath,
"--window", windowSize,
"--slide", slideSize,
"--execution-mode", "AUTOMATIC"
))
// since the parallel tokenizers might have different speed
// the exact output can not be checked just whether it is well-formed
// checks that the result lines look like e.g. (faust, 2)
TestBaseUtils.checkLinesAgainstRegexp(resultPath, "^\\\\([a-z]+,(\\\\d)+\\\\)")
}
@Test
def testWordCount(): Unit = {
val textPath = createTempFile("text.txt", WordCountData.TEXT)
val resultPath = getTempDirPath("result")
WordCount.main(Array(
"--input", textPath,
"--output", resultPath,
"--execution-mode", "automatic"
))
TestBaseUtils.compareResultsByLinesInMemory(
WordCountData.COUNTS_AS_TUPLES,
resultPath)
}
}
| godfreyhe/flink | flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/StreamingExamplesITCase.scala | Scala | apache-2.0 | 4,659 |
package org.jetbrains.plugins.scala.compiler.data
import org.jetbrains.jps.incremental.scala.{compilerVersionIn, containsScala3}
import org.jetbrains.plugins.scala.util.JarUtil
import org.jetbrains.plugins.scala.util.JarUtil.JarFileWithName
import java.io.File
object CompilerJarsFactory {
sealed trait CompilerJarsResolveError
object CompilerJarsResolveError {
case class NotFound(kind: String) extends CompilerJarsResolveError
case class DuplicatesFound(kind: String, duplicates: Seq[JarFileWithName]) extends CompilerJarsResolveError
case class FilesDoNotExist(files: Seq[File]) extends CompilerJarsResolveError
}
def fromFiles(scalacJars: Seq[File]): Either[CompilerJarsResolveError, CompilerJars] = {
val withName = JarUtil.collectJars(scalacJars)
fromJarFiles(withName)
}
def fromJarFiles(scalacJars: Seq[JarFileWithName]): Either[CompilerJarsResolveError, CompilerJars] = {
val ioFiles = scalacJars.map(_.file)
val isScala3 = containsScala3(ioFiles)
val compilerPrefix =
if (isScala3) "scala3"
else "scala"
val init: Either[CompilerJarsResolveError, Seq[JarFileWithName]] = Right(Seq.empty)
val libraryJarsE = Set("scala-library", s"$compilerPrefix-library").foldLeft(init) { (acc, kind) =>
for {
jars <- acc
jar <- find(scalacJars, kind)
} yield jars :+ jar
}
for {
libraryJars <- libraryJarsE
compilerJars = scalacJars.filterNot(libraryJars.contains)
compilerJar <- find(scalacJars, s"$compilerPrefix-compiler")
_ <- scalaReflectIfRequired(compilerJar, compilerJars)
} yield CompilerJars(
libraryJars = libraryJars.map(_.file),
compilerJars = compilerJars.map(_.file),
compilerJar = compilerJar.file
)
}
private def find(files: Seq[JarFileWithName], kind: String): Either[CompilerJarsResolveError, JarFileWithName] = {
val filesOfKind = files.filter(_.name.startsWith(kind)).distinct
filesOfKind match {
case Seq(file) => Right(file)
case Seq() => Left(CompilerJarsResolveError.NotFound(kind))
case duplicates => Left(CompilerJarsResolveError.DuplicatesFound(kind, duplicates))
}
}
private def scalaReflectIfRequired(compiler: JarFileWithName, compilerJars: Seq[JarFileWithName]): Either[CompilerJarsResolveError, Unit] =
if (compilerVersionIn(compiler.file, "2.10")) find(compilerJars, "scala-reflect").map(_ => ())
else Right(())
}
| JetBrains/intellij-scala | scala/compiler-shared/src/org/jetbrains/plugins/scala/compiler/data/CompilerJarsFactory.scala | Scala | apache-2.0 | 2,461 |
/*
* Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/agpl.html.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package lancet.core
trait Core_TIR extends Base_TIR {
implicit def unit(x: Boolean): Rep[Boolean] = liftConst(x)
implicit def unit(x: Byte): Rep[Byte] = liftConst(x)
implicit def unit(x: Char): Rep[Char] = liftConst(x)
implicit def unit(x: Short): Rep[Short] = liftConst(x)
implicit def unit(x: Int): Rep[Int] = liftConst(x)
implicit def unit(x: Long): Rep[Long] = liftConst(x)
implicit def unit(x: Float): Rep[Float] = liftConst(x)
implicit def unit(x: Double): Rep[Double] = liftConst(x)
def unit(x: Null): Rep[Object] = liftConst(null)
def unit(x: Object): Rep[Object] = liftConst(x)
case class PrimConvert[A:TypeRep,B:TypeRep](x:Rep[A]) extends Def[B]
case class PrimNegate[A:TypeRep](x: Rep[A]) extends Def[A]
case class PrimPlus[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimMinus[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimTimes[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimDiv[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimMod[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimAnd[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimOr[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimXor[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimShiftLeft[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimShiftRight[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimShiftRightUnsigned[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[A]
case class PrimLess[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[Boolean]
case class PrimLessEqual[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[Boolean]
case class PrimGreater[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[Boolean]
case class PrimGreaterEqual[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[Boolean]
case class PrimEqual[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[Boolean]
case class PrimNotEqual[A:TypeRep](x: Rep[A], y: Rep[A]) extends Def[Boolean]
case class ObjectEqual(x: Rep[Object], y: Rep[Object]) extends Def[Boolean]
case class ObjectNotEqual(x: Rep[Object], y: Rep[Object]) extends Def[Boolean]
case class ObjectAsInstanceOf[T:TypeRep](x: Rep[Object]) extends Def[T]
case class ObjectIsInstanceOf[T:TypeRep](x: Rep[Object]) extends Def[Boolean]
case class IfThenElse[T:TypeRep](x: Rep[Boolean], y: Block[T], z: Block[T]) extends Def[Boolean]
override def mirrorDef[A:TypeRep](d: Def[A], f: Transformer): Def[A] = (d match {
case PrimConvert(x) => PrimConvert(f(x))(typeRep[Any], typeRep[A]) /// FIXME!!!! ---> GADTS
case PrimNegate(x) => PrimNegate(f(x))
case PrimPlus(x, y) => PrimPlus(f(x), f(y))
case PrimMinus(x, y) => PrimMinus(f(x), f(y))
case PrimTimes(x, y) => PrimTimes(f(x), f(y))
case PrimDiv(x, y) => PrimDiv(f(x), f(y))
case PrimMod(x, y) => PrimMod(f(x), f(y))
case PrimAnd(x, y) => PrimAnd(f(x), f(y))
case PrimOr(x, y) => PrimOr(f(x), f(y))
case PrimXor(x, y) => PrimXor(f(x), f(y))
case PrimShiftLeft(x, y) => PrimShiftLeft(f(x), f(y))
case PrimShiftRight(x, y) => PrimShiftRight(f(x), f(y))
case PrimShiftRightUnsigned(x, y) => PrimShiftRightUnsigned(f(x), f(y))
case PrimLess(x, y) => PrimLess(f(x), f(y))
case PrimLessEqual(x, y) => PrimLessEqual(f(x), f(y))
case PrimGreater(x, y) => PrimGreater(f(x), f(y))
case PrimGreaterEqual(x, y) => PrimGreaterEqual(f(x), f(y))
case PrimEqual(x, y) => PrimEqual(f(x), f(y))
case PrimNotEqual(x, y) => PrimNotEqual(f(x), f(y))
case ObjectEqual(x, y) => ObjectEqual(f(x), f(y))
case ObjectNotEqual(x, y) => ObjectNotEqual(f(x), f(y))
case ObjectAsInstanceOf(x) => ObjectAsInstanceOf(f(x))(typeRep[A])
case ObjectIsInstanceOf(x) => ObjectIsInstanceOf(f(x))(typeRep[A]) // <------- NOT A!!
case IfThenElse(x, y, z) => IfThenElse(f(x), f(y), f(z))
case _ => super.mirrorDef(d, f)
}).asInstanceOf[Def[A]]
override def emitScala[A:TypeRep](d: Def[A], f: CodeGen): Unit = d match {
case PrimConvert(x) => Console.print(x+".to"+typeRep[A])
case PrimNegate(x) => Console.print("-"+x)
case PrimPlus(x, y) => Console.print(x+" + "+y)
case PrimMinus(x, y) => Console.print(x+" - "+y)
case PrimTimes(x, y) => Console.print(x+" * "+y)
case PrimDiv(x, y) => Console.print(x+" / "+y)
case PrimMod(x, y) => Console.print(x+" % "+y)
case PrimAnd(x, y) => Console.print(x+" & "+y)
case PrimOr(x, y) => Console.print(x+" | "+y)
case PrimXor(x, y) => Console.print(x+" ^ "+y)
case PrimShiftLeft(x, y) => Console.print(x+" << "+y)
case PrimShiftRight(x, y) => Console.print(x+" >> "+y)
case PrimShiftRightUnsigned(x, y) => Console.print(x+" >>> "+y)
case PrimLess(x, y) => Console.print(x+" < "+y)
case PrimLessEqual(x, y) => Console.print(x+" <= "+y)
case PrimGreater(x, y) => Console.print(x+" > "+y)
case PrimGreaterEqual(x, y) => Console.print(x+" >= "+y)
case PrimEqual(x, y) => Console.print(x+" == "+y)
case PrimNotEqual(x, y) => Console.print(x+" != "+y)
case ObjectEqual(x, y) => Console.print(x+" eq "+y)
case ObjectNotEqual(x, y) => Console.print(x+" ne "+y)
case ObjectAsInstanceOf(x) => Console.print(x+".asInstanceOf["+typeRep[A]+"]")
case ObjectIsInstanceOf(x) => Console.print(x+".isInstanceOf["+typeRep[A]+"]")
case IfThenElse(x, y, z) => Console.print("if ("+x+") ")
emitScalaBlock(y,f)
Console.print(" else ")
emitScalaBlock(z,f)
case _ => super.emitScala(d, f)
}
override def quickString[A:TypeRep](d: Def[A]): String = d match {
case PrimConvert(x) => x+".to"+typeRep[A]
case PrimNegate(x) => "-"+x
case PrimPlus(x, y) => x+" + "+y
case PrimMinus(x, y) => x+" - "+y
case PrimTimes(x, y) => x+" * "+y
case PrimDiv(x, y) => x+" / "+y
case PrimMod(x, y) => x+" % "+y
case PrimAnd(x, y) => x+" & "+y
case PrimOr(x, y) => x+" | "+y
case PrimXor(x, y) => x+" ^ "+y
case PrimShiftLeft(x, y) => x+" << "+y
case PrimShiftRight(x, y) => x+" >> "+y
case PrimShiftRightUnsigned(x, y) => x+" >>> "+y
case PrimLess(x, y) => x+" < "+y
case PrimLessEqual(x, y) => x+" <= "+y
case PrimGreater(x, y) => x+" > "+y
case PrimGreaterEqual(x, y) => x+" >= "+y
case PrimEqual(x, y) => x+" == "+y
case PrimNotEqual(x, y) => x+" != "+y
case ObjectEqual(x, y) => x+" eq "+y
case ObjectNotEqual(x, y) => x+" ne "+y
case ObjectAsInstanceOf(x) => x+".asInstanceOf["+typeRep[A]+"]"
case ObjectIsInstanceOf(x) => x+".isInstanceOf["+typeRep[A]+"]"
//case IfThenElse(x, y, z) => do default
case _ => super.quickString(d)
}
/*
override def mirror[T](d: Def[T]): Rep[T] = x match {
case _ => super.mirror(x,d)
}
*/
def byteToInt(x: Rep[Byte]): Rep[Int] = reflect[Int](PrimConvert[Byte,Int](x))
def charToInt(x: Rep[Char]): Rep[Int] = reflect[Int](PrimConvert[Char,Int](x))
def shortToInt(x: Rep[Short]): Rep[Int] = reflect[Int](PrimConvert[Short,Int](x))
def intToByte(x: Rep[Int]): Rep[Byte] = reflect[Byte](PrimConvert[Int,Byte](x))
def intToChar(x: Rep[Int]): Rep[Char] = reflect[Char](PrimConvert[Int,Char](x))
def intToShort(x: Rep[Int]): Rep[Short] = reflect[Short](PrimConvert[Int,Short](x))
def intToInt(x: Rep[Int]): Rep[Int] = reflect[Int](PrimConvert[Int,Int](x))
def intToLong(x: Rep[Int]): Rep[Long] = reflect[Long](PrimConvert[Int,Long](x))
def intToFloat(x: Rep[Int]): Rep[Float] = reflect[Float](PrimConvert[Int,Float](x))
def intToDouble(x: Rep[Int]): Rep[Double] = reflect[Double](PrimConvert[Int,Double](x))
def intNegate(x: Rep[Int]): Rep[Int] = reflect[Int](PrimNegate[Int](x))
def intPlus(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimPlus[Int](x,y))
def intMinus(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimMinus[Int](x,y))
def intTimes(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimTimes[Int](x,y))
def intDiv(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimDiv[Int](x,y))
def intMod(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimMod[Int](x,y))
def intAnd(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimAnd[Int](x,y))
def intOr(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimOr[Int](x,y))
def intXor(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimXor[Int](x,y))
def intShiftLeft(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimShiftLeft[Int](x,y))
def intShiftRight(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimShiftRight[Int](x,y))
def intShiftRightUnsigned(x: Rep[Int], y: Rep[Int]): Rep[Int] = reflect[Int](PrimShiftRightUnsigned[Int](x,y))
def intLess(x: Rep[Int], y: Rep[Int]): Rep[Boolean] = reflect[Boolean](PrimLess[Int](x,y))
def intLessEqual(x: Rep[Int], y: Rep[Int]): Rep[Boolean] = reflect[Boolean](PrimLessEqual[Int](x,y))
def intGreater(x: Rep[Int], y: Rep[Int]): Rep[Boolean] = reflect[Boolean](PrimGreater[Int](x,y))
def intGreaterEqual(x: Rep[Int], y: Rep[Int]): Rep[Boolean] = reflect[Boolean](PrimGreaterEqual[Int](x,y))
def intEqual(x: Rep[Int], y: Rep[Int]): Rep[Boolean] = reflect[Boolean](PrimEqual[Int](x,y))
def intNotEqual(x: Rep[Int], y: Rep[Int]): Rep[Boolean] = reflect[Boolean](PrimNotEqual[Int](x,y))
def longToByte(x: Rep[Long]): Rep[Byte] = reflect[Byte](PrimConvert[Long,Byte](x))
def longToChar(x: Rep[Long]): Rep[Char] = reflect[Char](PrimConvert[Long,Char](x))
def longToShort(x: Rep[Long]): Rep[Short] = reflect[Short](PrimConvert[Long,Short](x))
def longToInt(x: Rep[Long]): Rep[Int] = reflect[Int](PrimConvert[Long,Int](x))
def longToLong(x: Rep[Long]): Rep[Long] = reflect[Long](PrimConvert[Long,Long](x))
def longToFloat(x: Rep[Long]): Rep[Float] = reflect[Float](PrimConvert[Long,Float](x))
def longToDouble(x: Rep[Long]): Rep[Double] = reflect[Double](PrimConvert[Long,Double](x))
def longNegate(x: Rep[Long]): Rep[Long] = reflect[Long](PrimNegate[Long](x))
def longPlus(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimPlus[Long](x,y))
def longMinus(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimMinus[Long](x,y))
def longTimes(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimTimes[Long](x,y))
def longDiv(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimDiv[Long](x,y))
def longMod(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimMod[Long](x,y))
def longAnd(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimAnd[Long](x,y))
def longOr(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimOr[Long](x,y))
def longXor(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimXor[Long](x,y))
def longShiftLeft(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimShiftLeft[Long](x,y))
def longShiftRight(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimShiftRight[Long](x,y))
def longShiftRightUnsigned(x: Rep[Long], y: Rep[Long]): Rep[Long] = reflect[Long](PrimShiftRightUnsigned[Long](x,y))
def longLess(x: Rep[Long], y: Rep[Long]): Rep[Boolean] = reflect[Boolean](PrimLess[Long](x,y))
def longLessEqual(x: Rep[Long], y: Rep[Long]): Rep[Boolean] = reflect[Boolean](PrimLessEqual[Long](x,y))
def longGreater(x: Rep[Long], y: Rep[Long]): Rep[Boolean] = reflect[Boolean](PrimGreater[Long](x,y))
def longGreaterEqual(x: Rep[Long], y: Rep[Long]): Rep[Boolean] = reflect[Boolean](PrimGreaterEqual[Long](x,y))
def longEqual(x: Rep[Long], y: Rep[Long]): Rep[Boolean] = reflect[Boolean](PrimEqual[Long](x,y))
def longNotEqual(x: Rep[Long], y: Rep[Long]): Rep[Boolean] = reflect[Boolean](PrimNotEqual[Long](x,y))
def floatToByte(x: Rep[Float]): Rep[Byte] = reflect[Byte](PrimConvert[Float,Byte](x))
def floatToChar(x: Rep[Float]): Rep[Char] = reflect[Char](PrimConvert[Float,Char](x))
def floatToShort(x: Rep[Float]): Rep[Short] = reflect[Short](PrimConvert[Float,Short](x))
def floatToInt(x: Rep[Float]): Rep[Int] = reflect[Int](PrimConvert[Float,Int](x))
def floatToLong(x: Rep[Float]): Rep[Long] = reflect[Long](PrimConvert[Float,Long](x))
def floatToFloat(x: Rep[Float]): Rep[Float] = reflect[Float](PrimConvert[Float,Float](x))
def floatToDouble(x: Rep[Float]): Rep[Double] = reflect[Double](PrimConvert[Float,Double](x))
def floatNegate(x: Rep[Float]): Rep[Float] = reflect[Float](PrimNegate[Float](x))
def floatPlus(x: Rep[Float], y: Rep[Float]): Rep[Float] = reflect[Float](PrimPlus[Float](x,y))
def floatMinus(x: Rep[Float], y: Rep[Float]): Rep[Float] = reflect[Float](PrimMinus[Float](x,y))
def floatTimes(x: Rep[Float], y: Rep[Float]): Rep[Float] = reflect[Float](PrimTimes[Float](x,y))
def floatDiv(x: Rep[Float], y: Rep[Float]): Rep[Float] = reflect[Float](PrimDiv[Float](x,y))
def floatMod(x: Rep[Float], y: Rep[Float]): Rep[Float] = reflect[Float](PrimMod[Float](x,y))
def floatLess(x: Rep[Float], y: Rep[Float]): Rep[Boolean] = reflect[Boolean](PrimLess[Float](x,y))
def floatLessEqual(x: Rep[Float], y: Rep[Float]): Rep[Boolean] = reflect[Boolean](PrimLessEqual[Float](x,y))
def floatGreater(x: Rep[Float], y: Rep[Float]): Rep[Boolean] = reflect[Boolean](PrimGreater[Float](x,y))
def floatGreaterEqual(x: Rep[Float], y: Rep[Float]): Rep[Boolean] = reflect[Boolean](PrimGreaterEqual[Float](x,y))
def floatEqual(x: Rep[Float], y: Rep[Float]): Rep[Boolean] = reflect[Boolean](PrimEqual[Float](x,y))
def floatNotEqual(x: Rep[Float], y: Rep[Float]): Rep[Boolean] = reflect[Boolean](PrimNotEqual[Float](x,y))
def doubleToByte(x: Rep[Double]): Rep[Byte] = reflect[Byte](PrimConvert[Double,Byte](x))
def doubleToChar(x: Rep[Double]): Rep[Char] = reflect[Char](PrimConvert[Double,Char](x))
def doubleToShort(x: Rep[Double]): Rep[Short] = reflect[Short](PrimConvert[Double,Short](x))
def doubleToInt(x: Rep[Double]): Rep[Int] = reflect[Int](PrimConvert[Double,Int](x))
def doubleToLong(x: Rep[Double]): Rep[Long] = reflect[Long](PrimConvert[Double,Long](x))
def doubleToFloat(x: Rep[Double]): Rep[Float] = reflect[Float](PrimConvert[Double,Float](x))
def doubleToDouble(x: Rep[Double]): Rep[Double] = reflect[Double](PrimConvert[Double,Double](x))
def doubleNegate(x: Rep[Double]): Rep[Double] = reflect[Double](PrimNegate[Double](x))
def doublePlus(x: Rep[Double], y: Rep[Double]): Rep[Double] = reflect[Double](PrimPlus[Double](x,y))
def doubleMinus(x: Rep[Double], y: Rep[Double]): Rep[Double] = reflect[Double](PrimMinus[Double](x,y))
def doubleTimes(x: Rep[Double], y: Rep[Double]): Rep[Double] = reflect[Double](PrimTimes[Double](x,y))
def doubleDiv(x: Rep[Double], y: Rep[Double]): Rep[Double] = reflect[Double](PrimDiv[Double](x,y))
def doubleMod(x: Rep[Double], y: Rep[Double]): Rep[Double] = reflect[Double](PrimMod[Double](x,y))
def doubleLess(x: Rep[Double], y: Rep[Double]): Rep[Boolean] = reflect[Boolean](PrimLess[Double](x,y))
def doubleLessEqual(x: Rep[Double], y: Rep[Double]): Rep[Boolean] = reflect[Boolean](PrimLessEqual[Double](x,y))
def doubleGreater(x: Rep[Double], y: Rep[Double]): Rep[Boolean] = reflect[Boolean](PrimGreater[Double](x,y))
def doubleGreaterEqual(x: Rep[Double], y: Rep[Double]): Rep[Boolean] = reflect[Boolean](PrimGreaterEqual[Double](x,y))
def doubleEqual(x: Rep[Double], y: Rep[Double]): Rep[Boolean] = reflect[Boolean](PrimEqual[Double](x,y))
def doubleNotEqual(x: Rep[Double], y: Rep[Double]): Rep[Boolean] = reflect[Boolean](PrimNotEqual[Double](x,y))
def objectEqual(x: Rep[Object], y: Rep[Object]): Rep[Boolean] = reflect[Boolean](ObjectEqual(x,y))
def objectNotEqual(x: Rep[Object], y: Rep[Object]): Rep[Boolean] = reflect[Boolean](ObjectNotEqual(x,y))
def objectAsInstanceOf[T:TypeRep](x: Rep[Object]): Rep[T] = reflect[T](ObjectAsInstanceOf[T](x))
def objectIsInstanceOf[T:TypeRep](x: Rep[Object]): Rep[Boolean] = reflect[Boolean](ObjectIsInstanceOf[T](x))
def if_[T:TypeRep](x: Rep[Boolean])(y: =>Rep[T])(z: =>Rep[T]): Rep[T] = {
val save = exprs
// TODO: state lub; reset exprs for both branches!
var r = reflect[T](IfThenElse(x,reify(y),reify(z)))
exprs = save
r
}
}
| TiarkRompf/lancet | src/main/scala/lancet/core/Core_TIR.scala | Scala | agpl-3.0 | 18,277 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.simulations.deprecated
import io.gatling.core.Predef._
import org.apache.usergrid.datagenerators.FeederGenerator
import org.apache.usergrid.scenarios.UserScenarios
import org.apache.usergrid.settings.Settings
import scala.concurrent.duration._
class GetEntitySimulation extends Simulation {
// Target settings
val httpConf = Settings.httpAppConf
// Simulation settings
val numUsers:Int = Settings.rampUsers
val numEntities:Int = Settings.numEntities
val rampTime:Int = Settings.rampTime
val throttle:Int = Settings.throttle
val feeder = FeederGenerator.generateEntityNameFeeder("user", numEntities)
val scnToRun = scenario("GET entity")
.exec(UserScenarios.getRandomUser)
setUp(scnToRun.inject(atOnceUsers(numUsers)).throttle(reachRps(throttle) in (rampTime.seconds)).protocols(httpConf)).maxDuration(Settings.holdDuration)
}
| mdunker/usergrid | tests/performance/src/main/scala/org/apache/usergrid/simulations/deprecated/GetEntitySimulation.scala | Scala | apache-2.0 | 1,691 |
package com.cloudera.sa.apptrans.streaming.ingestion.kudu
import com.cloudera.sa.apptrans.model.{AccountMart, AppEvent, AppEventBuilder, AppEventConst}
import kafka.serializer.StringDecoder
import org.apache.solr.common.cloud.ZooKeeperException
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.kududb.client.{KuduClient, Operation}
import org.kududb.client.SessionConfiguration.FlushMode
import org.kududb.spark.kudu.KuduContext
object SparkStreamingAppEventToKudu {
def main(args: Array[String]): Unit = {
println("Java Version:" + System.getProperty("java.version"))
println("Java Home:" + System.getProperties().getProperty("java.home"))
val v: ZooKeeperException = null
if (args.length == 0) {
println("Args: <KafkaBrokerList> " +
"<kafkaTopicList> " +
"<numberOfSeconds>" +
"<runLocal>" +
"<kuduMaster>" +
"<kuduAccountMartTable>",
"<kuduAppEventTable",
"<checkPointFolder>")
return
}
val kafkaBrokerList = args(0)
val kafkaTopicList = args(1)
val numberOfSeconds = args(2).toInt
val runLocal = args(3).equals("l")
val kuduMaster = args(4)
val kuduAccountMartTable = args(5)
val kuduAppEventTable = args(6)
val checkPointFolder = args(7)
println("kafkaBrokerList:" + kafkaBrokerList)
println("kafkaTopicList:" + kafkaTopicList)
println("numberOfSeconds:" + numberOfSeconds)
println("runLocal:" + runLocal)
println("kuduMaster:" + kuduMaster)
println("kuduAccountMartTable:" + kuduAccountMartTable)
println("kuduAppEventTable:" + kuduAppEventTable)
println("checkPointFolder:" + checkPointFolder)
val sc: SparkContext = if (runLocal) {
val sparkConfig = new SparkConf()
sparkConfig.set("spark.broadcast.compress", "false")
sparkConfig.set("spark.shuffle.compress", "false")
sparkConfig.set("spark.shuffle.spill.compress", "false")
new SparkContext("local[2]", "TableStatsSinglePathMain", sparkConfig)
} else {
val sparkConf = new SparkConf().setAppName("Spark Streaming Ingestion to Kudu")
new SparkContext(sparkConf)
}
val ssc = new StreamingContext(sc, Seconds(numberOfSeconds))
val topicsSet = kafkaTopicList.split(",").toSet
val kafkaParams = Map[String, String]("metadata.broker.list" -> kafkaBrokerList)
val messageStream = KafkaUtils.
createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topicsSet)
val kuduContext = new KuduContext(kuduMaster)
val appEventDStream = messageStream.map { case (key, value) =>
AppEventBuilder.build(value)
}
appEventDStream.foreachRDD(rdd => {
rdd.foreachPartition(it => {
sendEntityToKudu(kuduAppEventTable, it, kuduContext.syncClient)
})
})
val mapDStream = appEventDStream.map(appEvent =>
(appEvent.accountId + "," + appEvent.appId, appEvent.toAccountMart()))
val aggDStream = mapDStream.updateStateByKey[AccountMart]((a:Seq[AccountMart], b:Option[AccountMart]) => {
val aSum:AccountMart = a.reduce((a1, a2) => a1 + a2)
val optional = if (b.isEmpty) {
Option(aSum)
} else {
Option(aSum + b.get)
}
optional
})
aggDStream.map(r => r._2).foreachRDD(rdd => {
rdd.foreachPartition(it => {
sendMartToKudu(kuduAppEventTable, it, kuduContext.syncClient)
})
})
println("--Starting Spark Streaming")
ssc.checkpoint(checkPointFolder)
ssc.start()
ssc.awaitTermination()
}
def sendMartToKudu(kuduAppEventTable: String, it: Iterator[AccountMart], kuduClient: KuduClient): Unit = {
val table = kuduClient.openTable(kuduAppEventTable)
val session = kuduClient.newSession()
session.setFlushMode(FlushMode.AUTO_FLUSH_BACKGROUND)
it.foreach(accountMart => {
val operation: Operation = table.newUpsert()
if (operation != null) {
val row = operation.getRow()
row.addString("account_id", accountMart.accountId)
row.addString("app_id", accountMart.appId)
row.addLong("sign_on_count", accountMart.signOnCount)
row.addLong("win_count", accountMart.winCount)
row.addLong("lose_count", accountMart.loseCount)
row.addDouble("purchase_total", accountMart.purchaseTotal)
row.addDouble("payment_credit_total", accountMart.paymentCreditTotal)
row.addLong("payment_credit_count", accountMart.paymentCreditCount)
row.addDouble("payment_debit_total", accountMart.paymentDebitTotal)
row.addLong("payment_debit_count", accountMart.paymentDebitCount)
row.addDouble("payment_paypal_total", accountMart.paymentPaypalTotal)
row.addLong("payment_paypal_count", accountMart.paymentPaypalCount)
session.apply(operation)
}
})
session.flush()
session.close()
}
def sendEntityToKudu(kuduAppEventTable: String, it: Iterator[AppEvent], kuduClient: KuduClient): Unit = {
val table = kuduClient.openTable(kuduAppEventTable)
val session = kuduClient.newSession()
session.setFlushMode(FlushMode.AUTO_FLUSH_BACKGROUND)
it.foreach(appEvent => {
val operation: Operation = table.newUpsert()
if (operation != null) {
val row = operation.getRow()
row.addString("account_id", appEvent.accountId)
row.addString("app_id", appEvent.appId)
row.addLong("event_timestamp", appEvent.eventTimestamp)
row.addString("event_id", appEvent.eventId)
row.addString("event_type", appEvent.eventType)
row.addDouble("purchase", appEvent.purchase)
row.addString("payment_type", appEvent.paymentType)
row.addString("session_id", appEvent.sessionId)
row.addDouble("latitude", appEvent.latitude)
row.addDouble("longitude", appEvent.longitude)
session.apply(operation)
}
})
session.flush()
session.close()
}
}
| tmalaska/AppTrans | src/main/scala/com/cloudera/sa/apptrans/streaming/ingestion/kudu/SparkStreamingAppEventToKudu.scala | Scala | apache-2.0 | 6,053 |
/*
* Copyright 2010 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter
package conversions
import com.twitter.util.StorageUnit
object storage {
class RichWholeNumber(wrapped: Long) {
def byte = bytes
def bytes = new StorageUnit(wrapped)
def kilobyte = kilobytes
def kilobytes = new StorageUnit(wrapped * 1024)
def megabyte = megabytes
def megabytes = new StorageUnit(wrapped * 1024 * 1024)
def gigabyte = gigabytes
def gigabytes = new StorageUnit(wrapped * 1024 * 1024 * 1024)
def terabyte = terabytes
def terabytes = new StorageUnit(wrapped * 1024 * 1024 * 1024 * 1024)
def petabyte = petabytes
def petabytes = new StorageUnit(wrapped * 1024 * 1024 * 1024 * 1024 * 1024)
def thousand = wrapped * 1000
def million = wrapped * 1000 * 1000
def billion = wrapped * 1000 * 1000 * 1000
}
implicit def intToStorageUnitableWholeNumber(i: Int) = new RichWholeNumber(i)
implicit def longToStorageUnitableWholeNumber(l: Long) = new RichWholeNumber(l)
}
| luciferous/util | util-core/src/main/scala/com/twitter/conversions/storage.scala | Scala | apache-2.0 | 1,580 |
package net.hotelling.harold.audiowidget
import javax.sound.sampled.DataLine.Info
import javax.sound.sampled._
import scala.collection.mutable.ArrayBuffer
/**
* Convert raw data coming in into a sequence of amplitude values.
*/
class Audio(val format: AudioFormat, val timeWindowMillis: Double = 50.0) {
if (format.getEncoding != AudioFormat.Encoding.PCM_UNSIGNED &&
format.getEncoding != AudioFormat.Encoding.PCM_SIGNED) {
throw new RuntimeException(s"Not sure how to deal with audio format with unsupported encoding: $format")
}
val bitsPerFrame: Int = format.getSampleSizeInBits * format.getChannels
if (bitsPerFrame % 8 != 0) {
// Frames may span across several adjacent bytes so we need to split up data that way.
throw new RuntimeException(s"Weird sized audio input frames are not supported: $format")
}
val framesInWindow: Int = (format.getFrameRate * timeWindowMillis / 1000.0).toInt
val bufferSize: Int = {
val desiredSize =(framesInWindow * bitsPerFrame / 16.0).toInt
// Make sure we ask for a multiple of the frame size in bytes:
(desiredSize / format.getFrameSize) * format.getFrameSize
}
private[this] val buffer: Array[Byte] = Array.fill[Byte](bufferSize)(0)
def printInfo() {
println(s"format: $format")
println(s"bitsPerFrame: $bitsPerFrame")
println(s"framesIn50ms: $framesInWindow")
println(s"buffer size: $bufferSize")
}
def readFrames(line: TargetDataLine): Array[Int] = {
// By monitoring line.available I discovered that the backlog of input data
// kept growing and growing and I needed to flush the data each time. This
// way each sample is current and the UI does not being to lag behind.
line.flush
line.start()
val bytesRead = line.read(buffer, 0, bufferSize)
line.stop()
decode(buffer, bytesRead)
}
/** Decode the input samples into Int values.
*
* Only takes in the first channel, i.e. the left channel for stereo data.
* Compensates for 8- or 16-bit samples, big- or little-endian 16 bit values,
* and
* TODO: signed versus unsigned values.
*/
def decode(data: Array[Byte], numBytes: Int): Array[Int] = {
val result = new ArrayBuffer[Int]()
// Each frame is an integer number of bytes so we can just split up the bytes into frames.
var offsetBytes: Int = 0
while (offsetBytes < numBytes - format.getFrameSize) {
offsetBytes += format.getFrameSize
// For now, assume we're working with 8 bit or 16 bit data.
val sample: Int = if (format.getSampleSizeInBits == 16) {
// Just take 1 channel for now
val byte1 = data(offsetBytes)
val byte2 = data(offsetBytes + 1)
if (format.isBigEndian) (byte1 << 8) + byte2 else (byte2 << 8) + byte1
} else if (format.getSampleSizeInBits == 8) {
// Just take 1 channel for now
data(offsetBytes).toInt
} else {
throw new RuntimeException(s"Not sure how to deal with audio format: $format")
}
result.append(sample)
}
result.toArray
}
}
object Audio {
def getMixerByName(name: String = "Primary Sound Capture Driver"): Mixer = {
val mixerInfo = AudioSystem.getMixerInfo.filter { _.getName == name }.head
println("Found mixer info: " + mixerInfo)
AudioSystem.getMixer(mixerInfo)
}
def getSomeInputLine(mixer: Mixer): TargetDataLine = {
val line = mixer.getTargetLineInfo.map(mixer.getLine)
.filter(_.isInstanceOf[TargetDataLine]).map(_.asInstanceOf[TargetDataLine]).head
line.open()
line
}
def getLineFormats(line: TargetDataLine): Array[AudioFormat] = {
line.getLineInfo.asInstanceOf[DataLine.Info].getFormats
}
def supportsInput(mixer: Mixer, lineInfo: Line.Info): Boolean = {
val line = mixer.getLine(lineInfo)
line match {
case tdl: TargetDataLine => !tdl.getLineInfo.asInstanceOf[Info].getFormats.isEmpty
case _ => false
}
}
def supportsInput(mixerInfo: Mixer.Info): Boolean = {
val mixer: Mixer = AudioSystem.getMixer(mixerInfo)
mixer.getTargetLineInfo.exists(supportsInput(mixer, _))
}
def mixerNamesWithInputSupport(): Array[String] =
AudioSystem.getMixerInfo.toList.filter(supportsInput).map(_.getName).toArray
def dumpInfo(): Unit = {
val mixers = AudioSystem.getMixerInfo
mixers.foreach { mixerInfo =>
println(mixerInfo.getName)
println(mixerInfo.getDescription)
println(mixerInfo.getVendor)
println(mixerInfo.getVersion)
val mixer: Mixer = AudioSystem.getMixer(mixerInfo)
mixer.getSourceLineInfo.foreach { sourceLineInfo =>
println(s" $sourceLineInfo")
val line = mixer.getLine(sourceLineInfo)
line match {
case sdl: SourceDataLine =>
sdl.getLineInfo.asInstanceOf[Info].getFormats.foreach { format =>
println(s" output: $format")
}
case _ =>
}
}
mixer.getTargetLineInfo.foreach { targetLineInfo =>
println(s" $targetLineInfo")
val line = mixer.getLine(targetLineInfo)
line match {
case tdl: TargetDataLine =>
tdl.getLineInfo.asInstanceOf[Info].getFormats.foreach { format =>
println(s" input: $format")
}
case _ =>
}
}
println()
val targetLines = mixer.getTargetLines
targetLines.foreach { targetLine => println(targetLine.getLineInfo) }
println(s"targetLines: ${targetLines.size}")
println()
}
val mixer = AudioSystem.getMixer(mixers(0))
val targetLines = mixer.getTargetLines
targetLines.foreach { targetLine => println(targetLine.getLineInfo) }
println(s"targetLines: ${targetLines.size}")
}
}
| haroldl/audiowidget | src/main/scala/net/hotelling/harold/audiowidget/Audio.scala | Scala | apache-2.0 | 5,749 |
package cortex.model
/**
*/
case class ActionContext(endpoint: String)(implicit symbols: Seq[Symbol]) {
val coercedEndpoint =
if (endpoint.startsWith("/")) {
endpoint
} else {
s"/$endpoint"
}
val regex = s"$coercedEndpoint".r
def map(cs: CharSequence): Map[String, String] = {
val list = regex.unapplySeq(cs).getOrElse(List())
(symbols.map(_.name) zip list).toMap
}
}
| jsflax/cortex | src/main/scala/cortex/model/ActionContext.scala | Scala | mit | 412 |
package akka.persistence.hbase.common
import akka.persistence.SnapshotSelectionCriteria
/**
* Grouped events which will be sent to the `eventStream` if `publish-testing-events` is enabeled.
*/
object TestingEventProtocol {
private[hbase] case class FinishedWrites(written: Int)
private[hbase] case class DeletedSnapshotsFor(processorId: String, criteria: SnapshotSelectionCriteria)
}
| hossx/akka-persistence-hbase | src/main/scala/akka/persistence/hbase/common/TestingEventProtocol.scala | Scala | apache-2.0 | 395 |
package latis.time
import latis.time.TimeScaleType.TimeScaleType
import latis.units.UnitOfMeasure
import latis.util.LatisServiceException
import latis.util.RegEx
import java.util.Date
import java.util.GregorianCalendar
import java.util.TimeZone
/**
* Model a time scale as the number of TimeUnit-s from an epoch (start date, i.e. time zero).
* The TimeScaleType will determine how leap seconds are dealt with (or other calendar differences?).
*/
class TimeScale(val epoch: Date, val unit: TimeUnit, val tsType: TimeScaleType) extends UnitOfMeasure("TODO") {
//TODO: consider using millis for epoch instead of Date
private var format: TimeFormat = null
override def toString(): String = {
if (format != null) format.toString
else {
val sb = new StringBuilder()
tsType.toString match {
case "UTC" => sb.append("UTC ")
case "TAI" => sb.append("TAI ")
case _ => //don't label native units
}
sb.append(unit.name)
sb.append(" since ")
sb.append(TimeFormat.DATE.format(epoch.getTime)) //TODO: include time, e.g. Julian Date starts at noon
//TODO: override for Julian Date?
sb.toString()
}
}
}
object TimeScale {
//Note, using def instead of lazy val to support tests with varying default TimeScaleType.
def JAVA: TimeScale = new TimeScale(new Date(0), TimeUnit.MILLISECOND, TimeScaleType.default)
/**
* Define a special case for Julian date: days since noon Jan 1, 4713 BC.
* Because Java's default calendar jumps from 1 BC to 1 AD, we need to use year -4712.
* This seems to work for the times we care about.
*/
lazy val JULIAN_DATE = {
val cal = new GregorianCalendar(-4712, 0, 1, 12, 0);
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
TimeScale(cal.getTime, TimeUnit.DAY, TimeScaleType.default)
}
def apply(epoch: Date, unit: TimeUnit, tstype: TimeScaleType): TimeScale = {
new TimeScale(epoch, unit, tstype)
}
def apply(epoch: String, unit: TimeUnit, tsType: TimeScaleType): TimeScale = {
new TimeScale(new Date(TimeFormat.fromIsoValue(epoch).parse(epoch)), unit, tsType)
}
/**
* Make new TimeScale from an existing TimeScale but with a specified TimeScaleType.
*/
def apply(ts: TimeScale, tsType: TimeScaleType): TimeScale = {
new TimeScale(ts.epoch, ts.unit, tsType)
}
/**
* Make TimeScale from "(tsType) unit since epoch", special name, or time format String adhering to java.util.SimleDateFormat.
* "tsType" is the optional (case-insensitive) TimeScaleType: "UTC", "TAI" or "NATIVE".
* If tsType is not specified for a numeric unit (unit since epoch), NATIVE will be used. Note, the time.scale.type property will not be used
* because units are considered to be a property of the dataset. The property is designed to specify behavior in LaTiS.
* e.g. how to interpret a time selection. If a "naively" written dataset were interpreted as UTC, unexpected leap second offsets will be introduced.
* Special names supported include:
* Julian Date
* Formatted time "units" with use the time.scale.type property for now since there is currently no mechanism to specify the type in the format string.
* This will likely need to change. (LATIS-322)
*/
def apply(scale: String): TimeScale = {
scale.split(" since ") match {
case Array(s, epoch) => s.split("""\\s""") match {
case Array(tstype, unit) => TimeScale(epoch, TimeUnit.withName(unit.toLowerCase), TimeScaleType.withName(tstype.toUpperCase))
case Array(unit) => TimeScale(epoch, TimeUnit.withName(unit.toLowerCase), TimeScaleType.NATIVE)
case _ => throw new LatisServiceException("Time unit has more than 2 components before the 'since': " + scale)
}
case Array(s) => s match {
case s: String if (s.toLowerCase.startsWith("julian")) => JULIAN_DATE //TODO: can we interpret JD as UTC?
case _ => {
//formatted
val ts = TimeScale.JAVA //will get tsType from time.scale.type property
//hack format into TimeScale so we can use it when printing units
ts.format = TimeFormat(s)
ts
}
}
case _ => throw new LatisServiceException("Time unit contains 'since' more than once: " + scale)
}
}
} | dlindhol/LaTiS | src/main/scala/latis/time/TimeScale.scala | Scala | epl-1.0 | 4,277 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka
import java.io.OutputStream
import java.lang.{Integer => JInt, Long => JLong, Number => JNumber}
import java.nio.charset.StandardCharsets
import java.util.{List => JList, Locale, Map => JMap, Set => JSet}
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import kafka.common.TopicAndPartition
import kafka.message.MessageAndMetadata
import kafka.serializer.{Decoder, DefaultDecoder, StringDecoder}
import net.razorvine.pickle.{IObjectPickler, Opcodes, Pickler}
import org.apache.spark.{SparkContext, SparkException}
import org.apache.spark.api.java.{JavaPairRDD, JavaRDD, JavaSparkContext}
import org.apache.spark.api.java.function.{Function => JFunction}
import org.apache.spark.api.python.SerDeUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.api.java._
import org.apache.spark.streaming.dstream.{DStream, InputDStream, ReceiverInputDStream}
import org.apache.spark.streaming.util.WriteAheadLogUtils
@deprecated("Update to Kafka 0.10 integration", "2.3.0")
object KafkaUtils {
/**
* Create an input stream that pulls messages from Kafka Brokers.
* @param ssc StreamingContext object
* @param zkQuorum Zookeeper quorum (hostname:port,hostname:port,..)
* @param groupId The group id for this consumer
* @param topics Map of (topic_name to numPartitions) to consume. Each partition is consumed
* in its own thread
* @param storageLevel Storage level to use for storing the received objects
* (default: StorageLevel.MEMORY_AND_DISK_SER_2)
* @return DStream of (Kafka message key, Kafka message value)
*/
def createStream(
ssc: StreamingContext,
zkQuorum: String,
groupId: String,
topics: Map[String, Int],
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): ReceiverInputDStream[(String, String)] = {
val kafkaParams = Map[String, String](
"zookeeper.connect" -> zkQuorum, "group.id" -> groupId,
"zookeeper.connection.timeout.ms" -> "10000")
createStream[String, String, StringDecoder, StringDecoder](
ssc, kafkaParams, topics, storageLevel)
}
/**
* Create an input stream that pulls messages from Kafka Brokers.
* @param ssc StreamingContext object
* @param kafkaParams Map of kafka configuration parameters,
* see http://kafka.apache.org/08/configuration.html
* @param topics Map of (topic_name to numPartitions) to consume. Each partition is consumed
* in its own thread.
* @param storageLevel Storage level to use for storing the received objects
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam U type of Kafka message key decoder
* @tparam T type of Kafka message value decoder
* @return DStream of (Kafka message key, Kafka message value)
*/
def createStream[K: ClassTag, V: ClassTag, U <: Decoder[_]: ClassTag, T <: Decoder[_]: ClassTag](
ssc: StreamingContext,
kafkaParams: Map[String, String],
topics: Map[String, Int],
storageLevel: StorageLevel
): ReceiverInputDStream[(K, V)] = {
val walEnabled = WriteAheadLogUtils.enableReceiverLog(ssc.conf)
new KafkaInputDStream[K, V, U, T](ssc, kafkaParams, topics, walEnabled, storageLevel)
}
/**
* Create an input stream that pulls messages from Kafka Brokers.
* Storage level of the data will be the default StorageLevel.MEMORY_AND_DISK_SER_2.
* @param jssc JavaStreamingContext object
* @param zkQuorum Zookeeper quorum (hostname:port,hostname:port,..)
* @param groupId The group id for this consumer
* @param topics Map of (topic_name to numPartitions) to consume. Each partition is consumed
* in its own thread
* @return DStream of (Kafka message key, Kafka message value)
*/
def createStream(
jssc: JavaStreamingContext,
zkQuorum: String,
groupId: String,
topics: JMap[String, JInt]
): JavaPairReceiverInputDStream[String, String] = {
createStream(jssc.ssc, zkQuorum, groupId, Map(topics.asScala.mapValues(_.intValue()).toSeq: _*))
}
/**
* Create an input stream that pulls messages from Kafka Brokers.
* @param jssc JavaStreamingContext object
* @param zkQuorum Zookeeper quorum (hostname:port,hostname:port,..).
* @param groupId The group id for this consumer.
* @param topics Map of (topic_name to numPartitions) to consume. Each partition is consumed
* in its own thread.
* @param storageLevel RDD storage level.
* @return DStream of (Kafka message key, Kafka message value)
*/
def createStream(
jssc: JavaStreamingContext,
zkQuorum: String,
groupId: String,
topics: JMap[String, JInt],
storageLevel: StorageLevel
): JavaPairReceiverInputDStream[String, String] = {
createStream(jssc.ssc, zkQuorum, groupId, Map(topics.asScala.mapValues(_.intValue()).toSeq: _*),
storageLevel)
}
/**
* Create an input stream that pulls messages from Kafka Brokers.
* @param jssc JavaStreamingContext object
* @param keyTypeClass Key type of DStream
* @param valueTypeClass value type of Dstream
* @param keyDecoderClass Type of kafka key decoder
* @param valueDecoderClass Type of kafka value decoder
* @param kafkaParams Map of kafka configuration parameters,
* see http://kafka.apache.org/08/configuration.html
* @param topics Map of (topic_name to numPartitions) to consume. Each partition is consumed
* in its own thread
* @param storageLevel RDD storage level.
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam U type of Kafka message key decoder
* @tparam T type of Kafka message value decoder
* @return DStream of (Kafka message key, Kafka message value)
*/
def createStream[K, V, U <: Decoder[_], T <: Decoder[_]](
jssc: JavaStreamingContext,
keyTypeClass: Class[K],
valueTypeClass: Class[V],
keyDecoderClass: Class[U],
valueDecoderClass: Class[T],
kafkaParams: JMap[String, String],
topics: JMap[String, JInt],
storageLevel: StorageLevel
): JavaPairReceiverInputDStream[K, V] = {
implicit val keyCmt: ClassTag[K] = ClassTag(keyTypeClass)
implicit val valueCmt: ClassTag[V] = ClassTag(valueTypeClass)
implicit val keyCmd: ClassTag[U] = ClassTag(keyDecoderClass)
implicit val valueCmd: ClassTag[T] = ClassTag(valueDecoderClass)
createStream[K, V, U, T](
jssc.ssc,
kafkaParams.asScala.toMap,
Map(topics.asScala.mapValues(_.intValue()).toSeq: _*),
storageLevel)
}
/** get leaders for the given offset ranges, or throw an exception */
private def leadersForRanges(
kc: KafkaCluster,
offsetRanges: Array[OffsetRange]): Map[TopicAndPartition, (String, Int)] = {
val topics = offsetRanges.map(o => TopicAndPartition(o.topic, o.partition)).toSet
val leaders = kc.findLeaders(topics)
KafkaCluster.checkErrors(leaders)
}
/** Make sure offsets are available in kafka, or throw an exception */
private def checkOffsets(
kc: KafkaCluster,
offsetRanges: Array[OffsetRange]): Unit = {
val topics = offsetRanges.map(_.topicAndPartition).toSet
val result = for {
low <- kc.getEarliestLeaderOffsets(topics).right
high <- kc.getLatestLeaderOffsets(topics).right
} yield {
offsetRanges.filterNot { o =>
low(o.topicAndPartition).offset <= o.fromOffset &&
o.untilOffset <= high(o.topicAndPartition).offset
}
}
val badRanges = KafkaCluster.checkErrors(result)
if (!badRanges.isEmpty) {
throw new SparkException("Offsets not available on leader: " + badRanges.mkString(","))
}
}
private[kafka] def getFromOffsets(
kc: KafkaCluster,
kafkaParams: Map[String, String],
topics: Set[String]
): Map[TopicAndPartition, Long] = {
val reset = kafkaParams.get("auto.offset.reset").map(_.toLowerCase(Locale.ROOT))
val result = for {
topicPartitions <- kc.getPartitions(topics).right
leaderOffsets <- (if (reset == Some("smallest")) {
kc.getEarliestLeaderOffsets(topicPartitions)
} else {
kc.getLatestLeaderOffsets(topicPartitions)
}).right
} yield {
leaderOffsets.map { case (tp, lo) =>
(tp, lo.offset)
}
}
KafkaCluster.checkErrors(result)
}
/**
* Create an RDD from Kafka using offset ranges for each topic and partition.
*
* @param sc SparkContext object
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers) specified in
* host1:port1,host2:port2 form.
* @param offsetRanges Each OffsetRange in the batch corresponds to a
* range of offsets for a given Kafka topic/partition
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @return RDD of (Kafka message key, Kafka message value)
*/
def createRDD[
K: ClassTag,
V: ClassTag,
KD <: Decoder[K]: ClassTag,
VD <: Decoder[V]: ClassTag](
sc: SparkContext,
kafkaParams: Map[String, String],
offsetRanges: Array[OffsetRange]
): RDD[(K, V)] = sc.withScope {
val messageHandler = (mmd: MessageAndMetadata[K, V]) => (mmd.key, mmd.message)
val kc = new KafkaCluster(kafkaParams)
val leaders = leadersForRanges(kc, offsetRanges)
checkOffsets(kc, offsetRanges)
new KafkaRDD[K, V, KD, VD, (K, V)](sc, kafkaParams, offsetRanges, leaders, messageHandler)
}
/**
* Create an RDD from Kafka using offset ranges for each topic and partition. This allows you
* specify the Kafka leader to connect to (to optimize fetching) and access the message as well
* as the metadata.
*
* @param sc SparkContext object
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers) specified in
* host1:port1,host2:port2 form.
* @param offsetRanges Each OffsetRange in the batch corresponds to a
* range of offsets for a given Kafka topic/partition
* @param leaders Kafka brokers for each TopicAndPartition in offsetRanges. May be an empty map,
* in which case leaders will be looked up on the driver.
* @param messageHandler Function for translating each message and metadata into the desired type
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @tparam R type returned by messageHandler
* @return RDD of R
*/
def createRDD[
K: ClassTag,
V: ClassTag,
KD <: Decoder[K]: ClassTag,
VD <: Decoder[V]: ClassTag,
R: ClassTag](
sc: SparkContext,
kafkaParams: Map[String, String],
offsetRanges: Array[OffsetRange],
leaders: Map[TopicAndPartition, Broker],
messageHandler: MessageAndMetadata[K, V] => R
): RDD[R] = sc.withScope {
val kc = new KafkaCluster(kafkaParams)
val leaderMap = if (leaders.isEmpty) {
leadersForRanges(kc, offsetRanges)
} else {
// This could be avoided by refactoring KafkaRDD.leaders and KafkaCluster to use Broker
leaders.map {
case (tp: TopicAndPartition, Broker(host, port)) => (tp, (host, port))
}
}
val cleanedHandler = sc.clean(messageHandler)
checkOffsets(kc, offsetRanges)
new KafkaRDD[K, V, KD, VD, R](sc, kafkaParams, offsetRanges, leaderMap, cleanedHandler)
}
/**
* Create an RDD from Kafka using offset ranges for each topic and partition.
*
* @param jsc JavaSparkContext object
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers) specified in
* host1:port1,host2:port2 form.
* @param offsetRanges Each OffsetRange in the batch corresponds to a
* range of offsets for a given Kafka topic/partition
* @param keyClass type of Kafka message key
* @param valueClass type of Kafka message value
* @param keyDecoderClass type of Kafka message key decoder
* @param valueDecoderClass type of Kafka message value decoder
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @return RDD of (Kafka message key, Kafka message value)
*/
def createRDD[K, V, KD <: Decoder[K], VD <: Decoder[V]](
jsc: JavaSparkContext,
keyClass: Class[K],
valueClass: Class[V],
keyDecoderClass: Class[KD],
valueDecoderClass: Class[VD],
kafkaParams: JMap[String, String],
offsetRanges: Array[OffsetRange]
): JavaPairRDD[K, V] = jsc.sc.withScope {
implicit val keyCmt: ClassTag[K] = ClassTag(keyClass)
implicit val valueCmt: ClassTag[V] = ClassTag(valueClass)
implicit val keyDecoderCmt: ClassTag[KD] = ClassTag(keyDecoderClass)
implicit val valueDecoderCmt: ClassTag[VD] = ClassTag(valueDecoderClass)
new JavaPairRDD(createRDD[K, V, KD, VD](
jsc.sc, Map(kafkaParams.asScala.toSeq: _*), offsetRanges))
}
/**
* Create an RDD from Kafka using offset ranges for each topic and partition. This allows you
* specify the Kafka leader to connect to (to optimize fetching) and access the message as well
* as the metadata.
*
* @param jsc JavaSparkContext object
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers) specified in
* host1:port1,host2:port2 form.
* @param offsetRanges Each OffsetRange in the batch corresponds to a
* range of offsets for a given Kafka topic/partition
* @param leaders Kafka brokers for each TopicAndPartition in offsetRanges. May be an empty map,
* in which case leaders will be looked up on the driver.
* @param messageHandler Function for translating each message and metadata into the desired type
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @tparam R type returned by messageHandler
* @return RDD of R
*/
def createRDD[K, V, KD <: Decoder[K], VD <: Decoder[V], R](
jsc: JavaSparkContext,
keyClass: Class[K],
valueClass: Class[V],
keyDecoderClass: Class[KD],
valueDecoderClass: Class[VD],
recordClass: Class[R],
kafkaParams: JMap[String, String],
offsetRanges: Array[OffsetRange],
leaders: JMap[TopicAndPartition, Broker],
messageHandler: JFunction[MessageAndMetadata[K, V], R]
): JavaRDD[R] = jsc.sc.withScope {
implicit val keyCmt: ClassTag[K] = ClassTag(keyClass)
implicit val valueCmt: ClassTag[V] = ClassTag(valueClass)
implicit val keyDecoderCmt: ClassTag[KD] = ClassTag(keyDecoderClass)
implicit val valueDecoderCmt: ClassTag[VD] = ClassTag(valueDecoderClass)
implicit val recordCmt: ClassTag[R] = ClassTag(recordClass)
val leaderMap = Map(leaders.asScala.toSeq: _*)
createRDD[K, V, KD, VD, R](
jsc.sc, Map(kafkaParams.asScala.toSeq: _*), offsetRanges, leaderMap, messageHandler.call(_))
}
/**
* Create an input stream that directly pulls messages from Kafka Brokers
* without using any receiver. This stream can guarantee that each message
* from Kafka is included in transformations exactly once (see points below).
*
* Points to note:
* - No receivers: This stream does not use any receiver. It directly queries Kafka
* - Offsets: This does not use Zookeeper to store offsets. The consumed offsets are tracked
* by the stream itself. For interoperability with Kafka monitoring tools that depend on
* Zookeeper, you have to update Kafka/Zookeeper yourself from the streaming application.
* You can access the offsets used in each batch from the generated RDDs (see
* [[org.apache.spark.streaming.kafka.HasOffsetRanges]]).
* - Failure Recovery: To recover from driver failures, you have to enable checkpointing
* in the `StreamingContext`. The information on consumed offset can be
* recovered from the checkpoint. See the programming guide for details (constraints, etc.).
* - End-to-end semantics: This stream ensures that every records is effectively received and
* transformed exactly once, but gives no guarantees on whether the transformed data are
* outputted exactly once. For end-to-end exactly-once semantics, you have to either ensure
* that the output operation is idempotent, or use transactions to output records atomically.
* See the programming guide for more details.
*
* @param ssc StreamingContext object
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers) specified in
* host1:port1,host2:port2 form.
* @param fromOffsets Per-topic/partition Kafka offsets defining the (inclusive)
* starting point of the stream
* @param messageHandler Function for translating each message and metadata into the desired type
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @tparam R type returned by messageHandler
* @return DStream of R
*/
def createDirectStream[
K: ClassTag,
V: ClassTag,
KD <: Decoder[K]: ClassTag,
VD <: Decoder[V]: ClassTag,
R: ClassTag] (
ssc: StreamingContext,
kafkaParams: Map[String, String],
fromOffsets: Map[TopicAndPartition, Long],
messageHandler: MessageAndMetadata[K, V] => R
): InputDStream[R] = {
val cleanedHandler = ssc.sc.clean(messageHandler)
new DirectKafkaInputDStream[K, V, KD, VD, R](
ssc, kafkaParams, fromOffsets, cleanedHandler)
}
/**
* Create an input stream that directly pulls messages from Kafka Brokers
* without using any receiver. This stream can guarantee that each message
* from Kafka is included in transformations exactly once (see points below).
*
* Points to note:
* - No receivers: This stream does not use any receiver. It directly queries Kafka
* - Offsets: This does not use Zookeeper to store offsets. The consumed offsets are tracked
* by the stream itself. For interoperability with Kafka monitoring tools that depend on
* Zookeeper, you have to update Kafka/Zookeeper yourself from the streaming application.
* You can access the offsets used in each batch from the generated RDDs (see
* [[org.apache.spark.streaming.kafka.HasOffsetRanges]]).
* - Failure Recovery: To recover from driver failures, you have to enable checkpointing
* in the `StreamingContext`. The information on consumed offset can be
* recovered from the checkpoint. See the programming guide for details (constraints, etc.).
* - End-to-end semantics: This stream ensures that every records is effectively received and
* transformed exactly once, but gives no guarantees on whether the transformed data are
* outputted exactly once. For end-to-end exactly-once semantics, you have to either ensure
* that the output operation is idempotent, or use transactions to output records atomically.
* See the programming guide for more details.
*
* @param ssc StreamingContext object
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers), specified in
* host1:port1,host2:port2 form.
* If not starting from a checkpoint, "auto.offset.reset" may be set to "largest" or "smallest"
* to determine where the stream starts (defaults to "largest")
* @param topics Names of the topics to consume
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @return DStream of (Kafka message key, Kafka message value)
*/
def createDirectStream[
K: ClassTag,
V: ClassTag,
KD <: Decoder[K]: ClassTag,
VD <: Decoder[V]: ClassTag] (
ssc: StreamingContext,
kafkaParams: Map[String, String],
topics: Set[String]
): InputDStream[(K, V)] = {
val messageHandler = (mmd: MessageAndMetadata[K, V]) => (mmd.key, mmd.message)
val kc = new KafkaCluster(kafkaParams)
val fromOffsets = getFromOffsets(kc, kafkaParams, topics)
new DirectKafkaInputDStream[K, V, KD, VD, (K, V)](
ssc, kafkaParams, fromOffsets, messageHandler)
}
/**
* Create an input stream that directly pulls messages from Kafka Brokers
* without using any receiver. This stream can guarantee that each message
* from Kafka is included in transformations exactly once (see points below).
*
* Points to note:
* - No receivers: This stream does not use any receiver. It directly queries Kafka
* - Offsets: This does not use Zookeeper to store offsets. The consumed offsets are tracked
* by the stream itself. For interoperability with Kafka monitoring tools that depend on
* Zookeeper, you have to update Kafka/Zookeeper yourself from the streaming application.
* You can access the offsets used in each batch from the generated RDDs (see
* [[org.apache.spark.streaming.kafka.HasOffsetRanges]]).
* - Failure Recovery: To recover from driver failures, you have to enable checkpointing
* in the `StreamingContext`. The information on consumed offset can be
* recovered from the checkpoint. See the programming guide for details (constraints, etc.).
* - End-to-end semantics: This stream ensures that every records is effectively received and
* transformed exactly once, but gives no guarantees on whether the transformed data are
* outputted exactly once. For end-to-end exactly-once semantics, you have to either ensure
* that the output operation is idempotent, or use transactions to output records atomically.
* See the programming guide for more details.
*
* @param jssc JavaStreamingContext object
* @param keyClass Class of the keys in the Kafka records
* @param valueClass Class of the values in the Kafka records
* @param keyDecoderClass Class of the key decoder
* @param valueDecoderClass Class of the value decoder
* @param recordClass Class of the records in DStream
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers), specified in
* host1:port1,host2:port2 form.
* @param fromOffsets Per-topic/partition Kafka offsets defining the (inclusive)
* starting point of the stream
* @param messageHandler Function for translating each message and metadata into the desired type
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @tparam R type returned by messageHandler
* @return DStream of R
*/
def createDirectStream[K, V, KD <: Decoder[K], VD <: Decoder[V], R](
jssc: JavaStreamingContext,
keyClass: Class[K],
valueClass: Class[V],
keyDecoderClass: Class[KD],
valueDecoderClass: Class[VD],
recordClass: Class[R],
kafkaParams: JMap[String, String],
fromOffsets: JMap[TopicAndPartition, JLong],
messageHandler: JFunction[MessageAndMetadata[K, V], R]
): JavaInputDStream[R] = {
implicit val keyCmt: ClassTag[K] = ClassTag(keyClass)
implicit val valueCmt: ClassTag[V] = ClassTag(valueClass)
implicit val keyDecoderCmt: ClassTag[KD] = ClassTag(keyDecoderClass)
implicit val valueDecoderCmt: ClassTag[VD] = ClassTag(valueDecoderClass)
implicit val recordCmt: ClassTag[R] = ClassTag(recordClass)
val cleanedHandler = jssc.sparkContext.clean(messageHandler.call _)
createDirectStream[K, V, KD, VD, R](
jssc.ssc,
Map(kafkaParams.asScala.toSeq: _*),
Map(fromOffsets.asScala.mapValues(_.longValue()).toSeq: _*),
cleanedHandler
)
}
/**
* Create an input stream that directly pulls messages from Kafka Brokers
* without using any receiver. This stream can guarantee that each message
* from Kafka is included in transformations exactly once (see points below).
*
* Points to note:
* - No receivers: This stream does not use any receiver. It directly queries Kafka
* - Offsets: This does not use Zookeeper to store offsets. The consumed offsets are tracked
* by the stream itself. For interoperability with Kafka monitoring tools that depend on
* Zookeeper, you have to update Kafka/Zookeeper yourself from the streaming application.
* You can access the offsets used in each batch from the generated RDDs (see
* [[org.apache.spark.streaming.kafka.HasOffsetRanges]]).
* - Failure Recovery: To recover from driver failures, you have to enable checkpointing
* in the `StreamingContext`. The information on consumed offset can be
* recovered from the checkpoint. See the programming guide for details (constraints, etc.).
* - End-to-end semantics: This stream ensures that every records is effectively received and
* transformed exactly once, but gives no guarantees on whether the transformed data are
* outputted exactly once. For end-to-end exactly-once semantics, you have to either ensure
* that the output operation is idempotent, or use transactions to output records atomically.
* See the programming guide for more details.
*
* @param jssc JavaStreamingContext object
* @param keyClass Class of the keys in the Kafka records
* @param valueClass Class of the values in the Kafka records
* @param keyDecoderClass Class of the key decoder
* @param valueDecoderClass Class type of the value decoder
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>. Requires "metadata.broker.list" or "bootstrap.servers"
* to be set with Kafka broker(s) (NOT zookeeper servers), specified in
* host1:port1,host2:port2 form.
* If not starting from a checkpoint, "auto.offset.reset" may be set to "largest" or "smallest"
* to determine where the stream starts (defaults to "largest")
* @param topics Names of the topics to consume
* @tparam K type of Kafka message key
* @tparam V type of Kafka message value
* @tparam KD type of Kafka message key decoder
* @tparam VD type of Kafka message value decoder
* @return DStream of (Kafka message key, Kafka message value)
*/
def createDirectStream[K, V, KD <: Decoder[K], VD <: Decoder[V]](
jssc: JavaStreamingContext,
keyClass: Class[K],
valueClass: Class[V],
keyDecoderClass: Class[KD],
valueDecoderClass: Class[VD],
kafkaParams: JMap[String, String],
topics: JSet[String]
): JavaPairInputDStream[K, V] = {
implicit val keyCmt: ClassTag[K] = ClassTag(keyClass)
implicit val valueCmt: ClassTag[V] = ClassTag(valueClass)
implicit val keyDecoderCmt: ClassTag[KD] = ClassTag(keyDecoderClass)
implicit val valueDecoderCmt: ClassTag[VD] = ClassTag(valueDecoderClass)
createDirectStream[K, V, KD, VD](
jssc.ssc,
Map(kafkaParams.asScala.toSeq: _*),
Set(topics.asScala.toSeq: _*)
)
}
}
/**
* This is a helper class that wraps the KafkaUtils.createStream() into more
* Python-friendly class and function so that it can be easily
* instantiated and called from Python's KafkaUtils.
*
* The zero-arg constructor helps instantiate this class from the Class object
* classOf[KafkaUtilsPythonHelper].newInstance(), and the createStream()
* takes care of known parameters instead of passing them from Python
*/
private[kafka] class KafkaUtilsPythonHelper {
import KafkaUtilsPythonHelper._
def createStream(
jssc: JavaStreamingContext,
kafkaParams: JMap[String, String],
topics: JMap[String, JInt],
storageLevel: StorageLevel): JavaPairReceiverInputDStream[Array[Byte], Array[Byte]] = {
KafkaUtils.createStream[Array[Byte], Array[Byte], DefaultDecoder, DefaultDecoder](
jssc,
classOf[Array[Byte]],
classOf[Array[Byte]],
classOf[DefaultDecoder],
classOf[DefaultDecoder],
kafkaParams,
topics,
storageLevel)
}
def createRDDWithoutMessageHandler(
jsc: JavaSparkContext,
kafkaParams: JMap[String, String],
offsetRanges: JList[OffsetRange],
leaders: JMap[TopicAndPartition, Broker]): JavaRDD[(Array[Byte], Array[Byte])] = {
val messageHandler =
(mmd: MessageAndMetadata[Array[Byte], Array[Byte]]) => (mmd.key, mmd.message)
new JavaRDD(createRDD(jsc, kafkaParams, offsetRanges, leaders, messageHandler))
}
def createRDDWithMessageHandler(
jsc: JavaSparkContext,
kafkaParams: JMap[String, String],
offsetRanges: JList[OffsetRange],
leaders: JMap[TopicAndPartition, Broker]): JavaRDD[Array[Byte]] = {
val messageHandler = (mmd: MessageAndMetadata[Array[Byte], Array[Byte]]) =>
new PythonMessageAndMetadata(
mmd.topic, mmd.partition, mmd.offset, mmd.key(), mmd.message())
val rdd = createRDD(jsc, kafkaParams, offsetRanges, leaders, messageHandler).
mapPartitions(picklerIterator)
new JavaRDD(rdd)
}
private def createRDD[V: ClassTag](
jsc: JavaSparkContext,
kafkaParams: JMap[String, String],
offsetRanges: JList[OffsetRange],
leaders: JMap[TopicAndPartition, Broker],
messageHandler: MessageAndMetadata[Array[Byte], Array[Byte]] => V): RDD[V] = {
KafkaUtils.createRDD[Array[Byte], Array[Byte], DefaultDecoder, DefaultDecoder, V](
jsc.sc,
kafkaParams.asScala.toMap,
offsetRanges.toArray(new Array[OffsetRange](offsetRanges.size())),
leaders.asScala.toMap,
messageHandler
)
}
def createDirectStreamWithoutMessageHandler(
jssc: JavaStreamingContext,
kafkaParams: JMap[String, String],
topics: JSet[String],
fromOffsets: JMap[TopicAndPartition, JNumber]): JavaDStream[(Array[Byte], Array[Byte])] = {
val messageHandler =
(mmd: MessageAndMetadata[Array[Byte], Array[Byte]]) => (mmd.key, mmd.message)
new JavaDStream(createDirectStream(jssc, kafkaParams, topics, fromOffsets, messageHandler))
}
def createDirectStreamWithMessageHandler(
jssc: JavaStreamingContext,
kafkaParams: JMap[String, String],
topics: JSet[String],
fromOffsets: JMap[TopicAndPartition, JNumber]): JavaDStream[Array[Byte]] = {
val messageHandler = (mmd: MessageAndMetadata[Array[Byte], Array[Byte]]) =>
new PythonMessageAndMetadata(mmd.topic, mmd.partition, mmd.offset, mmd.key(), mmd.message())
val stream = createDirectStream(jssc, kafkaParams, topics, fromOffsets, messageHandler).
mapPartitions(picklerIterator)
new JavaDStream(stream)
}
private def createDirectStream[V: ClassTag](
jssc: JavaStreamingContext,
kafkaParams: JMap[String, String],
topics: JSet[String],
fromOffsets: JMap[TopicAndPartition, JNumber],
messageHandler: MessageAndMetadata[Array[Byte], Array[Byte]] => V): DStream[V] = {
val currentFromOffsets = if (!fromOffsets.isEmpty) {
val topicsFromOffsets = fromOffsets.keySet().asScala.map(_.topic)
if (topicsFromOffsets != topics.asScala.toSet) {
throw new IllegalStateException(
s"The specified topics: ${topics.asScala.toSet.mkString(" ")} " +
s"do not equal to the topic from offsets: ${topicsFromOffsets.mkString(" ")}")
}
Map(fromOffsets.asScala.mapValues { _.longValue() }.toSeq: _*)
} else {
val kc = new KafkaCluster(Map(kafkaParams.asScala.toSeq: _*))
KafkaUtils.getFromOffsets(
kc, Map(kafkaParams.asScala.toSeq: _*), Set(topics.asScala.toSeq: _*))
}
KafkaUtils.createDirectStream[Array[Byte], Array[Byte], DefaultDecoder, DefaultDecoder, V](
jssc.ssc,
Map(kafkaParams.asScala.toSeq: _*),
Map(currentFromOffsets.toSeq: _*),
messageHandler)
}
def createOffsetRange(topic: String, partition: JInt, fromOffset: JLong, untilOffset: JLong
): OffsetRange = OffsetRange.create(topic, partition, fromOffset, untilOffset)
def createTopicAndPartition(topic: String, partition: JInt): TopicAndPartition =
TopicAndPartition(topic, partition)
def createBroker(host: String, port: JInt): Broker = Broker(host, port)
def offsetRangesOfKafkaRDD(rdd: RDD[_]): JList[OffsetRange] = {
val parentRDDs = rdd.getNarrowAncestors
val kafkaRDDs = parentRDDs.filter(rdd => rdd.isInstanceOf[KafkaRDD[_, _, _, _, _]])
require(
kafkaRDDs.length == 1,
"Cannot get offset ranges, as there may be multiple Kafka RDDs or no Kafka RDD associated" +
"with this RDD, please call this method only on a Kafka RDD.")
val kafkaRDD = kafkaRDDs.head.asInstanceOf[KafkaRDD[_, _, _, _, _]]
kafkaRDD.offsetRanges.toSeq.asJava
}
}
private object KafkaUtilsPythonHelper {
private var initialized = false
def initialize(): Unit = {
SerDeUtil.initialize()
synchronized {
if (!initialized) {
new PythonMessageAndMetadataPickler().register()
initialized = true
}
}
}
initialize()
def picklerIterator(iter: Iterator[Any]): Iterator[Array[Byte]] = {
new SerDeUtil.AutoBatchedPickler(iter)
}
case class PythonMessageAndMetadata(
topic: String,
partition: JInt,
offset: JLong,
key: Array[Byte],
message: Array[Byte])
class PythonMessageAndMetadataPickler extends IObjectPickler {
private val module = "pyspark.streaming.kafka"
def register(): Unit = {
Pickler.registerCustomPickler(classOf[PythonMessageAndMetadata], this)
Pickler.registerCustomPickler(this.getClass, this)
}
def pickle(obj: Object, out: OutputStream, pickler: Pickler) {
if (obj == this) {
out.write(Opcodes.GLOBAL)
out.write(s"$module\\nKafkaMessageAndMetadata\\n".getBytes(StandardCharsets.UTF_8))
} else {
pickler.save(this)
val msgAndMetaData = obj.asInstanceOf[PythonMessageAndMetadata]
out.write(Opcodes.MARK)
pickler.save(msgAndMetaData.topic)
pickler.save(msgAndMetaData.partition)
pickler.save(msgAndMetaData.offset)
pickler.save(msgAndMetaData.key)
pickler.save(msgAndMetaData.message)
out.write(Opcodes.TUPLE)
out.write(Opcodes.REDUCE)
}
}
}
}
| bravo-zhang/spark | external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaUtils.scala | Scala | apache-2.0 | 36,775 |
import dotty.tools.dotc.ast.Trees.*
import dotty.tools.dotc.core.Types.*
object Patterns {
val d: Object = null
private def rebase(tp: NamedType): Type = {
def rebaseFrom(prefix: Type): Type = ???
tp.prefix match {
case SkolemType(rt) => rebaseFrom(rt)
case pre: ThisType => rebaseFrom(pre)
case _ => tp
}
}
d match {
case WildcardType(bounds: TypeBounds) => bounds.lo
case a @ Assign(Ident(id), rhs) => id
case a: Object => a
}
('1', "1") match {
case (digit, str) => true
case _ => false
}
def foo2(x: AnyRef) = x match { case x: Function0[Any] => x() }
object Breakdown {
def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there"))
}
object Test2 {
42 match {
case a@Breakdown(f@"") => // needed to trigger bug
case b@Breakdown(d@"foo") => // needed to trigger bug
case c@Breakdown(e@"", who) => println ("hello " + who)
}
}
val names = List("a", "b", "c")
object SeqExtractors {
val y = names match {
case List(x, z) => x
case List(x) => x
case List() => ""
case x @ _ => "wildcard"
}
val yy: String = y
}
val xs = List('2' -> "ABC", '3' -> "DEF")
xs filter {
case (digit, str) => true
case _ => false
}
(xs: Any) match {
case x: Int @unchecked => true
case xs: List[Int @ unchecked] => true
case _ => false
}
def sum(xs: List[Int]): Int = xs match {
case Nil => 0
case x :: xs1 => x + sum(xs1)
}
def len[T](xs: List[T]): Int = xs match {
case _ :: xs1 => 1 + len(xs1)
case Nil => 0
}
final def sameLength[T](xs: List[T], ys: List[T]): Boolean = xs match {
case _ :: xs1 => xs1.isEmpty
ys match {
case _ :: ys1 => sameLength(xs1, ys1)
case _ => false
}
case _ => ys.isEmpty
}
class A{
class B
}
val a1 = new A
val a2 = new A
d match {
case t: a1.B =>
t
case t: a2.B =>
t
}
class caseWithPatternVariableHelper1[A]
class caseWithPatternVariableHelper2[A]
def caseWithPatternVariable(x: Any) = x match {
case a: caseWithPatternVariableHelper1[_] => ()
case b: caseWithPatternVariableHelper2[_] => ()
}
}
object NestedPattern {
val xss: List[List[String]] = ???
val List(List(x)) = xss
}
// Tricky case (exercised by Scala parser combinators) where we use
// both get/isEmpty and product-based pattern matching in different
// matches on the same types.
object ProductAndGet {
trait Result[+T]
case class Success[+T](in: String, x: T) extends Result[T] {
def isEmpty = false
def get: T = x
}
case class Failure[+T](in: String, msg: String) extends Result[T] {
def isEmpty = false
def get: String = msg
}
val r: Result[Int] = ???
r match {
case Success(in, x) => x
case Failure(in, msg) => -1
}
r match {
case Success(x) => x
case Failure(msg) => -1
}
}
| dotty-staging/dotty | tests/pos-with-compiler/Patterns.scala | Scala | apache-2.0 | 2,936 |
class I0 { Map() { i1: I2 { type I3 } } }
| som-snytt/dotty | tests/pending/fuzzy/AE-68729466e791245d18cb32f1e30b38e9953f15ab.scala | Scala | apache-2.0 | 42 |
package com.twitter.finagle.thrift
import com.twitter.finagle.stats.{Counter, LazyStatsReceiver, NullStatsReceiver, StatsReceiver}
object ThriftMethodStats {
def apply(stats: StatsReceiver): ThriftMethodStats = {
val wrapped = new LazyStatsReceiver(stats)
ThriftMethodStats(
wrapped.counter("requests"),
wrapped.counter("success"),
wrapped.counter("failures"),
wrapped.scope("failures")
)
}
private[this] val NullThriftMethodStats = apply(NullStatsReceiver)
/**
* An instance of [[ThriftMethodStats]] that is backed by a
* `NullStatsReceiver`.
*
* This can be used as a sentinel instance where everything is a no-op.
*/
def Null: ThriftMethodStats = NullThriftMethodStats
}
case class ThriftMethodStats private (
requestsCounter: Counter,
successCounter: Counter,
failuresCounter: Counter,
failuresScope: StatsReceiver)
| twitter/finagle | finagle-thrift/src/main/scala/com/twitter/finagle/thrift/ThriftMethodStats.scala | Scala | apache-2.0 | 895 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.parser
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.TableIdentifier
class TableIdentifierParserSuite extends SparkFunSuite {
import CatalystSqlParser._
// Add "$elem$", "$value$" & "$key$"
val hiveNonReservedKeyword = Array("add", "admin", "after", "analyze", "archive", "asc", "before",
"bucket", "buckets", "cascade", "change", "cluster", "clustered", "clusterstatus", "collection",
"columns", "comment", "compact", "compactions", "compute", "concatenate", "continue", "cost",
"data", "day", "databases", "datetime", "dbproperties", "deferred", "defined", "delimited",
"dependency", "desc", "directories", "directory", "disable", "distribute",
"enable", "escaped", "exclusive", "explain", "export", "fields", "file", "fileformat", "first",
"format", "formatted", "functions", "hold_ddltime", "hour", "idxproperties", "ignore", "index",
"indexes", "inpath", "inputdriver", "inputformat", "items", "jar", "keys", "key_type", "last",
"limit", "offset", "lines", "load", "location", "lock", "locks", "logical", "long", "mapjoin",
"materialized", "metadata", "minus", "minute", "month", "msck", "noscan", "no_drop", "nulls",
"offline", "option", "outputdriver", "outputformat", "overwrite", "owner", "partitioned",
"partitions", "plus", "pretty", "principals", "protection", "purge", "read", "readonly",
"rebuild", "recordreader", "recordwriter", "reload", "rename", "repair", "replace",
"replication", "restrict", "rewrite", "role", "roles", "schemas", "second",
"serde", "serdeproperties", "server", "sets", "shared", "show", "show_database", "skewed",
"sort", "sorted", "ssl", "statistics", "stored", "streamtable", "string", "struct", "tables",
"tblproperties", "temporary", "terminated", "tinyint", "touch", "transactions", "unarchive",
"undo", "uniontype", "unlock", "unset", "unsigned", "uri", "use", "utc", "utctimestamp",
"view", "while", "year", "work", "transaction", "write", "isolation", "level",
"snapshot", "autocommit", "all", "alter", "array", "as", "authorization", "between", "bigint",
"binary", "boolean", "both", "by", "create", "cube", "current_date", "current_timestamp",
"cursor", "date", "decimal", "delete", "describe", "double", "drop", "exists", "external",
"false", "fetch", "float", "for", "grant", "group", "grouping", "import", "in",
"insert", "int", "into", "is", "lateral", "like", "local", "none", "null",
"of", "order", "out", "outer", "partition", "percent", "procedure", "range", "reads", "revoke",
"rollup", "row", "rows", "set", "smallint", "table", "timestamp", "to", "trigger",
"true", "truncate", "update", "user", "using", "values", "with", "regexp", "rlike",
"bigint", "binary", "boolean", "current_date", "current_timestamp", "date", "double", "float",
"int", "smallint", "timestamp", "at")
val hiveStrictNonReservedKeyword = Seq("anti", "full", "inner", "left", "semi", "right",
"natural", "union", "intersect", "except", "database", "on", "join", "cross", "select", "from",
"where", "having", "from", "to", "table", "with", "not")
test("table identifier") {
// Regular names.
assert(TableIdentifier("q") === parseTableIdentifier("q"))
assert(TableIdentifier("q", Option("d")) === parseTableIdentifier("d.q"))
// Illegal names.
Seq("", "d.q.g", "t:", "${some.var.x}", "tab:1").foreach { identifier =>
intercept[ParseException](parseTableIdentifier(identifier))
}
}
test("quoted identifiers") {
assert(TableIdentifier("z", Some("x.y")) === parseTableIdentifier("`x.y`.z"))
assert(TableIdentifier("y.z", Some("x")) === parseTableIdentifier("x.`y.z`"))
assert(TableIdentifier("z", Some("`x.y`")) === parseTableIdentifier("```x.y```.z"))
assert(TableIdentifier("`y.z`", Some("x")) === parseTableIdentifier("x.```y.z```"))
assert(TableIdentifier("x.y.z", None) === parseTableIdentifier("`x.y.z`"))
}
test("table identifier - strict keywords") {
// SQL Keywords.
hiveStrictNonReservedKeyword.foreach { keyword =>
assert(TableIdentifier(keyword) === parseTableIdentifier(keyword))
assert(TableIdentifier(keyword) === parseTableIdentifier(s"`$keyword`"))
assert(TableIdentifier(keyword, Option("db")) === parseTableIdentifier(s"db.`$keyword`"))
}
}
test("table identifier - non reserved keywords") {
// Hive keywords are allowed.
hiveNonReservedKeyword.foreach { nonReserved =>
assert(TableIdentifier(nonReserved) === parseTableIdentifier(nonReserved))
}
}
test("SPARK-17364 table identifier - contains number") {
assert(parseTableIdentifier("123_") == TableIdentifier("123_"))
assert(parseTableIdentifier("1a.123_") == TableIdentifier("123_", Some("1a")))
// ".123" should not be treated as token of type DECIMAL_VALUE
assert(parseTableIdentifier("a.123A") == TableIdentifier("123A", Some("a")))
// ".123E3" should not be treated as token of type SCIENTIFIC_DECIMAL_VALUE
assert(parseTableIdentifier("a.123E3_LIST") == TableIdentifier("123E3_LIST", Some("a")))
// ".123D" should not be treated as token of type DOUBLE_LITERAL
assert(parseTableIdentifier("a.123D_LIST") == TableIdentifier("123D_LIST", Some("a")))
// ".123BD" should not be treated as token of type BIGDECIMAL_LITERAL
assert(parseTableIdentifier("a.123BD_LIST") == TableIdentifier("123BD_LIST", Some("a")))
}
test("SPARK-17832 table identifier - contains backtick") {
val complexName = TableIdentifier("`weird`table`name", Some("`d`b`1"))
assert(complexName === parseTableIdentifier("```d``b``1`.```weird``table``name`"))
assert(complexName === parseTableIdentifier(complexName.quotedString))
intercept[ParseException](parseTableIdentifier(complexName.unquotedString))
// Table identifier contains countious backticks should be treated correctly.
val complexName2 = TableIdentifier("x``y", Some("d``b"))
assert(complexName2 === parseTableIdentifier(complexName2.quotedString))
}
}
| wangyixiaohuihui/spark2-annotation | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala | Scala | apache-2.0 | 6,998 |
package one.lockstep.monolock.protocol
import scodec._
import scodec.codecs._
import one.lockstep.util._
import one.lockstep.util.crypto._
import one.lockstep.lock._
import one.lockstep.util.protocol._
import one.lockstep.archive.v2
case class EnrollmentRequest(ticket: Encoded[LockTicket],
timestamp: Long,
newSeriesIdDigest: Digest,
latestSeal: Option[EncryptedLockSeal]) extends MonolockRequest {
override def lockId: LockId = ticket.decoded.lockId
override def failure(error: MonolockResponse.Error): MonolockResponse = EnrollmentResponse.Failure(error)
}
object EnrollmentRequest {
implicit lazy val codec: Codec[EnrollmentRequest] = (
Codec[Encoded[LockTicket]] ::
int64 ::
Codec[Digest] ::
optional[EncryptedLockSeal](bool(8), Codec[EncryptedLockSeal])
).as[EnrollmentRequest]
implicit lazy val protocol = Protocol[EnrollmentRequest](Label("enrollment-request"), Versions.current, since = Versions.v3)
.withUpgrade[v2.EnrollmentRequest] { older =>
val ticket = Protocol.upgrade[v2.LockTicket, LockTicket].apply(older.ticket)
EnrollmentRequest(Protocol.encoded(ticket), older.timestamp, older.newSeriesIdDigest, older.latestSeal)
}
}
| lockstep-one/vault | vault-common/src/main/scala/one/lockstep/monolock/protocol/EnrollmentRequest.scala | Scala | agpl-3.0 | 1,289 |
/*
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package textapp.server
import com.sun.net.httpserver.{HttpHandler, HttpExchange}
import ccf.session.ChannelId
import ccf.server.ServerEngine
import ccf.transport.json.JsonCodec
import io.Source
import textapp.{TextAppOperationDecoder, TextDocument}
class TextAppRequestHandler extends HttpHandler {
private val page404 =
<html>
<head><title>404 - Page not found</title></head>
<body>
<h1>404 - Page not found</h1>
</body>
</html>
private val page500 =
<html>
<head><title>500 - Internal server error</title></head>
<body>
<h1>500 - Internal server error</h1>
</body>
</html>
private val document = new TextDocument("")
private val interceptor = new TextAppOperationInterceptor(document)
private val serverEngine = new ServerEngine(codec = JsonCodec, operationInterceptor = interceptor, operationDecoder = new TextAppOperationDecoder)
private val defaultChannel = ChannelId.randomId
def handle(exchange: HttpExchange) {
try {
val uri = exchange.getRequestURI
println("Serving '%s' using %s %s".format(uri, exchange.getProtocol, exchange.getRequestMethod))
val request = Source.fromInputStream(exchange.getRequestBody).getLines.toList.foldLeft("\\n")(_+_)
val reply = serverEngine.processRequest(request)
exchange.sendResponseHeaders(200, reply.length)
exchange.getResponseBody.write(reply.getBytes)
} catch {
case e =>
println("=== Exception while handling request ===")
e.printStackTrace
exchange.sendResponseHeaders(500, 0)
exchange.getResponseBody.write(page500.toString.getBytes)
} finally {
exchange.getResponseBody.close
}
}
}
| akisaarinen/ccf | app/src/main/scala/textapp/server/TextAppRequestHandler.scala | Scala | apache-2.0 | 2,341 |
import scala.reflect.macros.whitebox.Context
import scala.language.experimental.macros
import scala.annotation.StaticAnnotation
object helloMacro {
def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
import c.universe._
import Flag._
val result = {
annottees.map(_.tree).toList match {
case ModuleDef(mods, name, Template(parents, self, body)) :: Nil =>
val helloMethod = DefDef(NoMods, TermName("hello"), List(), List(List()), TypeTree(), Literal(Constant("hello")))
ModuleDef(mods, name, Template(parents, self, body :+ helloMethod))
}
}
c.Expr[Any](result)
}
}
class hello extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro helloMacro.impl
}
| mdemarne/scalahost | tests/src/main/scala/HelloMacros.scala | Scala | bsd-3-clause | 749 |
/*
* This file is part of Evo2DSim.
*
* Evo2DSim is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Evo2DSim is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Evo2DSim. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vastness.evo2dsim.core.gui
import java.awt
import org.vastness.evo2dsim.macros.utils.Enum
/**
* Singelton that abstracts away the actual color implementation from the runtime
*/
sealed trait Color { def underlying: awt.Color }
object Color extends Enum[Color] {
case object BLACK extends Color { val underlying = awt.Color.BLACK }
case object RED extends Color { val underlying = awt.Color.RED }
case object BLUE extends Color { val underlying = awt.Color.BLUE }
case object CYAN extends Color { val underlying = awt.Color.CYAN }
}
| vchuravy/Evo2DSim | core/src/main/scala/org/vastness/evo2dsim/core/gui/Color.scala | Scala | mit | 1,263 |
package Ch04
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.types.{FloatType, LongType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}
import org.stjohns.utils.StatisticsUtil.SummaryStat
import org.stjohns.utils.{DataFrameUtil, StatisticsUtil}
/**
* Created by Giancarlo on 10/18/2016.
*/
object Ouliers {
def processRow(row:Row, sStats:Map[Int, SummaryStat]): Row ={
val newRowArray = new Array[Any](row.size)
for (idx <- 0 to row.length-1) {
val zScore = (row.getFloat(idx)-sStats(idx).AVG)/sStats(idx).SD
newRowArray(idx) = zScore
}
Row.fromSeq(newRowArray.toSeq)
}
def isOutlier(row:Row,lowerBound:Double,upperBound:Double): Boolean = {
var result = false
for (idx <- 0 to row.length-1) {
val value = try {row.getDouble(0)} catch { case e:Exception => row.getFloat(0)}
if (value<lowerBound || value>upperBound)
result = true
}
result
}
def main(args:Array[String]){
val conf = new SparkConf()
.setMaster("local[2]")
.setAppName("Data Preprocessing")
.set("spark.executor.memory","2g")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
// Read the dataset
val df = sqlContext.read.format("com.databricks.spark.csv").option("header", "true").load("c:/Spark/data/04-DataPreprocessing/01_Abalone - Missing.csv")
// Convert the schema to the appropriate types
val columns = Array("Sex","Length","Diameter","Height","WholeWeight","ShuckedWeight","Vweight","ShellWeight","Rings")
val types = Array("String","Float","Float","Float","Float","Float","Float","Float","int")
val dataset = DataFrameUtil.setTypes(df,columns,types)
// Extracting statistics
val rowsRDD = dataset.select("Length","Height").rdd
val stats = StatisticsUtil.extractStat(rowsRDD)
// Remove null values
val schema = StructType(Array(StructField("Length",FloatType,true),StructField("Height",FloatType,true)))
val dNew = sqlContext.createDataFrame(rowsRDD,schema).na.drop()
// OUTLIERS USING Z-SCORES
// Extracting statistics on Length
val lengthRDD = dataset.map(row=>row.getFloat(1).toDouble)
val lengthStats = lengthRDD.stats()
val mean = lengthStats.mean
val stdev = lengthStats.stdev
// Identifying outliers with z-Scores
val zScores = lengthRDD.map(value=> (value-mean)/stdev)
val zOutliers = zScores.filter(value => ((-3.0d > value)||(value > 3.0d)))
zOutliers.collect()
// Display outliers
zOutliers.foreach(println)
// OUTLIERS USING INTERQUARTILE RANGE
// Getting the array of values for Length
val lengthArray = dataset.select("Length").sort("Length").map(row=>row.getFloat(0).asInstanceOf[Double]).collect()
// Retrieve the first and third percentile
val q1Idx:Int = (lengthArray.size*0.25).toInt
val q3Idx:Int = (lengthArray.size*0.75).toInt
val Q1 = lengthArray(q1Idx)
val Q3 = lengthArray(q3Idx)
val IQR = Q3-Q1
val lower = Q1-1.5d*IQR
val upper = Q3+1.5d*IQR
// Identify and display outliers
val iqrOutliers = dataset.select("Length").rdd.filter(row=> isOutlier(row,lower,upper)).collect()
println("Outlier Detection (IQR):")
println(" Q1: "+Q1)
println(" Q3: "+Q3)
println(" IQR: "+IQR)
println(" LowerBound: "+lower)
println(" UpperBound: "+upper)
iqrOutliers.foreach(println)
}
}
| gcrocetti/SeriesOnBigData | ChapterFour/Ouliers.scala | Scala | mit | 3,552 |
package com.dataintuitive.luciusapi.functions
import com.dataintuitive.luciuscore.genes._
import com.dataintuitive.luciuscore.Model.CompoundAnnotations
import com.dataintuitive.luciuscore.Model.DbRow
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.SparkSession
import scala.collection.immutable.Map
object TargetToCompoundsFunctions extends SessionFunctions {
import com.dataintuitive.luciusapi.Common.Variables._
case class JobData(db: Dataset[DbRow],
genes: GenesDB,
version: String,
targets: List[String],
limit: Int)
type JobOutput = Array[Map[String, Any]]
import com.dataintuitive.luciuscore.lenses.CompoundAnnotationsLenses._
def extractFeatures(r:CompoundAnnotations, features:List[String]) = features.map{
_ match {
case x if COMPOUND_ID contains x => safeIdLens.get(r)
case x if COMPOUND_SMILES contains x => safeSmilesLens.get(r)
case x if COMPOUND_INCHIKEY contains x => safeInchikeyLens.get(r)
case x if COMPOUND_NAME contains x => safeNameLens.get(r)
case x if COMPOUND_TYPE contains x => safeCtypeLens.get(r)
case x if COMPOUND_TARGETS contains x => safeKnownTargetsLens.get(r)
case _ => "Feature not found"
}}
val helpMsg =
s"""Returns a list of compounds matching a target query (list).
|
| Input:
| - query: List of targets to match (exact string match)
| - version: v1, v2 or t1 (optional, default is `v1`)
""".stripMargin
def info(data:JobData) = s"Result for target query ${data.targets}"
def header(data:JobData) = "All relevant data"
def result(data:JobData)(implicit sparkSession: SparkSession) = {
val JobData(db, genesDB, version, targetQuery, limit) = data
implicit val genes = genesDB
// I could distinguish on version as well, but this makes more sense
// This way, the same function can be reused for v1 and v2
def isMatch(targets: Seq[String], query:List[String]):Boolean = {
// Exact match on one of the entries in the query
targets.toList.map(target => query.toSet.contains(target)).foldLeft(false)(_||_)
}
val features = List(
"compound_id",
"compound_smiles",
"compound_inchikey",
"compound_name",
"compound_type",
"compound_targets"
)
val resultRDD =
db.rdd
.map(_.compoundAnnotations)
.filter{compoundAnnotations =>
compoundAnnotations.knownTargets.map(isMatch(_, targetQuery)).getOrElse(false)
}
.distinct
.sortBy(compoundAnnotations => compoundAnnotations.knownTargets.map(_.size).getOrElse(0))
val limitOutput = (resultRDD.count > limit)
// Should we limit the result set?
val result =
limitOutput match {
case true => resultRDD.take(limit)
case false => resultRDD.collect
}
// Array(Map( "test" -> targetQuery))
result
.map(entry => extractFeatures(entry, features) )
.map(_.zip(features).map(_.swap).toMap)
}
// def targetToCompounds = result _
}
| data-intuitive/LuciusAPI | src/main/scala/com/dataintuitive/luciusapi/functions/targetToCompoundsFunctions.scala | Scala | apache-2.0 | 3,172 |
package src.main.scala.types
import scala.collection.mutable.ListBuffer
import com.vividsolutions._
import org.geotools.geometry.jts.JTSFactoryFinder
import org.geoscript.geometry._
/* This case-class represents a speed in a geometry of New York
* City, as returned from its OpenData URL
*
* In a similar sense, it is a simplification of the WEKA instance
* that represents that speed, and it is smaller to fit into RAM
*/
case class SpeedInPolygonalSection(speed: Double,
geometry: jts.geom.Geometry,
polygon_encoded: String,
centroid: Point,
well_known_addr: String) {
// Handy alias to access the "geometry: jts.geom.Geometry" field, by alias
// "geom":
val geom = geometry
// The polygon associated to this geometry (it does assumes so far that it
// is a "MultiLine" geometry, as those that the NYC LION Single-Line Street
// GeoDB and the NYC Traffic Speed contain).
val polyg: jts.geom.Polygon = {
if (geometry != null) {
val coords = geometry.getCoordinates()
if (coords.size >= 3) {
/* If it has more than three coordinates, then it can
* be made a polygon by adding the first coordinate
* again as the last one, ie., by closing the coords
*/
val geomFactory = JTSFactoryFinder.getGeometryFactory()
var hull_coords = new ListBuffer[jts.geom.Coordinate]()
hull_coords ++= coords
// append first coord at the end, closing the coords
hull_coords += hull_coords(0)
val hull = geomFactory.createLinearRing(hull_coords.toArray)
// create the polygon
geomFactory.createPolygon(hull, null)
} else
null
} else
null
} // end of "val polyg : jts.geom.Polygon = { ..."
}
| je-nunez/DataMining_NewYork_Traffic_Speed | src/main/scala/types/SpeedInPolygonalSection.scala | Scala | gpl-2.0 | 1,884 |
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: Spiros Tzavellas
*/
package com.tzavellas.coeus.i18n.locale
import java.util.Locale
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
/**
* Resolves the user's locale from the primary locale specified in the
* <i>accept-language</i> header of the HTTP request.
*
* @see HttpServletRequest#getLocale()
*/
class AcceptHeaderLocaleResolver extends LocaleResolver {
/**
* Return the locale specified in the <em>accept-language</em> header
* of the HTTP request.
*/
def resolve(request: HttpServletRequest) = request.getLocale
/**
* This <code>LocaleResolver</code> always uses the locale specified in the
* <i>accept-language</code> header of the HTTP request so this method is not
* supported.
*
* @throws UnsupportedOperationException
*/
def setLocale(request: HttpServletRequest, response: HttpServletResponse, locale: Locale) {
throw new UnsupportedOperationException
}
} | sptz45/coeus | src/main/scala/com/tzavellas/coeus/i18n/locale/AcceptHeaderLocaleResolver.scala | Scala | apache-2.0 | 1,063 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package genc
import ir.Operators._
import ir.PrimitiveTypes._
import ir.Literals._
/*
* Here are defined classes used to represent AST of C programs.
*
* NOTE on char and string:
* because the C character and string literals encoding sets are
* highly dependent on platforms and compilers, only basic single-byte
* characters from the ASCII set are supported at the moment.
*
* Details on such literals can be found in the C99 standard in §3.7,
* §6.4.4.4 and §6.4.5, and more.
*
* NOTE Because types in union shall not be partially defined we need to
* keep track of the dependencies between Struct's and Union's in a
* Prog's types. We do this lazily by requiring the `types` field to
* be sorted appropriately. Also, it shall only contains Struct's
* and Union's, no other kind of Type's.
*/
object CAST { // C Abstract Syntax Tree
sealed abstract class Tree {
override def toString = {
val sb = new StringBuffer()
new CPrinter("stainless.h", true, Set(), Seq(), sb).print(this)
sb.toString
}
def size(using inox.Context): Int = {
var result = 0
new CASTTraverser {
override def traverse(t: Tree): Unit = { result += 1; super.traverse(t) }
}.traverse(this)
result
}
}
/* ----------------------------------------------------- Definitions ----- */
abstract class Def extends Tree
case class Include(file: String) extends Def {
require(file.nonEmpty && isASCII(file))
}
case class Prog(
headerIncludes: Set[Include],
cIncludes: Set[Include],
decls: Seq[(Decl, Seq[DeclarationMode])],
typeDefs: Set[TypeDef],
enums: Set[Enum],
types: Seq[DataType], // Both structs and unions, order IS important! See NOTE above.
functions: Set[Fun]
) extends Def {
require(types.length == types.distinct.length) // no duplicates in `types`
}
// Manually defined function through the cCode.function annotation have a string
// for signature+body instead of the usual Stmt AST exclusively for the body
case class Fun(id: Id, returnType: Type, params: Seq[Var], body: Either[Block, String], isExported: Boolean, isPure: Boolean) extends Def
case class Id(name: String) extends Def {
// TODO add check on name's domain for conformance
// `|` is used as the margin delimiter and can cause trouble in some situations,
// for example when name start with a `|`.
def fixMargin =
if (name.size > 0 && name(0) == '|') "| " + name
else name
}
case class Var(id: Id, typ: Type) extends Def
/* ------------------------------------------------------------ Types ----- */
abstract class Type extends Tree
abstract class DataType extends Type {
val id: Id
val fields: Seq[(Var, Seq[DeclarationMode])]
val isExported: Boolean
}
case class TypeDef(orig: Id, alias: Id, isExported: Boolean) extends Type
case class Primitive(pt: PrimitiveType) extends Type
case class Pointer(base: Type) extends Type
case class FunType(ret: Type, params: Seq[Type]) extends Type
case class Struct(id: Id, fields: Seq[(Var, Seq[DeclarationMode])], isExported: Boolean, isPacked: Boolean) extends DataType {
require(fields.nonEmpty, s"Fields of struct $id should be non empty")
}
case class Union(id: Id, fields: Seq[(Var, Seq[DeclarationMode])], isExported: Boolean) extends DataType {
require(fields.nonEmpty, s"Fields of union $id should be non empty")
}
case class Enum(id: Id, literals: Seq[EnumLiteral]) extends Type {
require(literals.nonEmpty, s"Literals in enum $id should be non empty")
}
case class FixedArrayType(base: Type, length: Int) extends Type
/* ------------------------------------------------------ Expressions ----- */
abstract class Expr extends Tree
case class Block(exprs: Seq[Expr]) extends Expr // Can be empty
case class Lit(lit: Literal) extends Expr
case class EnumLiteral(id: Id) extends Expr
case class MemSet(pointer: Expr, value: Expr, size: Expr) extends Expr
case class SizeOf(tpe: Type) extends Expr
case class Decl(id: Id, typ: Type, optValue: Option[Expr]) extends Expr {
require(optValue.forall(_.isValue), s"Initialisation $id = ${optValue.get} should be done with a value")
}
case class DeclArrayStatic(id: Id, base: Type, length: Int, values: Seq[Expr]) extends Expr {
require(values forall { _.isValue },
s"Array initialisation of $id with ${values.mkString("{", ", ", "}")} should be done with values"
)
}
case class ArrayStatic(base: Type, values: Seq[Expr]) extends Expr {
require(values forall { _.isValue },
s"Array ${values.mkString("{", ", ", "}")} should contain only values"
)
}
case class DeclArrayVLA(id: Id, base: Type, length: Expr, defaultExpr: Expr) extends Expr {
require(length.isValue, s"Length $length of array $id should be a value")
require(defaultExpr.isValue, s"Default expression $defaultExpr of of array $id should be a value")
}
// Initialise all the fields of a struct, in the same order as they are declared.
case class StructInit(struct: Struct, values: Seq[Expr]) extends Expr {
require(values.length == struct.fields.length,
s"Wrong number of arguments for initialisation of struct $struct with values $values (expected ${struct.fields.length})"
)
require(values forall { _.isValue },
s"Struct initialisation of $struct with ${values.mkString("{", ", ", "}")} should be done with values"
)
}
// Initialise one of the fields of the union
case class UnionInit(union: Union, fieldId: Id, value: Expr) extends Expr {
require(union.fields exists { case (vd, modes) => vd.id == fieldId },
s"Field $fieldId must exist in union $union"
)
require(value.isValue,
s"Initialisation of union $union with $value should be done with a value"
)
}
case class Call(callable: Expr, args: Seq[Expr]) extends Expr {
require(args forall { _.isValue },
s"Call of $callable with arguments ${args.mkString("{", ", ", "}")} should be done with values"
)
}
case class Binding(id: Id) extends Expr
case class FieldAccess(obj: Expr, fieldId: Id) extends Expr {
require(obj.isValue, s"Field access on $obj must be done on a value")
}
case class ArrayAccess(array: Expr, index: Expr) extends Expr {
require(array.isValue, s"Array access of $array must be done on a value")
}
case class Ref(e: Expr) extends Expr {
require(e.isValue, s"Referencing ($e) must be done on values")
}
case class Deref(e: Expr) extends Expr {
require(e.isValue, s"Dereferencing ($e) must be done on values")
}
case class Assign(lhs: Expr, rhs: Expr) extends Expr {
require(lhs.isValue, s"Assignment left-hand-side ($lhs) must be a value")
require(rhs.isValue, s"Assignment right-hand-side ($rhs) must be a value")
}
case class BinOp(op: BinaryOperator, lhs: Expr, rhs: Expr) extends Expr {
require(lhs.isValue, s"Left-hand-side of operation $op ($lhs) must be a value")
require(rhs.isValue, s"Right-hand-side of operation $op ($rhs) must be a value")
}
case class UnOp(op: UnaryOperator, expr: Expr) extends Expr {
require(expr.isValue, s"Unary operations $op must be done on values")
}
case class If(cond: Expr, thenn: Block) extends Expr {
require(cond.isValue, s"Condition ($cond) of if expression must be a value")
}
case class IfElse(cond: Expr, thenn: Block, elze: Block) extends Expr {
require(cond.isValue, s"Condition ($cond) of if-then-else expression must be a value")
}
case class While(cond: Expr, body: Block) extends Expr {
require(cond.isValue, s"Condition ($cond) of while loop must be a value")
}
case object Break extends Expr
case class Return(value: Expr) extends Expr {
require(value.isValue, s"Return expressions ($value) must be values")
}
case class Assert(e: Expr) extends Expr
// This can represent any C cast, however unsafe it can be.
case class Cast(expr: Expr, typ: Type) extends Expr
/* ---------------------------------------------------------- Helpers ----- */
// Flatten blocks together and remove `()` literals
def buildBlock(exprs: Seq[Expr]): Block = {
val block = (exprs filterNot isUnitLit).foldLeft(Seq.empty[Expr]) {
case (acc, e) => e match {
case Block(exprs) => acc ++ exprs
case expr => acc :+ expr
}
}
Block(block)
}
def buildBlock(expr: Expr): Block = buildBlock(Seq(expr))
object FreshId {
private val counter = new utils.UniqueCounter[String]()
def apply(prefix: String): Id = {
val idx = counter.next(prefix)
Id("stainless_" + prefix + "_" + idx)
}
}
val True = Lit(BoolLit(true))
/* ---------------------------------------------------------- Details ----- */
// String & char limitations, see NOTE above
private def isASCII(c: Char): Boolean = { c >= 0 && c <= 127 }
private def isASCII(s: String): Boolean = s forall isASCII
private def isUnitLit(e: Expr): Boolean = e match {
case Lit(UnitLit) => true
case _ => false
}
/* ---------------------------------------------- Sanitisation Helper ----- */
extension (e: Expr) {
private def isValue = e match {
case _: Binding | _: Lit | _: EnumLiteral | _: StructInit | _: ArrayStatic |
_: UnionInit | _: Call | _: FieldAccess | _: ArrayAccess |
_: Ref | _: Deref | _: BinOp | _: UnOp | _: Cast => true
case _ => false
}
private def isReference = e match {
case _: Ref => true
case _ => false
}
}
}
| epfl-lara/stainless | core/src/main/scala/stainless/genc/CAST.scala | Scala | apache-2.0 | 9,696 |
package com.jaroop.anorm.relational
/** A Function-like class to describe how to copy a list of child objects into
* their respective parents.
* @tparam A The type of the parent object in a one-to-many relation.
* @tparam B The type of the child object in a one-to-many relation.
* @param f A function that takes a parent and a list of it's children and returns the parent with the children nested inside.
*/
case class RowFlattener[A, B](f: (A, List[B]) => A) {
/** Syntactic sugar for applying the `RowFlattener` function, without collision with implicit functions.
* @param parent The parent object in a one-to-many relation.
* @param children The children that should be copied into the parent object.
* @return A parent containing the children in a nested list.
*/
def apply(parent: A, children: List[B]): A = f(parent, children)
}
/** Includes extra `apply` methods for each `RowFlattener` arity. */
object RowFlattener {
def apply[A, B1, B2](f: (A, List[B1], List[B2]) => A): RowFlattener2[A, B1, B2] = RowFlattener2[A, B1, B2](f)
def apply[A, B1, B2, B3](f: (A, List[B1], List[B2], List[B3]) => A): RowFlattener3[A, B1, B2, B3] = RowFlattener3[A, B1, B2, B3](f)
def apply[A, B1, B2, B3, B4](f: (A, List[B1], List[B2], List[B3], List[B4]) => A): RowFlattener4[A, B1, B2, B3, B4] = RowFlattener4[A, B1, B2, B3, B4](f)
def apply[A, B1, B2, B3, B4, B5](f: (A, List[B1], List[B2], List[B3], List[B4], List[B5]) => A): RowFlattener5[A, B1, B2, B3, B4, B5] = RowFlattener5[A, B1, B2, B3, B4, B5](f)
}
case class RowFlattener2[A, B1, B2](f: (A, List[B1], List[B2]) => A) {
def apply(parent: A, c1: List[B1], c2: List[B2]): A = f(parent, c1, c2)
}
case class RowFlattener3[A, B1, B2, B3](f: (A, List[B1], List[B2], List[B3]) => A) {
def apply(parent: A, c1: List[B1], c2: List[B2], c3: List[B3]): A = f(parent, c1, c2, c3)
}
case class RowFlattener4[A, B1, B2, B3, B4](f: (A, List[B1], List[B2], List[B3], List[B4]) => A) {
def apply(parent: A, c1: List[B1], c2: List[B2], c3: List[B3], c4: List[B4]): A = f(parent, c1, c2, c3, c4)
}
case class RowFlattener5[A, B1, B2, B3, B4, B5](f: (A, List[B1], List[B2], List[B3], List[B4], List[B5]) => A) {
def apply(parent: A, c1: List[B1], c2: List[B2], c3: List[B3], c4: List[B4], c5: List[B5]): A = f(parent, c1, c2, c3, c4, c5)
} | mhzajac/anorm-relational | src/main/scala/com/jaroop/anorm/relational/RowFlattener.scala | Scala | apache-2.0 | 2,316 |
package cz.vse.easyminer.miner
trait DatasetBuilder {
def buildAndExecute[T](dbServer: String, dbName: String, dbUser: String, dbPass: String, dbTableName: String)(dbq: Dataset => T) : T
} | KIZI/EasyMiner-Apriori-R | src/main/scala/cz/vse/easyminer/miner/DatasetBuilder.scala | Scala | bsd-3-clause | 191 |
package services
import org.specs2.mutable.Specification
import scala.util.Try
class OptimiserWithFlexibleProcessorsSpec extends Specification {
val oneDesk: Seq[Int] = Seq.fill(30)(1)
val oneBank: Seq[Int] = Seq.fill(30)(1)
private val zeroWaitFor30Minutes: Seq[Int] = Seq.fill(30)(0)
"Crunch with desk workload processors" >> {
"Given 1 minutes incoming workload per minute, and desks fixed at 1 per minute" >> {
"I should see all the workload completed each minute, leaving zero wait times" >> {
val oneMinuteWorkloadFor30Minutes = Seq.fill(30)(1d)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
workloads = oneMinuteWorkloadFor30Minutes,
minDesks = oneDesk,
maxDesks = oneDesk,
config = OptimiserConfig(20, DeskWorkloadProcessors))
result.get.waitTimes === zeroWaitFor30Minutes
}
}
"Given 2 minutes incoming workload per minute, and desks fixed at 1 per minute" >> {
"I should see workload spilling over each minute, leaving increasing wait times" >> {
val twoMinuteWorkloadFor30Minutes = Seq.fill(30)(2d)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
workloads = twoMinuteWorkloadFor30Minutes,
minDesks = oneDesk,
maxDesks = oneDesk,
config = OptimiserConfig(20, DeskWorkloadProcessors))
val increasingWaitTimes = Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15)
result.get.waitTimes === increasingWaitTimes
}
}
}
"Crunch with egate workload processors" >> {
val tenMinutesWorkloadFor30Minutes = List.fill(30)(10d)
"Given 10 minutes incoming workload per minute, and egate banks of size 10 gates fixed at 1 bank per minute" >> {
"I should see all the workload completed each minute, leaving zero wait times" >> {
val bankSizes = Iterable(10)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
workloads = tenMinutesWorkloadFor30Minutes,
minDesks = oneBank,
maxDesks = oneBank,
config = OptimiserConfig(20, EGateWorkloadProcessors(bankSizes)))
result.get.waitTimes === zeroWaitFor30Minutes
}
}
"Given 10 minutes incoming workload per minute, and egate banks of size 5 gates fixed at 1 bank per minute" >> {
"I should see wait times creeping up by a minute every 2 minutes" >> {
val bankSizes = Iterable(5)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
workloads = tenMinutesWorkloadFor30Minutes,
minDesks = oneBank,
maxDesks = oneBank,
config = OptimiserConfig(20, EGateWorkloadProcessors(bankSizes)))
val increasingWaitTimes = Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15)
result.get.waitTimes === increasingWaitTimes
}
}
"Given 20 minutes incoming workload per minute, and egate banks of size 15 gates fixed at 1 bank per minute" >> {
"I should see wait times creeping up by a minute every 4 minutes" >> {
val twentyMinutesWorkloadFor30Minutes = List.fill(30)(20d)
val banksSizes = Iterable(15)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
workloads = twentyMinutesWorkloadFor30Minutes,
minDesks = oneBank,
maxDesks = oneBank,
config = OptimiserConfig(20, EGateWorkloadProcessors(banksSizes)))
val increasingWaitTimes = Seq(1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8)
result.get.waitTimes === increasingWaitTimes
}
}
val oneBankFor15Minutes: Seq[Int] = Seq.fill(15)(1)
val twoBanksFor15Minutes: Seq[Int] = Seq.fill(15)(2)
"Given 10 minutes incoming workload per minute, and egate banks of sizes 5 & 5 gates fixed at 1 bank for 15 mins followed by 2 banks for 15 mins" >> {
"I should see wait times creeping up by a minute every 2 minutes for the first 15 minutes and then holding steady for the remaining time" >> {
val bankSizes = Iterable(5, 5)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
tenMinutesWorkloadFor30Minutes,
oneBankFor15Minutes ++ twoBanksFor15Minutes,
oneBankFor15Minutes ++ twoBanksFor15Minutes,
OptimiserConfig(20, EGateWorkloadProcessors(bankSizes)))
val increasingWaitTimes = Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8)
result.get.waitTimes === increasingWaitTimes
}
}
"Given 10 minutes incoming workload per minute, and egate banks of sizes 5 & 10 gates fixed at 1 bank for 15 mins followed by 2 banks for 15 mins" >> {
"I should see wait times creeping up by a minute every 2 minutes for the first 15 minutes and then falling for the remaining time" >> {
val bankSizes = Iterable(5, 10)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
tenMinutesWorkloadFor30Minutes,
oneBankFor15Minutes ++ twoBanksFor15Minutes,
oneBankFor15Minutes ++ twoBanksFor15Minutes,
OptimiserConfig(20, EGateWorkloadProcessors(bankSizes)))
val increasingWaitTimes = Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 7, 7, 6, 6, 5, 5, 4, 4, 3, 3, 2, 2, 1, 1, 0)
result.get.waitTimes === increasingWaitTimes
}
}
"Given 10 minutes incoming workload per minute, and egate banks of sizes 5 & 10 gates with min 1 and max 3, and small SLA of 5 minutes" >> {
"The optimiser should decide on 2 banks (9 gates) for 15 minutes followed by 3 banks (11 gates), with wait times slowly climbing and then slowly falling" >> {
val bankSizes = Iterable(6, 3, 2)
val threeDesksOrGates = Seq.fill(30)(3)
val result: Try[OptimizerCrunchResult] = OptimiserWithFlexibleProcessors.crunch(
tenMinutesWorkloadFor30Minutes,
oneBank,
threeDesksOrGates,
OptimiserConfig(5, EGateWorkloadProcessors(bankSizes)))
val expected = OptimizerCrunchResult(
Vector(2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3),
Seq(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0))
result.get === expected
}
}
}
"rollingFairXmax with egate workload processors" >> {
val oneGateFor60Minutes: IndexedSeq[Int] = IndexedSeq.fill(60)(1)
"Given 60 minutes of 3.5 minutes work per minute" >> {
"When comparing the original rollingFairXmax to the new one given banks of size 1" >> {
"Both results should be the same, 3 desks" >> {
val bankSizes = Iterable(1, 1, 1, 1, 1, 1, 1, 1)
val workPerMinute = 3.5
val workloadFor60Minutes = IndexedSeq.fill(60)(workPerMinute)
val result: IndexedSeq[Int] = OptimiserWithFlexibleProcessors.rollingFairXmax(workloadFor60Minutes, oneGateFor60Minutes, 5, 15, 60, 120, EGateWorkloadProcessors(bankSizes))
val threeBanksFor60Minutes = Seq.fill(60)(3)
result === threeBanksFor60Minutes
}
}
}
"Given 60 minutes of 3.6 minutes work per minute" >> {
"When comparing the original rollingFairXmax to the new one given banks of size 1" >> {
"Both results should be the same, 4 desks" >> {
val bankSizes = Iterable(1, 1, 1, 1, 1, 1, 1, 1)
val workPerMinute = 3.6
val workloadFor60Minutes = IndexedSeq.fill(60)(workPerMinute)
val result: IndexedSeq[Int] = OptimiserWithFlexibleProcessors.rollingFairXmax(workloadFor60Minutes, oneGateFor60Minutes, 5, 15, 60, 120, EGateWorkloadProcessors(bankSizes))
val fourBanksFor60Minutes = Seq.fill(60)(4)
result === fourBanksFor60Minutes
}
}
}
"Given 60 minutes of 3 minutes work per minute, and bank sizes of 3, 5, 5" >> {
"When asking for the rolling fair xmax" >> {
"The result should be 1 bank, since 3 gates can clear 3 minutes of work per minute" >> {
val bankSizes = Iterable(3, 5, 5)
val workPerMinute = 3d
val workloadFor60Minutes = IndexedSeq.fill(60)(workPerMinute)
val result: IndexedSeq[Int] = OptimiserWithFlexibleProcessors.rollingFairXmax(workloadFor60Minutes, oneGateFor60Minutes, 5, 15, 60, 120, EGateWorkloadProcessors(bankSizes))
val oneBankFor60Minutes = Seq.fill(60)(1)
result === oneBankFor60Minutes
}
}
}
"Given 60 minutes of 6 minutes work per minute, and bank sizes of 3, 5, 5" >> {
"When asking for the rolling fair xmax" >> {
"The result should be 2 banks, since 3 gates is insufficient, but 8 (3 + 5) would be enough" >> {
val bankSizes = Iterable(3, 5, 5)
val workPerMinute = 6d
val workloadFor60Minutes = IndexedSeq.fill(60)(workPerMinute)
val result: IndexedSeq[Int] = OptimiserWithFlexibleProcessors.rollingFairXmax(workloadFor60Minutes, oneGateFor60Minutes, 5, 15, 60, 120, EGateWorkloadProcessors(bankSizes))
val twoBanksFor60Minutes = Seq.fill(60)(2)
result === twoBanksFor60Minutes
}
}
}
}
"churn penalty" >> {
"Given a fluctuating number of desks" >> {
val churnStart = 1
val desks = IndexedSeq(5, 10, 5, 15, 12, 14, 9, 5, 3, 7, 3, 9, 2, 10, 10, 14, 11, 16, 50, 25, 15, 10)
"When I calculate the churn" >> {
val churn = OptimiserWithFlexibleProcessors.totalDesksOpeningFromClosed(churnStart, desks)
val expected = 82
"It should be the sum of the number of desks that had to open from closed across the period" >> {
churn === expected
}
}
}
}
"Processing work" >> {
"Given a workload and capacity containing some zeros" >> {
val workload = IndexedSeq.fill(60)(5d)
val capacity = IndexedSeq.fill(30)(5) ++ IndexedSeq.fill(30)(0)
"When I ask for the ProcessedWork" >> {
val processed = OptimiserWithFlexibleProcessors.tryProcessWork(workload, capacity, 25, IndexedSeq(), DeskWorkloadProcessors)
"I should not find any NaNs" >> {
processed.get.util.exists(_.isNaN) === false
}
}
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/test/scala/services/OptimiserWithFlexibleProcessorsSpec.scala | Scala | apache-2.0 | 10,522 |
package com.rockymadden.stringmetric.phonetic
import com.rockymadden.stringmetric.{StringAlgorithm, StringFilter}
import com.rockymadden.stringmetric.Alphabet.{Alpha, LowercaseVowel}
import scala.annotation.{switch, tailrec}
/** An implementation of the refined NYSIIS algorithm. */
class RefinedNysiisAlgorithm extends StringAlgorithm[DummyImplicit, String] { this: StringFilter =>
final override def compute(charArray: Array[Char])(implicit di: DummyImplicit): Option[Array[Char]] = {
val fca = filter(charArray)
if (fca.length == 0 || !(Alpha isSuperset fca.head)) None
else {
val lfca = fca.map(_.toLower)
val tlh = transcodeLast(transcodeHead(lfca.head +: cleanLast(lfca.tail, Set('s', 'z'))))
val t = transcode(Array.empty[Char], tlh.head, tlh.tail, Array.empty[Char])
if (t.length == 1) Some(t)
else Some(deduplicate(t.head +: cleanTerminal(cleanLast(t.tail, Set('a')))))
}
}
final override def compute(string: String)(implicit di: DummyImplicit): Option[String] =
compute(string.toCharArray).map(_.mkString)
private[this] def cleanLast(ca: Array[Char], s: Set[Char]) =
if (ca.length == 0) ca
else if(s.contains(ca.last)) ca.dropRight(ca.reverseIterator.takeWhile(c => s.contains(c)).length)
else ca
private[this] def cleanTerminal(ca: Array[Char]) =
if (ca.length >= 2 && ca.last == 'y' && ca(ca.length - 2) == 'a') ca.dropRight(2) :+ 'y'
else ca
private[this] def deduplicate(ca: Array[Char]) =
if (ca.length <= 1) ca
else ca.sliding(2).withFilter(a => a(0) != a(1)).map(a => a(0)).toArray[Char] :+ ca.last
@tailrec
private[this] def transcode(l: Array[Char], c: Char, r: Array[Char], o: Array[Char]): Array[Char] = {
if (c == '\\0' && r.length == 0) o
else {
def shift(d: Int, ca: Array[Char]) = {
val sca = r.splitAt(d - 1)
(
if (sca._1.length > 0) (l :+ c) ++ sca._1 else l :+ c,
if (sca._2.length > 0) sca._2.head else '\\0',
if (sca._2.length > 1) sca._2.tail else Array.empty[Char],
ca
)
}
val t = {
(c: @switch) match {
case 'a' | 'i' | 'o' | 'u' =>
if (l.length == 0) shift(1, o :+ c)
else shift(1, o :+ 'a')
case 'b' | 'c' | 'f' | 'j' | 'l' | 'n' | 'r' | 't' | 'v' | 'x' => shift(1, o :+ c)
case 'd' =>
if (r.length >= 1 && r.head == 'g') shift(2, o :+ 'g') else shift(1, o :+ c)
case 'e' =>
if (l.length == 0) shift(1, o :+ c)
else if (r.length >= 1 && r.head == 'v') shift(2, o ++ Array('a', 'f'))
else shift(1, o :+ 'a')
case 'g' =>
if (r.length >= 2 && r.head == 'h' && r(1) == 't') shift(3, o ++ Array('g', 't'))
else shift(1, o :+ c)
case 'h' =>
if (l.length == 0) shift(1, o :+ c)
else if (!(LowercaseVowel isSuperset l.last) || (r.length >= 1 && !(LowercaseVowel isSuperset r.head))) shift(1, o)
else shift(1, o :+ c)
case 'k' => if (r.length >= 1 && r.head == 'n') shift(2, o :+ 'n') else shift(1, o :+ 'c')
case 'm' => if (l.length == 0) shift(1, o :+ c) else shift(1, o :+ 'n')
case 'p' => if (r.length >= 1 && r.head == 'h') shift(2, o :+ 'f') else shift(1, o :+ c)
case 'q' => if (l.length == 0) shift(1, o :+ c) else shift(1, o :+ 'g')
case 's' =>
if (r.length >= 2 && r.head == 'c' && r(1) == 'h') shift(3, o :+ c)
else if (r.length >= 1 && r.head == 'h') shift(2, o :+ c)
else shift(1, o :+ c)
case 'w' =>
if (l.length >= 1 && (LowercaseVowel isSuperset l.last)) shift(1, o)
else if (r.length >= 1 && r.head == 'r') shift(2, o :+ 'r')
else shift(1, o :+ c)
case 'y' =>
if (l.length >= 1 && r.length >= 2 && r.head == 'w') shift(2, o :+ 'a')
else if (r.length >= 1 && r.head == 'w') shift(2, o :+ c)
else if (l.length >= 1 && r.length >= 1) shift(1, o :+ 'a')
else shift(1, o :+ c)
case 'z' => if (l.length == 0) shift(1, o :+ c) else shift(1, o :+ 's')
case _ => shift(1, o)
}
}
transcode(t._1, t._2, t._3, t._4)
}
}
private[this] def transcodeHead(ca: Array[Char]) = {
if (ca.length == 0) ca
else
(ca.head: @switch) match {
case 'm' if (ca.length >= 3 && ca(1) == 'a' && ca(2) == 'c') => Array('m', 'c') ++ ca.takeRight(ca.length - 3)
case 'p' if (ca.length >= 2 && ca(1) == 'f') => 'f' +: ca.takeRight(ca.length - 2)
case _ => ca
}
}
private[this] def transcodeLast(ca: Array[Char]) = {
if (ca.length >= 2) {
val lc = ca(ca.length - 1)
val lcm1 = ca(ca.length - 2)
lazy val t2 = ca.take(ca.length - 2)
(lc: @switch) match {
case 'd' if (lcm1 == 'n' || lcm1 == 'r') => t2 :+ 'd'
case 'e' if (lcm1 == 'e' || lcm1 == 'i' || lcm1 =='y') => t2 :+ 'y'
case 't' if (lcm1 == 'd' || lcm1 == 'n' || lcm1 == 'r') => t2 :+ 'd'
case 'x' if (lcm1 == 'e') => t2 ++ Array('e', 'c')
case 'x' if (lcm1 == 'i') => t2 ++ Array('i', 'c')
case _ => ca
}
} else ca
}
}
object RefinedNysiisAlgorithm {
private lazy val self = apply()
def apply(): RefinedNysiisAlgorithm = new RefinedNysiisAlgorithm with StringFilter
def compute(charArray: Array[Char]) = self.compute(charArray)
def compute(string: String) = self.compute(string)
}
| cocoxu/multip | src/main/scala/com/rockymadden/stringmetric/phonetic/RefinedNysiisAlgorithm.scala | Scala | gpl-3.0 | 5,144 |
Subsets and Splits