code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Tranquility.
* Copyright 2013, 2014, 2015 Metamarkets Group, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metamx.tranquility.test
import backtype.storm.Config
import backtype.storm.task.IMetricsContext
import backtype.storm.topology.TopologyBuilder
import com.metamx.common.scala.Logging
import com.metamx.common.scala.Predef._
import com.metamx.common.scala.timekeeper.TestingTimekeeper
import com.metamx.tranquility.storm.BeamBolt
import com.metamx.tranquility.storm.BeamFactory
import com.metamx.tranquility.storm.common.SimpleKryoFactory
import com.metamx.tranquility.storm.common.SimpleSpout
import com.metamx.tranquility.storm.common.StormRequiringSuite
import com.metamx.tranquility.test.StormDruidTest._
import com.metamx.tranquility.test.common.CuratorRequiringSuite
import com.metamx.tranquility.test.common.DruidIntegrationSuite
import com.metamx.tranquility.test.common.JulUtils
import java.{util => ju}
import org.apache.curator.framework.CuratorFrameworkFactory
import org.apache.curator.retry.BoundedExponentialBackoffRetry
import org.junit.runner.RunWith
import org.scala_tools.time.Imports._
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
object StormDruidTest
{
def newBeamFactory(zkConnect: String, now: DateTime): BeamFactory[SimpleEvent] = {
new BeamFactory[SimpleEvent]
{
override def makeBeam(conf: ju.Map[_, _], metrics: IMetricsContext) = {
val aDifferentCurator = CuratorFrameworkFactory.newClient(
zkConnect,
new BoundedExponentialBackoffRetry(100, 1000, 5)
)
aDifferentCurator.start()
DirectDruidTest.newBuilder(
aDifferentCurator, new TestingTimekeeper withEffect {
timekeeper =>
timekeeper.now = now
}
).buildBeam()
}
}
}
}
@RunWith(classOf[JUnitRunner])
class StormDruidTest
extends FunSuite with DruidIntegrationSuite with CuratorRequiringSuite with StormRequiringSuite with Logging
{
JulUtils.routeJulThroughSlf4j()
test("Storm to Druid") {
withDruidStack {
(curator, broker, overlord) =>
val zkConnect = curator.getZookeeperClient.getCurrentConnectionString
val now = new DateTime().hourOfDay().roundFloorCopy()
withLocalStorm {
storm =>
val inputs = DirectDruidTest.generateEvents(now)
val spout = SimpleSpout.create(inputs)
val conf = new Config
conf.setKryoFactory(classOf[SimpleKryoFactory])
val builder = new TopologyBuilder
builder.setSpout("events", spout)
builder
.setBolt("beam", new BeamBolt[SimpleEvent](newBeamFactory(zkConnect, now)))
.shuffleGrouping("events")
storm.submitTopology("test", conf, builder.createTopology())
runTestQueriesAndAssertions(
broker, new TestingTimekeeper withEffect {
timekeeper =>
timekeeper.now = now
}
)
}
}
}
}
| deepikakhera/spark-tranquility | storm/src/test/scala/com/metamx/tranquility/test/StormDruidTest.scala | Scala | apache-2.0 | 3,569 |
package org.testobjects
trait ATestInt {
def withValue(int: Int): Int = int
val nonNull: Int = 0
}
| test-objects/test-objects-for-scala-jvm1.7 | src/main/scala/org/testobjects/ATestInt.scala | Scala | apache-2.0 | 107 |
package yuuto.enhancedinventories.block
import java.util.List
import net.minecraftforge.common.util.ForgeDirection
import net.minecraft.tileentity.TileEntity
import yuuto.enhancedinventories.tile.TileCabinet
import net.minecraft.world.World
import net.minecraft.block.material.Material
import net.minecraft.nbt.NBTTagCompound
import net.minecraft.creativetab.CreativeTabs
import yuuto.enhancedinventories.materials.DecorationHelper
import net.minecraft.init.Blocks
import cpw.mods.fml.relauncher.SideOnly
import net.minecraft.item.Item
import net.minecraft.item.ItemStack
import yuuto.enhancedinventories.materials.ETier
import net.minecraft.entity.player.EntityPlayer
import yuuto.enhancedinventories.compat.refinedrelocation.RefinedRelocationHelper
import yuuto.enhancedinventories.EnhancedInventories
import net.minecraft.world.IBlockAccess
import net.minecraft.util.AxisAlignedBB
import yuuto.enhancedinventories.block.traits.TBlockSecurable
import yuuto.enhancedinventories.block.traits.TBlockDecorative
import yuuto.enhancedinventories.block.traits.TBlockInventoryUpgradeable
import yuuto.enhancedinventories.block.base.BlockBaseEI
import cpw.mods.fml.relauncher.Side
import yuuto.enhancedinventories.block.traits.TBlockReverseable
import net.minecraft.util.IIcon
import net.minecraft.client.renderer.texture.IIconRegister
import yuuto.enhancedinventories.config.EIConfiguration
/**
* @author Jacob
*/
class BlockCabinet(name:String) extends BlockBaseEI(Material.wood, name)
with TBlockReverseable with TBlockDecorative with TBlockInventoryUpgradeable with TBlockSecurable{
this.setHardness(2.1f);
this.setResistance(10);
override def createNewTileEntity(world:World, meta:Int):TileEntity=new TileCabinet(meta);
@SideOnly(Side.CLIENT)
override def getSubBlocks(item:Item, tab:CreativeTabs, subItems:List[_]) {
//Sets a random wood and wool color for the chest
var nbt:NBTTagCompound = new NBTTagCompound();
DecorationHelper.setCoreBlock(nbt, Blocks.planks, this.getRandom().nextInt(4));
DecorationHelper.setWool(nbt, this.getRandom().nextInt(16));
//For each tier of chest set a random frame material and add chest to the tab
for (ix <-0 until 8) {
val stack:ItemStack = new ItemStack(this, 1, ix);
DecorationHelper.setFrame(nbt, ETier.values()(ix).getRandomFrameMaterial(this.getRandom()));
stack.setTagCompound(nbt);
subItems.asInstanceOf[List[ItemStack]].add(stack);
nbt = nbt.copy().asInstanceOf[NBTTagCompound];
}
}
//Opens the gui
override def onBlockActivated(world:World, x:Int, y:Int, z:Int, player:EntityPlayer, side:Int, hitX:Float, hitY:Float, hitZ:Float):Boolean={
val tileEntity:TileEntity = world.getTileEntity(x, y, z);
if (tileEntity == null || player.isSneaking()) {
return false;
}
if(!tileEntity.isInstanceOf[TileCabinet])
return false;
if(tileEntity.asInstanceOf[TileCabinet].canPlayerAccess(player)){
//Attempts to apply sorting upgrade
if(EIConfiguration.moduleRefinedRelocation && player.getHeldItem() != null && RefinedRelocationHelper.isSortingUpgrade(player.getHeldItem())){
val l:TileCabinet = tileEntity.asInstanceOf[TileCabinet];
if(l.isUpgradeValid(player.getHeldItem(), player)){
if(l.addUpgrade(player.getHeldItem(), player)){
if(!player.capabilities.isCreativeMode)
player.getHeldItem().stackSize-=1;
}
}
return true;
}
if(world.isRemote)
return true;
//if the top is clicked open a crafting grid
if(side == 1)
player.openGui(EnhancedInventories, 1, world, x, y, z);
//Otherwise open the inventory
else
player.openGui(EnhancedInventories, 0, world, x, y, z);
return true;
}
return false;
}
//Makes sure the chest can be placed
def canPlaceBlockAt(itemBlock:ItemStack, world:World, x:Int, y:Int, z:Int):Boolean={
val dirs:Array[ForgeDirection] = TileCabinet.validDirections;
var chests:Int = 0;
for(dir <- dirs){
val tile:TileEntity = world.getTileEntity(x+dir.offsetX, y+dir.offsetY, z+dir.offsetZ);
if(tile == null || !tile.isInstanceOf[TileCabinet]){}
else{
val chest:TileCabinet = tile.asInstanceOf[TileCabinet];
if(!chest.isValidForConnection(itemBlock)){}
else if(chest.isConnected()){return false;}
else{chests+=1;}
}
}
if(chests > 1)
return false;
return true;
}
@SideOnly(Side.CLIENT)
override def registerBlockIcons(reg:IIconRegister)={}
@SideOnly(Side.CLIENT)
override def getIcon(side:Int, meta:Int):IIcon={
return Blocks.planks.getIcon(side, meta);
}
} | AnimeniacYuuto/EnhancedInventories | src/main/scala/yuuto/enhancedinventories/block/BlockCabinet.scala | Scala | gpl-2.0 | 4,720 |
package pl.touk.nussknacker.sql.utils
import io.circe.generic.JsonCodec
import pl.touk.nussknacker.engine.api._
import pl.touk.nussknacker.engine.api.process.{EmptyProcessConfigCreator, _}
import pl.touk.nussknacker.engine.lite.api.utils.sinks.LazyParamSink
import pl.touk.nussknacker.engine.requestresponse.utils.JsonRequestResponseSourceFactory
//TODO: extract to separate, tests module
class RequestResponseConfigCreator extends EmptyProcessConfigCreator {
private val Category = "Test"
override def sourceFactories(processObjectDependencies: ProcessObjectDependencies): Map[String, WithCategories[SourceFactory]] = {
Map(
"request" -> WithCategories(new JsonRequestResponseSourceFactory[TestRequest], Category))
}
override def sinkFactories(processObjectDependencies: ProcessObjectDependencies): Map[String, WithCategories[SinkFactory]] = {
Map(
"response" -> WithCategories(ResponseSinkFactory, Category))
}
}
@JsonCodec case class TestRequest(id: Int)
@JsonCodec case class TestResponse(name: String, count: Option[Long] = None) extends DisplayJsonWithEncoder[TestResponse]
object ResponseSinkFactory extends SinkFactory {
@MethodToInvoke
def invoke(@ParamName("name") name: LazyParameter[String], @ParamName("count") count: LazyParameter[Option[Long]]): Sink = new ResponseSink(name, count)
}
class ResponseSink(nameParam: LazyParameter[String], countParam: LazyParameter[Option[Long]]) extends LazyParamSink[AnyRef] {
override def prepareResponse(implicit evaluateLazyParameter: LazyParameterInterpreter): LazyParameter[AnyRef] =
nameParam.product(countParam).map {
case (name, count) => TestResponse(name, count)
}
}
| TouK/nussknacker | components/sql/src/test/scala/pl/touk/nussknacker/sql/utils/RequestResponseConfigCreator.scala | Scala | apache-2.0 | 1,686 |
package org.jetbrains.plugins.scala
package lang
package completion
package filters.definitions
import com.intellij.psi._
import com.intellij.psi.filters.ElementFilter
import org.jetbrains.annotations.NonNls
import org.jetbrains.plugins.scala.lang.completion.ScalaCompletionUtil._
import org.jetbrains.plugins.scala.lang.lexer._
import org.jetbrains.plugins.scala.lang.parser._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScCaseClause
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates._
/**
* @author Alexander Podkhalyuzin
* Date: 22.05.2008
*/
class DefinitionsFilter extends ElementFilter {
def isAcceptable(element: Object, context: PsiElement): Boolean = {
if (context.isInstanceOf[PsiComment]) return false
val leaf = getLeafByOffset(context.getTextRange.getStartOffset, context)
if (leaf != null) {
val parent = leaf.getParent
parent match {
case _: ScClassParameter =>
return true
case _: ScReferenceExpression =>
case _ => return false
}
def findParent(p: PsiElement): PsiElement = {
if (p == null) return null
p.getParent match {
case parent@(_: ScBlock | _: ScCaseClause | _: ScTemplateBody | _: ScClassParameter | _: ScalaFile) => {
parent match {
case clause: ScCaseClause =>
clause.funType match {
case Some(elem) => if (leaf.getTextRange.getStartOffset <= elem.getTextRange.getStartOffset) return null
case _ => return null
}
case _ =>
}
if (!parent.isInstanceOf[ScalaFile] || parent.asInstanceOf[ScalaFile].isScriptFile())
if ((leaf.getPrevSibling == null || leaf.getPrevSibling.getPrevSibling == null ||
leaf.getPrevSibling.getPrevSibling.getNode.getElementType != ScalaTokenTypes.kDEF) &&
(parent.getPrevSibling == null || parent.getPrevSibling.getPrevSibling == null ||
(parent.getPrevSibling.getPrevSibling.getNode.getElementType != ScalaElementTypes.MATCH_STMT ||
!parent.getPrevSibling.getPrevSibling.getLastChild.isInstanceOf[PsiErrorElement])))
return p
null
}
case _ => findParent(p.getParent)
}
}
val otherParent = findParent(parent)
if (otherParent != null && otherParent.getTextRange.getStartOffset == parent.getTextRange.getStartOffset)
return true
}
false
}
def isClassAcceptable(hintClass: java.lang.Class[_]): Boolean = {
true
}
@NonNls
override def toString: String = {
"val, var keyword filter"
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/completion/filters/definitions/DefinitionsFilter.scala | Scala | apache-2.0 | 2,878 |
package com.houseofmoran.zeitgeist
import org.json4s.JValue
trait Snapshotable {
def toJSON(): JValue
}
| mikemoraned/zeitgeist | src/main/scala/com/houseofmoran/zeitgeist/Snapshotable.scala | Scala | mit | 108 |
package io.iohk.ethereum.ets.blockchain
import akka.util.ByteString
import io.iohk.ethereum.consensus.Protocol
import io.iohk.ethereum.consensus.ethash.validators.ValidatorsExecutor
import io.iohk.ethereum.domain.{Address, UInt256}
import io.iohk.ethereum.utils.{BlockchainConfig, DaoForkConfig, MonetaryPolicyConfig}
import org.bouncycastle.util.encoders.Hex
// scalastyle:off magic.number
object BlockchainTestConfig {
val BaseBlockchainConfig = BlockchainConfig(
frontierBlockNumber = Long.MaxValue,
homesteadBlockNumber = Long.MaxValue,
//Enabling maxGasLimit in all Configs and all blocks
eip106BlockNumber = 0,
eip150BlockNumber = Long.MaxValue,
eip155BlockNumber = Long.MaxValue,
eip160BlockNumber = Long.MaxValue,
eip161BlockNumber = Long.MaxValue,
byzantiumBlockNumber = Long.MaxValue,
constantinopleBlockNumber = Long.MaxValue,
petersburgBlockNumber = Long.MaxValue,
istanbulBlockNumber = Long.MaxValue,
// unused
maxCodeSize = None,
difficultyBombPauseBlockNumber = 3000000,
difficultyBombContinueBlockNumber = 5000000,
difficultyBombRemovalBlockNumber = 5900000,
chainId = 0x1.toByte,
networkId = 1,
customGenesisFileOpt = Some("test-genesis.json"),
monetaryPolicyConfig =
MonetaryPolicyConfig(5000000, 0.2, BigInt("5000000000000000000"), BigInt("3000000000000000000")),
daoForkConfig = None,
accountStartNonce = UInt256.Zero,
bootstrapNodes = Set(),
// TODO: only place where this was supposed to be used but now it seems it's not, remove? Issue: EC-312
gasTieBreaker = false,
ethCompatibleStorage = true,
atlantisBlockNumber = Long.MaxValue,
aghartaBlockNumber = Long.MaxValue,
phoenixBlockNumber = Long.MaxValue,
ecip1098BlockNumber = Long.MaxValue,
treasuryAddress = Address(0),
ecip1097BlockNumber = Long.MaxValue,
ecip1099BlockNumber = Long.MaxValue
)
val FrontierConfig = BaseBlockchainConfig.copy(
frontierBlockNumber = 0
)
val HomesteadConfig = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = 0
)
val Eip150Config = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = 0
)
val FrontierToHomesteadAt5 = BaseBlockchainConfig.copy(
frontierBlockNumber = 0,
homesteadBlockNumber = 5
)
val HomesteadToEIP150At5 = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = 0,
eip150BlockNumber = 5
)
val HomesteadToDaoAt5 = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = 0,
daoForkConfig = Some(
new DaoForkConfig {
override val forkBlockNumber: BigInt = 5
override val forkBlockHash =
ByteString(Hex.decode("f6d7ef1087b5fd94eada533cf8a563f78c3944a2f8ae850e80935d20dc3b7315"))
override val blockExtraData = Some(ByteString(Hex.decode("64616f2d686172642d666f726b")))
override val range = 10
override val refundContract = Some(Address("bf4ed7b27f1d666546e30d74d50d173d20bca754"))
override val drainList = Seq(
Address("d4fe7bc31cedb7bfb8a345f31e668033056b2728"),
Address("b3fb0e5aba0e20e5c49d252dfd30e102b171a425"),
Address("2c19c7f9ae8b751e37aeb2d93a699722395ae18f"),
Address("ecd135fa4f61a655311e86238c92adcd779555d2"),
Address("1975bd06d486162d5dc297798dfc41edd5d160a7"),
Address("a3acf3a1e16b1d7c315e23510fdd7847b48234f6"),
Address("319f70bab6845585f412ec7724b744fec6095c85"),
Address("06706dd3f2c9abf0a21ddcc6941d9b86f0596936"),
Address("5c8536898fbb74fc7445814902fd08422eac56d0"),
Address("6966ab0d485353095148a2155858910e0965b6f9"),
Address("779543a0491a837ca36ce8c635d6154e3c4911a6"),
Address("2a5ed960395e2a49b1c758cef4aa15213cfd874c"),
Address("5c6e67ccd5849c0d29219c4f95f1a7a93b3f5dc5"),
Address("9c50426be05db97f5d64fc54bf89eff947f0a321"),
Address("200450f06520bdd6c527622a273333384d870efb"),
Address("be8539bfe837b67d1282b2b1d61c3f723966f049"),
Address("6b0c4d41ba9ab8d8cfb5d379c69a612f2ced8ecb"),
Address("f1385fb24aad0cd7432824085e42aff90886fef5"),
Address("d1ac8b1ef1b69ff51d1d401a476e7e612414f091"),
Address("8163e7fb499e90f8544ea62bbf80d21cd26d9efd"),
Address("51e0ddd9998364a2eb38588679f0d2c42653e4a6"),
Address("627a0a960c079c21c34f7612d5d230e01b4ad4c7"),
Address("f0b1aa0eb660754448a7937c022e30aa692fe0c5"),
Address("24c4d950dfd4dd1902bbed3508144a54542bba94"),
Address("9f27daea7aca0aa0446220b98d028715e3bc803d"),
Address("a5dc5acd6a7968a4554d89d65e59b7fd3bff0f90"),
Address("d9aef3a1e38a39c16b31d1ace71bca8ef58d315b"),
Address("63ed5a272de2f6d968408b4acb9024f4cc208ebf"),
Address("6f6704e5a10332af6672e50b3d9754dc460dfa4d"),
Address("77ca7b50b6cd7e2f3fa008e24ab793fd56cb15f6"),
Address("492ea3bb0f3315521c31f273e565b868fc090f17"),
Address("0ff30d6de14a8224aa97b78aea5388d1c51c1f00"),
Address("9ea779f907f0b315b364b0cfc39a0fde5b02a416"),
Address("ceaeb481747ca6c540a000c1f3641f8cef161fa7"),
Address("cc34673c6c40e791051898567a1222daf90be287"),
Address("579a80d909f346fbfb1189493f521d7f48d52238"),
Address("e308bd1ac5fda103967359b2712dd89deffb7973"),
Address("4cb31628079fb14e4bc3cd5e30c2f7489b00960c"),
Address("ac1ecab32727358dba8962a0f3b261731aad9723"),
Address("4fd6ace747f06ece9c49699c7cabc62d02211f75"),
Address("440c59b325d2997a134c2c7c60a8c61611212bad"),
Address("4486a3d68fac6967006d7a517b889fd3f98c102b"),
Address("9c15b54878ba618f494b38f0ae7443db6af648ba"),
Address("27b137a85656544b1ccb5a0f2e561a5703c6a68f"),
Address("21c7fdb9ed8d291d79ffd82eb2c4356ec0d81241"),
Address("23b75c2f6791eef49c69684db4c6c1f93bf49a50"),
Address("1ca6abd14d30affe533b24d7a21bff4c2d5e1f3b"),
Address("b9637156d330c0d605a791f1c31ba5890582fe1c"),
Address("6131c42fa982e56929107413a9d526fd99405560"),
Address("1591fc0f688c81fbeb17f5426a162a7024d430c2"),
Address("542a9515200d14b68e934e9830d91645a980dd7a"),
Address("c4bbd073882dd2add2424cf47d35213405b01324"),
Address("782495b7b3355efb2833d56ecb34dc22ad7dfcc4"),
Address("58b95c9a9d5d26825e70a82b6adb139d3fd829eb"),
Address("3ba4d81db016dc2890c81f3acec2454bff5aada5"),
Address("b52042c8ca3f8aa246fa79c3feaa3d959347c0ab"),
Address("e4ae1efdfc53b73893af49113d8694a057b9c0d1"),
Address("3c02a7bc0391e86d91b7d144e61c2c01a25a79c5"),
Address("0737a6b837f97f46ebade41b9bc3e1c509c85c53"),
Address("97f43a37f595ab5dd318fb46e7a155eae057317a"),
Address("52c5317c848ba20c7504cb2c8052abd1fde29d03"),
Address("4863226780fe7c0356454236d3b1c8792785748d"),
Address("5d2b2e6fcbe3b11d26b525e085ff818dae332479"),
Address("5f9f3392e9f62f63b8eac0beb55541fc8627f42c"),
Address("057b56736d32b86616a10f619859c6cd6f59092a"),
Address("9aa008f65de0b923a2a4f02012ad034a5e2e2192"),
Address("304a554a310c7e546dfe434669c62820b7d83490"),
Address("914d1b8b43e92723e64fd0a06f5bdb8dd9b10c79"),
Address("4deb0033bb26bc534b197e61d19e0733e5679784"),
Address("07f5c1e1bc2c93e0402f23341973a0e043f7bf8a"),
Address("35a051a0010aba705c9008d7a7eff6fb88f6ea7b"),
Address("4fa802324e929786dbda3b8820dc7834e9134a2a"),
Address("9da397b9e80755301a3b32173283a91c0ef6c87e"),
Address("8d9edb3054ce5c5774a420ac37ebae0ac02343c6"),
Address("0101f3be8ebb4bbd39a2e3b9a3639d4259832fd9"),
Address("5dc28b15dffed94048d73806ce4b7a4612a1d48f"),
Address("bcf899e6c7d9d5a215ab1e3444c86806fa854c76"),
Address("12e626b0eebfe86a56d633b9864e389b45dcb260"),
Address("a2f1ccba9395d7fcb155bba8bc92db9bafaeade7"),
Address("ec8e57756626fdc07c63ad2eafbd28d08e7b0ca5"),
Address("d164b088bd9108b60d0ca3751da4bceb207b0782"),
Address("6231b6d0d5e77fe001c2a460bd9584fee60d409b"),
Address("1cba23d343a983e9b5cfd19496b9a9701ada385f"),
Address("a82f360a8d3455c5c41366975bde739c37bfeb8a"),
Address("9fcd2deaff372a39cc679d5c5e4de7bafb0b1339"),
Address("005f5cee7a43331d5a3d3eec71305925a62f34b6"),
Address("0e0da70933f4c7849fc0d203f5d1d43b9ae4532d"),
Address("d131637d5275fd1a68a3200f4ad25c71a2a9522e"),
Address("bc07118b9ac290e4622f5e77a0853539789effbe"),
Address("47e7aa56d6bdf3f36be34619660de61275420af8"),
Address("acd87e28b0c9d1254e868b81cba4cc20d9a32225"),
Address("adf80daec7ba8dcf15392f1ac611fff65d94f880"),
Address("5524c55fb03cf21f549444ccbecb664d0acad706"),
Address("40b803a9abce16f50f36a77ba41180eb90023925"),
Address("fe24cdd8648121a43a7c86d289be4dd2951ed49f"),
Address("17802f43a0137c506ba92291391a8a8f207f487d"),
Address("253488078a4edf4d6f42f113d1e62836a942cf1a"),
Address("86af3e9626fce1957c82e88cbf04ddf3a2ed7915"),
Address("b136707642a4ea12fb4bae820f03d2562ebff487"),
Address("dbe9b615a3ae8709af8b93336ce9b477e4ac0940"),
Address("f14c14075d6c4ed84b86798af0956deef67365b5"),
Address("ca544e5c4687d109611d0f8f928b53a25af72448"),
Address("aeeb8ff27288bdabc0fa5ebb731b6f409507516c"),
Address("cbb9d3703e651b0d496cdefb8b92c25aeb2171f7"),
Address("6d87578288b6cb5549d5076a207456a1f6a63dc0"),
Address("b2c6f0dfbb716ac562e2d85d6cb2f8d5ee87603e"),
Address("accc230e8a6e5be9160b8cdf2864dd2a001c28b6"),
Address("2b3455ec7fedf16e646268bf88846bd7a2319bb2"),
Address("4613f3bca5c44ea06337a9e439fbc6d42e501d0a"),
Address("d343b217de44030afaa275f54d31a9317c7f441e"),
Address("84ef4b2357079cd7a7c69fd7a37cd0609a679106"),
Address("da2fef9e4a3230988ff17df2165440f37e8b1708"),
Address("f4c64518ea10f995918a454158c6b61407ea345c"),
Address("7602b46df5390e432ef1c307d4f2c9ff6d65cc97"),
Address("bb9bc244d798123fde783fcc1c72d3bb8c189413"),
Address("807640a13483f8ac783c557fcdf27be11ea4ac7a")
)
}
)
)
val Eip158Config = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = 0,
maxCodeSize = Some(24576)
)
val ByzantiumConfig = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = -1,
maxCodeSize = Some(24576),
byzantiumBlockNumber = 0,
monetaryPolicyConfig =
MonetaryPolicyConfig(5000000, 0.2, BigInt("5000000000000000000"), BigInt("3000000000000000000"))
)
val ConstantinopleConfig = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = -1,
maxCodeSize = Some(24576),
byzantiumBlockNumber = -1,
constantinopleBlockNumber = 0,
monetaryPolicyConfig = MonetaryPolicyConfig(
5000000,
0.2,
BigInt("5000000000000000000"),
BigInt("3000000000000000000"),
BigInt("2000000000000000000")
)
)
val ConstantinopleFixConfig = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = -1,
maxCodeSize = Some(24576),
byzantiumBlockNumber = -1,
constantinopleBlockNumber = -1,
petersburgBlockNumber = 0,
monetaryPolicyConfig = MonetaryPolicyConfig(
5000000,
0.2,
BigInt("5000000000000000000"),
BigInt("3000000000000000000"),
BigInt("2000000000000000000")
)
)
val IstanbulConfig = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = -1,
maxCodeSize = Some(24576),
byzantiumBlockNumber = -1,
constantinopleBlockNumber = -1,
petersburgBlockNumber = -1,
istanbulBlockNumber = 0,
monetaryPolicyConfig = MonetaryPolicyConfig(
5000000,
0.2,
BigInt("5000000000000000000"),
BigInt("3000000000000000000"),
BigInt("2000000000000000000")
)
)
val Eip158ToByzantiumAt5Config = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = 0,
maxCodeSize = Some(24576),
byzantiumBlockNumber = 5,
monetaryPolicyConfig =
MonetaryPolicyConfig(5000000, 0.2, BigInt("5000000000000000000"), BigInt("3000000000000000000"))
)
val ByzantiumToConstantinopleAt5 = BaseBlockchainConfig.copy(
frontierBlockNumber = -1,
homesteadBlockNumber = -1,
eip150BlockNumber = -1,
eip155BlockNumber = -1,
eip160BlockNumber = -1,
eip161BlockNumber = -1,
maxCodeSize = Some(24576),
byzantiumBlockNumber = 0,
constantinopleBlockNumber = 5,
monetaryPolicyConfig = MonetaryPolicyConfig(
5000000,
0.2,
BigInt("5000000000000000000"),
BigInt("3000000000000000000"),
BigInt("2000000000000000000")
)
)
}
object Validators {
import BlockchainTestConfig._
val frontierValidators = ValidatorsExecutor(FrontierConfig, Protocol.Ethash)
val homesteadValidators = ValidatorsExecutor(HomesteadConfig, Protocol.Ethash)
val eip150Validators = ValidatorsExecutor(Eip150Config, Protocol.Ethash)
val frontierToHomesteadValidators = ValidatorsExecutor(FrontierToHomesteadAt5, Protocol.Ethash)
val homesteadToEipValidators = ValidatorsExecutor(HomesteadToEIP150At5, Protocol.Ethash)
val homesteadToDaoValidators = ValidatorsExecutor(HomesteadToDaoAt5, Protocol.Ethash)
val eip158Validators = ValidatorsExecutor(Eip158Config, Protocol.Ethash)
val byzantiumValidators = ValidatorsExecutor(ByzantiumConfig, Protocol.Ethash)
val constantinopleValidators = ValidatorsExecutor(ConstantinopleConfig, Protocol.Ethash)
val constantinopleFixValidators = ValidatorsExecutor(ConstantinopleFixConfig, Protocol.Ethash)
val istanbulValidators = ValidatorsExecutor(IstanbulConfig, Protocol.Ethash)
val eip158ToByzantiumValidators = ValidatorsExecutor(Eip158ToByzantiumAt5Config, Protocol.Ethash)
val byzantiumToConstantinopleAt5 = ValidatorsExecutor(ByzantiumToConstantinopleAt5, Protocol.Ethash)
}
// Connected with: https://github.com/ethereum/tests/issues/480
object ValidatorsWithSkippedPoW {
import BlockchainTestConfig._
val frontierValidators = ValidatorsExecutor(FrontierConfig, new EthashTestBlockHeaderValidator(FrontierConfig))
val homesteadValidators = ValidatorsExecutor(HomesteadConfig, new EthashTestBlockHeaderValidator(HomesteadConfig))
val eip150Validators = ValidatorsExecutor(Eip150Config, new EthashTestBlockHeaderValidator(Eip150Config))
val frontierToHomesteadValidators =
ValidatorsExecutor(FrontierToHomesteadAt5, new EthashTestBlockHeaderValidator(FrontierToHomesteadAt5))
val homesteadToEipValidators =
ValidatorsExecutor(HomesteadToEIP150At5, new EthashTestBlockHeaderValidator(HomesteadToEIP150At5))
val homesteadToDaoValidators =
ValidatorsExecutor(HomesteadToDaoAt5, new EthashTestBlockHeaderValidator(HomesteadToDaoAt5))
val eip158Validators = ValidatorsExecutor(Eip158Config, new EthashTestBlockHeaderValidator(Eip158Config))
val byzantiumValidators = ValidatorsExecutor(ByzantiumConfig, new EthashTestBlockHeaderValidator(ByzantiumConfig))
val constantinopleValidators =
ValidatorsExecutor(ConstantinopleConfig, new EthashTestBlockHeaderValidator(ConstantinopleConfig))
val constantinopleFixValidators =
ValidatorsExecutor(ConstantinopleFixConfig, new EthashTestBlockHeaderValidator(ConstantinopleFixConfig))
val istanbulValidators = ValidatorsExecutor(IstanbulConfig, new EthashTestBlockHeaderValidator(IstanbulConfig))
val eip158ToByzantiumValidators =
ValidatorsExecutor(Eip158ToByzantiumAt5Config, new EthashTestBlockHeaderValidator(Eip158ToByzantiumAt5Config))
val byzantiumToConstantinopleAt5 =
ValidatorsExecutor(ByzantiumToConstantinopleAt5, new EthashTestBlockHeaderValidator(ByzantiumToConstantinopleAt5))
}
| input-output-hk/etc-client | src/ets/scala/io/iohk/ethereum/ets/blockchain/BlockchainTestConfig.scala | Scala | mit | 16,764 |
package models.daos
import org.scalatest._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import org.scalatest.concurrent._
import scala.concurrent.duration._
import scala.concurrent._
import models.Gathering
import models.Goal
import slick.driver.PostgresDriver.api._
import java.util.UUID
import java.text.SimpleDateFormat
import java.util.Calendar
class GatheringsDAOSpec extends DatabaseSpec with Matchers with OptionValues with BeforeAndAfter {
var dao: GatheringDAO = null
before {
dao = new GatheringDAOImpl()
val f = for {
_ <- db.run(sqlu"delete from goals")
_ <- db.run(sqlu"delete from accumulations")
_ <- db.run(sqlu"delete from gatherings")
_ <- db.run(sqlu"delete from mantra")
} yield ()
whenReady(f) { _ =>
}
}
after {
}
"Saving a new non-existant Gathering" should "save and return Gathering with the primary key" taggedAs (DbTest) in {
val gathering = Gathering(None, UUID.randomUUID(), "A gathering", "dedicated to all", false, false, 2015, 8, 12)
whenReady(dao.save(gathering)) { updatedGathering =>
val id: Option[Long] = updatedGathering.id
id.value should be >= 1L
}
}
"Saving an existing Gathering" should "save and return Gathering with the primary key" taggedAs (DbTest) in {
val gathering = Gathering(None, UUID.randomUUID(), "A gathering", "dedicated to all", false, false, 2015, 8, 12)
whenReady(dao.save(gathering)) { updatedGathering =>
val id: Option[Long] = updatedGathering.id
val idValue = id.value
val accumulation2 = updatedGathering.copy(name = "updated name")
whenReady(dao.save(accumulation2)) { updatedGathering2 =>
val id2: Option[Long] = updatedGathering2.id
assert(id2.value === idValue)
}
}
}
"Finding Gatherings" should "return empty list when none are found" taggedAs (DbTest) in {
whenReady(dao.find()) { found =>
found.length shouldBe (0)
}
}
it should "only find non-archived gatherings" taggedAs (DbTest) in {
cleanInsert("GatheringsDAOSpec")
whenReady(dao.find()) { found =>
found.length shouldBe (6)
}
}
"Finding Gatherings by mantra" should "only return Gatherings related to the mantra" taggedAs (DbTest) in {
cleanInsert("GatheringsDAOSpec")
whenReady(dao.find(3L)) { foundGatherings =>
foundGatherings.length shouldBe (4)
}
}
"Exists" should "return false if no gathering exists with given name" taggedAs (DbTest) in {
whenReady(dao.exists("jdjaljlj")) { result =>
result shouldBe (false)
}
}
it should "return true if gathering exists with given name" taggedAs (DbTest) in {
val name = "A gathering"
val gathering = Gathering(None, UUID.randomUUID(), name, "dedicated to all", false, false, 2015, 8, 12)
whenReady(dao.save(gathering)) { updatedGathering =>
whenReady(dao.exists(name)) { result =>
result shouldBe (true)
}
}
}
"Deleting an existant Gathering" should "set is achieved to 1" taggedAs (DbTest) in {
cleanInsert("GatheringsDAOSpec")
whenReady(dao.delete(1L)) { isDeleted =>
isDeleted shouldBe (true)
}
}
it should "not be available in find results" taggedAs (DbTest) in {
cleanInsert("GatheringsDAOSpec")
whenReady(dao.delete(1L)) { isDeleted =>
whenReady(dao.find(2L)) { found =>
found.length shouldBe (0)
}
}
}
it should "rename mantra to include date archived" taggedAs (DbTest) in {
cleanInsert("GatheringsDAOSpec")
whenReady(dao.delete(1L)) { result =>
result shouldBe (true)
val f = for {
name <- db.run(sql"select name from gatherings where id = 1".as[String])
} yield name
whenReady(f) { name =>
val f = new SimpleDateFormat("dd-MM-YYYY kk:hh")
val nowStr = f.format(Calendar.getInstance.getTime)
assert(name.head.contains(nowStr))
}
}
}
"Finding a gathering by id and mantra" should "return gathering when found" taggedAs (DbTest) in {
cleanInsert("GatheringsDAOSpec")
whenReady(dao.find(1L, 2L)) { result =>
result shouldBe (Gathering(Some(1), UUID.fromString("600ba5de-01ff-4cb5-9b7b-ec4c5521f6e3"), "A gathering", "dedicated to all", false, false, 2015, 8, 12))
}
}
} | leannenorthrop/play-mantra-accumulations | test/models/daos/GatheringsDAOSpec.scala | Scala | apache-2.0 | 4,318 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Auspraegung entity.
*/
class AuspraegungGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connectionHeader("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authentication = Map(
"Content-Type" -> """application/json""",
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"Authorization" -> "${access_token}"
)
val scn = scenario("Test the Auspraegung entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))).exitHereIfFailed
.pause(10)
.exec(http("Authentication")
.post("/api/authenticate")
.headers(headers_http_authentication)
.body(StringBody("""{"username":"admin", "password":"admin"}""")).asJSON
.check(header.get("Authorization").saveAs("access_token"))).exitHereIfFailed
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all auspraegungs")
.get("/api/auspraegungs")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new auspraegung")
.post("/api/auspraegungs")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "bezeichnung":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_auspraegung_url"))).exitHereIfFailed
.pause(10)
.repeat(5) {
exec(http("Get created auspraegung")
.get("${new_auspraegung_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created auspraegung")
.delete("${new_auspraegung_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(Integer.getInteger("users", 100)) over (Integer.getInteger("ramp", 1) minutes))
).protocols(httpConf)
}
| t08094a/ffManagementSuite | src/test/gatling/user-files/simulations/AuspraegungGatlingTest.scala | Scala | gpl-3.0 | 3,396 |
package akka
import akka.event.Logging
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.stream.QueueOfferResult
import akka.stream.QueueOfferResult.Enqueued
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{TestActorRef, TestProbe}
import com.omearac.consumers.{DataConsumer, EventConsumer}
import com.omearac.http.routes.{ConsumerCommands, ProducerCommands}
import com.omearac.producers.DataProducer
import org.scalatest.{Matchers, WordSpec}
import scala.concurrent.Future
class HTTPInterfaceSpec extends WordSpec
with Matchers with ScalatestRouteTest
with ConsumerCommands with ProducerCommands {
val log = Logging(system, this.getClass.getName)
//Mocks for DataConsumer Tests
val dataConsumer = TestActorRef(new DataConsumer)
val manager = TestProbe()
dataConsumer.underlyingActor.consumerStreamManager = manager.ref
//Mocks for EventConsumer Tests
val eventConsumer = TestActorRef(new EventConsumer)
eventConsumer.underlyingActor.consumerStreamManager = manager.ref
//Mocks for DataProducer Tests
val dataProducer = TestActorRef(new DataProducer)
val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] {
override def complete(): Unit = println("complete")
override def fail(ex: Throwable): Unit = println("fail")
override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued}
override def watchCompletion(): Future[Done] = Future{Done}
}
"The HTTP interface to control the DataConsumerStream" should {
"return a Already Stopped message for GET requests to /data_consumer/stop" in {
Get("/data_consumer/stop") ~> dataConsumerHttpCommands ~> check {
responseAs[String] shouldEqual "Data Consumer Stream Already Stopped"
}
}
"return a Stream Started response for GET requests to /data_consumer/start" in {
Get("/data_consumer/start") ~> dataConsumerHttpCommands ~> check {
responseAs[String] shouldEqual "Data Consumer Stream Started"
}
}
}
"The HTTP interface to control the EventConsumerStream" should {
"return a Already Stopped message for GET requests to /event_consumer/stop" in {
Get("/event_consumer/stop") ~> eventConsumerHttpCommands ~> check {
responseAs[String] shouldEqual "Event Consumer Stream Already Stopped"
}
}
"return a Stream Started response for GET requests to /data_consumer/start" in {
Get("/event_consumer/start") ~> eventConsumerHttpCommands ~> check {
responseAs[String] shouldEqual "Event Consumer Stream Started"
}
}
}
"The HTTP interface to tell the DataProducer Actor to publish messages to Kafka" should {
"return a Messages Produced message for GET requests to /data_producer/produce/10" in {
dataProducer.underlyingActor.producerStream = mockProducerStream
val producing = dataProducer.underlyingActor.publishData
dataProducer.underlyingActor.context.become(producing)
Get("/data_producer/produce/10") ~> producerHttpCommands ~> check {
responseAs[String] shouldEqual "10 messages Produced as Ordered, Boss!"
}
}
}
}
| omearac/reactive-kafka-microservice-template | src/test/scala/akka/HTTPInterfaceSpec.scala | Scala | apache-2.0 | 3,384 |
package eu.ace_design.island.geom
import eu.ace_design.island.util.{LogSilos, Logger}
/**
* This file is part of the Island project.
* @author mosser
**/
/**
* A MeshBuilder is used to generate an Island mesh based on a given set of Points.
*
* Remark: this class exploits the JTS Topology Suites for Voronoi and Delaunay computations
*
* @param size the size of the map (a square of size x size)
*/
class MeshBuilder(val size: Int) extends Logger {
val silo = LogSilos.MESH_GEN
import com.vividsolutions.jts.geom.{Polygon, GeometryCollection}
/**
* Create a Mesh by applying a builder to a given set of points
* @param sites the points used to generate the mesh
* @return the associated mesh
*/
def apply(sites: Set[Point]): Mesh = {
// introduce points added by the computation of the Voronoi diagram for this site
val voronoiMesh = this.voronoi(sites) clip size
val mesh = buildDelaunayNeighborhood(voronoiMesh)
mesh
}
/**
* Exploit a Voronoi diagram to build the different area of the maps
* @param sites a distribution of points used as inputs for the Voronoi Builder
* @return a complete mesh (based on voronoi algorithm) with the associated Faces, Edges and Vertex.
*/
private def voronoi(sites: Set[Point]): Mesh = {
import com.vividsolutions.jts.geom.{Coordinate, GeometryCollection, GeometryFactory}
import com.vividsolutions.jts.triangulate.VoronoiDiagramBuilder
import scala.collection.JavaConversions._
// Transform the Points into JTS coordinates
val coordinates = sites.par map { p => new Coordinate(p.x, p.y) }
// Instantiate a DiagramBuilder, associated to the computed coordinates.
info("Generating Voronoi diagram")
val builder = new VoronoiDiagramBuilder()
builder.setSites(coordinates.seq)
//builder.setClipEnvelope(new Envelope(0,size,0,size))
val polygons = buildPolygons(builder.getDiagram(new GeometryFactory()).asInstanceOf[GeometryCollection])
// Compute the contents of the mesh
val vertexRegistry = buildVertexRegistry(polygons)
val edgeRegistry = buildEdgeRegistry(polygons, vertexRegistry)
val faceRegistry = buildFaceRegistry(polygons, vertexRegistry, edgeRegistry)
// Return the mesh
Mesh(vertices = vertexRegistry, edges = edgeRegistry, faces = faceRegistry)
}
/**
* Compute a sequence of Polygons based on a GeometryCollection obtained as the output of a Voronoi Builder
* It aims to restricts the geometry to coordinates compatible with the Mesh to be built
* @param geometry the output of a voronoi builder
* @return a sequence of polygons compatible with a Mesh (\\in [0, SIZE] x [0,SIZE])
*/
private def buildPolygons(geometry: GeometryCollection): Seq[Polygon] = {
import com.vividsolutions.jts.geom.Coordinate
info("Building polygons")
val rect = geometry.getFactory.createPolygon(Array(new Coordinate(0,0), new Coordinate(0,size),
new Coordinate(size, size), new Coordinate(size, 0),
new Coordinate(0,0)))
val polySeq = geometryCollectionToPolygonSequence(geometry)
(polySeq.par map { _.intersection(rect).asInstanceOf[Polygon] }).seq
}
/**
* Transform a GeometryCollection into a sequence of polygons
* @param g the collection to transform
* @return an associated sequence of Polygons
*/
private def geometryCollectionToPolygonSequence(g: GeometryCollection): Seq[Polygon] = {
val iterator = (0 until g.getNumGeometries).par
val r = (Seq[Polygon]() /: iterator) { (polygons, idx) =>
val p: Polygon = g.getGeometryN(idx).asInstanceOf[Polygon]
polygons :+ p
}
r
}
/**
* Compute a vertex registry that contains all the vertices used in the given polygons
* @param polygons the polygons to work on
* @return A vertex registry containing all the vertices in init + the one defined in the given polygons
*/
private def buildVertexRegistry(polygons: Seq[Polygon]): VertexRegistry = {
info("Building VertexRegistry")
(VertexRegistry() /: polygons.par) { (r, poly) =>
val coordinates = poly.getBoundary.getCoordinates
val points = coordinates map { c => Point(c.x, c.y) }
// We add the points located in the border + its centroid (to be used as the center of the associated face)
(r /: points) { (acc, point) => acc + point } + getCentroid(poly)
}
}
/**
* Compute as a Point the centroid of a JTS polygon
* @param p the polygon to handle
* @return a Point
*/
def getCentroid(p: Polygon): Point = {
val tmp = p.getCentroid
Point(tmp.getX, tmp.getY)
}
/**
* Compute and EdgeRegistry based on the given polygons and a vertex registry containing the associated vertices
* @param polygons the polygons to work on
* @param vertices the vertices used by these polygons
* @return the associated EdgeRegistry
*/
private def buildEdgeRegistry(polygons: Seq[Polygon], vertices: VertexRegistry): EdgeRegistry = {
info("Building EdgeRegistry")
(EdgeRegistry() /: polygons.par) { (r, poly) =>
val edges = extractEdges(vertices, poly)
edges.foldLeft(r) { (reg, e) => reg + e }
}
}
/**
* Compute a FaceRegistry based on the given polygons, the sites used to compute these polygons,
* and preexisting registries
* @param polygons the polygons to work on
* @param vReg the vertexRegistry used to store the vertices
* @param eReg the edgeRegistry used to store the edges
* @return a FaceRegistry
*/
private def buildFaceRegistry(polygons: Seq[Polygon], vReg: VertexRegistry, eReg: EdgeRegistry): FaceRegistry = {
info("Building Face Registry")
(FaceRegistry() /: polygons.par) { (reg, poly) =>
val centerRef = vReg(getCentroid(poly)).get
val edgeRefs = extractEdges(vReg, poly) map { e => eReg(e).get }
reg + Face(center = centerRef, edges = edgeRefs)
}
}
/**
* Transform a given polygon into a sequence of Edges
* @param vReg the VertexRegistry containing the associated vertices
* @param poly the polygon to transform
* @return the associated sequence of edges
*/
private def extractEdges(vReg: VertexRegistry, poly: Polygon): Seq[Edge] = {
trace(s"Extracting edges for $poly")
def loop(points: Array[Point]): Seq[Edge] = points match {
case Array() => Seq()
case Array(p) => Seq()
case Array(p1, p2, _*) => Edge(vReg(p1).get, vReg(p2).get) +: loop(points.slice(1, points.length))
}
loop(poly.getBoundary.getCoordinates map { c => Point(c.x, c.y) })
}
/**
* Implements the computation of the neighborhood relationship between faces by leveraging a Delaunay triangulation.
* It is actually a good enough method to compute neighbors (wrong for border polygons, but we don't care as
* we are not using such polygons in our Island - always ocean).
* @param mesh the mesh to be used to compute the neighborhood relations
* @return a new mesh (faces updated to store their neighbors as an immutable set of face references)
*/
def buildDelaunayNeighborhood(mesh: Mesh): Mesh = {
import com.vividsolutions.jts.triangulate.DelaunayTriangulationBuilder
import com.vividsolutions.jts.geom.{Coordinate, GeometryFactory}
import scala.collection.JavaConversions._
def buildTriangles(m: Mesh): Seq[Polygon] = {
info("Building Delaunay triangulation for neighborhood")
val sites = m.faces.values.par map { f =>
val center = m.vertices(f.center)
new Coordinate(center.x, center.y)
}
val builder = new DelaunayTriangulationBuilder()
builder.setSites(sites.seq)
val geom = builder.getTriangles(new GeometryFactory()).asInstanceOf[GeometryCollection]
geometryCollectionToPolygonSequence(geom)
}
type Neighborhood = Map[Int, Set[Int]]
def addToNeighbors(key: Int, neighbor: Int, data: Neighborhood): Neighborhood = data.get(key) match {
case None => data + (key -> Set(neighbor))
case Some(existing) => data - key + (key -> (existing + neighbor))
}
val triangles: Seq[Polygon] = buildTriangles(mesh)
val emptyNeighborhood: Neighborhood = Map()
info("Processing the Delaunay triangulation ("+triangles.length+" triangles)")
val neighborhood = (emptyNeighborhood /: triangles.par) { (acc, t) =>
// polygons are "closed", thus the start point is a duplicate of the last one (=> distinct is used)
val centerRefs = (t.getCoordinates map { c => mesh.vertices(Point(c.x, c.y)).get }).distinct
// We transform these center references into faces
val faceRefs = centerRefs map { mesh.faces.lookFor(_).get}
// we build the neighborhood pairs based on the triangle contents
val pairs = for(i <- faceRefs; j<- faceRefs) yield (i,j)
// we update the accumulator wit the pairs
(acc /: pairs) { (res,p) => addToNeighbors(p._1, p._2, res) }
}
info("Updating the FaceRegistry with the neighborhood relation")
val updatedFaces = (mesh.faces /: neighborhood.par) { (reg, info) =>
val face = reg(info._1).copy(neighbors = Some(info._2))
reg.update(info._1, face)
}
mesh.copy(faces = updatedFaces)
}
}
| ace-design/island | engine/src/main/scala/eu/ace_design/island/geom/MeshBuilder.scala | Scala | lgpl-3.0 | 9,291 |
package com.innoq.leanpubclient
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.innoq.leanpubclient.ResponseHandler._
import play.api.libs.json._
import play.api.libs.ws.{StandaloneWSClient, StandaloneWSRequest}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
/** LeanpubClient is a client to interact with the Leanpub-API.
*
* @param wsClient an implementation of [[StandaloneWSClient]] to perform http requests
* @param apiKey String, Leanpub-API key to access your books
* @param requestTimeout [[FiniteDuration]] define a request timeout time
* @param executionContext implicit [[ExecutionContext]] for Futures
*/
class LeanPubClient(wsClient: StandaloneWSClient, apiKey: String, requestTimeout: FiniteDuration)(implicit executionContext: ExecutionContext) {
private val host = "https://leanpub.com"
/** Sends a POST request to trigger a preview of the book.
*
* @param slug, usually book's title
* @return Future of type [[Result]], which can be either a Success or an Error.
*/
def triggerPreview(slug: String): Future[Result] = postFormParams(s"$host/$slug/preview.json")
/** Sends a POST request to trigger the book's publishing.
*
* Triggers also the sending of an email to your book's readers if you provide an emailText.
* Email sending will not be triggered if you omit the emailText param.
* @param slug, usually book's title
* @param emailText is optional.
* @return Future of type [[Result]], which can be either a Success or an Error.
*/
def triggerPublish(slug: String, emailText: Option[String]): Future[Result] = {
val formParams = emailText match {
case Some(text) => Map("publish[email_readers]" -> Seq("true"), "publish[release_notes]" -> Seq(text))
case None => Map.empty[String, Seq[String]]
}
postFormParams(s"$host/$slug/publish.json", formParams)
}
/** Sends a POST request to create a coupon for the given book.
*
* @param slug, usually book's title
* @param coupon provide a [[CreateCoupon]] for the book
* @return Future of type [[Result]], which can be either a Success or an Error.
*/
def createCoupon(slug: String, coupon: CreateCoupon): Future[Result] = {
postJson(s"$host/$slug/coupons.json", coupon)
}
/** Sends a PUT request to update a coupon.
*
* @param slug, usually book's title
* @param couponCode name of the coupon you would like to update
* @param coupon [[UpdateCoupon]] contains the attributes you would like to update
* @return Future of type [[Result]], which can be either a Success or an Error.
*/
def updateCoupon(slug: String, couponCode: String, coupon: UpdateCoupon): Future[Result] = {
putJson(s"$host/$slug/coupons/$couponCode.json", coupon)
}
/** Sends a GET request to retrieve all coupons for a book.
*
* @param slug, usually book's title
* @return Future of Option which may contain a List of [[Coupon]]
*/
def getCoupons(slug: String): Future[Option[List[Coupon]]] = {
get(s"$host/$slug/coupons.json").map { response =>
response.map { json => json.as[List[Coupon]] }
}
}
/** Sends a GET request to retrieve general information about the book.
*
* @param slug, usually book's title
* @return Future of Option which may contain a [[BookInfo]] object
*/
def getSummary(slug: String): Future[Option[BookInfo]] = {
get(s"$host/$slug.json").map { response =>
response.map { json => json.as[BookInfo] }
}
}
/** Sends a GET request to retrieve information on a book's sales.
*
* @param slug, usually book's title
* @return Future of Option which may contain a [[Sales]] Object
*/
def getSales(slug: String): Future[Option[Sales]] = {
get(s"$host/$slug/sales.json").map { response =>
response.map { json => json.as[Sales] }
}
}
/** Sends a GET request to retrieve detailed information on a book's sales.
*
* This method only retrieves one page per method call. Please use the method
* [[getIndividualPurchaseSource]] if you would like to get all Individual Purchases.
* @param slug, usually book's title
* @param page page to load
* @return Future of Option which may contain a List of [[IndividualPurchase]]
*/
def getIndividualPurchases(slug: String, page: Int = 1): Future[Option[List[IndividualPurchase]]] = {
getWithPagination(s"$host/$slug/individual_purchases.json", page).map { response =>
response.map {
case a: JsArray => a.as[List[IndividualPurchase]]
case _ => List.empty[IndividualPurchase]
}
}
}
/** Creates a source which emits objects of [[IndividualPurchase]]
*
* Use this source to retrieve detailed sales information on a given book.
* @param slug, usually book's title
* @return an akka [[Source]] of [[IndividualPurchase]]
*/
def getIndividualPurchaseSource(slug: String): Source[IndividualPurchase, NotUsed] = {
val startPage = 1
Source.unfoldAsync(startPage) { pageNum =>
val futurePage: Future[Option[List[IndividualPurchase]]] = getIndividualPurchases(slug, pageNum)
val next = futurePage.map {
case Some(Nil) => None
case Some(list) => Some((pageNum + 1, list))
case _ => None
}
next
}.mapConcat(identity)
}
private def postFormParams(url: String, formParams: Map[String, Seq[String]] = Map.empty): Future[Result] = {
val body = formParams.updated("api_key", Seq(apiKey))
val request = buildBasicRequest(url)
.post(body)
request.map { response => handleResponseToPost(url, response) }
}
private def postJson[A](url: String, a: A)(implicit writes: Writes[A]): Future[Result] = {
val query = "api_key" -> apiKey
val data = Json.toJson(a)
val request = buildBasicRequest(url)
.withQueryString(query)
.post(data)
request.map { response => handleResponseToPost(url, response) }
}
private def putJson[A](url: String, a: A)(implicit writes: Writes[A]): Future[Result] = {
val query = "api_key" -> apiKey
val data = Json.toJson(a)
val request = buildBasicRequest(url)
.withQueryString(query)
.put(data)
request.map { response => handleResponseToPost(url, response) }
}
private def get(url: String): Future[Option[JsValue]] = {
val query = "api_key" -> apiKey
val request = buildBasicRequest(url)
.withQueryString(query)
.get()
request.map { response => handleResponseToGet(url, response) }
}
private def getWithPagination(url: String, page: Int): Future[Option[JsValue]] = {
val query1 = "api_key" -> apiKey
val query2 = "page" -> page.toString
val request = buildBasicRequest(url)
.withQueryString(query1, query2)
.get()
request.map { response => handleResponseToGet(url, response) }
}
private def buildBasicRequest(url: String): StandaloneWSRequest = {
wsClient.url(url).withRequestTimeout(requestTimeout)
}
}
| innoq/leanpub-client | src/main/scala/com/innoq/leanpubclient/LeanPubClient.scala | Scala | mit | 7,036 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
package params
import com.intellij.lang.ASTNode
import com.intellij.psi._
import com.intellij.psi.scope.PsiScopeProcessor
import org.jetbrains.plugins.scala.JavaArrayFactoryUtil.ScTypeParamFactory
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScTypeParamClauseStub
import org.jetbrains.plugins.scala.lang.resolve.processor.BaseProcessor
/**
* @author Alexander Podkhalyuzin
* @since 22.02.2008
*/
class ScTypeParamClauseImpl private (stub: ScTypeParamClauseStub, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, TYPE_PARAM_CLAUSE, node) with ScTypeParamClause {
def this(node: ASTNode) = this(null, node)
def this(stub: ScTypeParamClauseStub) = this(stub, null)
override def toString: String = "TypeParameterClause"
def getTextByStub: String = byStubOrPsi(_.typeParameterClauseText)(getText)
def typeParameters: Seq[ScTypeParam] = getStubOrPsiChildren(TYPE_PARAM, ScTypeParamFactory)
override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = {
if (!processor.isInstanceOf[BaseProcessor]) {
for (param <- typeParameters) {
if (!processor.execute(param, state)) return false
}
}
true
}
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/params/ScTypeParamClauseImpl.scala | Scala | apache-2.0 | 1,460 |
package gie.app.gbb.db
//////////////////////////////////////////////////////////////////////////////////
import gie.app.gbb.SlickProfile.profile.simple._
import com.github.nscala_time.time.Imports._
import gie.UUIDMapper._
import gie.DateTimeMapper._
//////////////////////////////////////////////////////////////////////////////////
case class Board(id:Option[Long],
title: String,
description: String,
path: String )
object BoardsTable extends Table[Board]("boards"){
def id = column[Long]("id", O.DBType("IDENTITY"), O.PrimaryKey, O.AutoInc)
def title = column[String]("title")
def description = column[String]("description")
def path = column[String]("path")
def * = id.? ~ title ~ description ~ path<> (Board, Board.unapply _)
def forInsert = title ~ description ~ path <> ({v=>Board(None,v._1, v._2, v._3)}, {v:Board=>Some(v.title, v.description, v.path)})
}
//////////////////////////////////////////////////////////////////////////////////
case class Message(id: Option[Long] = None,
boardId: Long,
threadId: Option[Long] = None,
childCount: Int = 0,
name: Option[String] = None,
email: Option[String] = None,
subject: String,
comment: String,
file: Option[Long] = None,
cDate: DateTime = DateTime.now,
mDate: DateTime = DateTime.now)
object MessagesTable extends Table[Message]("messages"){
val C_MODIFICATION_DATE = "modification_date"
val C_CHILD_COUNT = "child_count"
val C_ID = "id"
def id = column[Long](C_ID, O.DBType("IDENTITY"), O.PrimaryKey, O.AutoInc)
def boardId = column[Long]("board_id")
def threadId = column[Long]("thread_id", O.Nullable)
def childCount = column[Int](C_CHILD_COUNT, O.Default(0))
def name = column[String]("name", O.Nullable)
def email = column[String]("email", O.Nullable)
def subject = column[String]("subject")
def comment = column[String]("message")
def file = column[Long]("file_id", O.Nullable)
def cDate = column[DateTime]("creation_date")
def mDate = column[DateTime](C_MODIFICATION_DATE)
def cDateIdx = index("messages_cDate_idx", cDate)
def mDateIdx = index("messages_mDate_idx", mDate)
def topicFK = foreignKey("messages_threadId_fk", threadId, MessagesTable)(_.id)
def boardFK = foreignKey("messages_boardId_fk", boardId, BoardsTable)(_.id)
def fileFK = foreignKey("file_id_fk", file, FileRecordsTable)(_.id)
def * = id.? ~ boardId ~ threadId.? ~ childCount ~ name.? ~ email.? ~ subject ~ comment ~ file.? ~ cDate ~ mDate <> (Message, Message.unapply _)
}
//////////////////////////////////////////////////////////////////////////////////
case class FileRecord(id: Option[Long] = None,
uuid: gie.UUID,
mime: String,
thumbnailId: Option[Long] = None)
object FileRecordsTable extends Table[FileRecord]("file_records"){
def id = column[Long]("id", O.DBType("IDENTITY"), O.PrimaryKey, O.AutoInc)
def uuid = column[gie.UUID]("file_uuid")//(UUIDMapper.uuidMap)
def mime = column[String]("mime")
def thumbnailId = column[Long]("thumbnail_id", O.Nullable)
def thumbnailFK = foreignKey(s"${this.tableName}_thumbnailId_fk", thumbnailId, FileRecordsTable)(_.id)
def uuidIdx = index(s"${this.tableName}_uuid_fk", uuid, unique = true)
def * = id.? ~ uuid ~ mime ~ thumbnailId.? <> (FileRecord, FileRecord.unapply _)
}
| igorge/gbb | src/main/scala/code/db/ddl.scala | Scala | gpl-2.0 | 3,273 |
package nz.wicker.autoencoder.neuralnet
import nz.wicker.autoencoder.math.matrix._
import nz.wicker.autoencoder.math.matrix.Mat
import nz.wicker.autoencoder.math.structure.VectorSpace
import nz.wicker.autoencoder.visualization.Visualizable
import java.awt.Image
import java.awt.image.BufferedImage
/**
* Represents a single layer of a neural network. The sort of layer meant
* here does not necessary contain any neurons, instead we think of a layer
* as of a filter in a long pipe: it can contain neurons that are activated
* with some activation function, but it can also contain only connections
* between the layer below and the layer above.
*
* Common to all layers is that they are parameterized by something that
* is isomorphic to Real^n, that they
* know how to transform their input into output (or transmit the signal in
* opposite direction), and to calculate entries of the gradient of the error
* function that correspond to their parameters, given partial derivatives wrt.
* their output passed from above. With other words: each layer knows how to
* propagate all signals in feed-forward manner, and how to propagate errors
* backwards.
*/
trait Layer extends VectorSpace[Layer] with Visualizable {
// signal propagation methods
/**
* Returns the output given the input. This method can cache data
* that could be useful on the second pass of the backpropagation.
*
* The input contains one example in each row, the output shall have the
* same layout.
*/
def propagate(input: Mat): Mat
/**
* Returns the result of signal propagation in reverse direction
*/
def reversePropagate(output: Mat): Mat
/**
* Returns the gradient (Layer-valued) and the backpropagated
* error, which is passed to the layer below.
*
* This method can rely on the fact that the `propagate` method
* already has been called in the first pass.
*
* @param backpropagatedError error propagated from above, formatted
* the same way (one row for each example) as input and output
* @return gradient (Layer-valued) and the next backpropagated error
*/
def gradAndBackpropagationError(backpropagatedError: Mat): (Layer, Mat)
/**
* Creates a new independent layer that has the same type as this one,
* but propagates the information in reverse direction
*/
def reverseLayer: Layer
def inputDimension: Int
def outputDimension: Int
/**
* Optionally, one can specify how to reshape the neuron activities for
* visualization (height, width).
*/
def activityShape: Option[(Int, Int)] = None
/**
* Color map for the activities
*/
def activityColorscheme: Double => Int =
nz.wicker.autoencoder.visualization.defaultColorscheme
def visualizeActivity(activity: Mat): BufferedImage = {
activityShape match {
case None => activity.toImage(activityColorscheme)
case Some((h, w)) => {
val numExamples = activity.height
val result =
new BufferedImage(h, w * numExamples, BufferedImage.TYPE_INT_RGB)
val g = result.getGraphics()
for (r <- 0 until activity.width) {
val img = activity(r ::: (r + 1), 0 ::: end).
reshape(h, w).
toImage(activityColorscheme)
g.drawImage(img, w * r, 0, w, h, null)
}
result
}
}
}
}
| joergwicker/autoencoder | src/main/scala/nz/wicker/autoencoder/neuralnet/Layer.scala | Scala | gpl-3.0 | 3,378 |
package com.advancedspark.pmml.spark.ml
import java.io.File
import scala.collection.JavaConverters._
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.Predictor
import org.apache.spark.ml.classification.DecisionTreeClassifier
import org.apache.spark.ml.feature.RFormula
import org.apache.spark.ml.regression.DecisionTreeRegressor
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.DataFrameReader
import org.apache.spark.sql.types.StructType
import org.dmg.pmml.FieldName
import org.dmg.pmml.DataField
import org.xml.sax.InputSource
import org.apache.commons.io.IOUtils
import java.net.URL
import java.nio.charset.Charset
import java.io.File
/*
age,workclass,education,education_num,marital_status,occupation,relationship,race,sex,capital_gain,capital_loss,hours_per_week,native_country,income
*/
case class Census(
age: Integer, workclass: String, education: String, education_num: Integer, marital_status: String,
occupation: String, relationship: String, race: String, sex: String, capital_gain: Integer,
capital_loss: Integer, hours_per_week: Integer, native_country: String, income: String
)
object GeneratePMML {
def main(args: Array[String]) = {
val sparkConf: SparkConf = new SparkConf()
val sc: SparkContext = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
val datasetUrl = "https://raw.githubusercontent.com/fluxcapacitor/datapalooza.ml/master/R/census.csv"
val datasetLines = IOUtils.toString(new URL(datasetUrl), "utf8")
val datasetRDD = sc.parallelize(datasetLines.split("\\n"))
import sqlContext.implicits._
val censusDF = datasetRDD.map(s => s.split(","))
.filter(s => s(0) != "age").map(s =>
Census(s(0).toInt,
s(1),
s(2),
s(3).toInt,
s(4),
s(5),
s(6),
s(7),
s(8),
s(9).toInt,
s(10).toInt,
s(11).toInt,
s(12),
s(13))
).toDF()
import org.apache.spark.ml.classification.DecisionTreeClassificationModel
val formulaStr: String = "income ~ ."
val schema: StructType = censusDF.schema
System.out.println(schema.treeString)
val formula: RFormula = new RFormula().setFormula(formulaStr)
var predictor: Predictor[_, _, _] = new DecisionTreeClassifier().setMinInstancesPerNode(10)
predictor.setLabelCol(formula.getLabelCol)
predictor.setFeaturesCol(formula.getFeaturesCol)
val pipeline = new Pipeline().setStages(Array[PipelineStage](formula, predictor))
val pipelineModel = pipeline.fit(censusDF)
val predictorModel = pipelineModel.stages(1).asInstanceOf[DecisionTreeClassificationModel]
System.out.println(predictorModel.toDebugString)
import org.jpmml.sparkml.ConverterUtil
val pmml = ConverterUtil.toPMML(schema, pipelineModel)
System.out.println(pmml.getModels().get(0).toString())
import org.jpmml.model.ImportFilter
import org.jpmml.model.JAXBUtil
import org.jpmml.model.MetroJAXBUtil
val pmmlOutput: File = new File(s"census.pmml")
val os = new java.io.FileOutputStream(pmmlOutput.getAbsolutePath())
MetroJAXBUtil.marshalPMML(pmml, os)
val baos = new java.io.ByteArrayOutputStream()
MetroJAXBUtil.marshalPMML(pmml, baos)
/*
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.DefaultHttpClient // TODO: this is deprecated
// create an HttpPost object
println("--- HTTP POST UPDATED PMML ---")
val post = new HttpPost(s"http://demo.pipeline.io:9040/update-pmml/${pmmlName}")
// set the Content-type
post.setHeader("Content-type", "application/xml")
// add the JSON as a StringEntity
post.setEntity(new StringEntity(baos.toString()))
// send the post request
val response = (new DefaultHttpClient).execute(post)
// print the response headers
println("--- HTTP RESPONSE HEADERS ---")
response.getAllHeaders.foreach(arg => println(arg)
val is = new java.io.FileInputStream(pmmlOutput.getAbsolutePath())
val transformedSource = ImportFilter.apply(new InputSource(is))
val pmml2 = JAXBUtil.unmarshalPMML(transformedSource)
import org.jpmml.evaluator.Evaluator
import org.jpmml.evaluator.FieldValue
import org.jpmml.evaluator.ModelEvaluatorFactory
import org.jpmml.evaluator.EvaluatorUtil
val modelEvaluatorFactory = ModelEvaluatorFactory.newInstance()
val modelEvaluator: Evaluator = modelEvaluatorFactory.newModelEvaluator(pmml2)
System.out.println("Mining function: " + modelEvaluator.getMiningFunction())
val inputFields = modelEvaluator.getInputFields().asScala
System.out.println("Input schema:");
System.out.println("\\t" + "Input fields: " + inputFields)
System.out.println("Output schema:");
System.out.println("\\t" + "Target fields: " + modelEvaluator.getTargetFields())
System.out.println("\\t" + "Output fields: " + modelEvaluator.getOutputFields())
val inputs: Map[String, _] = Map("age" -> 39,
"workclass" -> "State-gov",
"education" -> "Bachelors",
"education_num" -> 13,
"marital_status" -> "Never-married",
"occupation" -> "Adm-clerical",
"relationship" -> "Not-in-family",
"race" -> "White",
"sex" -> "Male",
"capital_gain" -> 2174,
"capital_loss" -> 0,
"hours_per_week" -> 40,
"native_country" -> "United-States")
val arguments =
( for(inputField <- inputFields)
// The raw value is passed through:
// 1) outlier treatment,
// 2) missing value treatment,
// 3) invalid value treatment
// 4) type conversion
yield (inputField.getName -> inputField.prepare(inputs(inputField.getName.getValue)))
).toMap.asJava
val results = modelEvaluator.evaluate(arguments)
val targetField = modelEvaluator.getTargetFields().asScala(0)
val targetValue = results.get(targetField)
System.out.println(s"**** Predicted value for '${targetField.getName}': ${targetValue} ****")
*/
}
}
| fluxcapacitor/source.ml | apachespark.ml/demos/pmml/src/main/scala/com/advancedspark/pmml/spark/ml/GeneratePMML.scala | Scala | apache-2.0 | 6,665 |
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.queryapi
import java.net.URI
/**
* Minimal API for Documents, having a type parameter for the element type.
*
* This is a purely abstract API trait. It can be useful in generic code abstracting over multiple element implementations.
*
* @author Chris de Vreeze
*/
trait DocumentApi extends AnyDocumentApi {
type ThisDoc <: DocumentApi
/** Returns the document element */
def documentElement: DocElemType
/** Returns the optional document URI, wrapped in an Option */
def uriOption: Option[URI]
}
object DocumentApi {
/**
* This document API type, restricting the type members to the passed type parameters.
*
* @tparam D The document type itself
* @tparam E The document element type
*/
type Aux[D, E] = DocumentApi { type ThisDoc = D; type DocElemType = E }
}
| dvreeze/yaidom | shared/src/main/scala/eu/cdevreeze/yaidom/queryapi/DocumentApi.scala | Scala | apache-2.0 | 1,432 |
/*
* Copyright 2016 Actian Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.actian.spark_vector
import scala.collection.immutable.List
import scala.language.implicitConversions
import com.actian.spark_vector.util.Logging
/** An accumulator for profiling, Contains a label and the number of milliseconds (`acc`) */
case class ProfAcc(val name: String, var acc: Long = 0) {
def print(logFcn: (=> String) => Unit, msg: String): Unit = logFcn(f"$msg ${acc.toDouble / 1000000000}%.6fs")
def print(logFcn: (=> String) => Unit): Unit = print(logFcn, s"#PROF $name =")
}
/** Contains all [[ProfAcc]]s defined and which of those are currently accumulating/measuring time (as a stack) */
case class ProfAccMap(accs: Map[String, ProfAcc], var started: List[ProfAcc] = Nil)
/**
* Trait to be used when profiling is needed. To profile a section of the code, the following steps should be followed:
* - Call [[profileInit]](<label_for_first_section>, <label_for_second_section>, ...) and store it into an implicit value
* - Use [[profile]](<section_name>) and [[profileEnd]] in a bracket opening/closing fashion, where the code between a
* [[profile]] call and its corresponding [[profileEnd]] will have its execution time measured and stored into its accumulator
* - Call [[profilePrint]] at the end to log the profiling information gathered
*/
trait Profiling {
this: Logging =>
/**
* Start measuring time and record it in `acc`.
*
* @note This function should always be used in combination with [[profileEnd]] in a similar way with opening and closing
* a sequence of brackets, where the code between a [[profile]] call and its corresponding [[profileEnd]] will have its
* execution time measured and stored into `acc`
*/
def profile(acc: ProfAcc)(implicit pmap: ProfAccMap): Unit = {
acc.acc -= System.nanoTime()
pmap.started = acc :: pmap.started
}
/** Finish profiling the current section of code, as determined by the most recent [[profile]] call */
def profileEnd(implicit pmap: ProfAccMap): Unit = {
val last = pmap.started.head
last.acc += System.nanoTime()
}
/** Initialize profiling */
def profileInit(names: String*): ProfAccMap = ProfAccMap(names.map { case name => (name, ProfAcc(name)) }.toMap)
/** Print profile information using `log` */
def profilePrint(implicit pmap: ProfAccMap): Unit = pmap.accs.map(_._2).foreach { _.print(logDebug) }
implicit def nameToAcc(name: String)(implicit pmap: ProfAccMap): ProfAcc = pmap.accs(name)
}
| ActianCorp/spark-vector | src/main/scala/com/actian/spark_vector/Profiling.scala | Scala | apache-2.0 | 3,050 |
package mesosphere.marathon.core.storage.store.impl.cache
import java.util.UUID
import com.codahale.metrics.MetricRegistry
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.storage.store.PersistenceStoreTest
import mesosphere.marathon.core.storage.store.impl.InMemoryTestClass1Serialization
import mesosphere.marathon.core.storage.store.impl.memory.InMemoryPersistenceStore
import mesosphere.marathon.core.storage.store.impl.zk.{ ZkPersistenceStore, ZkTestClass1Serialization }
import mesosphere.marathon.integration.setup.ZookeeperServerTest
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.storage.store.InMemoryStoreSerialization
import scala.concurrent.duration.Duration
class LazyCachingPersistenceStoreTest extends AkkaUnitTest
with PersistenceStoreTest with ZkTestClass1Serialization with ZookeeperServerTest
with InMemoryStoreSerialization with InMemoryTestClass1Serialization {
private def cachedInMemory = {
implicit val metrics = new Metrics(new MetricRegistry)
new LazyCachingPersistenceStore(new InMemoryPersistenceStore())
}
def zkStore: ZkPersistenceStore = {
implicit val metrics = new Metrics(new MetricRegistry)
val root = UUID.randomUUID().toString
val client = zkClient(namespace = Some(root))
new ZkPersistenceStore(client, Duration.Inf, 8)
}
private def cachedZk = new LazyCachingPersistenceStore(zkStore)
behave like basicPersistenceStore("LazyCache(InMemory)", cachedInMemory)
behave like basicPersistenceStore("LazyCache(Zk)", cachedZk)
// TODO: Mock out the backing store.
}
| timcharper/marathon | src/test/scala/mesosphere/marathon/core/storage/store/impl/cache/LazyCachingPersistenceStoreTest.scala | Scala | apache-2.0 | 1,592 |
package io.surfkit.typebus.actors
import akka.actor.{Actor, ActorLogging, Props}
import io.surfkit.typebus.AvroByteStreams
import io.surfkit.typebus.bus.Publisher
import io.surfkit.typebus.event._
/***
* ProducerActor - wraps the bus and publishes request to it. (Kafka / Kinesis)
* Note that we wrap all messages on the bus in a PublishedEvent
* @param producer - The underlying bus provider (Kafka / Kinesis)
*/
class ProducerActor(producer: Publisher) extends Actor with ActorLogging with AvroByteStreams {
def receive = {
case x:PublishedEvent =>
try {
log.info(s"[ProducerActor] publish ${x.meta.eventType}")
producer.publish(x)(context.system)
}catch{
case t: Throwable =>
log.error(t, "Error trying to publish event.")
}
case x =>
log.warning(s"ProducerActor does not know how to handle type[${x.getClass.getSimpleName}] containing: ${x} ...WTF WTF WTF !!!!!!!!")
}
override def postStop() {
log.debug(s"ProducerActor ACTOR STOP !!! ${self.path.toStringWithoutAddress}")
}
}
| coreyauger/typebus | typebus/src/main/scala/io/surfkit/typebus/actors/ProducerActor.scala | Scala | mit | 1,074 |
// Copyright 2013 Sean Wellington
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sqs.sean8223
import akka.testkit.TestKit
import akka.testkit.ImplicitSender
import org.scalatest.matchers.MustMatchers
import org.scalatest.BeforeAndAfterAll
import akka.actor.ActorSystem
import org.scalatest.WordSpec
class SQSPollerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpec with MustMatchers with BeforeAndAfterAll {
def this() = this(ActorSystem())
override def afterAll {
TestKit.shutdownActorSystem(system)
}
} | sean8223/akka-sqs | src/test/scala/sqs/sean8223/SQSPollerSpec.scala | Scala | apache-2.0 | 1,074 |
package scscene
import java.awt.Graphics2D
import java.io.File
import java.net.URI
import java.util.{ Vector => JVector }
import com.kitfox.svg._
import scgeom._
object SvgFigure {
def load(file:File):SVGDiagram = load(file.toURI)
def load(uri:URI):SVGDiagram = SVGCache.getSVGUniverse getDiagram uri
}
case class SvgFigure(
clip:Option[Clip],
transform:SgAffineTransform,
svg:SVGDiagram
) extends Figure {
lazy val globalBounds:SgRectangle =
SgRectangle fromRectangle2D (transform transformShape svg.getViewRect).getBounds2D inset SgRectangleInsets.one.inverse
final def globalPicked(at:SgPoint):Boolean =
(clip forall { _ globalPicked at }) &&
(transform.inverse exists { t =>
!(svg pick (t transformPoint2D at.toPoint2D, new JVector) isEmpty)
})
def paintImpl(g:Graphics2D) {
for (c <- clip) {
g clip c.globalShape
}
g transform transform.toAffineTransform
svg.render(g)
}
}
| ritschwumm/scscene | src/main/scala/scscene/SvgFigure.scala | Scala | bsd-2-clause | 927 |
package me.frmr.kafka.detective
import java.util.Properties
import org.apache.kafka.common._
import org.apache.kafka.clients.consumer._
import org.scalatest._
import org.scalatest.concurrent._
import scala.collection.JavaConverters._
class CallbackKafkaConsumerSpec extends FlatSpec with Matchers with Eventually {
"CallbackKafkaConsumer" should "init with running and hasBeenUsed as false" in withFixtures { (mockConsumer, callbackConsumer) =>
callbackConsumer.getRunning shouldBe false
callbackConsumer.getHasBeenUsed shouldBe false
}
it should "invoke the callback when it receives records" in withFixtures { (mockConsumer, callbackConsumer) =>
@volatile var sawMessage: Option[(String, String)] = None
callbackConsumer.doAssignments()
mockConsumer.addRecord(newRecord("abc", "def"))
callbackConsumer.addCallback { (record) =>
sawMessage = Some((record.key(), record.value()))
}
callbackConsumer.start()
eventually {
sawMessage shouldBe Some(("abc", "def"))
}
}
it should "invoke the batch callback when it receives records" in withFixtures { (mockConsumer, callbackConsumer) =>
@volatile var sawMessage: Option[(String, String)] = None
callbackConsumer.doAssignments()
mockConsumer.addRecord(newRecord("abc", "def"))
callbackConsumer.addBatchCallback { (records) =>
records.asScala.toList.foreach { record =>
sawMessage = Some((record.key(), record.value()))
}
}
callbackConsumer.start()
eventually {
sawMessage shouldBe Some(("abc", "def"))
}
}
it should "report itself as not running and used after stop" in withFixtures { (mockConsumer, callbackConsumer) =>
callbackConsumer.start()
callbackConsumer.stop()
eventually {
callbackConsumer.getRunning shouldBe false
callbackConsumer.getHasBeenUsed shouldBe true
}
}
it should "throw an exception if you attempt to use it more than once" in withFixtures { (mockConsumer, callbackConsumer) =>
callbackConsumer.start()
Thread.sleep(5)
val thrown = the [RuntimeException] thrownBy callbackConsumer.start()
thrown.getMessage should equal ("This callback kafka consumer has already been used. Please instantiate a new one.")
}
it should "properly handle a reset request" in withFixtures { (mockConsumer, callbackConsumer) =>
@volatile var sawReset: Boolean = false
@volatile var sawMessage: Option[(String, String)] = None
callbackConsumer.addBatchCallback { (records) =>
records.asScala.toList.foreach { record =>
sawMessage = Some((record.key(), record.value()))
}
}
callbackConsumer.doAssignments()
mockConsumer.addRecord(newRecord("abc", "def"))
callbackConsumer.addResetCallback( () => sawReset = true )
callbackConsumer.start()
eventually {
withClue("Didn't consume message") {
callbackConsumer.getPositionAheadOfStart should be > 0L
}
}
callbackConsumer.reset()
eventually {
withClue("Didn't see reset flag flip") {
sawReset shouldBe true
}
sawMessage shouldBe Some(("abc", "def"))
callbackConsumer.getPositionAheadOfStart shouldBe 0L
}
}
it should "give reset priority over backpressure" in withFixtures { (mockConsumer, callbackConsumer) =>
@volatile var sawReset: Boolean = false
callbackConsumer.addResetCallback( () => sawReset = true )
callbackConsumer.setBackpressureFn( Some(() => true) )
callbackConsumer.start()
callbackConsumer.reset()
eventually {
withClue("Didn't see reset flag flip") {
sawReset shouldBe true
}
withClue("Didn't see backpressure flag flip") {
callbackConsumer.isBackpressuring shouldBe true
}
}
}
var msgOffset = 0L
def newRecord(key: String, value: String): ConsumerRecord[String, String] = {
val newOffset = msgOffset + 1L
msgOffset = newOffset
new ConsumerRecord("test", 0, msgOffset, key, value)
}
def withFixtures(testCode: (MockConsumer[String, String], CallbackKafkaConsumer[String, String])=>Any) = {
val mockConsumer = new MockConsumer[String, String](OffsetResetStrategy.EARLIEST)
val windowingConsumer = new CallbackKafkaConsumer[String, String]("test", Seq(0), new Properties()) {
override lazy val consumer: Consumer[String, String] = mockConsumer
override def doAssignments: Unit = {
mockConsumer.assign(Seq(new TopicPartition("test", 0)).asJava)
mockConsumer.updateBeginningOffsets(Map(new TopicPartition("test", 0) -> new java.lang.Long(0)).asJava)
}
}
try {
testCode(mockConsumer, windowingConsumer)
} finally {
windowingConsumer.stop()
}
}
}
| farmdawgnation/kafka-detective | daemon/src/test/scala/me/frmr/kafka/detective/CallbackKafkaConsumerSpec.scala | Scala | apache-2.0 | 4,745 |
package views.html
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import play.api.i18n._
import play.api.mvc._
import play.api.data._
import views.html._
/* userCountDown Template File */
object userCountDown extends BaseScalaTemplate[play.api.templates.HtmlFormat.Appendable,Format[play.api.templates.HtmlFormat.Appendable]](play.api.templates.HtmlFormat) with play.api.templates.Template2[scala.Tuple2[Option[String], String],RequestHeader,play.api.templates.HtmlFormat.Appendable] {
/* userCountDown Template File */
def apply/*2.2*/(phnos: (Option[String], String))(implicit request: RequestHeader):play.api.templates.HtmlFormat.Appendable = {
_display_ {
Seq[Any](format.raw/*2.68*/("""
"""),_display_(/*4.2*/main("Call")/*4.14*/{_display_(Seq[Any](format.raw/*4.15*/("""
<legend>Missed Call Panel</legend>
<center>
<div class="alert alert-success">
"""),_display_(/*9.5*/phnos/*9.10*/._1/*9.13*/ match/*9.19*/{/*10.5*/case Some(number) =>/*10.25*/ {_display_(Seq[Any](format.raw/*10.27*/("""
<h1>Call """),_display_(/*11.16*/number),format.raw/*11.22*/("""</h1>
""")))}/*13.5*/case None =>/*13.17*/ {_display_(Seq[Any](format.raw/*13.19*/("""
<h1>Serious error ocurred. try to reload !!!</h1>
""")))}}),format.raw/*16.5*/("""
</div>
<div class="well">
<h1 id="start"></h1>
</div>
</center>
<script type="text/javascript">
$(document).ready(function() """),format.raw/*27.32*/("""{"""),format.raw/*27.33*/("""
var count = 120;
setTimeout(function() """),format.raw/*29.29*/("""{"""),format.raw/*29.30*/(""" count = 0; window.location = """"),_display_(/*29.62*/routes/*29.68*/.Application.redirect()),format.raw/*29.91*/(""""; """),format.raw/*29.94*/("""}"""),format.raw/*29.95*/(""", 60 * 2 * 1000);
window.setInterval(function()"""),format.raw/*30.37*/("""{"""),format.raw/*30.38*/("""
count = count - 1;
$("#start").text(count)
if(count <= 0) """),format.raw/*33.24*/("""{"""),format.raw/*33.25*/("""$("#start").text("Refresh to try again"); return;"""),format.raw/*33.74*/("""}"""),format.raw/*33.75*/("""
"""),format.raw/*34.8*/("""}"""),format.raw/*34.9*/(""", 1000)
"""),format.raw/*35.7*/("""}"""),format.raw/*35.8*/(""");
ws = new WebSocket(""""),_display_(/*37.27*/routes/*37.33*/.Application.callTrigger(phnos._2).webSocketURL()),format.raw/*37.82*/("""")
ws.onmessage = function(msg)"""),format.raw/*38.34*/("""{"""),format.raw/*38.35*/("""
window.location = """"),_display_(/*39.27*/routes/*39.33*/.Application.redirect()),format.raw/*39.56*/("""";
"""),format.raw/*40.6*/("""}"""),format.raw/*40.7*/("""
</script>
""")))}),format.raw/*43.2*/("""
"""))}
}
def render(phnos:scala.Tuple2[Option[String], String],request:RequestHeader): play.api.templates.HtmlFormat.Appendable = apply(phnos)(request)
def f:((scala.Tuple2[Option[String], String]) => (RequestHeader) => play.api.templates.HtmlFormat.Appendable) = (phnos) => (request) => apply(phnos)(request)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Tue Jul 01 01:00:33 IST 2014
SOURCE: /home/nagarjuna/FooService/app/views/userCountDown.scala.html
HASH: da80830e51fbadfd2b147b6541fec72124258317
MATRIX: 666->35|826->101|854->104|874->116|912->117|1027->207|1040->212|1051->215|1065->221|1074->227|1103->247|1143->249|1186->265|1213->271|1243->288|1264->300|1304->302|1397->369|1576->520|1605->521|1682->570|1711->571|1770->603|1785->609|1829->632|1860->635|1889->636|1972->691|2001->692|2112->775|2141->776|2218->825|2247->826|2282->834|2310->835|2351->849|2379->850|2436->880|2451->886|2521->935|2585->971|2614->972|2668->999|2683->1005|2727->1028|2762->1036|2790->1037|2836->1053
LINES: 19->2|22->2|24->4|24->4|24->4|29->9|29->9|29->9|29->9|29->10|29->10|29->10|30->11|30->11|31->13|31->13|31->13|33->16|44->27|44->27|46->29|46->29|46->29|46->29|46->29|46->29|46->29|47->30|47->30|50->33|50->33|50->33|50->33|51->34|51->34|52->35|52->35|54->37|54->37|54->37|55->38|55->38|56->39|56->39|56->39|57->40|57->40|60->43
-- GENERATED --
*/
| pamu/FooService | FooService2/target/scala-2.10/src_managed/main/views/html/userCountDown.template.scala | Scala | apache-2.0 | 4,406 |
package sbtmarathon
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.JavaConverters._
import com.twitter.finagle.Service
import com.twitter.finagle.http._
import com.twitter.finagle.http.path._
import com.twitter.finagle.http.service.RoutingService
import com.twitter.util.Future
import org.json4sbt._
import org.json4sbt.jackson.JsonMethods._
case class MockMarathonService() extends Service[Request, Response] {
private implicit val formats = DefaultFormats
private val applicationInstances = new ConcurrentHashMap[String, AtomicInteger]
private val underlying = RoutingService.byMethodAndPathObject[Request] {
case (Method.Post, Root / "v2" / "apps") => startRequestHandler
case (Method.Delete, Root / "v2" / "apps" / applicationId) => destroyRequestHandler(applicationId)
case (Method.Delete, Root / "v2" / "apps") => systemErrorHandler
}
def apply(request: Request): Future[Response] = {
underlying(request)
}
val startRequestHandler = new Service[Request, Response] {
def apply(request: Request): Future[Response] = {
val jsonContent = parse(request.contentString)
val applicationId = (jsonContent \\ "id").extract[String]
val instances = (jsonContent \\ "instances").extractOpt[Int].getOrElse(1)
if (!applicationInstances.containsKey(applicationId)) {
applicationInstances.put(applicationId, new AtomicInteger(instances))
val response = Response(request.version, Status.Created)
Future.value(response)
} else {
val response = Response(request.version, Status.BadRequest)
response.contentType = "application/json"
response.write(s"""{"message":"An app with id [/$applicationId] already exists."}""")
Future.value(response)
}
}
}
def destroyRequestHandler(applicationId: String) = new Service[Request, Response] {
def apply(request: Request): Future[Response] = {
val response = Response(request.version, Status.Ok)
response.contentType = "application/json"
response.write(s"""{"$applicationId":"destroyed"}""")
Future.value(response)
}
}
val systemErrorHandler = new Service[Request, Response] {
def apply(request: Request): Future[Response] = {
val response = Response(request.version, Status.InternalServerError)
Future.value(response)
}
}
}
| Tapad/sbt-marathon | marathon/src/test/scala/sbtmarathon/MockMarathonService.scala | Scala | bsd-3-clause | 2,419 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.model
import com.netflix.atlas.core.model.DataExpr.AggregateFunction
import com.netflix.atlas.core.stacklang.SimpleWord
import com.netflix.atlas.core.stacklang.StandardVocabulary.Macro
import com.netflix.atlas.core.stacklang.Vocabulary
import com.netflix.atlas.core.stacklang.Word
object MathVocabulary extends Vocabulary {
import com.netflix.atlas.core.model.ModelExtractors._
val name: String = "math"
val dependsOn: List[Vocabulary] = List(DataVocabulary)
val words: List[Word] = List(
GroupBy,
Const,
Random,
Time,
CommonQuery,
Abs, Negate, Sqrt, PerStep,
Add, Subtract, Multiply, Divide,
GreaterThan,
GreaterThanEqual,
LessThan,
LessThanEqual,
FAdd, FSubtract, FMultiply, FDivide,
And, Or,
Sum, Count, Min, Max,
Macro("avg", List(":dup", ":sum", ":swap", ":count", ":div"), List("name,sps,:eq,(,nf.cluster,),:by")),
Macro("pct", List(":dup", ":sum", ":div", "100", ":mul"), List("name,sps,:eq,(,nf.cluster,),:by")),
Macro("dist-avg", List(
"statistic", "(", "totalTime", "totalAmount", ")", ":in", ":sum",
"statistic", "count", ":eq", ":sum",
":div",
":swap", ":cq"
),
List("name,playback.startLatency,:eq")),
Macro("dist-max", List(
"statistic", "max", ":eq", ":max",
":swap", ":cq"
),
List("name,playback.startLatency,:eq")),
Macro("dist-stddev", List(
// N
"statistic", "count", ":eq", ":sum",
// sum(x^2)
"statistic", "totalOfSquares", ":eq", ":sum",
// N * sum(x^2)
":mul",
// sum(x)
"statistic", "(", "totalAmount", "totalTime", ")", ":in", ":sum",
// sum(x)^2
":dup", ":mul",
// N * sum(x^2) - sum(x)^2
":sub",
// N^2
"statistic", "count", ":eq", ":sum", ":dup", ":mul",
// v = (N * sum(x^2) - sum(x)^2) / N^2
":div",
// stddev = sqrt(v)
":sqrt",
// Swap and use :cq to apply a common query
":swap", ":cq"
),
List("name,playback.startLatency,:eq"))
)
case object GroupBy extends SimpleWord {
override def name: String = "by"
protected def matcher: PartialFunction[List[Any], Boolean] = {
case StringListType(_) :: TimeSeriesType(t) :: _ =>
t.dataExprs.forall(_.isInstanceOf[DataExpr.AggregateFunction])
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case StringListType(keys) :: TimeSeriesType(t) :: stack =>
val f = t.rewrite {
case af: AggregateFunction => DataExpr.GroupBy(af, keys)
}
f :: stack
}
override def summary: String =
"""
|Apply a common group by to all aggregation functions in the expression.
""".stripMargin.trim
override def signature: String = "TimeSeriesExpr keys:List -- TimeSeriesExpr"
override def examples: List[String] = List("name,sps,:eq,:avg,(,nf.cluster,)")
}
case object Const extends SimpleWord {
override def name: String = "const"
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: stack => MathExpr.Constant(v.toDouble) :: stack
}
override def summary: String =
"""
|Generates a line where each datapoint is a constant value.
""".stripMargin.trim
override def signature: String = "Double -- TimeSeriesExpr"
override def examples: List[String] = List("42")
}
case object Random extends SimpleWord {
override def name: String = "random"
protected def matcher: PartialFunction[List[Any], Boolean] = { case _ => true }
protected def executor: PartialFunction[List[Any], List[Any]] = {
case s => MathExpr.Random :: s
}
override def summary: String =
"""
|Generates a line where each datapoint is a random value between 0.0 and 1.0.
""".stripMargin.trim
override def signature: String = " -- TimeSeriesExpr"
override def examples: List[String] = List("")
}
case object Time extends SimpleWord {
override def name: String = "time"
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: stack => MathExpr.Time(v) :: stack
}
override def summary: String =
"""
|Generates a line based on the current time. Supported modes are secondOfMinute,
|secondOfDay, minuteOfHour, minuteOfDay, hourOfDay, dayOfWeek, dayOfMonth, dayOfYear,
|monthOfYear, yearOfCentury, yearOfEra, seconds (since epoch), or days (since epoch). The
|mode can also be one of the [ChronoField values]
|(https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html).
""".stripMargin.trim
override def signature: String = "String -- TimeSeriesExpr"
override def examples: List[String] = List("hourOfDay", "HOUR_OF_DAY")
}
case object CommonQuery extends SimpleWord {
override def name: String = "cq"
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: Query) :: (_: Expr) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (q2: Query) :: (expr: Expr) :: stack =>
val newExpr = expr.rewrite {
case q1: Query => Query.And(q1, q2)
}
newExpr :: stack
}
override def summary: String =
"""
|Recursively AND a common query to all queries in an expression.
""".stripMargin.trim
override def signature: String = "Expr Query -- Expr"
override def examples: List[String] = List(
"name,ssCpuUser,:eq,name,DiscoveryStatus_UP,:eq,:mul,nf.app,alerttest,:eq")
}
sealed trait UnaryWord extends SimpleWord {
protected def matcher: PartialFunction[List[Any], Boolean] = {
case TimeSeriesType(_) :: _ => true
}
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr
protected def executor: PartialFunction[List[Any], List[Any]] = {
case TimeSeriesType(t) :: stack => newInstance(t) :: stack
}
override def signature: String = "TimeSeriesExpr -- TimeSeriesExpr"
override def examples: List[String] = List("0", "64", "-64")
}
case object Abs extends UnaryWord {
override def name: String = "abs"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Abs(t)
override def summary: String =
"""
|Compute a new time series where each interval has the absolute value of the input time
|series.
""".stripMargin.trim
}
case object Negate extends UnaryWord {
override def name: String = "neg"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Negate(t)
override def summary: String =
"""
|Compute a new time series where each interval has the negated value of the input time
|series.
""".stripMargin.trim
}
case object Sqrt extends UnaryWord {
override def name: String = "sqrt"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Sqrt(t)
override def summary: String =
"""
|Compute a new time series where each interval has the square root of the value from the
|input time series.
""".stripMargin.trim
}
case object PerStep extends UnaryWord {
override def name: String = "per-step"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.PerStep(t)
override def summary: String =
"""
|Converts a line from a rate per second to a rate based on the step size of the graph.
|This is useful for getting an estimate of the raw number of events for a given
|interval.
""".stripMargin.trim
}
sealed trait BinaryWord extends SimpleWord {
protected def matcher: PartialFunction[List[Any], Boolean] = {
case TimeSeriesType(_) :: TimeSeriesType(_) :: _ => true
}
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr
protected def executor: PartialFunction[List[Any], List[Any]] = {
case TimeSeriesType(t2) :: TimeSeriesType(t1) :: stack => newInstance(t1, t2) :: stack
}
override def signature: String = "TimeSeriesExpr TimeSeriesExpr -- TimeSeriesExpr"
override def examples: List[String] = List(
"name,sps,:eq,42",
"name,sps,:eq,:sum,name,requestsPerSecond,:eq,:max,(,name,),:by")
}
case object Add extends BinaryWord {
override def name: String = "add"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.Add(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a addNaN b)` where `a`
| and `b` are the corresponding intervals in the input time series. Sample:
|
|| :add | 3.0 | 0.0 | 1.0 | 1.0 | NaN |
||---------|-----|-----|-----|-----|-----|
|| Input 1 | 1.0 | 0.0 | 1.0 | 1.0 | NaN |
|| Input 2 | 2.0 | 0.0 | 0.0 | NaN | NaN |
|
|Use the [fadd](math-fadd) operator to get strict floating point behavior.
""".stripMargin.trim
}
case object Subtract extends BinaryWord {
override def name: String = "sub"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.Subtract(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a subtractNaN b)` where `a`
| and `b` are the corresponding intervals in the input time series.
|
|| :sub | 1.0 | 0.0 | 1.0 | 1.0 | NaN |
||---------|-----|-----|-----|-----|-----|
|| Input 1 | 2.0 | 0.0 | 1.0 | 1.0 | NaN |
|| Input 2 | 1.0 | 0.0 | 0.0 | NaN | NaN |
|
|Use the [fsub](math-fsub) operator to get strict floating point behavior.
""".stripMargin.trim
}
case object Multiply extends BinaryWord {
override def name: String = "mul"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.Multiply(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a * b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object Divide extends BinaryWord {
override def name: String = "div"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.Divide(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a / b)` where `a`
| and `b` are the corresponding intervals in the input time series. If `a` and `b` are 0,
| then 0 will be returned for the interval. If only `b` is 0, then NaN will be returned as
| the value for the interval. Sample data:
|
|| :div | 0.5 | 0.0 | NaN | NaN | NaN |
||---------|-----|-----|-----|-----|-----|
|| Input 1 | 1.0 | 0.0 | 1.0 | 1.0 | NaN |
|| Input 2 | 2.0 | 0.0 | 0.0 | NaN | NaN |
|
|Use the [fdiv](math-fdiv) operator to get strict floating point behavior.
""".stripMargin.trim
}
case object GreaterThan extends BinaryWord {
override def name: String = "gt"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.GreaterThan(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a > b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object GreaterThanEqual extends BinaryWord {
override def name: String = "ge"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.GreaterThanEqual(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a >= b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object LessThan extends BinaryWord {
override def name: String = "lt"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.LessThan(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a < b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object LessThanEqual extends BinaryWord {
override def name: String = "le"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.LessThanEqual(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a <= b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object FAdd extends BinaryWord {
override def name: String = "fadd"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.FAdd(t1, t2)
}
override def summary: String =
"""
|Floating point addition operator. Compute a new time series where each interval has the
| value `(a + b)` where `a` and `b` are the corresponding intervals in the input time
| series.
|
|| :fadd | 3.0 | 0.0 | 1.0 | NaN | NaN |
||---------|-----|-----|-----|-----|-----|
|| Input 1 | 2.0 | 0.0 | 1.0 | 1.0 | NaN |
|| Input 2 | 1.0 | 0.0 | 0.0 | NaN | NaN |
|
|Note in many cases `NaN` will appear in data, e.g., if a node was brought up and started
|reporting in the middle of the time window for the graph. This can lead to confusing
|behavior if added to a line that does have data as the result will be `NaN`. Use the
|[add](math-add) operator to treat `NaN` values as zero for combining with other time
|series.
""".stripMargin.trim
}
case object FSubtract extends BinaryWord {
override def name: String = "fsub"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.FSubtract(t1, t2)
}
override def summary: String =
"""
|Floating point subtraction operator. Compute a new time series where each interval has the
| value `(a - b)` where `a` and `b` are the corresponding intervals in the input time
| series.
|
|| :fsub | 1.0 | 0.0 | 1.0 | NaN | NaN |
||---------|-----|-----|-----|-----|-----|
|| Input 1 | 2.0 | 0.0 | 1.0 | 1.0 | NaN |
|| Input 2 | 1.0 | 0.0 | 0.0 | NaN | NaN |
|
|Note in many cases `NaN` will appear in data, e.g., if a node was brought up and started
|reporting in the middle of the time window for the graph. This can lead to confusing
|behavior if added to a line that does have data as the result will be `NaN`. Use the
|[sub](math-sub) operator to treat `NaN` values as zero for combining with other time
|series.
""".stripMargin.trim
}
case object FMultiply extends BinaryWord {
override def name: String = "fmul"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.FMultiply(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a * b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object FDivide extends BinaryWord {
override def name: String = "fdiv"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.FDivide(t1, t2)
}
override def summary: String =
"""
|Floating point division operator. Compute a new time series where each interval has the
| value `(a / b)` where `a` and `b` are the corresponding intervals in the input time
| series. If `b` is 0, then NaN will be returned as the value for the interval.
|
|| :fdiv | 2.0 | NaN | Inf | NaN | NaN |
||---------|-----|-----|-----|-----|-----|
|| Input 1 | 2.0 | 0.0 | 1.0 | 1.0 | NaN |
|| Input 2 | 1.0 | 0.0 | 0.0 | NaN | NaN |
|
|Note in many cases `NaN` will appear in data, e.g., if a node was brought up and started
|reporting in the middle of the time window for the graph. Zero divided by zero can also
|occur due to lack of activity in some windows. Unless you really need strict floating
|point behavior, use the [div](math-div) operator to get behavior more appropriate for
|graphs.
""".stripMargin.trim
}
case object And extends BinaryWord {
override def name: String = "and"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.And(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a AND b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
case object Or extends BinaryWord {
override def name: String = "or"
def newInstance(t1: TimeSeriesExpr, t2: TimeSeriesExpr): TimeSeriesExpr = {
MathExpr.Or(t1, t2)
}
override def summary: String =
"""
|Compute a new time series where each interval has the value `(a OR b)` where `a`
| and `b` are the corresponding intervals in the input time series.
""".stripMargin.trim
}
sealed trait AggrWord extends SimpleWord {
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: TimeSeriesExpr) :: _ => true
}
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr
protected def executor: PartialFunction[List[Any], List[Any]] = {
case DataExpr.GroupBy(a: DataExpr.Sum, _) :: stack if this == Sum => a :: stack
case DataExpr.GroupBy(a: DataExpr.Min, _) :: stack if this == Min => a :: stack
case DataExpr.GroupBy(a: DataExpr.Max, _) :: stack if this == Max => a :: stack
case (a: DataExpr.AggregateFunction) :: stack if this != Count => a :: stack
case (t: TimeSeriesExpr) :: stack => newInstance(t) :: stack
}
override def signature: String = "TimeSeriesExpr -- TimeSeriesExpr"
override def examples: List[String] = List(
"name,sps,:eq,:sum",
"name,sps,:eq,:max,(,nf.cluster,),:by")
}
case object Sum extends AggrWord {
override def name: String = "sum"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Sum(t)
override def summary: String =
"""
|Compute the sum of all the time series that result from the previous expression.
""".stripMargin.trim
}
case object Count extends AggrWord {
override def name: String = "count"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Count(t)
override def summary: String =
"""
|Compute the count of all the time series that result from the previous expression.
""".stripMargin.trim
}
case object Min extends AggrWord {
override def name: String = "min"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Min(t)
override def summary: String =
"""
|Compute the min of all the time series that result from the previous expression.
""".stripMargin.trim
}
case object Max extends AggrWord {
override def name: String = "max"
def newInstance(t: TimeSeriesExpr): TimeSeriesExpr = MathExpr.Max(t)
override def summary: String =
"""
|Compute the max of all the time series that result from the previous expression.
""".stripMargin.trim
}
}
| rspieldenner/atlas | atlas-core/src/main/scala/com/netflix/atlas/core/model/MathVocabulary.scala | Scala | apache-2.0 | 20,824 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.server.middleware
import cats.effect.IO
import cats.effect.Ref
import com.comcast.ip4s._
import org.http4s.Request.Connection
import org.http4s._
import org.http4s.dsl.io._
import org.http4s.syntax.all._
import org.typelevel.vault.Vault
class ErrorActionSuite extends Http4sSuite {
val remote = Ipv4Address.fromBytes(192, 168, 0, 1)
def httpRoutes(error: Throwable = new RuntimeException()) =
HttpRoutes.of[IO] { case GET -> Root / "error" =>
IO.raiseError(error)
}
val req = Request[IO](
GET,
uri"/error",
attributes = Vault.empty.insert(
Request.Keys.ConnectionInfo,
Connection(
SocketAddress(Ipv4Address.fromBytes(127, 0, 0, 1), port"80"),
SocketAddress(remote, port"80"),
false,
),
),
)
def testApp(app: Ref[IO, Vector[String]] => HttpApp[IO], expected: Vector[String])(
req: Request[IO]
) =
(for {
logsRef <- Ref.of[IO, Vector[String]](Vector.empty)
_ <- app(logsRef).run(req).attempt
logs <- logsRef.get
} yield logs).assertEquals(expected)
def testHttpRoutes(
httpRoutes: Ref[IO, Vector[String]] => HttpRoutes[IO],
expected: Vector[String],
) =
testApp(logsRef => httpRoutes(logsRef).orNotFound, expected)(_)
test("run the given function when an error happens") {
testApp(
logsRef =>
ErrorAction(
httpRoutes().orNotFound,
(_: Request[IO], _) => logsRef.getAndUpdate(_ :+ "Error was handled").void,
),
Vector("Error was handled"),
)(req)
}
test("be created via httpApp constructor") {
testApp(
logsRef =>
ErrorAction.httpApp(
httpRoutes().orNotFound,
(_: Request[IO], _) => logsRef.getAndUpdate(_ :+ "Error was handled").void,
),
Vector("Error was handled"),
)(req)
}
test("be created via httRoutes constructor") {
testHttpRoutes(
logsRef =>
ErrorAction.httpRoutes(
httpRoutes(),
(_: Request[IO], _) => logsRef.getAndUpdate(_ :+ "Error was handled").void,
),
Vector("Error was handled"),
)(req)
}
test("provide prebaked error message in case of a runtime error") {
testApp(
logsRef =>
ErrorAction.log(
httpRoutes().orNotFound,
(_, _) => IO.unit,
(_, message) => logsRef.getAndUpdate(_ :+ message).void,
),
Vector(s"Error servicing request: GET /error from $remote"),
)(req)
}
test("provide prebaked error message in case of a message failure") {
testApp(
logsRef =>
ErrorAction.log(
httpRoutes(ParseFailure("some-erroneous-message", "error")).orNotFound,
(_, message) => logsRef.getAndUpdate(_ :+ message).void,
(_, _) => IO.unit,
),
Vector(s"Message failure handling request: GET /error from $remote"),
)(req)
}
test("should be created via httpApp.log constructor") {
testHttpRoutes(
logsRef =>
ErrorAction.httpRoutes.log(
httpRoutes(),
(_, _) => IO.unit,
(_, message) => logsRef.getAndUpdate(_ :+ message).void,
),
Vector(s"Error servicing request: GET /error from $remote"),
)(req)
}
}
| http4s/http4s | server/shared/src/test/scala/org/http4s/server/middleware/ErrorActionSuite.scala | Scala | apache-2.0 | 3,847 |
/**
* This file is part of mycollab-web.
*
* mycollab-web is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-web is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-web. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.project.view.parameters
import com.esofthead.mycollab.vaadin.mvp.ScreenData
/**
* @author MyCollab Ltd.
* @since 5.0.3
*/
object ProjectModuleScreenData {
class GotoModule(params: Array[String]) extends ScreenData[Array[String]](params) {}
}
| uniteddiversity/mycollab | mycollab-web/src/main/scala/com.esofthead.mycollab.module.project.view.parameters/ProjectModuleScreenData.scala | Scala | agpl-3.0 | 986 |
/**
* Magmanics Licensing. This web application allows for centralized control
* of client application activation, with optional configuration parameters
* to control licensable features, and storage of supplementary information
* about the client machine. Client applications may interface with this
* central server (for activation) using libraries licenced under an
* alternative licence.
*
* Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.magmanics.licensing.endpoint
import com.magmanics.licensing.service.CustomerRepository
import com.magmanics.licensing.model.Customer
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.web.bind.annotation._
/**
* @author James Baxter - 16/08/2014.
*/
@RestController
@RequestMapping(Array("/customers"))
class CustomerEndpoint {
@Autowired
var customerRepository: CustomerRepository = _
/**
* Create a Customer, returning the persistent Customer (id populated)
*/
@RequestMapping(method = Array(RequestMethod.POST))
def create(@RequestBody customer: Customer): Customer = customerRepository.create(customer)
/**
* Get enabled Customers
*/
@RequestMapping(method = Array(RequestMethod.GET), params = Array("enabled"))
def getEnabled(@RequestParam enabled: Boolean): Set[Customer] = customerRepository.getEnabled(enabled)
/**
* Get all Customers within the system
*/
@RequestMapping(method = Array(RequestMethod.GET))
def get(): Set[Customer] = customerRepository.get()
/**
* Gets a Customer given the specified id
*/
@RequestMapping(method = Array(RequestMethod.GET), params = Array("id"))
def get(@RequestParam id: Long): Option[Customer] = customerRepository.get(id)
/**
* Update the given Customer
*/
@RequestMapping(method = Array(RequestMethod.PUT))
def update(@RequestBody customer: Customer) = customerRepository.update(customer)
}
| manicmonkey/licensing | Licensing-Server/src/main/scala/com/magmanics/licensing/endpoint/CustomerEndpoint.scala | Scala | gpl-3.0 | 2,576 |
package common.models
import anorm.NamedParameter
import common.api.PermissionLevel.PermissionLevel
import common.api.{Model, PermissionLevel}
case class CustomCommand(guildId: Long, commandName: String, permissionLevel: Int = PermissionLevel.EVERYONE.id, commandContent: String = "") extends Model {
override val namedParameters: Seq[NamedParameter] = Seq(
'guild_id -> guildId,
'command_name -> commandName,
'permission_level -> permissionLevel,
'command_content -> commandContent
)
val getPermissionLevel: PermissionLevel = PermissionLevel(permissionLevel)
}
| Cobbleopolis/MonsterTruckBot | modules/common/app/common/models/CustomCommand.scala | Scala | mit | 611 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.sbt
object Colors {
import play.runsupport.{ Colors => RunColors }
lazy val isANSISupported = RunColors.isANSISupported
def red(str: String): String = RunColors.red(str)
def blue(str: String): String = RunColors.blue(str)
def cyan(str: String): String = RunColors.cyan(str)
def green(str: String): String = RunColors.green(str)
def magenta(str: String): String = RunColors.magenta(str)
def white(str: String): String = RunColors.white(str)
def black(str: String): String = RunColors.black(str)
def yellow(str: String): String = RunColors.yellow(str)
def bold(str: String): String = RunColors.bold(str)
}
| mkurz/playframework | dev-mode/sbt-plugin/src/main/scala/play/sbt/Colors.scala | Scala | apache-2.0 | 730 |
package org.scalatra.test
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.servlet.ServletContextHandler
trait EmbeddedJettyContainer extends JettyContainer {
/**
* Sets the port to listen on. 0 means listen on any available port.
*/
def port: Int = 0
/**
* The port of the currently running Jetty. May differ from port if port is 0.
*
* @return Some port if Jetty is currently listening, or None if it is not.
*/
def localPort: Option[Int] = server.getConnectors.headOption map { _.getLocalPort }
lazy val server = new Server(port)
lazy val servletContextHandler = {
val handler = new ServletContextHandler(ServletContextHandler.SESSIONS)
handler.setContextPath("/")
handler
}
def start(): Unit = {
server.setHandler(servletContextHandler)
server.start()
}
def stop(): Unit = server.stop()
def baseUrl: String =
server.getConnectors.headOption match {
case Some(conn) =>
val host = Option(conn.getHost) getOrElse "localhost"
val port = conn.getLocalPort
"http://%s:%d".format(host, port)
case None =>
sys.error("can't calculate base URL: no connector")
}
}
| louk/scalatra | test/src/main/scala/org/scalatra/test/EmbeddedJettyContainer.scala | Scala | bsd-2-clause | 1,198 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.charts.stats
import java.io.InputStream
import java.nio.ByteBuffer
import scala.collection.{ breakOut, mutable }
import scala.io.Source
import io.gatling.charts.stats.buffers.{ CountsBuffer, GeneralStatsBuffer, PercentilesBuffers }
import io.gatling.commons.stats._
import io.gatling.commons.stats.assertion.Assertion
import io.gatling.commons.util.PathHelper._
import io.gatling.commons.util.StringHelper._
import io.gatling.core.config.GatlingConfiguration
import io.gatling.core.config.GatlingFiles.simulationLogDirectory
import io.gatling.core.stats._
import io.gatling.core.stats.writer._
import boopickle.Default._
import com.typesafe.scalalogging.StrictLogging
import jodd.util.Base64
object LogFileReader {
val LogStep = 100000
val SecMillisecRatio = 1000.0
val SimulationFilesNamePattern = """.*\\.log"""
}
class LogFileReader(runUuid: String)(implicit configuration: GatlingConfiguration) extends GeneralStatsSource with StrictLogging {
import LogFileReader._
println("Parsing log file(s)...")
val inputFiles = simulationLogDirectory(runUuid, create = false).files
.collect { case file if file.filename.matches(SimulationFilesNamePattern) => file.path }
logger.info(s"Collected $inputFiles from $runUuid")
require(inputFiles.nonEmpty, "simulation directory doesn't contain any log file.")
private def parseInputFiles[T](f: Iterator[String] => T): T = {
def multipleFileIterator(streams: Seq[InputStream]): Iterator[String] =
streams.map(Source.fromInputStream(_)(configuration.core.codec).getLines()).reduce((first, second) => first ++ second)
val streams = inputFiles.map(_.inputStream)
try f(multipleFileIterator(streams))
finally streams.foreach(_.close)
}
case class FirstPassData(runStart: Long, runEnd: Long, runMessage: RunMessage, assertions: List[Assertion])
private def firstPass(records: Iterator[String]): FirstPassData = {
logger.info("First pass")
var count = 0
var runStart = Long.MaxValue
var runEnd = Long.MinValue
def updateRunLimits(eventStart: Long, eventEnd: Long): Unit = {
runStart = math.min(runStart, eventStart)
runEnd = math.max(runEnd, eventEnd)
}
val runMessages = mutable.ListBuffer.empty[RunMessage]
val assertions = mutable.LinkedHashSet.empty[Assertion]
records.foreach { line =>
count += 1
if (count % LogStep == 0) logger.info(s"First pass, read $count lines")
line.split(LogFileDataWriter.Separator) match {
case RawRequestRecord(array) =>
updateRunLimits(array(5).toLong, array(6).toLong)
case RawUserRecord(array) =>
updateRunLimits(array(4).toLong, array(5).toLong)
case RawGroupRecord(array) =>
updateRunLimits(array(4).toLong, array(5).toLong)
case RawRunRecord(array) =>
runMessages += RunMessage(array(1), array(2).trimToOption, array(3), array(4).toLong, array(5).trim)
case RawAssertionRecord(array) =>
val assertion: Assertion = {
val base64String = array(1)
val bytes = Base64.decode(base64String)
Unpickle[Assertion].fromBytes(ByteBuffer.wrap(bytes))
}
assertions += assertion
case RawErrorRecord(array) =>
case _ =>
logger.debug(s"Record broken on line $count: $line")
}
}
logger.info(s"First pass done: read $count lines")
FirstPassData(runStart, runEnd, runMessages.head, assertions.toList)
}
val FirstPassData(runStart, runEnd, runMessage, assertions) = parseInputFiles(firstPass)
val step = StatsHelper.step(math.floor(runStart / SecMillisecRatio).toInt, math.ceil(runEnd / SecMillisecRatio).toInt, configuration.charting.maxPlotsPerSeries) * SecMillisecRatio
val buckets = StatsHelper.buckets(0, runEnd - runStart, step)
val bucketFunction = StatsHelper.timeToBucketNumber(runStart, step, buckets.length)
private def secondPass(records: Iterator[String]): ResultsHolder = {
logger.info("Second pass")
val resultsHolder = new ResultsHolder(runStart, runEnd, buckets)
var count = 0
val requestRecordParser = new RequestRecordParser(bucketFunction)
val groupRecordParser = new GroupRecordParser(bucketFunction)
records
.foreach { line =>
count += 1
if (count % LogStep == 0) logger.info(s"Second pass, read $count lines")
line.split(LogFileDataWriter.Separator) match {
case requestRecordParser(record) => resultsHolder.addRequestRecord(record)
case groupRecordParser(record) => resultsHolder.addGroupRecord(record)
case UserRecordParser(record) => resultsHolder.addUserRecord(record)
case ErrorRecordParser(record) => resultsHolder.addErrorRecord(record)
case _ =>
}
}
resultsHolder.endOrphanUserRecords()
logger.info(s"Second pass: read $count lines")
resultsHolder
}
val resultsHolder = parseInputFiles(secondPass)
println("Parsing log file(s) done")
val statsPaths: List[StatsPath] =
resultsHolder.groupAndRequestsNameBuffer.map.toList.map {
case (path @ RequestStatsPath(request, group), time) => (path, (time, group.map(_.hierarchy.size + 1).getOrElse(0)))
case (path @ GroupStatsPath(group), time) => (path, (time, group.hierarchy.size))
case _ => throw new UnsupportedOperationException
}.sortBy(_._2).map(_._1)
def requestNames: List[String] = statsPaths.collect { case RequestStatsPath(request, _) => request }
def scenarioNames: List[String] = resultsHolder.scenarioNameBuffer
.map
.toList
.sortBy(_._2)
.map(_._1)
def numberOfActiveSessionsPerSecond(scenarioName: Option[String]): Seq[IntVsTimePlot] = resultsHolder
.getSessionDeltaPerSecBuffers(scenarioName)
.distribution
private def toNumberPerSec(value: Int) = (value / step * SecMillisecRatio).round.toInt
private def countBuffer2IntVsTimePlots(buffer: CountsBuffer): Seq[CountsVsTimePlot] =
buffer
.distribution
.map(plot => plot.copy(oks = toNumberPerSec(plot.oks), kos = toNumberPerSec(plot.kos)))
.toSeq
.sortBy(_.time)
def numberOfRequestsPerSecond(requestName: Option[String], group: Option[Group]): Seq[CountsVsTimePlot] =
countBuffer2IntVsTimePlots(resultsHolder.getRequestsPerSecBuffer(requestName, group))
def numberOfResponsesPerSecond(requestName: Option[String], group: Option[Group]): Seq[CountsVsTimePlot] =
countBuffer2IntVsTimePlots(resultsHolder.getResponsesPerSecBuffer(requestName, group))
private def distribution(maxPlots: Int, allBuffer: GeneralStatsBuffer, okBuffers: GeneralStatsBuffer, koBuffer: GeneralStatsBuffer): (Seq[PercentVsTimePlot], Seq[PercentVsTimePlot]) = {
// get main and max for request/all status
val size = allBuffer.stats.count
val ok = okBuffers.distribution
val ko = koBuffer.distribution
val min = allBuffer.stats.min
val max = allBuffer.stats.max
def percent(s: Int) = s * 100.0 / size
if (max - min <= maxPlots) {
// use exact values
def plotsToPercents(plots: Iterable[IntVsTimePlot]) = plots.map(plot => PercentVsTimePlot(plot.time, percent(plot.value))).toSeq.sortBy(_.time)
(plotsToPercents(ok), plotsToPercents(ko))
} else {
// use buckets
val step = StatsHelper.step(min, max, maxPlots)
val buckets = StatsHelper.buckets(min, max, step)
val halfStep = step / 2
val bucketFunction = (t: Int) => {
val value = t min (max - 1)
(value - (value - min) % step + halfStep).round.toInt
}
def process(buffer: Iterable[IntVsTimePlot]): Seq[PercentVsTimePlot] = {
val bucketsWithValues: Map[Int, Double] = buffer
.map(record => (bucketFunction(record.time), record))
.groupBy(_._1)
.map {
case (responseTimeBucket, recordList) =>
val bucketSize = recordList.foldLeft(0) {
(partialSize, record) => partialSize + record._2.value
}
(responseTimeBucket, percent(bucketSize))
}(breakOut)
buckets.map {
bucket => PercentVsTimePlot(bucket, bucketsWithValues.getOrElse(bucket, 0.0))
}
}
(process(ok), process(ko))
}
}
def responseTimeDistribution(maxPlots: Int, requestName: Option[String], group: Option[Group]): (Seq[PercentVsTimePlot], Seq[PercentVsTimePlot]) =
distribution(
maxPlots,
resultsHolder.getRequestGeneralStatsBuffers(requestName, group, None),
resultsHolder.getRequestGeneralStatsBuffers(requestName, group, Some(OK)),
resultsHolder.getRequestGeneralStatsBuffers(requestName, group, Some(KO))
)
def groupCumulatedResponseTimeDistribution(maxPlots: Int, group: Group): (Seq[PercentVsTimePlot], Seq[PercentVsTimePlot]) =
distribution(
maxPlots,
resultsHolder.getGroupCumulatedResponseTimeGeneralStatsBuffers(group, None),
resultsHolder.getGroupCumulatedResponseTimeGeneralStatsBuffers(group, Some(OK)),
resultsHolder.getGroupCumulatedResponseTimeGeneralStatsBuffers(group, Some(KO))
)
def groupDurationDistribution(maxPlots: Int, group: Group): (Seq[PercentVsTimePlot], Seq[PercentVsTimePlot]) =
distribution(
maxPlots,
resultsHolder.getGroupDurationGeneralStatsBuffers(group, None),
resultsHolder.getGroupDurationGeneralStatsBuffers(group, Some(OK)),
resultsHolder.getGroupDurationGeneralStatsBuffers(group, Some(KO))
)
def requestGeneralStats(requestName: Option[String], group: Option[Group], status: Option[Status]): GeneralStats = resultsHolder
.getRequestGeneralStatsBuffers(requestName, group, status)
.stats
def groupCumulatedResponseTimeGeneralStats(group: Group, status: Option[Status]): GeneralStats = resultsHolder
.getGroupCumulatedResponseTimeGeneralStatsBuffers(group, status)
.stats
def groupDurationGeneralStats(group: Group, status: Option[Status]): GeneralStats = resultsHolder
.getGroupDurationGeneralStatsBuffers(group, status)
.stats
def numberOfRequestInResponseTimeRange(requestName: Option[String], group: Option[Group]): Seq[(String, Int)] = {
val counts = resultsHolder.getResponseTimeRangeBuffers(requestName, group)
val lowerBound = configuration.charting.indicators.lowerBound
val higherBound = configuration.charting.indicators.higherBound
List(
(s"t < $lowerBound ms", counts.low),
(s"$lowerBound ms < t < $higherBound ms", counts.middle),
(s"t > $higherBound ms", counts.high),
("failed", counts.ko)
)
}
def responseTimePercentilesOverTime(status: Status, requestName: Option[String], group: Option[Group]): Iterable[PercentilesVsTimePlot] =
resultsHolder.getResponseTimePercentilesBuffers(requestName, group, status).percentiles
private def timeAgainstGlobalNumberOfRequestsPerSec(buffer: PercentilesBuffers, status: Status, requestName: String, group: Option[Group]): Seq[IntVsTimePlot] = {
val globalCountsByBucket = resultsHolder.getRequestsPerSecBuffer(None, None).counts
buffer.digests.view.zipWithIndex
.collect {
case (Some(digest), bucketNumber) =>
val count = globalCountsByBucket(bucketNumber)
IntVsTimePlot(toNumberPerSec(count.total), digest.quantile(0.95).toInt)
}
.sortBy(_.time)
}
def responseTimeAgainstGlobalNumberOfRequestsPerSec(status: Status, requestName: String, group: Option[Group]): Seq[IntVsTimePlot] = {
val percentilesBuffer = resultsHolder.getResponseTimePercentilesBuffers(Some(requestName), group, status)
timeAgainstGlobalNumberOfRequestsPerSec(percentilesBuffer, status, requestName, group)
}
def groupCumulatedResponseTimePercentilesOverTime(status: Status, group: Group): Iterable[PercentilesVsTimePlot] =
resultsHolder.getGroupCumulatedResponseTimePercentilesBuffers(group, status).percentiles
def groupDurationPercentilesOverTime(status: Status, group: Group): Iterable[PercentilesVsTimePlot] =
resultsHolder.getGroupDurationPercentilesBuffers(group, status).percentiles
def errors(requestName: Option[String], group: Option[Group]): Seq[ErrorStats] = {
val buff = resultsHolder.getErrorsBuffers(requestName, group)
val total = buff.foldLeft(0)(_ + _._2)
buff.toSeq.map { case (name, count) => ErrorStats(name, count, total) }.sortWith(_.count > _.count)
}
}
| timve/gatling | gatling-charts/src/main/scala/io/gatling/charts/stats/LogFileReader.scala | Scala | apache-2.0 | 13,126 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package au.com.cba.omnia.ebenezer.test
import scalaz.Scalaz._
import au.com.cba.omnia.thermometer.hive.ThermometerHiveSpec
import au.com.cba.omnia.ebenezer.scrooge.hive.Hive
object HiveTestSupportSpec
extends ThermometerHiveSpec
with HiveTestSupport
with HiveMatchers { def is = s2"""
Set up an existing Hive Table
=============================
Single data file test, with columns given as DDL $ddlFilePipeline
Partitioned data test, with columns given as DDL $ddlPartitionedPipeline
Single data file test, with columns given as Thrift $thriftFilePipeline
Partitioned data test, with columns given as Thrift $thriftPartitionedPipeline
Default delimiter test, with columns given as Thrift $thriftFileDefaultDelimitedPipeline
User defined delimiter test, with columns given as Thrift $thriftFileDelimitedPipeline
"""
def ddlFilePipeline = {
val result = for {
_ <- setupHiveTestTableDDL(
"testdb", "testtable",
List(("id", "INT"), ("name", "STRING")),
List(),
"/hive-test/testdata"
)
n <- Hive.query("SELECT COUNT(*) FROM testdb.testtable")
} yield n
result must beValue(List("4"))
}
def ddlPartitionedPipeline = {
val result = for {
_ <- setupHiveTestTableDDL(
"testdb2", "testtable",
List(("id", "INT"), ("name", "STRING")),
List(("year", "STRING"), ("month", "STRING"), ("day", "STRING")),
"/hive-test/partitions/"
)
n <- Hive.query("SELECT COUNT(*) FROM testdb2.testtable")
} yield n
result must beValue(List("4"))
}
def thriftFilePipeline = {
val result = for {
_ <- setupHiveTestTable[TestHive](
"testdb3", "testtable",
List(),
"/hive-test/testdata"
)
n <- Hive.query("SELECT COUNT(*) FROM testdb3.testtable")
} yield n
result must beValue(List("4"))
}
def thriftPartitionedPipeline = {
val result = for {
_ <- setupHiveTestTable[TestHive](
"testdb4", "testtable",
List(("year", "string"), ("month", "string"), ("day", "string")),
"/hive-test/partitions/"
)
n <- Hive.query("SELECT COUNT(*) FROM testdb4.testtable")
} yield n
result must beValue(List("4"))
}
def thriftFileDefaultDelimitedPipeline = {
val result = for {
_ <- setupHiveTestTable[TestHive]("testdb5", "testtable", List(), "/hive-test/testdata")
n <- Hive.query("SELECT name FROM testdb5.testtable")
} yield n
result must beValue(List("red", "green", "yellow", "orange"))
}
def thriftFileDelimitedPipeline = {
val result = for {
_ <- setupHiveTestTable[TestHive]("testdb6", "testtable", List(), "/hive-test/testdata1", ",")
n <- Hive.query("SELECT name FROM testdb6.testtable")
} yield n
result must beValue(List("red", "green", "yellow", "orange"))
}
}
| shyam334/ebenezer | test/src/test/scala/au/com/cba/omnia/ebenezer/test/HiveTestSupportSpec.scala | Scala | apache-2.0 | 3,616 |
package org.http4s.server.middleware
import cats.effect._
import org.http4s.{Http4sSpec, HttpRoutes, Request, Status}
import org.http4s.server.{MockRoute, Router}
import org.http4s.Uri.uri
class AutoSlashSpec extends Http4sSpec {
val route = MockRoute.route()
val pingRoutes = {
import org.http4s.dsl.io._
HttpRoutes.of[IO] {
case GET -> Root / "ping" => Ok()
}
}
"AutoSlash" should {
"Auto remove a trailing slash" in {
val req = Request[IO](uri = uri("/ping/"))
route.orNotFound(req) must returnStatus(Status.NotFound)
AutoSlash(route).orNotFound(req) must returnStatus(Status.Ok)
}
"Match a route defined with a slash" in {
AutoSlash(route).orNotFound(Request[IO](uri = uri("/withslash"))) must returnStatus(Status.Ok)
AutoSlash(route).orNotFound(Request[IO](uri = uri("/withslash/"))) must returnStatus(
Status.Accepted)
}
"Respect an absent trailing slash" in {
val req = Request[IO](uri = uri("/ping"))
route.orNotFound(req) must returnStatus(Status.Ok)
AutoSlash(route).orNotFound(req) must returnStatus(Status.Ok)
}
"Not crash on empty path" in {
val req = Request[IO](uri = uri(""))
AutoSlash(route).orNotFound(req) must returnStatus(Status.NotFound)
}
"Work when nested in Router" in {
// See https://github.com/http4s/http4s/issues/1378
val router = Router("/public" -> AutoSlash(pingRoutes))
router.orNotFound(Request[IO](uri = uri("/public/ping"))) must returnStatus(Status.Ok)
router.orNotFound(Request[IO](uri = uri("/public/ping/"))) must returnStatus(Status.Ok)
}
"Work when Router is nested in AutoSlash" in {
// See https://github.com/http4s/http4s/issues/1947
val router = AutoSlash(Router("/public" -> pingRoutes))
router.orNotFound(Request[IO](uri = uri("/public/ping"))) must returnStatus(Status.Ok)
router.orNotFound(Request[IO](uri = uri("/public/ping/"))) must returnStatus(Status.Ok)
}
}
}
| aeons/http4s | server/src/test/scala/org/http4s/server/middleware/AutoSlashSpec.scala | Scala | apache-2.0 | 2,019 |
package org.jetbrains.plugins.scala.lang.parser.parsing.base
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ErrMsg
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author adkozlov
*/
/*
* AccessModifier ::= '[' (id | 'this') ']'
*/
object AccessQualifier {
def parse(builder: ScalaPsiBuilder): Boolean = builder.getTokenType match {
case ScalaTokenTypes.tLSQBRACKET =>
builder.advanceLexer() // Ate [
builder.disableNewlines()
builder.getTokenType match {
case ScalaTokenTypes.tIDENTIFIER | ScalaTokenTypes.kTHIS =>
builder.advanceLexer() // Ate identifier or this
case _ => builder.error(ErrMsg("identifier.expected"))
}
builder.getTokenType match {
case ScalaTokenTypes.tRSQBRACKET => builder.advanceLexer() // Ate ]
case _ => builder.error(ErrMsg("rsqbracket.expected"))
}
builder.restoreNewlinesState()
true
case _ => false
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/base/AccessQualifier.scala | Scala | apache-2.0 | 1,042 |
package controllers
import java.util.UUID
import javax.inject.Inject
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.api.util.{PasswordHasherRegistry, PasswordInfo}
import com.mohiva.play.silhouette.impl.providers.CredentialsProvider
import forms.ResetPasswordForm
import models.services.{AuthTokenService, UserService}
import play.api.Logger
import play.api.i18n.{I18nSupport, Messages}
import play.api.libs.json.Json
import play.api.mvc.{AbstractController, ControllerComponents}
import utils.auth.DefaultEnv
import scala.concurrent.{ExecutionContext, Future}
/**
* The `Reset Password` controller.
*
* @param components ControllerComponents
* @param silhouette The Silhouette stack.
* @param userService The user service implementation.
* @param authInfoRepository The auth info repository.
* @param passwordHasherRegistry The password hasher registry.
* @param authTokenService The auth token service implementation.
* @param webJarAssets The WebJar assets locator.
*/
class ResetPasswordController @Inject() (
components: ControllerComponents,
silhouette: Silhouette[DefaultEnv],
userService: UserService,
authInfoRepository: AuthInfoRepository,
passwordHasherRegistry: PasswordHasherRegistry,
authTokenService: AuthTokenService)(
implicit val ex: ExecutionContext)
extends AbstractController(components) with I18nSupport {
val logger: Logger = Logger(this.getClass())
/**
* Resets the password.
*
* @param token The token to identify a user.
* @return The result to display.
*/
def submit(token: UUID) = silhouette.UnsecuredAction.async { implicit request =>
authTokenService.validate(token).flatMap { maybeToken =>
logger.info(s"Token returned: $maybeToken")
maybeToken match {
case Some(authToken) =>
ResetPasswordForm.form.bindFromRequest.fold(
form => Future.successful(BadRequest(Json.obj("errors" -> form.errors.map {
_.messages.mkString(", ")
}))),
password => userService.retrieveById(authToken.userID).flatMap { maybeUser =>
logger.info(s"Maybe user returned: $maybeUser")
maybeUser match {
case Some(user) if user.profiles.find(_.providerID == CredentialsProvider.ID).isDefined =>
val passwordInfo = passwordHasherRegistry.current.hash(password)
val loginInfo = user.profiles.find(_.providerID == CredentialsProvider.ID).get
authInfoRepository.update[PasswordInfo](loginInfo, passwordInfo).map { _ =>
Ok
}
case _ => Future.successful(BadRequest(Json.obj("error" -> Messages("invalid.reset.link"))))
}
}
)
case None => Future.successful(BadRequest(Json.obj("error" -> Messages("invalid.reset.link"))))
}
}
}
}
| epot/Gifter | app/controllers/ResetPasswordController.scala | Scala | mit | 3,095 |
package controllers.backend
import java.util.Date
import org.mockito.ArgumentMatchers
import org.specs2.mock.Mockito
import scala.concurrent.ExecutionContext.Implicits._
import scala.concurrent.Future
import com.overviewdocs.query.Field
import com.overviewdocs.searchindex.SearchWarning
import models.{InMemorySelection,Selection,SelectionRequest,SelectionWarning}
class NullSelectionBackendSpec extends NullBackendSpecification with Mockito {
trait BaseScope extends NullScope {
def resultIds: Vector[Long] = Vector.empty
def warnings: List[SelectionWarning] = Nil
val dsBackend = mock[DocumentSelectionBackend]
val backend = new NullSelectionBackend(dsBackend)
dsBackend.createSelection(any[SelectionRequest], any[Double => Unit]) returns Future.successful(InMemorySelection(resultIds, warnings))
val userEmail: String = "[email protected]"
val documentSetId: Long = 1L
}
"NullSelectionBackend" should {
"#create" should {
trait CreateScope extends BaseScope {
lazy val request = SelectionRequest(documentSetId)
def create = await(backend.create(userEmail, request, _ => ()))
lazy val result = create
}
"return a Selection with the returned document IDs" in new CreateScope {
override def resultIds = Vector(1L, 2L, 3L)
await(result.getAllDocumentIds) must beEqualTo(Vector(1L, 2L, 3L))
}
"return warnings" in new CreateScope {
override def warnings = SelectionWarning.SearchIndexWarning(SearchWarning.TooManyExpansions(Field.Text, "foo", 2)) :: Nil
result.warnings must beEqualTo(warnings)
}
"return a different Selection each time" in new CreateScope {
create
create
there were two(dsBackend).createSelection(any, any)
}
"pass the SelectionRequest to the dsBackend" in new CreateScope {
create
there was one(dsBackend).createSelection(ArgumentMatchers.eq(request), any[Double => Unit])
}
"pass a failure back" in new CreateScope {
val t = new Throwable("moo")
dsBackend.createSelection(any[SelectionRequest], any[Double => Unit]) returns Future.failed(t)
create must throwA[Throwable](message="moo")
}
}
}
}
| overview/overview-server | web/test/controllers/backend/NullSelectionBackendSpec.scala | Scala | agpl-3.0 | 2,257 |
package com.jasperdenkers.play.auth
import org.scalatestplus.play._
class CapabilitySpec extends PlaySpec {
"Capabilities" must {
object TokenA extends Token
object TokenB extends Token
object CapabilityByTokenA extends CapabilityByToken {
def predicate = _ == TokenA
}
object CapabilityByTokenB extends CapabilityByToken {
def predicate = _ == TokenB
}
"be composable into conjuctions with the implicit && operator" in {
(CapabilityByTokenA && CapabilityByTokenB) mustBe Conjunction(CapabilityByTokenA, CapabilityByTokenB)
}
"be composable into disjunctions with the implicit || operator" in {
(CapabilityByTokenA || CapabilityByTokenB) mustBe Disjunction(CapabilityByTokenA, CapabilityByTokenB)
}
}
}
| jasperdenkers/play-auth | core/test/com/jasperdenkers/play/auth/CapabilitySpec.scala | Scala | mit | 780 |
package pt.up.fe.iart.proj1.solver
import pt.up.fe.iart.proj1.collections.QueueLike
import scala.annotation.tailrec
import scala.collection.mutable
import scala.collection.immutable
/* Possible search results that various search algorithms can return */
sealed abstract class SearchResult[+A]
case class Success[A](result: A, numberExpanded: Int) extends SearchResult[A]
case object Failure extends SearchResult[Nothing]
case object CutOff extends SearchResult[Nothing]
object GraphSearch {
def apply[S, A](problem: Problem[S, A], frontier: QueueLike[Node[S, A]]) = {
@tailrec
def loop(frontier: QueueLike[Node[S, A]], explored: immutable.HashSet[S], numberExpanded: Int): SearchResult[List[A]] = {
val n = frontier.removeFirst()
n match {
case None => Failure
case Some(node) if problem.goalTest(node.state) =>
Success(node.solution, numberExpanded + 1)
case Some(node) =>
if (explored.exists(_ == node.state))
loop(frontier, explored, numberExpanded)
else {
val acts = problem.actions(node.state)
acts.foreach((a: A) => frontier.insert(Node.childNode(problem, node, a)))
loop(frontier, explored + node.state, numberExpanded + 1)
}
}
}
loop(frontier.insert(Node(problem.initialState)), scala.collection.immutable.HashSet.empty, 0)
}
}
object AStarSearch {
def apply[S, A](problem: Problem[S, A]): SearchResult[List[A]] =
GraphSearch(problem, new mutable.PriorityQueue[Node[S, A]]()(Ordering.by[Node[S, A], Double](AStarHeuristic(_, problem)).reverse))
private def AStarHeuristic[S, A](node: Node[S, A], problem: Problem[S, A]): Double = {
val g = node.pathCost
val h = problem.estimatedCostToGoal(node.state)
g + h //f(n) = g(n) + h(n)
}
}
object GreedySearch {
def apply[S, A](problem: Problem[S, A]): SearchResult[List[A]] =
GraphSearch(problem, new mutable.PriorityQueue[Node[S, A]]()(Ordering.by[Node[S, A], Double](GreedySearchHeuristic(_, problem)).reverse))
private def GreedySearchHeuristic[S, A](node: Node[S, A], problem: Problem[S, A]) =
problem.estimatedCostToGoal(node.state) //f(n) = h(n)
}
object UniformCostSearch {
def apply[S, A](problem: Problem[S, A]): SearchResult[List[A]] =
GraphSearch(problem, new mutable.PriorityQueue[Node[S, A]]()(Ordering.by[Node[S, A], Double](UniformCostHeuristic(_, problem)).reverse))
private def UniformCostHeuristic[S, A](node: Node[S, A], problem: Problem[S, A]) =
node.pathCost //f(n) = g(n)
}
object BreadthFirstSearch {
def apply[S, A](problem: Problem[S, A]): SearchResult[List[A]] =
GraphSearch(problem, new mutable.Queue[Node[S, A]]())
}
object DepthFirstSearch {
def apply[S, A](problem: Problem[S, A]): SearchResult[List[A]] =
GraphSearch(problem, new mutable.Stack[Node[S, A]]())
}
object DepthLimitedSearch {
def apply[S, A](problem: Problem[S, A], limit: Int) =
recursiveDLS(Node[S, A](problem.initialState), problem, limit, 0)
private def recursiveDLS[S, A](node: Node[S, A], problem: Problem[S, A], limit: Int, numberExpanded: Int): SearchResult[List[A]] = {
if (problem.goalTest(node.state))
Success(node.solution, numberExpanded + 1)
else if (node.depth == limit)
CutOff
else {
def loop(nodes: List[Node[S, A]], cutoffOccured: Boolean): SearchResult[List[A]] =
nodes match {
case Nil if cutoffOccured => CutOff
case Nil => Failure
case n :: rest =>
recursiveDLS(n, problem, limit, numberExpanded + 1) match {
case Failure => loop(rest, cutoffOccured)
case CutOff => loop(rest, cutoffOccured = true)
case ret => ret
}
}
loop(problem.actions(node.state).map(Node.childNode(problem, node, _)), cutoffOccured = false)
}
}
}
object IterativeDeepeningSearch {
def apply[S, A](problem: Problem[S, A]) = {
def loop(depth: Int): SearchResult[List[A]] =
DepthLimitedSearch(problem, depth) match {
case CutOff => loop(depth + 1)
case res => res
}
loop(0)
}
} | migulorama/feup-iart-2014 | src/main/scala/pt/up/fe/iart/proj1/solver/Search.scala | Scala | mit | 4,555 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperations.examples
import io.deepsense.deeplang.doperations.spark.wrappers.transformers.Binarize
class BinarizeExample extends AbstractOperationExample[Binarize] {
override def dOperation: Binarize = {
val op = new Binarize()
op.transformer
.setSingleColumn("hum", "hum_bin")
.setThreshold(0.5)
op.set(op.transformer.extractParamMap())
}
override def fileNames: Seq[String] = Seq("example_datetime_windspeed_hum_temp")
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/it/scala/io/deepsense/deeplang/doperations/examples/BinarizeExample.scala | Scala | apache-2.0 | 1,085 |
package com.featurefm.riversong
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.testkit.ScalatestRouteTest
import com.featurefm.riversong.message.Message
import com.featurefm.riversong.routes.LifecycleRouting
import com.typesafe.config.ConfigFactory
import org.json4s.JsonAST.JObject
import org.scalatest.{Matchers, FlatSpec}
import org.json4s.jackson.JsonMethods._
/**
* Created by yardena on 8/13/15.
*/
class LifecycleRouteSpec extends FlatSpec with Matchers with ScalatestRouteTest with Json4sProtocol {
def actorRefFactory = system
lazy val routing = new LifecycleRouting
"GET /status" should "return success" in {
Get("/status") ~> routing.routes ~> check {
status shouldEqual StatusCodes.OK
responseAs[Message].message should startWith ("Server is up")
}
}
"GET /config" should "return full config" in {
Get("/config") ~> routing.routes ~> check {
status shouldEqual StatusCodes.OK
val str = compact(render(responseAs[JObject]))
val config = ConfigFactory.parseString(str)
val myConfig = CoreConfig.getConfig()
config.getInt("akka.http.server.listen_port") shouldEqual myConfig.getInt("akka.http.server.listen_port")
}
}
"GET /config/akka/http/server" should "return server config" in {
Get("/config/akka/http/server") ~> routing.routes ~> check {
status shouldEqual StatusCodes.OK
val str = compact(render(responseAs[JObject]))
val config = ConfigFactory.parseString(str)
val myConfig = CoreConfig.getConfig()
config.getInt("listen_port") shouldEqual myConfig.getInt("akka.http.server.listen_port")
}
}
"GET /config/akka/http/server/listen_port" should "return listen_port" in {
Get("/config/akka/http/server") ~> routing.routes ~> check {
status shouldEqual StatusCodes.OK
val str = compact(render(responseAs[JObject]))
val config = ConfigFactory.parseString(str)
val myConfig = CoreConfig.getConfig()
config.getInt("listen_port") shouldEqual myConfig.getInt("akka.http.server.listen_port")
}
}
"GET /metrics" should "return success" in {
Get("/metrics") ~> routing.routes ~> check {
status shouldEqual StatusCodes.OK
}
}
}
| ListnPlay/RiverSong | src/test/scala/com/featurefm/riversong/LifecycleRouteSpec.scala | Scala | mit | 2,239 |
/*
* OverviewUser.scala
*
* Overview
* Created by Jonas Karlsson, Aug 2012
*/
package models
import com.github.t3hnar.bcrypt._
import java.sql.Timestamp
import java.util.Date
import models.tables.Users
import com.overviewdocs.database.HasBlockingDatabase
import com.overviewdocs.models.UserRole
/**
* A user that exists in the database
*/
trait OverviewUser {
val id: Long
val email: String
val requestedEmailSubscription: Boolean
val treeTooltipsEnabled: Boolean
def passwordMatches(password: String): Boolean
/** @return None if the user has no open confirmation request */
def withConfirmationRequest: Option[OverviewUser with ConfirmationRequest]
/** @return None if the user has not confirmed */
def asConfirmed: Option[OverviewUser with Confirmation]
/** @return The same user, with a new reset-password token. Save to commit. */
def withResetPasswordRequest: OverviewUser with ResetPasswordRequest
/** @return The same user, with a different email */
def withEmail(email: String): OverviewUser
/** @return True if the user has permission to administer the website */
def isAdministrator: Boolean
/** Returns a User for storage in the database. */
def toUser: User
}
/**
* A user that has an open confirmation request (confirmationToken exists)
*/
trait ConfirmationRequest {
val confirmationToken: String
val confirmationSentAt: Date
/**
* After confirming, the values in ConfirmationRequest will still
* be available, though the actual user will no longer have
* a confirmationToken. It's better to work with the returned OverviewUser
* to avoid any inconsistencies. The user must be saved before confirm
* takes effect.
*/
def confirm: OverviewUser
}
/**
* A user that has an open password-reset token
*/
trait ResetPasswordRequest {
val resetPasswordToken: String
val resetPasswordSentAt: Date
/**
* Converts this OverviewUser to one with the new password. Save the return
* value to make the change permanent.
*/
def withNewPassword(password: String): OverviewUser
}
/**
* A confirmed user (who has logged in at least once)
*/
trait Confirmation {
val confirmedAt: Date
}
/**
* A user that may or may not exist yet, and in an unknown state.
* Different methods attempt to convert the user into a known state,
* returning None if conversion can't be completed.
*/
case class PotentialUser(val email: String, val password: String, private val user: Option[OverviewUser]) {
/**
* @return OverviewUser if exists, without checking password
*/
def withRegisteredEmail: Option[OverviewUser] = {
user
}
/**
* @return OverviewUser if password is correct
*/
def withValidCredentials: Option[OverviewUser] = {
user.find(u => u.passwordMatches(password))
}
/**
* @return OverviewUser with ConfirmationRequest if the user has an active confirmation request.
*/
def withConfirmationRequest: Option[OverviewUser with ConfirmationRequest] = {
user match {
case Some(u) => u.withConfirmationRequest
case None => None
}
}
}
object PotentialUser {
def apply(email: String, password: String): PotentialUser = {
PotentialUser(email, password, OverviewUser.findByEmail(email))
}
}
/**
* Helpers to get new or existing OverviewUsers
*/
object OverviewUser extends HasBlockingDatabase {
import database.api._
private val TokenLength = 26
val BcryptRounds = 7
private def generateToken = scala.util.Random.alphanumeric.take(TokenLength).mkString
private def generateTimestamp = new Timestamp(new Date().getTime())
def findByEmail(email: String) : Option[OverviewUser] = {
blockingDatabase.option(Users.filter(_.email === email)).map(OverviewUser.apply)
}
def findByResetPasswordTokenAndMinDate(token: String, minDate: Date): Option[OverviewUser with ResetPasswordRequest] = {
blockingDatabase.option(
Users
.filter(_.resetPasswordToken === token)
.filter(_.resetPasswordSentAt >= new java.sql.Timestamp(minDate.getTime))
).map(new UserWithResetPasswordRequest(_))
}
def findByConfirmationToken(token: String): Option[OverviewUser with ConfirmationRequest] = {
blockingDatabase.option(
Users
.filter(_.confirmationToken === token)
).map(new UnconfirmedUser(_))
}
def prepareNewRegistration(email: String, password: String, emailSubscriber: Boolean): OverviewUser with ConfirmationRequest = {
val confirmationToken = generateToken
val confirmationSentAt = generateTimestamp
val user = User(
email = email,
passwordHash = password.bcrypt(BcryptRounds),
emailSubscriber = emailSubscriber,
confirmationToken = Some(confirmationToken),
confirmationSentAt = Some(confirmationSentAt)
)
new UnconfirmedUser(user)
}
private def apply(userData: Option[User]): Option[OverviewUser] = {
userData.map(new OverviewUserImpl(_))
}
def apply(user: User): OverviewUser = new OverviewUserImpl(user)
/**
* Underlying implementation that manages the User object that is the conduit to the
* database. As the user state is transformed, the underlying User is modified and
* passed along
*/
private case class OverviewUserImpl(user: User) extends OverviewUser {
override def toUser = user
override val id = user.id
override val email = user.email
override val requestedEmailSubscription = user.emailSubscriber
override val treeTooltipsEnabled = user.treeTooltipsEnabled
def passwordMatches(password: String): Boolean = {
password.isBcrypted(user.passwordHash)
}
def withConfirmationRequest: Option[OverviewUser with ConfirmationRequest] = {
if (user.confirmationToken.isDefined) Some(new UnconfirmedUser(user))
else None
}
def withResetPasswordRequest: OverviewUser with ResetPasswordRequest = {
new UserWithResetPasswordRequest(user.copy(
resetPasswordToken = Some(generateToken),
resetPasswordSentAt = Some(generateTimestamp)))
}
def asConfirmed: Option[OverviewUser with Confirmation] = {
user.confirmedAt.map(d => new ConfirmedUser(user, d))
}
def withEmail(email: String): OverviewUser = {
new OverviewUserImpl(user.copy(email = email))
}
def isAdministrator = user.role == UserRole.Administrator
}
/**
* A User with an active confirmation request
*/
private class UnconfirmedUser(user: User) extends OverviewUserImpl(user) with ConfirmationRequest {
override val confirmationToken = user.confirmationToken.get
override val confirmationSentAt = user.confirmationSentAt.get
override def confirm: OverviewUser = {
OverviewUserImpl(user.copy(
confirmationToken = None,
confirmedAt = Some(generateTimestamp)
))
}
}
/**
* A User who has confirmed
*/
private class ConfirmedUser(user: User, val confirmedAt: Date) extends OverviewUserImpl(user) with Confirmation {
}
private class UserWithResetPasswordRequest(user: User) extends OverviewUserImpl(user) with ResetPasswordRequest {
override val resetPasswordToken = user.resetPasswordToken.getOrElse(throw new Exception("logic"))
override val resetPasswordSentAt = user.resetPasswordSentAt.getOrElse(throw new Exception("logic"))
override def withNewPassword(password: String): OverviewUser = {
OverviewUserImpl(user.copy(
resetPasswordToken = None,
resetPasswordSentAt = None,
passwordHash = password.bcrypt(BcryptRounds)))
}
}
}
| overview/overview-server | web/app/models/OverviewUser.scala | Scala | agpl-3.0 | 7,552 |
package picfg
import java.text.SimpleDateFormat
import java.util.Date
import sodium.StreamSink
object log {
sealed trait Log{
val ts = new Date()
}
case class Info(msg: String) extends Log
case class Error(msg: String, e: Exception) extends Log
private object LogSupport {
// stream for log events
val log = new StreamSink[Log]
}
trait LogSupport {
def onLogMsg(f: Log => Unit): Unit = {
LogSupport.log.map(f)
}
def logInfo(msg: String) = LogSupport.log.send(Info(msg))
def logError(msg: String, e: Exception) = LogSupport.log.send(Error(msg, e))
}
}
| j-keck/picfg | src/main/scala/picfg/log.scala | Scala | mit | 613 |
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
import cats.kernel.Eq
import org.typelevel.ci.CIString
final case class Protocol(name: CIString, version: Option[CIString]) {
override def toString(): String = name.toString + version.map(v => "/" + v.toString).getOrElse("")
}
object Protocol {
implicit val catsEqInstance: Eq[Protocol] = Eq.fromUniversalEquals
}
| http4s/http4s | core/shared/src/main/scala/org/http4s/Protocol.scala | Scala | apache-2.0 | 935 |
def curry[A,B,C](f: (A, B) => C): A => (B => C) = { a: A => b: B => f(a, b)}
| hiroqn/scalaz-study | src/p2-3.scala | Scala | mit | 77 |
package de.frosner.broccoli.routes
import javax.inject.Inject
import com.google.inject.Provider
import de.frosner.broccoli.controllers._
import play.api.mvc.{Action, Results}
import play.api.routing.Router.Routes
import play.api.routing.SimpleRouter
import play.api.routing.sird._
/**
* Routes for Broccoli REST API.
*
* @param templates Controller for templates
* @param instances Controller for instances
* @param about Controller for application information
* @param status Controller for status application
* @param security Controller for authentication
*/
class ApiV1Router @Inject()(
templates: Provider[TemplateController],
instances: Provider[InstanceController],
about: Provider[AboutController],
status: Provider[StatusController],
security: Provider[SecurityController]
) extends SimpleRouter {
override def routes: Routes = {
// Templates
case GET(p"/templates") => templates.get.list
case GET(p"/templates/$id") => templates.get.show(id)
// Instances
case GET(p"/instances" ? q_o"templateId=$id") => instances.get.list(id)
case GET(p"/instances/$id") => instances.get.show(id)
case POST(p"/instances") => instances.get.create
case POST(p"/instances/$id") => instances.get.update(id)
case DELETE(p"/instances/$id") => instances.get.delete(id)
case GET(p"/instances/$id/tasks") => instances.get.tasks(id)
// About & status
case GET(p"/about") => about.get.about
case GET(p"/status") => status.get.status
// Authentication
case POST(p"/auth/login") => security.get.login
case POST(p"/auth/logout") => security.get.logout
case GET(p"/auth/verify") => security.get.verify
// Do not fall back to other routes for API requests, but return 404 directly
case _ => Action(Results.NotFound)
}
}
| FRosner/cluster-broccoli | server/src/main/scala/de/frosner/broccoli/routes/ApiV1Router.scala | Scala | apache-2.0 | 1,907 |
package com.twitter.querulous.async
import java.sql.Connection
import concurrent._
import com.twitter.querulous.{StatsCollector, NullStatsCollector }
import com.twitter.querulous.database.{Database, DatabaseFactory}
import com.twitter.querulous.config
class BlockingDatabaseWrapperFactory(
contextFactory :() => ExecutionContext,
factory :DatabaseFactory,
stats :StatsCollector = NullStatsCollector )
extends AsyncDatabaseFactory {
def apply(
hosts :List[String],
name :String,
username :String,
password :String,
urlOptions :Map[String, String],
driverName :String ): AsyncDatabase = {
new BlockingDatabaseWrapper(
factory(hosts, name, username, password, urlOptions, driverName),
stats
)( contextFactory() )
}
}
/*private object AsyncConnectionCheckout {
lazy val checkoutTimer = new JavaTimer(true)
}*/
class BlockingDatabaseWrapper(
protected[async] val database: Database,
stats :StatsCollector = NullStatsCollector )( implicit context :ExecutionContext )
extends AsyncDatabase {
//import AsyncConnectionCheckout._
val dbStr = database.hosts.mkString(",") + "-" + database.name
// Note: Our executor is similar to what you'd get via Executors.newFixedThreadPool(), but the latter
// returns an ExecutorService, which unfortunately doesn't give us as much visibility into stats as
// the ThreadPoolExecutor, so we create one ourselves. We use a LinkedBlockingQueue for memory efficiency
// since maxWaiters can be very high (configuration default is Int.MaxValue, i.e. unbounded).
/*private val executor = {
val e = new ThreadPoolExecutor(workPoolSize, workPoolSize, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue[Runnable](maxWaiters),
new DaemonThreadFactory("asyncWorkPool-" + dbStr));
stats.addGauge("db-async-waiters-" + dbStr)(e.getQueue.size)
e
}*/
//private val workPool = FuturePool(executor)
//private val openTimeout = database.openTimeout
// We cache the connection checked out from the underlying database in a thread local so
// that each workPool thread can hold on to a connection for its lifetime. This saves expensive
// context switches in borrowing/returning connections from the underlying database per request.
private lazy val tlConnection = new ThreadLocal[Connection] {
override def initialValue() = {
stats.incr("db-async-cached-connection-acquire-total", 1)
stats.incr("db-async-cached-connection-acquire-" + dbStr, 1)
database.open()
}
}
// Basically all we need to do is offload the real work to workPool. However, there is one
// complication - enforcement of DB open timeout. If a connection is not available, most
// likely neither is a thread to do the work, so requests would queue up in the future pool.
// We need to ensure requests don't stick around in the queue for more than openTimeout duration.
// To do this, we use a trick implemented in the ExecutorServiceFuturePool for cancellation - i.e.,
// setup a timeout and cancel the request iff it hasn't already started executing, coordinating
// via an AtomicBoolean.
def withConnection[R](f: Connection => R): Future[R] = future {
//val startCoordinator = new AtomicBoolean(true)
//future {//workPool {
//val isRunnable = startCoordinator.compareAndSet(true, false)
//if (isRunnable) {
val connection = tlConnection.get()
try {
f(connection)
} catch {
case e => {
// An exception occurred. To be safe, we return our cached connection back to the pool. This
// protects us in case either the connection has been killed or our thread is going to be
// terminated with an unhandled exception. If neither is the case (e.g. this was a benign
// exception like a SQL constraint violation), it still doesn't hurt much to return/re-borrow
// the connection from the underlying database, given that this should be rare.
// TODO: Handle possible connection leakage if this thread is destroyed in some other way.
// (Note that leaking an exception from here will not kill the thread since the FuturePool
// will swallow it and wrap with a Throw()).
stats.incr("db-async-cached-connection-release-total", 1)
stats.incr("db-async-cached-connection-release-" + dbStr, 1)
database.close(connection)
tlConnection.remove()
throw e
}
}
}
// If openTimeout elapsed and our task has still not started, cancel it and return the
// exception. If not, rescue the exception with the *original* future, as if nothing
// happened. Any other exception - just propagate unchanged.
/* future.within(checkoutTimer, openTimeout) rescue {
case e: TimeoutException => {
val isCancellable = startCoordinator.compareAndSet(true, false)
if (isCancellable) {
stats.incr("db-async-open-timeout-total", 1)
stats.incr("db-async-open-timeout-" + dbStr, 1)
future.cancel()
Future.exception(e)
} else {
future // note: this is the original future not bounded by within().
}
}
// Track stats for max waiters exceeded.
case e: RejectedExecutionException => {
stats.incr("db-async-max-waiters-exceeded-total", 1)
stats.incr("db-async-max-waiters-exceeded-" + dbStr, 1)
Future.exception(e)
}
} */
//}
// Equality overrides.
override def equals(other: Any) = other match {
case other: BlockingDatabaseWrapper => database eq other.database
case _ => false
}
override def hashCode = database.hashCode
}
| kievbs/querulous210 | src/main/scala/com/twitter/querulous/async/BlockingDatabaseWrapper.scala | Scala | apache-2.0 | 5,856 |
package edu.gemini.osgi.tools
/** A version per R4 spec section 3.1.4. The qualifier is unused. */
case class Version(major: Int, minor: Int, micro: Int) extends Ordered[Version] {
implicit class ZeroOrElse(n: Int) {
def orElse(m: Int) =
if (n != 0) n else m
}
def compare(other: Version) =
(major - other.major) orElse (minor - other.minor) orElse (micro - other.micro)
override def toString =
if (this == Version.MaxValue) "Infinity" else "%d.%d.%d".format(major, minor, micro)
}
object Version {
def parse(s: String): Version =
Option(s).map {
case "Infinity" => MaxValue
case _ =>
val parts = (s.split("\\.").take(3).map(_.toInt) ++ Array(0, 0, 0)).take(3)
Version(parts(0), parts(1), parts(2))
} getOrElse MinValue
val MinValue = Version(0, 0, 0)
val MaxValue = Version(Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE)
}
| arturog8m/ocs | project/src/main/scala/edu/gemini/osgi/tools/Version.scala | Scala | bsd-3-clause | 915 |
package com.twitter.finagle.mysql
import com.twitter.finagle.mysql.transport.{MysqlBuf, Packet}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.Stack
import com.twitter.util.{Future, Return, Throw, Try}
/**
* A base class for exceptions related to client incompatibility with an
* upstream MySQL server.
*/
class IncompatibleServerError(msg: String) extends Exception(msg)
/**
* Indicates that the server to which the client is connected is running
* a version of MySQL that the client is incompatible with.
*/
case object IncompatibleVersion
extends IncompatibleServerError(
"This client is only compatible with MySQL version 4.1 and later"
)
/**
* Indicates that the server to which the client is connected is configured to use
* a charset that the client is incompatible with.
*/
case object IncompatibleCharset
extends IncompatibleServerError(
"This client is only compatible with UTF-8 and Latin-1 charset encoding"
)
/**
* A `Handshake` is responsible for using an open `Transport`
* to a MySQL server to establish a MySQL session by performing
* the `Connection Phase` of the MySQL Client/Server Protocol.
*
* https://dev.mysql.com/doc/internals/en/connection-phase.html
*
* @param params The collection `Stack` params necessary to create the
* desired MySQL session.
*
* @param transport A `Transport` connected to a MySQL server which
* understands the reading and writing of MySQL packets.
*/
private[mysql] abstract class Handshake(
params: Stack.Params,
transport: Transport[Packet, Packet]) {
protected final val settings = HandshakeSettings(params)
private[this] def isCompatibleVersion(init: HandshakeInit): Try[Boolean] =
if (init.serverCapabilities.has(Capability.Protocol41)) Return.True
else Throw(IncompatibleVersion)
private[this] def isCompatibleCharset(init: HandshakeInit): Try[Boolean] =
if (MysqlCharset.isCompatible(init.charset)) Return.True
else Throw(IncompatibleCharset)
protected final def verifyCompatibility(handshakeInit: HandshakeInit): Future[HandshakeInit] =
LostSyncException.const(isCompatibleVersion(handshakeInit)).flatMap { _ =>
LostSyncException.const(isCompatibleCharset(handshakeInit)).map(_ => handshakeInit)
}
protected final def messageDispatch(msg: ProtocolMessage): Future[Result] =
transport.write(msg.toPacket).flatMap(_ => transport.read().flatMap(decodeSimpleResult))
protected final def decodeSimpleResult(packet: Packet): Future[Result] =
MysqlBuf.peek(packet.body) match {
case Some(Packet.OkByte) => LostSyncException.const(OK(packet))
case Some(Packet.ErrorByte) =>
LostSyncException.const(Error(packet)).flatMap { err =>
Future.exception(ServerError(err.code, err.sqlState, err.message))
}
case _ => LostSyncException.AsFuture
}
protected final def readHandshakeInit(): Future[HandshakeInit] =
transport
.read()
.flatMap(packet => LostSyncException.const(HandshakeInit(packet)))
.flatMap(verifyCompatibility)
/**
* Performs the connection phase. The phase should only be performed
* once before any other exchange between the client/server. A failure
* to handshake renders a service unusable.
* [[https://dev.mysql.com/doc/internals/en/connection-phase.html]]
*/
def connectionPhase(): Future[Result]
}
private[mysql] object Handshake {
/**
* Creates a `Handshake` based on the specific `Stack` params and `Transport` passed in.
* If the `Transport.ClientSsl` param is set, then a `SecureHandshake` will be returned.
* Otherwise a `PlainHandshake is returned.
*/
def apply(params: Stack.Params, transport: Transport[Packet, Packet]): Handshake =
if (params[Transport.ClientSsl].sslClientConfiguration.isDefined)
new SecureHandshake(params, transport)
else new PlainHandshake(params, transport)
}
| luciferous/finagle | finagle-mysql/src/main/scala/com/twitter/finagle/mysql/Handshake.scala | Scala | apache-2.0 | 3,925 |
package io.circe.cursor
import cats.Functor
import io.circe.{ Context, Cursor, Json }
private[circe] case class CJson(focus: Json) extends Cursor {
def context: List[Context] = Nil
def up: Option[Cursor] = None
def delete: Option[Cursor] = None
def withFocus(f: Json => Json): Cursor = CJson(f(focus))
def withFocusM[F[_]](f: Json => F[Json])(implicit F: Functor[F]): F[Cursor] =
F.map(f(focus))(CJson.apply)
}
| alexarchambault/circe | core/shared/src/main/scala/io/circe/cursor/CJson.scala | Scala | apache-2.0 | 427 |
package free
import simple.Console._
object ConsoleSimple {
def main(args: Array[String]) {
val instrs =
GetLine {
line =>
PutLine("we got: " ++ line, Stop)
}
execute(instrs)
}
}
| YoEight/psug-free | src/main/scala/free/ConsoleSimpleExample.scala | Scala | mit | 224 |
package com.bwsw.tstreamstransactionserver.configProperties
import com.bwsw.tstreamstransactionserver.netty.ExecutionContext
import scala.concurrent.ExecutionContextExecutorService
class ServerExecutionContext(rocksWriteNThreads: Int, rocksReadNThreads: Int) {
private val commitLogExecutionContext = ExecutionContext("CommitLogPool-%d")
private val serverWriteExecutionContext = ExecutionContext(rocksWriteNThreads, "ServerWritePool-%d")
private val serverReadExecutionContext = ExecutionContext(rocksReadNThreads, "ServerReadPool-%d")
val commitLogContext: ExecutionContextExecutorService = commitLogExecutionContext.getContext
val serverWriteContext: ExecutionContextExecutorService = serverWriteExecutionContext.getContext
val serverReadContext: ExecutionContextExecutorService = serverReadExecutionContext.getContext
def stopAccessNewTasksAndAwaitAllCurrentTasksAreCompleted(): Unit = {
val contexts = collection.immutable.Seq(
commitLogExecutionContext,
serverReadExecutionContext,
serverWriteExecutionContext
)
contexts foreach (context => context.stopAccessNewTasks())
contexts foreach (context => context.awaitAllCurrentTasksAreCompleted())
}
}
| bwsw/tstreams-transaction-server | src/main/scala/com/bwsw/tstreamstransactionserver/configProperties/ServerExecutionContext.scala | Scala | apache-2.0 | 1,209 |
package com.rikmuld.camping.features.blocks.logseat
import com.rikmuld.camping.CampingMod
import com.rikmuld.corerm.network.PacketSender
import net.minecraft.client.Minecraft
import net.minecraft.entity.Entity
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.nbt.NBTTagCompound
import net.minecraft.util.math.BlockPos
import net.minecraft.world.World
class EntityMountable(worldIn: World) extends Entity(worldIn) {
var pos: BlockPos =
_
var checkDeath: Int =
0
setSize(0.25F, 0.25F)
def setPos(pos:BlockPos){
this.pos = pos
setPosition(pos.getX + 0.5F, pos.getY + 0.1F, pos.getZ + 0.5F)
}
protected override def entityInit() {}
def tryAddPlayer(player: EntityPlayer) {
if (getPassengers.size() == 0) player.startRiding(this)
}
override def onUpdate() {
super.onUpdate()
if(pos!=null){
if (world.getBlockState(pos).getBlock != CampingMod.OBJ.logSeat)
setDead()//TODO put better in logseat code
if (this.getPassengers.size() > 0 && world.isRemote && Minecraft.getMinecraft.gameSettings.keyBindSneak.isPressed && Minecraft.getMinecraft.inGameHasFocus) {
PacketSender.sendToServer(new PacketExitLog(pos.getX, pos.getY, pos.getZ))
this.getPassengers.get(0).dismountRidingEntity()
}
}
}
protected override def readEntityFromNBT(tag: NBTTagCompound) {}
protected override def writeEntityToNBT(tag: NBTTagCompound) {}
} | Rikmuld/MC-Camping | scala/com/rikmuld/camping/features/blocks/logseat/EntityMountable.scala | Scala | gpl-3.0 | 1,440 |
package org.jetbrains.plugins.scala.debugger.evaluation.evaluator
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl
import com.intellij.debugger.engine.evaluation.expression.{DisableGC, Evaluator}
import com.intellij.debugger.engine.{DebugProcess, DebugProcessImpl, JVMName}
import com.intellij.debugger.impl.DebuggerUtilsEx
import com.intellij.debugger.{DebuggerBundle, SourcePosition}
import com.sun.jdi._
import com.sun.tools.jdi.{ConcreteMethodImpl, TypeComponentImpl}
import org.jetbrains.plugins.scala.debugger.ScalaPositionManager
import org.jetbrains.plugins.scala.debugger.evaluation.EvaluationException
import org.jetbrains.plugins.scala.debugger.evaluation.util.DebuggerUtil
import org.jetbrains.plugins.scala.extensions.inReadAction
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.{Success, Try}
/**
* User: Alefas
* Date: 12.10.11
*/
case class ScalaMethodEvaluator(objectEvaluator: Evaluator, _methodName: String, signature: JVMName,
argumentEvaluators: Seq[Evaluator], traitImplementation: Option[JVMName] = None,
methodPosition: Set[SourcePosition] = Set.empty, localMethodIndex: Int = -1) extends Evaluator {
val methodName: String = DebuggerUtil.withoutBackticks(_methodName)
private val localMethod = localMethodIndex > 0
private val localMethodName = methodName + "$" + localMethodIndex
private var prevProcess: DebugProcess = _
private val jdiMethodsCache = mutable.HashMap[ReferenceType, Option[Method]]()
private def initCache(process: DebugProcess): Unit = {
if (process != null) {
prevProcess = process
jdiMethodsCache.clear()
}
}
def evaluate(context: EvaluationContextImpl): AnyRef = {
val debugProcess: DebugProcessImpl = context.getDebugProcess
if (!debugProcess.isAttached) return null
if (debugProcess != prevProcess) {
initCache(debugProcess)
}
val requiresSuperObject: Boolean = objectEvaluator.isInstanceOf[ScSuperEvaluator] ||
(objectEvaluator.isInstanceOf[DisableGC] &&
objectEvaluator.asInstanceOf[DisableGC].getDelegate.isInstanceOf[ScSuperEvaluator])
val obj : AnyRef = DebuggerUtil.unwrapScalaRuntimeRef {
objectEvaluator.evaluate(context)
}
if (obj == null) {
throw EvaluationException(new NullPointerException)
}
val args: Seq[Value] = argumentEvaluators.flatMap { ev =>
ev.evaluate(context) match {
case Some(res) => Some(res.asInstanceOf[Value])
case None => None
case res => Some(res.asInstanceOf[Value])
}
}
try {
def findClass(name: String) = debugProcess.findClass(context, name, context.getClassLoader)
def findMethod(referenceType: ReferenceType): Option[Method] = {
lazy val sortedMethodCandidates: List[Method] = {
val allMethods = referenceType.allMethods()
allMethods.asScala.collect {
case method if !localMethod && method.name() == methodName => (method, 1)
case method if !localMethod && method.name().endsWith("$$" + methodName) => (method, 1) //private method, maybe from parent class
case method if localMethod && method.name() == localMethodName => (method, 1)
case method if localMethod && method.name.startsWith(methodName + "$") => (method, 2)
case method if localMethod && method.name.contains(methodName + "$") => (method, 3)
}
.sortBy(_._2)
.map(_._1)
.toList
}
def concreteMethodByName(mName: String, signature: JVMName): Option[Method] = {
val sgn = signature.getName(debugProcess)
referenceType match {
case classType: ClassType =>
Option(classType.concreteMethodByName(mName, sgn))
case it: InterfaceType =>
it.methodsByName(mName, sgn).asScala.find(_.isInstanceOf[ConcreteMethodImpl])
}
}
def findWithSignature(): Option[Method] = {
if (signature == null) None
else {
if (!localMethod) concreteMethodByName(methodName, signature)
else {
sortedMethodCandidates.toStream
.flatMap(m => concreteMethodByName(m.name(), signature))
.headOption
}
}
}
def findWithoutSignature(): Option[Method] = {
def sameParamNumber(m: Method) = {
try {
val argsCount = m.argumentTypeNames().size()
if (m.isVarArgs) args.length >= argsCount
else args.length == argsCount || args.length == argsCount - 1
}
catch {
case _: AbsentInformationException => true
}
}
def linesIntersects(m: Method): Boolean = inReadAction {
Try {
val lines = methodPosition.map(_.getLine)
m.allLineLocations().asScala.exists(l => lines.contains(ScalaPositionManager.checkedLineNumber(l)))
}.getOrElse(true)
}
if (sortedMethodCandidates.length > 1) {
val withSameParamNumber = sortedMethodCandidates.filter(sameParamNumber)
if (withSameParamNumber.isEmpty) sortedMethodCandidates.headOption
else if (withSameParamNumber.length == 1) withSameParamNumber.headOption
else {
val withSameLines = withSameParamNumber.filter(linesIntersects)
withSameLines.headOption.orElse(withSameParamNumber.headOption)
}
}
else sortedMethodCandidates.headOption
}
def doFind() = findWithSignature() orElse findWithoutSignature()
jdiMethodsCache.getOrElseUpdate(referenceType, doFind())
}
def invokeStaticMethod(referenceType: ReferenceType, jdiMethod: Method): AnyRef = {
def fixArguments(): Seq[Value] = {
def correctArgType(arg: AnyRef, typeName: String) = arg match {
case objRef: ObjectReference => DebuggerUtilsEx.isAssignableFrom(typeName, objRef.referenceType())
case primValue: PrimitiveValue => primValue.`type`().name() == typeName
case _ => true
}
val paramTypeNames = jdiMethod.argumentTypeNames()
if (paramTypeNames.size() == 0) Seq.empty
else {
val needObj = args.isEmpty || !correctArgType(args.head, paramTypeNames.get(0))
if (needObj) unwrappedArgs(obj +: args, jdiMethod)
else unwrappedArgs(args, jdiMethod)
}
}
referenceType match {
case ct: ClassType =>
debugProcess.invokeMethod(context, ct, jdiMethod, fixArguments().asJava)
case it: InterfaceType =>
debugProcess.invokeMethod(context, it, jdiMethod, fixArguments().asJava)
}
}
def invokeConstructor(referenceType: ReferenceType, method: Method): AnyRef = {
referenceType match {
case ct: ClassType if methodName == "<init>" =>
debugProcess.newInstance(context, ct, method, unwrappedArgs(args, method).asJava)
case _ => throw EvaluationException(s"Couldn't found appropriate constructor for ${referenceType.name()}")
}
}
def invokeInstanceMethod(objRef: ObjectReference, jdiMethod: Method): AnyRef = {
if (requiresSuperObject)
debugProcess.invokeInstanceMethod(context, objRef, jdiMethod, unwrappedArgs(args, jdiMethod).asJava, ObjectReference.INVOKE_NONVIRTUAL)
else
debugProcess.invokeMethod(context, objRef, jdiMethod, unwrappedArgs(args, jdiMethod).asJava)
}
def invokeInterfaceMethod(objRef: ObjectReference, jdiMethod: Method): AnyRef = {
def togglePrivate(method: Method): Unit = {
try {
method match {
case mImpl: TypeComponentImpl =>
val field = classOf[TypeComponentImpl].getDeclaredField("modifiers")
field.setAccessible(true)
val value = field.get(mImpl).asInstanceOf[Integer].toInt
val privateModifierMask = 2
field.set(mImpl, value ^ privateModifierMask)
case _ =>
}
} catch {
case _: Throwable =>
}
}
if (jdiMethod.isAbstract) throw EvaluationException(s"Cannot invoke abstract interface method ${jdiMethod.name()}")
//see SCL-10132
if (!jdiMethod.isDefault && jdiMethod.isPrivate) {
togglePrivate(jdiMethod)
val result = debugProcess.invokeInstanceMethod(context, objRef, jdiMethod, unwrappedArgs(args, jdiMethod).asJava, ObjectReference.INVOKE_NONVIRTUAL)
togglePrivate(jdiMethod)
result
} else {
debugProcess.invokeMethod(context, objRef, jdiMethod, unwrappedArgs(args, jdiMethod).asJava)
}
}
def classWithMethod(c: ReferenceType) = findMethod(c).map(m => (c, m))
def findInSuperClass(classType: ClassType): Option[(ReferenceType, Method)] = {
val superClass = classType.superclass()
classWithMethod(superClass)
.orElse {
traitImplementation.flatMap(ti => Option(ti.getName(context.getDebugProcess))) match {
case Some(traitImplName) =>
Try(findClass(traitImplName)) match {
case Success(c: ClassType) => classWithMethod(c)
case _ =>
val traitName = traitImplName.stripSuffix("$class")
Try(findClass(traitName)).toOption.flatMap(classWithMethod)
}
case _ => None
}
}
}
val typeAndMethod: Option[(ReferenceType, Method)] = obj match {
case objRef: ObjectReference =>
val objType = findClass(objRef.referenceType().name())
if (objType.isInstanceOf[ArrayType]) throw EvaluationException(s"Method $methodName cannot be invoked on array")
val classType = objType.asInstanceOf[ClassType]
if (requiresSuperObject) findInSuperClass(classType)
else classWithMethod(classType)
case rt: ReferenceType =>
classWithMethod(rt)
case _ =>
throw EvaluationException(DebuggerBundle.message("evaluation.error.evaluating.method", methodName))
}
if (typeAndMethod.isEmpty) throw EvaluationException(DebuggerBundle.message("evaluation.error.evaluating.method", methodName))
typeAndMethod match {
case Some((tp, m)) if m.isConstructor =>
invokeConstructor(tp, m)
case Some((tp, m)) if m.isStatic =>
invokeStaticMethod(tp, m)
case Some((_, m)) =>
obj match {
case objRef: ObjectReference if m.declaringType().isInstanceOf[InterfaceType] =>
invokeInterfaceMethod(objRef, m)
case objRef: ObjectReference =>
invokeInstanceMethod(objRef, m)
case _ =>
throw EvaluationException(DebuggerBundle.message("evaluation.error.evaluating.method", methodName))
}
case _ => throw EvaluationException(DebuggerBundle.message("evaluation.error.evaluating.method", methodName))
}
}
catch {
case e: Exception => throw EvaluationException(e)
}
}
private def unwrappedArgs(args: Seq[AnyRef], jdiMethod: Method): Seq[Value] = {
val argTypeNames = jdiMethod.argumentTypeNames()
args.zipWithIndex.map {
case (DebuggerUtil.scalaRuntimeRefTo(value), idx) if !DebuggerUtil.isScalaRuntimeRef(argTypeNames.get(idx)) => value.asInstanceOf[Value]
case (arg, _) => arg.asInstanceOf[Value]
}
}
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaMethodEvaluator.scala | Scala | apache-2.0 | 11,717 |
package org.scaladebugger.language.models
sealed trait Function extends BaseValue {
val parameters: Seq[Identifier]
val documentation: Option[String]
override def toScalaValue: AnyRef =
s"Parameters: ${parameters.map(_.name).mkString(",")}"
}
sealed trait CallableFunction extends Function
/**
* Represents a function created through the interpreter. This implementation
* is missing the closure that will be filled in elsewhere.
*
* @param parameters The parameter names for the function
* @param body The body of the interpreted function
* @param documentation Optional documentation to associate with the function
*/
case class IncompleteInterpretedFunction(
parameters: Seq[Identifier],
body: Expression,
documentation: Option[String] = None
) extends Function
/**
* Represents a function created through the interpreter.
*
* @param parameters The parameter names for the function
* @param closure The enclosing scope of the function when defined
* @param body The body of the interpreted function
* @param documentation Optional documentation to associate with the function
*/
case class InterpretedFunction(
parameters: Seq[Identifier],
closure: Scope,
body: Expression,
documentation: Option[String] = None
) extends CallableFunction {
override def toScalaValue: AnyRef = "<INTERPRETED> Function | " + super.toScalaValue
}
/**
* Creates a function created outside of the interpreter.
*
* @param parameters The parameter names for the function
* @param implementation The function implementation
* @param documentation Optional documentation to associate with the function
*/
case class NativeFunction(
parameters: Seq[Identifier],
implementation: (Map[Identifier, Expression], Scope) => Expression,
documentation: Option[String] = None
) extends CallableFunction {
override def toScalaValue: AnyRef = "<NATIVE> Function | " + super.toScalaValue
}
| chipsenkbeil/scala-debugger | scala-debugger-language/src/main/scala/org/scaladebugger/language/models/Function.scala | Scala | apache-2.0 | 1,913 |
/*******************************************************************************
Copyright (c) 2013-2014, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.bug_detector
import scala.collection.mutable.{ HashMap => MHashMap, ListBuffer => MListBuffer }
import kr.ac.kaist.jsaf.analysis.cfg._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.CallContext._
import kr.ac.kaist.jsaf.analysis.typing.{ SemanticsExpr => SE }
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.widl.{WIDLTypeMap, WIDLToString, WIDLHelper}
import kr.ac.kaist.jsaf.nodes._
import kr.ac.kaist.jsaf.nodes_util.{ IRFactory, NodeRelation, Span }
import kr.ac.kaist.jsaf.scala_src.nodes._
import kr.ac.kaist.jsaf.scala_src.useful.Lists._
import java.util.{List => JList}
// libraries = ["webapis.tv.channel", ...]
class WIDLChecker(bugDetector: BugDetector) extends Walker {
////////////////////////////////////////////////////////////////////////////////
// Analysis
////////////////////////////////////////////////////////////////////////////////
val cfg = bugDetector.cfg
val typing = bugDetector.typing
val semantics = bugDetector.semantics
val stateManager = bugDetector.stateManager
val bugStorage = bugDetector.bugStorage
val bugOption = bugDetector.bugOption
var argObj: Obj = null
var argState: State = null
////////////////////////////////////////////////////////////////////////////////
// Soundness for argument type checking
////////////////////////////////////////////////////////////////////////////////
val soundness = false
////////////////////////////////////////////////////////////////////////////////
// ...
////////////////////////////////////////////////////////////////////////////////
def getType(typ: WType): Option[String] = typ match {
case SWNamedType(_, _, n) => Some(n)
case SWArrayType(_, _, t) => getType(t) match {
case Some(n) => Some(n+"[]")
case _ => Some("[]")
}
case _ => None // Not yet implemented : In case of other types...?
}
def isErrorCallback(typ: WType): Boolean = getType(typ) match {
case Some(t) => t.endsWith("ErrorCallback")
case None => false
}
def isOptional(attr: WEAttribute): Boolean = attr match {
case SWEAOptional(_) => true
case _ => false
}
// check if the [name]d property is in the [dic]tionary
def contains(dic: WDictionary, name: String): Boolean = {
for (i <- 0 until dic.getMembers.size)
if (dic.getMembers.get(i).getName == name) return true
false
}
// check if the [name]d value is in the [enum] list
def contains(enum: WEnum, name: String): Boolean = {
for (i <- 0 until enum.getEnumValueList.size)
if (enum.getEnumValueList.get(i).getStr == name) return true
false
}
// retrieve the [name]ed property from the [dic]ionary
def getFromDic(dic: WDictionary, name: String): Option[WDictionaryMember] = {
val lst: JList[WDictionaryMember] = dic.getMembers()
for (i <- 0 until lst.size){
val mem: WDictionaryMember = lst.get(i)
if (mem.getName == name) return Some(mem)
}
None
}
// return Some(_) if concrete value exists
// None if we cannot know
// null if we wanna report a bug (functions, arrays)
def toDOMString(given: PropValue): Option[String] = {
var result: Option[String] = null
def setResult(str: String): Unit = result match {
case Some(old) => if (old != str) result = None
case None => result = None
case _ => result = Some(str)
}
val undefval = given.objval.value.pvalue.undefval
val nullval = given.objval.value.pvalue.nullval
val boolval = given.objval.value.pvalue.boolval
val strval = given.objval.value.pvalue.strval
val numval = given.objval.value.pvalue.numval
val locset = given.objval.value.locset
if (undefval.isConcrete) setResult("undefined")
if (nullval.isConcrete) setResult("null")
boolval match {
case BoolTop => result = None // cannot know
case BoolTrue => setResult("true")
case BoolFalse => setResult("false")
case BoolBot =>
}
strval match {
case StrTop => result = None
case StrBot =>
case _ => strval.getSingle match {
case Some(s) => setResult(s)
case _ => result = None
}
}
numval match {
case NumBot =>
case PosInf => setResult("Infinity")
case NegInf => setResult("-Infinity")
case NaN => setResult("NaN")
case _ => numval.getSingle match {
case Some(n) => setResult(n.toString)
case _ => result = None
}
}
for (loc <- locset) {
argState.heap(loc)("@class")._1.value.pvalue.strval.getSingle match {
case Some(str) => str match {
case "Function" => // TODO: get function body as string if possible
case "Array" => {
/* commented out because absent property cannot be handled well.
argState.heap(loc)("length").objval.value.pvalue.numval match {
case UIntSingle(v) => { // array to string
var arr: String = ""
var comma: Boolean = false
for (i <- 0 until v.toInt){
if (comma) arr += ","
argState.heap(loc)(i.toString) match {
case Obj.bottom => // absent property
case obj =>
obj._2 match {
case AbsentTop => result = None // property can be absent
case AbsentBot => toDOMString(obj._1) match {
case Some(str) => if (str != "undefined" && str != "null") arr += str
case _ => result = None
}
}
}
comma = true
}
setResult(arr)
}
case tmp@_ => result = None
}
*/
}
case _ => setResult("[object Object]")
}
case _ => result = None
}
}
result
}
// used in overload resolution algorithm
def isUndefined(v: Option[(PropValue, Absent)]): Boolean = v match {
case Some((p, _)) =>
val undefval = p.objval.value.pvalue.undefval
val nullval = p.objval.value.pvalue.nullval
val boolval = p.objval.value.pvalue.boolval
val strval = p.objval.value.pvalue.strval
val numval = p.objval.value.pvalue.numval
val locset = p.objval.value.locset
val notAllBottom = !undefval.isBottom || !nullval.isBottom || !boolval.isBottom || !strval.isBottom || !numval.isBottom || !locset.isEmpty
if (notAllBottom) undefval.isTop
else true
case _ => true
}
/*
* overload resolution algorithm (from WIDL 4.4.1.1)
* input - S: an effective overload set
* - arg[0...n-1]: a list of ECMAScript values
* output - one of S's entries
* - a list of IDL values
*/
def overloadResolution(_S: List[JList[WArgument]], argObj: Obj): Set[JList[WArgument]] = { //TODO: refactoring arg type
// 1. Let maxarg be the length of the longest type list of the entries in S.
val maxarg: Int = _S.foldLeft(0)((res, arg) =>
if (arg.size > res) arg.size else res)
// 2. Initialize argcount to be min(maxarg, n).
val n: Int = argObj("length")._1._1._1._4.getSingle match {
case Some(len) => len.toInt
case _ =>
System.out.println("* WARNING: the length of argument is not single number *")
maxarg // TODO: is it possible?
}
val argcount: Int = if (maxarg < n) maxarg else n
/*
// 3. Initialize j to n−1.
var j: Int = n - 1
// 4. While j ≥ 0 and argj is undefined:
while (j >= 0 && isUndefined(argObj.map.get(j.toString))) {
System.out.println(j)
// 4. 1. If S has one or more entries whose type list is of length j, then:
if (_S.exists(arg => arg.size == j)) {
// 4. 1. 1. If S has one or more entries whose type list is longer than j,
// and at least one of those entries has the [TreatUndefinedAs=Missing] extended attribute on its arguments at indices j and above,
// then set argcount to j.
for (entry <- _S) {
val m: Int = entry.size
System.out.println(j until m)
if (m >= j){
for (i <- j until m){
val attr_list: JList[WEAttribute] = entry.get(j).getAttributes
for (attr_index <- 0 until attr_list.size){
val attr: WEAttribute = attr_list.get(attr_index)
attr match {
case SWAttribute(_, _, typ, na, _) =>
// TODO: there is no element which has [TreatUndefinedAs=Missing] extended attribute.
case _ => System.out.println("Hi?") // DEBUG
}
}
}
}
}
}
// 4. 2. Set j to j−1.
j = j - 1
}
*/
// 5. Initialize k to argcount.
val k: Int = argcount
// 6. Remove from S all entries whose type list is not of length argcount.
val S: Set[JList[WArgument]] = _S.filter(arg => arg.size == argcount).toSet
// 7. If S is empty, then throw a TypeError.
if (S.isEmpty) return Set() // TODO: no matched constructor found
// 8. Initialize d to −1.
var d: Int = -1
// 9. If there is more than one entry in S, then set d to be the distinguishing argument index for the entries of S.
if (S.size > 1) {
// TODO: 9
d = argcount
}
// 10. Initialize values to be an empty list of IDL values.
var paramss: Set[JList[WArgument]] = Set()
// 11. Initialize i to 0.
var i: Int = 0
// 12. While i < d:
while (i < d) {
// 12. 1. Let V be argi.
val V = argObj(i.toString)
// 12. 2. Let type be the type at index i in the type list of any entry in S.
paramss.head.get(i) match {
case SWArgument(_, attrs, t, _, _) =>
// 12. 3. Append to values the result of converting V to IDL type type.
checkType(V, t) // TODO: 에러 내야 댐
}
// 12. 4. Set i to i + 1.
i = i + 1
}
var resS: Set[JList[WArgument]] = Set()
// 13. If i = d, then:
// 13. 1. Let V be argi.
val V = argObj(i.toString)
val undefval = V.objval.value.pvalue.undefval
val nullval = V.objval.value.pvalue.nullval
val boolval = V.objval.value.pvalue.boolval
val strval = V.objval.value.pvalue.strval
val numval = V.objval.value.pvalue.numval
val locset = V.objval.value.locset
// 13. 2. If V is null or undefined, and any entry in S has a nullable type or a union type
// that includes a nullable type at position i of its type list, then remove from S all other entries.
if (undefval.isTop || nullval.isTop) {
resS ++= S.filter(params => params.get(i) match {
case SWArgument(_, attrs, t, _, _) if attrs.exists(attr => attr.isInstanceOf[WEAQuestion]) =>
true
case _ =>
false
})
}
// 13. 3. Otherwise: if V is a platform object – but not a platform array object – and there is an entry in S that has one of the following types at position i of its type list,
// * an interface type that V implements
// * object
// * a nullable version of any of the above types
// * a union type or a nullable union type that has one of the above types in its flattened member types
// then remove from S all other entries.
// 13. 4. Otherwise: if V is a platform array object, a native Array object, or a platform object that supports indexed properties; and there is an entry in S that has one of the following types at position i of its type list,
// * an array type
// * a sequence type
// * object
// * a nullable version of any of the above types
// * a union type or nullable union type that has one of the above types in its flattened member types
// then remove from S all other entries.
// 13. 5. Otherwise: if V is a Date object and there is an entry in S that has one of the following types at position i of its type list,
// * Date
// * object
// * a nullable version of either of the above types
// * a union type or nullable union type that has one of the above types in its flattened member types
// then remove from S all other entries.
// 13. 6. Otherwise: if V is any other type of object and there is an entry in S that has one of the following types at position i of its type list,
// * a callback interface type
// * a callback function type
// * a dictionary type
// * object
// * a nullable version of any of the above types
// * a union type or nullable union type that has one of the above types in its flattened member types
// then remove from S all other entries.
// 13. 7. Otherwise: if there is an entry in S that has one of the following types at position i of its type list,
// * DOMString
// * an enumeration type
// * a nullable version of any of the above types
// * a union type or nullable union type that has one of the above types in its flattened member types
// then remove from S all other entries.
// 13. 8. Otherwise: if there is an entry in S that has one of the following types at position i of its type list,
// * a primitive type
// * a nullable primitive type
// * a union type or nullable union type that has one of the above types in its flattened member types
// then remove from S all other entries.
// 13. 9. Otherwise: if there is an entry in S that has any at position i of its type list, then remove from S all other entries.
// 13. 10. Otherwise: throw a TypeError.
// 14. While i < argcount:
// 14. 1. Let V be argi.
// 14. 2. Let type be the type at index i in the type list of the single entry in S.
// 14. 3. Append to values the result of converting V to IDL type type.
// 14. 4. Set i to i + 1.
// 15. Let callable be the operation or extended attribute of the single entry in S.
// 16. While i is less than the length of the number of arguments callable is declared to take, and the argument at index i is declared with a default value:
// 16. 1. Append to values that default value.
// 16. 2. Set i to be i + 1.
// 17. Return the pair <callable, values>.
S
}
/*
* checkType: check if [given] type is matched with [expected] type
* Actually, argState, argObj are arguments of checkType
*/
def checkType(given: PropValue, expected: WType): (Boolean, String) = { // TODO: detect given type
val undefval = given.objval.value.pvalue.undefval
val nullval = given.objval.value.pvalue.nullval
val boolval = given.objval.value.pvalue.boolval
val strval = given.objval.value.pvalue.strval
val numval = given.objval.value.pvalue.numval
val locset = given.objval.value.locset
val notAllBottom = !undefval.isBottom || !nullval.isBottom || !boolval.isBottom || !strval.isBottom || !numval.isBottom || !locset.isEmpty
val notInf = !(numval == PosInf || numval == NegInf || numval == NaN || numval == Infinity)
val notOtherStr = !(strval.isAllOthers)
val intResult = // TODO: consider object case (it depends the result of toString() or valueOf()
if (soundness)
locset.isEmpty
else
notAllBottom
val floatResult = // TODO: consider object case (it depends the result of toString() or valueOf(), and empty string
if (soundness)
undefval.isBottom && !(strval </ NumStr) && notInf && locset.isEmpty
else
!nullval.isBottom || !boolval.isBottom || (!strval.isBottom && notOtherStr) || !numval.isBottom || !locset.isEmpty
if (WIDLHelper.isNullable(expected) && !nullval.isBottom) return (true, "Null")
def checkArrayType(elty: WType): Boolean = {
var result = !locset.isEmpty
val h: Heap = argState.heap
for (loc <- locset) {
val locres: Boolean = (BoolTrue <= Helper.IsArray(h, loc)) match {
case true => {
val _arrSize = h(loc)("length").objval.value.pvalue.numval
var locres = true
if (_arrSize.isBottom) {
// not an array
locres = false
} else {
try {
val arrSize = _arrSize.getConcreteValueAsString("0").toString.toInt
// TODO Need to be check.
locres = locres && checkType(h(loc)(Str_default_number), elty)._1
for (idx <- (0 until arrSize)) {
// if (AbsentTop </ h(loc).lookup(idx.toString)._2) {
locres = locres && checkType(h(loc)(idx.toString), elty)._1
// }
}
} catch {
case e: Throwable => {
// if (AbsentTop </ h(loc).lookup(Str_default_number)._2)
locres = locres && checkType(h(loc)(Str_default_number), elty)._1
for (prop <- argState.heap(loc).getProps) {
try {
prop.toInt // check if prop is integer or not
locres = locres && checkType(h(loc)(prop), elty)._1
} catch {
case e: Throwable =>
}
}
}
}
}
locres
}
case false => false // not an array
}
result = result && locres // we report errors only if there are errors in all possible locations
}
result
}
expected match {
case SWAnyType(info, suffix) => (true, "DEBUG")
case SWNamedType(info, suffix, name2) => name2 match {
case "any" => (true, "any")
case "boolean" => (true, "boolean")
case "byte" => (intResult, "byte")
case "octet" => (intResult, "octet")
case "short" => (intResult, "short")
case "unsigned short" => (intResult, "unsigned short")
case "long" => (intResult, "long")
case "unsigned long" => (intResult, "unsigned long")
case "long long" => (intResult, "long long")
case "unsigned long long" => (intResult, "unsigned long long")
case "float" => (floatResult, "float")
case "unrestricted float" => (floatResult, "unrestricted float")
case "double" => (floatResult, "double")
case "unrestricted double" => (floatResult, "unrestricted double")
case "DOMString" => (true, "DOMString")
case "Date" => {
var result: Boolean = true
for (loc <- locset) {
if (AbsString.alpha("Date") </ argState.heap(loc)("@class")._2._1._5) {
result = false
}
}
(result, "Date")
}
case _enum_ if WIDLTypeMap.enumMap.contains(_enum_) => {
//println("enum " + _enum_)
val enum = WIDLTypeMap.enumMap(_enum_)
toDOMString(given) match {
case Some(str) =>
(contains(enum, str), _enum_)
case None => // cannot know
(!soundness, _enum_)
case null => // buggy case
(false, _enum_)
}
}
case _dic_ if WIDLTypeMap.dictionaryMap.contains(_dic_) => {
//println("dic " + _dic_)
val dic = WIDLTypeMap.dictionaryMap(_dic_)
var result = true
for (loc <- locset) {
var locres: Boolean = true // check if there are absent properties in this loc
for (prop <- argState.heap(loc).getProps) {
getFromDic(dic, prop) match {
case None => // no property named "[prop]" in the [dic]tionary
locres = false
case Some(mem) => // check if the type of the [mem]ber fits with the specification
locres = locres && checkType(argState.heap(loc)(prop), mem.getTyp)._1
}
}
result = result && locres // we report errors only if there are errors in all possible locations
}
(result, _dic_)
}
case _interface_ if WIDLTypeMap.interfaceMap.contains(_interface_) => { // callback without FunctionOnly
//println("interface " + _interface_)
val interface = WIDLTypeMap.interfaceMap(_interface_)
var result = false
val interfaceMemberTypeMap = new MHashMap[String, Option[WType]]
for (mem <- toList(interface.getMembers)) {
mem match {
case SWConst(_, _, t, n, _) => interfaceMemberTypeMap.update(n, Some(t))
case SWAttribute(_, _, t, n, _) => interfaceMemberTypeMap.update(n, Some(t))
case SWOperation(_, _, _, t, n, args, _) => n match {
case Some(n) => interfaceMemberTypeMap.update(n, None)
case _ =>
}
case _ =>
}
}
/* checkWithProto:
* follows every possible prototype chain, and checks if every specified property exists
* it returns false if every possible prototype chain has faults
*/
def checkWithProto(ls: LocSet, props: Set[String]): Boolean = {
if (ls.isEmpty) {
!interfaceMemberTypeMap.keySet.exists(attr => !props.contains(attr))
} else {
ls.foldLeft[Boolean](false)((res: Boolean, loc: Loc) =>
argState.heap(loc).getProps.foldLeft[Boolean](true)((typecorrect:Boolean, prop:String) =>
interfaceMemberTypeMap.get(prop) match {
case Some(typ) => typ match {
case Some(typ) =>
typecorrect && checkType(argState.heap(loc)(prop), typ)._1
case None =>
typecorrect // TODO? check function type
} // type check for properties that the current location has
case None => typecorrect
}) match { // if the current object has no problem, check the prototype objects
case true => res || checkWithProto(argState.heap(loc)("@proto").objval.value.locset, props ++ argState.heap(loc).getProps)
case false => false
})
}
}
result = checkWithProto(locset, Set())
(result, _interface_)
}
case _callback_ if WIDLTypeMap.callbackMap.contains(_callback_) => { // callback with functionOnly
//println("callback " + _callback_)
val callback = WIDLTypeMap.callbackMap(_callback_)
var result = false // report a bug only if every locset has bug
for (loc <- locset) {
if (BoolTrue <= Helper.IsCallable(argState.heap, loc)) result = true // acceptable if [loc] is a function
}
(result, _callback_)
}
case _typedef_ if WIDLTypeMap.typedefMap.contains(_typedef_) => {
//println("typedef " + _typedef_)
val typedef = WIDLTypeMap.typedefMap(_typedef_)
(checkType(given, typedef.getTyp)._1, _typedef_)
}
case typ => {// if the type in the specification is not primitive
(true, typ)
}
}
case typ@SWArrayType(info, suffix, type2) =>
(checkArrayType(type2), getType(typ).getOrElse("[]"))
case typ@SWSequenceType(info, suffix, type2) =>
(checkArrayType(type2), getType(typ).getOrElse("[]"))
case SWUnionType(info, suffix, types) => (true, "DEBUG")
}
}
def dotToStr(dot: LHS): Option[String] = dot match {
case SDot(_, d: Dot, SId(_, x, _, _)) => dotToStr(d) match {
case Some(str) => Some(str + "." + x)
case None => None
}
case SDot(_, SVarRef(_, SId(_, o, _, _)), SId(_, x, _, _)) => Some(o + "." + x)
case SVarRef(_, SId(_, x, _, _)) => Some(x)
case SBracket(_, o, i) => i match {
case SStringLiteral(_, _, idxS) => dotToStr(o) match {
case Some(objS) => Some(objS + "." + idxS)
case _ => None
}
case _ => None
}
case _ => None
}
// consumes a set and provides an enumeration of that (ex. Set(1, 2, 3) => "1, 2, 3")
// corner case: empty set Set() => ""
def setToStr(st: Set[String]): String = {
if (st.isEmpty) ""
else{
var res: String = ""
var flag: Boolean = false
for (str <- st){
if (flag) res += ", "
res += str
}
res
}
}
private var nestedTries = 0
def initAll = {
// Initialize variables
nestedTries = 0
}
def doit(_program: Program, libraries: List[String] = null) = {
var program = _program
// Set libraries (database)
if(libraries != null) WIDLTypeMap.setLibraries(libraries)
// Check
walkUnit(program)
}
/* error-related things */
val errSpace = "\\n "
def reportMessages(res: (Boolean, Boolean, List[() => Unit])): Unit = {
res._3.foreach(msg => msg())
}
def isAPI(obj: LHS): Boolean = obj match {
case SDot(_, dot, _) => isAPI(dot)
case SVarRef(_, SId(_, x, _, _)) if x.equals("webapis") =>
WIDLTypeMap.typedbs.contains(("WindowWebAPIs", "webapis"))
case SVarRef(_, SId(_, x, _, _)) if x.equals("tizen") =>
WIDLTypeMap.typedbs.contains(("TizenObject", "tizen"))
case _ => false
}
/*
* checkArgs: checks if [arg]uments fit into the definition of [param]eters in WIDL spec
* and produces 1. a boolean that represents if errors occur or not
* 2. a boolean that represents any warnings exist or not
* 3. a list of string that contains error / warning messages
*/
def checkArgsCommon(span: Span, name: String, argEnvs: List[Pair[Obj, State]], _S: List[JList[WArgument]])
: (Boolean, Boolean, List[() => Unit]) = {
for (env <- argEnvs) {
argObj = env._1
argState = env._2
val S = overloadResolution(_S, argObj)
// TODO: implement
}
(false, false, List())
}
def checkArgsCommon(span: Span, name: String, argEnvs: List[Pair[Obj, State]], params: JList[WArgument])
: (Boolean, Boolean, List[() => Unit]) = {
var errors: Boolean = false
var warnings: Boolean = false
var msgs: List[() => Unit] = List()
var numOfOptional: Int = 0
val numOfParameter: Int = params.size
val numOfArgument: Int = argEnvs.foldLeft(-1)((base, env) => env._1("length")._1._1._1._4.getSingle match {
case Some(d) => d.toInt
case _ => base
})
for (i <- 0 until numOfParameter) params.get(i) match {
case SWArgument(_, attrs, t, _, _) =>
if (i < numOfArgument) {
// check for every locset
var global_result: Boolean = soundness || argEnvs.size == 0 // shall we report bugs for this parameter?
var global_detected: Set[String] = Set() // detected types for this parameter
val expected_type: Option[String] = getType(t)
for (env <- argEnvs) {
argObj = env._1
argState = env._2
var local_result = (false, "?")
if (argObj != null) {
val objTuple = argObj(i.toString)
if (objTuple </ PropValueBot) local_result = checkType(objTuple, t)
else local_result = (false, getType(t).get)
}
if (soundness && !local_result._1) {
// if soundness==true, reports error if a locset has error
// report
global_result = false
global_detected += local_result._2
}
if (!soundness) {
// if soundness==false, reports error only if every locset has error
if (local_result._1) {
// not report (existing normal cases)
global_result = true
} else {
global_detected += local_result._2
}
}
}
if (!global_result) {
errors = true
msgs :+= (() => {
bugStorage.addMessage(span, WebAPIWrongArgType, null, null, i + 1, name, expected_type.getOrElse(""))
})
//expected_type.getOrElse(""), setToStr(global_detected), i+1))
}
}
if (!attrs.filter(attr => isOptional(attr)).isEmpty) { // if an error callback function is missing
numOfOptional = numOfOptional + 1
if (i >= numOfArgument && isErrorCallback(t)){
warnings = true
msgs :+= (() => { bugStorage.addMessage(span, WebAPIMissingErrorCB, null, null, name) })
}
}
}
if (numOfArgument != -1 && (numOfParameter - numOfOptional > numOfArgument || numOfArgument > numOfParameter)){
errors = true
if (numOfOptional != 0)
msgs :+= (() => { bugStorage.addMessage(span, WebAPIWrongArgs, null, null, name, numOfArgument, "from %s to %s".format(numOfParameter - numOfOptional, numOfParameter)) })
else
msgs :+= (() => { bugStorage.addMessage(span, WebAPIWrongArgs, null, null, name, numOfArgument, "of %s".format(numOfParameter)) })
}
(errors, warnings, msgs)
}
def checkArgs(fa: LHS, name: String, params: List[JList[WArgument]])
: (Boolean, Boolean, List[() => Unit]) = {
val span: Span = fa.getInfo.getSpan
var argEnvs: List[Pair[Obj, State]] = List()
NodeRelation.ast2cfgMap.get(fa) match {
case Some(cfgList) => {
def aux(inst: CFGInst, arguments: CFGExpr) = {
val cfgNode = cfg.findEnclosingNode(inst)
val cstate = stateManager.getInputCState(cfgNode, inst.getInstId, _MOST_SENSITIVE)
for ((callContext, state) <- cstate) {
val argLocSet = SE.V(arguments, state.heap, state.context)._1.locset
for (argLoc <- argLocSet) {
argEnvs :+= (state.heap(argLoc), state)
}
}
}
for (cfgInst <- cfgList) {
cfgInst match {
case inst@CFGCall(_, _, _, _, arguments, _, _) =>
aux(inst, arguments)
case inst@CFGConstruct(_, _, _, _, arguments, _, _) =>
aux(inst, arguments)
case _ =>
}
}
}
case None =>
}
checkArgsCommon(span, name, argEnvs, params)
}
def checkArgs(fa: LHS, name: String, params: JList[WArgument], args: List[Expr])
: (Boolean, Boolean, List[() => Unit]) = {
val span: Span = fa.getInfo.getSpan
var argEnvs: List[Pair[Obj, State]] = List()
NodeRelation.ast2cfgMap.get(fa) match {
case Some(cfgList) => {
def aux(inst: CFGInst, arguments: CFGExpr) = {
val cfgNode = cfg.findEnclosingNode(inst)
val cstate = stateManager.getInputCState(cfgNode, inst.getInstId, _MOST_SENSITIVE)
for ((callContext, state) <- cstate) {
val argLocSet = SE.V(arguments, state.heap, state.context)._1.locset
for (argLoc <- argLocSet) {
argEnvs :+= (state.heap(argLoc), state)
}
}
}
for (cfgInst <- cfgList) {
cfgInst match {
case inst@CFGCall(_, _, _, _, arguments, _, _) =>
aux(inst, arguments)
case inst@CFGConstruct(_, _, _, _, arguments, _, _) =>
aux(inst, arguments)
case _ =>
}
}
}
case None =>
}
checkArgsCommon(span, name, argEnvs, params)
}
def checkArgs(span: Span, name: String, args: CFGExpr, cstate: CState)
: Unit = {
if (name.endsWith(".constructor")) return
var argEnvs: List[Pair[Obj, State]] = List()
for ((callContext, state) <- cstate) {
val argLocSet = SE.V(args, state.heap, state.context)._1.locset
for (argLoc <- argLocSet) {
argEnvs :+= (state.heap(argLoc), state)
}
}
val splitByDot: Array[String] = name.split('.')
val interface: String = splitByDot(0)
val func: String = splitByDot(splitByDot.size - 1)
val winterf: Option[(String, WInterfaceMember)] = WIDLTypeMap.getMembers(interface).find(p => p._2._1.equals(func)) match {
case Some(pair) => Some(pair._2)
case None => None
}
val params: JList[WArgument] = winterf match {
// Now, we get the arguments of the API
case Some((_, op: WOperation)) => op.getArgs
case _ => null
}
val result = checkArgsCommon(span, name, argEnvs, params)
reportMessages(result)
}
def ConstructorToStr(lst: MListBuffer[WEAConstructor], space: String): String = {
var result: String = ""
for (constructor <- lst) {
result += space + WIDLToString.walk(constructor)
}
result
}
def isWrappedByTry(node: kr.ac.kaist.jsaf.nodes.Node): Boolean = NodeRelation.astParentMap.get(node) match {
case Some(par) => par match {
case STry(_, _, _, _) => true
case _ => isWrappedByTry(par)
}
case None => false
}
def exceptionHandlingCheck(span: Span, node: FunApp, funName: String): Unit = {
if (funName.endsWith(".constructor")) return
val splitByDot: Array[String] = funName.split('.')
val interface: String = splitByDot(0)
val func: String = splitByDot(splitByDot.size - 1)
val winterf: Option[(String, WInterfaceMember)] = WIDLTypeMap.getMembers(interface).find(p => p._2._1.equals(func)) match {
case Some(pair) => Some(pair._2)
case None => None
}
winterf match {
case Some((_, op: WOperation)) if !op.getExns.isEmpty =>
if (!isWrappedByTry(node)) {
bugStorage.addMessage(span, WebAPINoExceptionHandling, null, null, funName)
}
case _ =>
}
}
override def walkUnit(node: Any): Unit = node match {
/*
* webapis.tv.channel.tuneUp
*
* WindowWebAPIs defines webapis of type WebAPIs
* WebAPIs implements { WebAPIsTVObject }
* WebAPIsTVObject defines tv of type TV
* TV implements { WebAPIsTVChannelManager }
* WebAPIsTVChannelManager defines channel of type TVChannelManager
* TVChannelManager defines { tuneUp, ... }
*/
case fa@SDot(span, obj, SId(_, x, _, _)) if (isAPI(obj)) => {
val api = dotToStr(obj) match {
case Some(apis) => apis
case _ => obj.toString
}
WIDLTypeMap.getAPI(obj, x) match {
case Some(typ) =>
WIDLTypeMap.getMembers(typ).find(p => p._2._1.equals(x)) match {
case Some(pair) =>
case None => bugStorage.addMessage(span.getSpan, WebAPIInvalidNamespace, null, null, x, typ)
}
case _ =>
// obj is not an API
//bugStorage.addMessage(span.getSpan, WebAPIInvalidNamespace, null, null, x, api)
}
}
case fa@SFunApp(span, fun@SDot(_, obj, SId(_, x, _, _)), args) => WIDLTypeMap.getAPI(obj, x) match {
case Some(typ) => WIDLTypeMap.getMembers(typ).find(p => p._2._1.equals(x)) match {
case Some(pair) => pair._2 match {
case (propName, op: WOperation) =>
val funName = op.getName.isSome match {
case true =>
val isCallback = WIDLHelper.isCallback(WIDLTypeMap.interfaceMap(typ))
val isStatic = WIDLHelper.isStatic(op)
(isCallback | isStatic) match {
case true => typ + "." + propName
case false => typ + ".prototype." + propName
}
case _ => typ + "." + propName
}
if (!op.getExns.isEmpty && nestedTries == 0)
bugStorage.addMessage(span.getSpan, WebAPINoExceptionHandling, null, null, funName)
args.foreach(walkUnit)
}
case _ => super.walkUnit(fa)
}
case _ => super.walkUnit(fa)
}
case nw@SNew(span, fa@SFunApp(_, SVarRef(_, SId(_, f, _, _)), args))
if f.startsWith("<>webapis_") || f.startsWith("<>tizen_") => {
val fname = if (f.startsWith("<>webapis_")) f.drop(10) else if (f.startsWith("<>tizen_")) f.drop(8) else f
WIDLTypeMap.constructorMap.get(fname) match {
case Some(constructorList) =>
var result: (Boolean, Boolean, List[() => Unit]) = null
//checkArgs(nw, fname, constructorList.map(f => f.getArgs).toList)
for (constructor <- constructorList){
val temp: (Boolean, Boolean, List[() => Unit]) = checkArgs(nw, fname, constructor.getArgs, args)
if (!temp._1 || constructorList.size == 1){ // if there is no error or #(possible constructor) is 1
if (result == null)
result = temp
//else
// System.out.format("* Warning: many constructors are possible for %s\\n", fname)
}
}
if (result != null){ // there is a matching constructor
reportMessages(result)
} else {
bugStorage.addMessage(span.getSpan, WebAPIWrongConstructor, null, null, fname, ConstructorToStr(constructorList, errSpace + " "))
}
case None =>
if (f.startsWith("<>webapis_"))
bugStorage.addMessage(span.getSpan, WebAPIInvalidNamespace, null, null, fname, "webapis")
else if (f.startsWith("<>tizen_"))
bugStorage.addMessage(span.getSpan, WebAPIInvalidNamespace, null, null, fname, "tizen")
}
args.foreach(walkUnit)
}
case STry(_, body, catchB, _) => {
nestedTries += 1
walkUnit(body)
nestedTries -= 1
walkUnit(catchB)
}
case _: Comment =>
case _ => super.walkUnit(node)
}
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/bug_detector/WIDLChecker.scala | Scala | bsd-3-clause | 37,903 |
package io.ecumene.worker
import org.msgpack.core.{ MessagePacker, MessageUnpacker }
import io.ecumene.core._
import io.ecumene.core.Implicits._
final class EcumeneFunctionImpl9[-T1: CanUnpack, -T2: CanUnpack, -T3: CanUnpack, -T4: CanUnpack, -T5: CanUnpack, -T6: CanUnpack, -T7: CanUnpack, -T8: CanUnpack, -T9: CanUnpack, +R: CanPack](
ecmKey: String,
localEndpoint: String,
publicEndpoint: String,
val func: (T1, T2, T3, T4, T5, T6, T7, T8, T9) => R
) extends EcumeneFunctionImpl(
ecmKey,
localEndpoint,
publicEndpoint,
{ (unpacker, packer) =>
implicit val unpk = unpacker
if (unpacker.unpackArrayHeader != 9) {
throw new IllegalArgumentException
}
val r = func(unpack[T1].get, unpack[T2].get, unpack[T3].get, unpack[T4].get, unpack[T5].get, unpack[T6].get, unpack[T7].get, unpack[T8].get, unpack[T9].get)
implicit val pk = packer
pack(r)
}
) with Function9[T1, T2, T3, T4, T5, T6, T7, T8, T9, R] {
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R = func(v1, v2, v3, v4, v5, v6, v7, v8, v9)
}
| ZizhengTai/ecumene-scala | src/main/scala/EcumeneFunctionImpl9.scala | Scala | lgpl-3.0 | 1,084 |
package com.twitter.finagle.mux.lease
import java.lang.management.GarbageCollectorMXBean
/**
* This is the experimental package of mux.lease. Right now, this is all
* experimental code around leasing, especially leasing around garbage
* collections. We haven't hammered out exactly what the api will be, so it's
* in large part private and is subject to change.
*
* If you want to experiment with GC avoidance, you need to turn on the flag
* `com.twitter.finagle.mux.lease.exp.drainerEnabled` to enable GC avoidance,
* and right now it only works with parallel old and CMS. So far, we've seen
* promising results with GC avoidance locally, but haven't had the opportunity
* to run scientific load tests when simulating production load.
*
* It can be useful to turn on the
* `com.twitter.finagle.mux.lease.exp.drainerDebug` flag to get much more
* granular data about what is going on. Turning on this flag will log stats
* for each garbage collection avoidance attempt. Especially interesting is to
* see how long it takes for a server to drain, how many outstanding requests
* there still are, and whether any GCs were natural instead of triggered. If
* there are still outstanding requests when the GC is triggered, it might make
* sense to turn on nacking, or allow more time for draining.
*
* You can turn on nacking for a server after the lease has expired by turning
* on the flag `com.twitter.finagle.mux.lease.exp.nackOnExpiredLease`. The way
* this behaves is that it nacks all requests which come in after a 0 lease has
* been issued. This has two effects--it corrects behavior for clients which are
* slow to respect leases and turns on failure accrual for clients which
* continue to send requests. One possible area of future work would be
* allowing consumers to configure whether or not Finagle's failure accrual
* mechanism considers these nacks to be failures. Another possible improvement
* would be to nack requests which are already partially completed, instead of
* just new incoming requets.
*
* The `com.twitter.finagle.mux.lease.exp.drainerDiscountRange` flag modulates
* how the server chooses the discount, which is when it expires the lease. The
* left is the absolute lower bound on the range from which it will select
* randomly. The right is the absolute upper bound on the range from which it
* will select randomly. It chooses a uniform random number of bytes from
* between the upper bound and the max of the percentile number of bytes
* generated in a single request and the lower bound. The absolute lower bound
* does double duty--it is also used for choosing when to stop waiting for
* stragglers and run a GC regardless.
*
* The `com.twitter.finagle.mux.lease.exp.drainerPercentile` flag specifies the
* percentile of the incoming request distribution that will be chosen for
* deciding how long it takes to handle a single request.
*
* If you're not working with a JDK which supports System.minorGc, GCA will not
* work properly, since it will not just run a minor GC when you trigger
* System.gc. Although the behavior will continue to be correct, it will be
* less efficient. Assuming you're draining properly, your pause times won't
* affect your latency, but you will have lower throughput, since your server
* will spend more time not receiving traffice. One possible direction for
* further work is to allocate big arrays until you trigger a minor GC, but this
* must be done slightly delicately to avoid triggering a major collection.
*
* The drainerDiscountRange is of insufficient fidelity. The biggest problem is
* that it doesn't make sense to conflate the hard cutoff for triggering a GC
* with the drainerDiscountRange minimum, so it should probably be separated
* out.
*
* Observing leases on the client side can be made slightly smarter too. It
* might make sense to have some kind of communication which encourages clients
* to stop sending requests that it guesses will soon fail (for leases that will
* soon expire).
*
* NB: large parts of this package might suddenly end up in util-jvm
*/
package object exp {
implicit def gcMxBeanToGc(coll: GarbageCollectorMXBean): GarbageCollectorAddable =
new GarbageCollectorAddable(coll)
class GarbageCollectorAddable(self: GarbageCollectorMXBean) {
def +(other: GarbageCollectorMXBean): GarbageCollectorMXBean = new GarbageCollectorMXBean {
def getCollectionCount() =
self.getCollectionCount() + other.getCollectionCount()
def getCollectionTime() =
self.getCollectionTime() + other.getCollectionTime()
def getMemoryPoolNames() =
Array.concat(self.getMemoryPoolNames(), other.getMemoryPoolNames())
def getName() = self.getName() + "+" + other.getName()
def isValid() = self.isValid || other.isValid
def getObjectName = throw new UnsupportedOperationException
}
}
}
| latur19318/finagle | finagle-mux/src/main/scala/com/twitter/finagle/mux/lease/exp/package.scala | Scala | apache-2.0 | 4,927 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2008-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package example.dataflow
/**
* Imperative language AST for dataflow example.
*/
object DataflowTree {
import org.kiama.relation.Tree
type DataflowTree = Tree[Stm,Stm]
type Var = String
abstract class Stm extends Product
case class Assign (left : Var, right : Var) extends Stm
case class While (cond : Var, body : Stm) extends Stm
case class If (cond : Var, tru : Stm, fls : Stm) extends Stm
case class Block (stms : List[Stm]) extends Stm
case class Return (ret : Var) extends Stm
case class Empty () extends Stm
}
| solomono/kiama | library/src/org/kiama/example/dataflow/DataflowTree.scala | Scala | gpl-3.0 | 1,384 |
package fr.inria.spirals.sigma.ttc14.fixml.objlang.support
import fr.inria.spirals.sigma.ttc14.fixml.objlang.StringLiteral;
import fr.unice.i3s.sigma.support.EMFProxyBuilder;
import fr.unice.i3s.sigma.support.EMFScalaSupport;
trait StringLiteralScalaSupport extends EMFScalaSupport {
type StringLiteral = fr.inria.spirals.sigma.ttc14.fixml.objlang.StringLiteral
protected implicit val _stringliteralProxyBuilder = new EMFProxyBuilder[StringLiteral](ObjLang._objlangBuilder)
object StringLiteral {
def apply(value: String = null): StringLiteral = {
val _instance = ObjLang._objlangBuilder.create[StringLiteral]
if (value != null) _instance.setValue(value)
_instance
}
def unapply(that: StringLiteral): Option[(String)] =
Some((that.getValue))
}
}
object StringLiteralScalaSupport extends StringLiteralScalaSupport
| fikovnik/ttc14-fixml-sigma | ttc14-fixml-extension-3/src-gen/fr/inria/spirals/sigma/ttc14/fixml/objlang/support/StringLiteralScalaSupport.scala | Scala | epl-1.0 | 886 |
/**
* MIT License
*
* Copyright (c) 2016-2018 James Sherwood-Jones <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.jsherz.luskydive.util
import java.util.UUID
import spray.json.{DeserializationException, JsString, JsValue, RootJsonFormat}
/**
* Used to (de)serialize [[UUID]]s.
*/
object UuidJsonFormat extends RootJsonFormat[UUID] {
/**
* Take a raw UUID string and parse it into a UUID.
*
* @param json
* @return
*/
override def read(json: JsValue): UUID = json match {
case JsString(rawUuid) => UUID.fromString(rawUuid)
case _ => throw new DeserializationException("UUID must be a string in the format 98774829-b6a5-4299-b87c-d6038bacaf8e")
}
/**
* Produces a JsString in the following format:
*
* 98774829-b6a5-4299-b87c-d6038bacaf8e
*
* @param obj
* @return
*/
override def write(obj: UUID): JsValue = {
JsString(obj.toString)
}
}
| jSherz/lsd-members | backend/src/main/scala/com/jsherz/luskydive/util/UuidJsonFormat.scala | Scala | mit | 2,019 |
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.io.events
import org.platanios.tensorflow.api.io.DirectoryLoader
import org.platanios.tensorflow.api.utilities.Reservoir
import org.platanios.tensorflow.proto._
import com.google.protobuf.ByteString
import com.typesafe.scalalogging.Logger
import org.slf4j.LoggerFactory
import java.nio.file.{Files, Path}
import scala.collection.compat._
import scala.collection.mutable
import scala.jdk.CollectionConverters._
/** Accumulates event values collected from the provided path.
*
* The [[EventAccumulator]] is intended to provide a convenient interface for loading event data written during a
* TensorFlow run (or otherwise). TensorFlow writes out event ProtoBuf objects, which have a timestamp and step number
* associated with them, and often also contain a [[Summary]]. Summaries can store different kinds of data like a
* scalar value, an image, audio, or a histogram. Each summary also has a tag associated with it, which we use to
* organize logically related data. The [[EventAccumulator]] supports retrieving the event and summary data by their
* tags.
*
* Calling `tags` returns a map from event types to the associated tags for those types, that were found in the loaded
* event files. Then, various functional endpoints (e.g., `scalars(tag)`) allow for the retrieval of all data
* associated with each tag.
*
* The `reload()` method synchronously loads all of the data written so far.
*
* @param path Path to a directory containing TensorFlow events files, or a single TensorFlow
* events file. The accumulator will load events from this path.
* @param sizeGuidance Information on how much data the event accumulator should store in memory. The
* default size guidance tries not to store too much so as to avoid consuming all of
* the client's memory. The `sizeGuidance` should be a map from event types to integers
* representing the number of items to keep in memory, per tag for items of that event
* type. If the size is `0`, then all events are stored. Images, audio, and histograms
* tend to be very large and thus storing all of them is not recommended.
* @param histogramCompressionBps Information on how the event accumulator should compress histogram data for the
* [[CompressedHistogramEventType]] event type.
* @param purgeOrphanedData Boolean value indicating whether to discard any events that were "orphaned" by a
* TensorFlow restart.
*
* @author Emmanouil Antonios Platanios
*/
case class EventAccumulator(
path: Path,
sizeGuidance: Map[EventType, Int] = EventAccumulator.DEFAULT_SIZE_GUIDANCE,
histogramCompressionBps: Seq[Int] = EventAccumulator.DEFAULT_HISTOGRAM_COMPRESSION_BPS,
purgeOrphanedData: Boolean = true) {
private[this] val eventLoader: () => Iterator[Event] = EventAccumulator.eventLoaderFromPath(path)
private[this] object EventLoaderLock
private[this] var _firstEventTimeStamp: Double = -1.0
private[this] var _fileVersion : Float = -1.0f
private[this] var _mostRecentWallTime: Double = -1L
private[this] var _mostRecentStep : Long = -1L
private[this] val _actualSizeGuidance = EventAccumulator.DEFAULT_SIZE_GUIDANCE ++ sizeGuidance
private[this] val _reservoirs: Map[EventType, Reservoir[String, _ <: EventRecord[_]]] = Map(
ScalarEventType -> Reservoir[String, ScalarEventRecord](_actualSizeGuidance(ScalarEventType)),
ImageEventType -> Reservoir[String, ImageEventRecord](_actualSizeGuidance(ImageEventType)),
AudioEventType -> Reservoir[String, AudioEventRecord](_actualSizeGuidance(AudioEventType)),
HistogramEventType -> Reservoir[String, HistogramEventRecord](_actualSizeGuidance(HistogramEventType)),
CompressedHistogramEventType -> Reservoir[String, CompressedHistogramEventRecord](
_actualSizeGuidance(CompressedHistogramEventType), alwaysKeepLast = false)
)
private[this] var _graphDef : ByteString = _
private[this] var _graphFromMetaGraph: Boolean = false
private[this] var _metaGraphDef : ByteString = _
private[this] var _taggedRunMetadata : Map[String, ByteString] = Map.empty[String, ByteString]
private[this] var _summaryMetadata : Map[String, SummaryMetadata] = Map.empty[String, SummaryMetadata]
// Keep a mapping from plugin name to a map from tag to plugin data content obtained from the summary metadata for
// that plugin (this is not the entire summary metadata proto - only the content for that plugin). The summary writer
// only keeps the content on the first event encountered per tag, and so we must store that first instance of content
// for each tag.
private[this] val _pluginTagContent: mutable.Map[String, mutable.Map[String, String]] = mutable.Map.empty
/** Loads all events added since the last call to `reload()` and returns this event accumulator. If `reload()` was
* never called before, then it loads all events in the path. */
def reload(): EventAccumulator = EventLoaderLock synchronized {
eventLoader().foreach(processEvent)
this
}
/** Returns the timestamp (in seconds) of the first event.
*
* If the first event has been loaded (either by this method or by `reload()`, then this method returns immediately.
* Otherwise, it loads the first event and then returns. Note that this means that calling `reload()` will cause this
* method to block until `reload()` has finished. */
def firstEventTimeStamp: Double = {
if (_firstEventTimeStamp >= 0) {
_firstEventTimeStamp
} else {
EventLoaderLock synchronized {
try {
processEvent(eventLoader().next())
_firstEventTimeStamp
} catch {
case t: Throwable => throw new IllegalStateException("No event timestamp could be found.", t)
}
}
}
}
/** Returns all scalar events associated with the provided summary tag. */
def scalars(tag: String): List[ScalarEventRecord] = {
_reservoirs(ScalarEventType).asInstanceOf[Reservoir[String, ScalarEventRecord]].items(tag)
}
/** Returns all image events associated with the provided summary tag. */
def images(tag: String): List[ImageEventRecord] = {
_reservoirs(ImageEventType).asInstanceOf[Reservoir[String, ImageEventRecord]].items(tag)
}
/** Returns all audio events associated with the provided summary tag. */
def audio(tag: String): List[AudioEventRecord] = {
_reservoirs(AudioEventType).asInstanceOf[Reservoir[String, AudioEventRecord]].items(tag)
}
/** Returns all histogram events associated with the provided summary tag. */
def histograms(tag: String): List[HistogramEventRecord] = {
_reservoirs(HistogramEventType).asInstanceOf[Reservoir[String, HistogramEventRecord]].items(tag)
}
/** Returns all compressed histogram events associated with the provided summary tag. */
def compressedHistograms(tag: String): List[CompressedHistogramEventRecord] = {
_reservoirs(CompressedHistogramEventType).asInstanceOf[Reservoir[String, CompressedHistogramEventRecord]].items(tag)
}
/** Returns all tensor events associated with the provided summary tag. */
def tensors(tag: String): List[TensorEventRecord] = {
_reservoirs(TensorEventType).asInstanceOf[Reservoir[String, TensorEventRecord]].items(tag)
}
/** Returns the graph definition, if there is one.
*
* If the graph is stored directly, the method returns it. If no graph is stored directly, but a meta-graph is stored
* containing a graph, the method returns that graph. */
@throws[IllegalStateException]
def graph: GraphDef = {
if (_graphDef != null)
GraphDef.parseFrom(_graphDef)
else
throw new IllegalStateException("There is no graph in this event accumulator.")
}
/** Returns the meta-graph definition, if there is one. */
@throws[IllegalStateException]
def metaGraph: MetaGraphDef = {
if (_metaGraphDef != null)
MetaGraphDef.parseFrom(_metaGraphDef)
else
throw new IllegalStateException("There is no meta-graph in this event accumulator.")
}
/** Returns the run metadata associated with the provided summary tag. */
@throws[IllegalArgumentException]
def runMetadata(tag: String): RunMetadata = {
if (!_taggedRunMetadata.contains(tag))
throw new IllegalArgumentException("There is no run metadata for the provided tag name.")
RunMetadata.parseFrom(_taggedRunMetadata(tag))
}
/** Returns the summary metadata associated with the provided summary tag. */
def summaryMetadata(tag: String): SummaryMetadata = {
_summaryMetadata(tag)
}
/** Returns a map from tags to content specific to the specified plugin. */
def pluginTagToContent(pluginName: String): Option[Map[String, String]] = {
_pluginTagContent.get(pluginName).map(_.toMap)
}
/** Returns a sequence with paths to all the registered assets for the provided plugin name.
*
* If a plugins directory does not exist in the managed directory, then this method returns an empty list. This
* maintains compatibility with old log directories that contain no plugin sub-directories.
*/
def pluginAssets(pluginName: String): Seq[Path] = {
EventPluginUtilities.listPluginAssets(path, pluginName)
}
/** Retrieves a particular plugin asset from the managed directory and returns it as a string. */
def retrievePluginAsset(pluginName: String, assetName: String): String = {
EventPluginUtilities.retrievePluginAsset(path, pluginName, assetName)
}
/** Returns a map from event types to all corresponding tags that have been accumulated. */
def tags: Map[EventType, Seq[String]] = Map(
ScalarEventType -> _reservoirs(ScalarEventType).keys.toSeq,
ImageEventType -> _reservoirs(ImageEventType).keys.toSeq,
AudioEventType -> _reservoirs(AudioEventType).keys.toSeq,
HistogramEventType -> _reservoirs(HistogramEventType).keys.toSeq,
CompressedHistogramEventType -> _reservoirs(CompressedHistogramEventType).keys.toSeq,
TensorEventType -> _reservoirs(TensorEventType).keys.toSeq,
// We use a heuristic here: if a meta-graph is available, but a graph is not, then we assume that the meta-graph
// contains the graph.
// TODO: I don't really get this.
GraphEventType -> Seq((_graphDef != null).toString),
MetaGraphEventType -> Seq((_metaGraphDef != null).toString),
RunMetadataEventType -> _taggedRunMetadata.keys.toSeq
)
/** Processes a newly-loaded event. */
private[this] def processEvent(event: Event): Unit = {
if (_firstEventTimeStamp < 0)
_firstEventTimeStamp = event.getWallTime
if (event.getWhatCase == Event.WhatCase.FILE_VERSION) {
val newFileVersion = {
val tokens = event.getFileVersion.split("brain.Event:")
try {
tokens.last.toFloat
} catch {
// This should never happen according to the definition of the file version field specified in event.proto.
case _: NumberFormatException =>
EventAccumulator.logger.warn(
"Invalid event.proto file_version. Defaulting to use of out-of-order event.step logic " +
"for purging expired events.")
-1f
}
}
if (_fileVersion >= 0 && _fileVersion != newFileVersion) {
// This should not happen.
EventAccumulator.logger.warn(
"Found new file version for event. This will affect purging logic for TensorFlow restarts. " +
s"Old: ${_fileVersion}. New: $newFileVersion.")
}
_fileVersion = newFileVersion
}
maybePurgeOrphanedData(event)
// Process the event.
event.getWhatCase match {
case Event.WhatCase.GRAPH_DEF =>
// GraphDef and MetaGraphDef are handled in a special way: If no GraphDef event is available, but a MetaGraphDef is,
// and it contains a GraphDef, then we use that GraphDef for our graph. If a GraphDef event is available, then we
// always prefer it to the GraphDef inside the MetaGraphDef.
if (_graphDef != null)
EventAccumulator.logger.warn(
"Found more than one graph event per run, or there was a meta-graph containing a graph definition, " +
"as well as one or more graph events. Overwriting the graph with the newest event.")
_graphDef = event.getGraphDef
_graphFromMetaGraph = false
case Event.WhatCase.META_GRAPH_DEF =>
if (_metaGraphDef != null)
EventAccumulator.logger.warn(
"Found more than one meta-graph event per run. Overwriting the meta-graph with the newest event.")
_metaGraphDef = event.getMetaGraphDef
if (_graphDef == null || _graphFromMetaGraph) {
// We may have a GraphDef in the meta-graph. If so, and no GraphDef is directly available, we use this one
// instead.
val metaGraphDef = MetaGraphDef.parseFrom(_metaGraphDef)
if (metaGraphDef.hasGraphDef) {
if (_graphDef != null)
EventAccumulator.logger.warn(
"Found multiple meta-graphs containing graph definitions, but did not find any graph events. " +
"Overwriting the graph with the newest meta-graph version.")
_graphDef = metaGraphDef.getGraphDef.toByteString
_graphFromMetaGraph = true
}
}
case Event.WhatCase.TAGGED_RUN_METADATA =>
val tag = event.getTaggedRunMetadata.getTag
if (_taggedRunMetadata.contains(tag))
EventAccumulator.logger.warn(
s"Found more than one run metadata event with tag '$tag'. Overwriting it with the newest event.")
_taggedRunMetadata += tag -> event.getTaggedRunMetadata.getRunMetadata
case Event.WhatCase.SUMMARY =>
event.getSummary.getValueList.asScala.foreach(value => {
if (value.hasMetadata) {
val tag = value.getTag
// We only store the first instance of the metadata. This check is important: the `FileWriter` does strip
// metadata from all values except the first one per each tag. However, a new `FileWriter` is created every
// time a training job stops and restarts. Hence, we must also ignore non-initial metadata in this logic.
if (!_summaryMetadata.contains(tag)) {
_summaryMetadata += tag -> value.getMetadata
val pluginData = value.getMetadata.getPluginData
if (pluginData.getPluginName != null) {
_pluginTagContent
.getOrElseUpdate(pluginData.getPluginName, mutable.Map.empty[String, String])
.update(tag, pluginData.getContent.toStringUtf8)
} else {
EventAccumulator.logger.warn(s"The summary with tag '$tag' is oddly not associated with any plugin.")
}
}
}
value.getValueCase match {
case Summary.Value.ValueCase.SIMPLE_VALUE =>
val record = ScalarEventRecord(event.getWallTime, event.getStep, value.getSimpleValue)
_reservoirs(ScalarEventType).asInstanceOf[Reservoir[String, ScalarEventRecord]].add(value.getTag, record)
case Summary.Value.ValueCase.IMAGE =>
val image = value.getImage
val imageValue = ImageValue(
image.getEncodedImageString, image.getWidth, image.getHeight, image.getColorspace)
val record = ImageEventRecord(event.getWallTime, event.getStep, imageValue)
_reservoirs(ImageEventType).asInstanceOf[Reservoir[String, ImageEventRecord]].add(value.getTag, record)
case Summary.Value.ValueCase.AUDIO =>
val audio = value.getAudio
val audioValue = AudioValue(
audio.getEncodedAudioString, audio.getContentType, audio.getSampleRate, audio.getNumChannels,
audio.getLengthFrames)
val record = AudioEventRecord(event.getWallTime, event.getStep, audioValue)
_reservoirs(AudioEventType).asInstanceOf[Reservoir[String, AudioEventRecord]].add(value.getTag, record)
case Summary.Value.ValueCase.HISTO =>
val histogram = value.getHisto
val histogramValue = HistogramValue(
histogram.getMin, histogram.getMax, histogram.getNum, histogram.getSum, histogram.getSumSquares,
histogram.getBucketLimitList.asScala.map(_.toDouble).toSeq,
histogram.getBucketList.asScala.map(_.toDouble).toSeq)
val record = HistogramEventRecord(event.getWallTime, event.getStep, histogramValue)
_reservoirs(HistogramEventType).asInstanceOf[Reservoir[String, HistogramEventRecord]].add(value.getTag, record)
// TODO: [EVENTS] Compress histogram and add to the compressed histograms reservoir.
case Summary.Value.ValueCase.TENSOR =>
val tag = {
if (value.getTag == null) {
// This tensor summary was created using the old method that used plugin assets.
// We must still continue to support it.
value.getNodeName
} else {
value.getTag
}
}
val record = TensorEventRecord(event.getWallTime, event.getStep, value.getTensor)
_reservoirs(TensorEventType).asInstanceOf[Reservoir[String, TensorEventRecord]].add(tag, record)
case _ => EventAccumulator.logger.warn(s"Unrecognized value type (${value.getValueCase}) is ignored.")
}
})
case _ => ()
}
}
//region Purging Methods
/** Purges orphaned data due to a TensorFlow crash, if that is deemed necessary.
*
* When TensorFlow crashes at step `T+O` and restarts at step `T`, any events written after step `T` are now
* "orphaned" and will be at best misleading if they are included. This method attempts to determine if there is
* orphaned data, and purge it if it is found.
*
* @param event Event to use as reference for the purge.
*/
private[this] def maybePurgeOrphanedData(event: Event): Unit = {
if (purgeOrphanedData) {
// Check if the event happened after a crash, and purge expired tags.
if (_fileVersion >= 2) {
// If the file version is recent enough, we can use the session log events to check for restarts.
checkForRestartAndMaybePurge(event)
} else {
// If there is no file version or if the file version is too old, we default to the old logic of checking for
// out of order steps.
checkForOutOfOrderStepAndMaybePurge(event)
}
}
}
/** Checks and discards expired events using `SessionLog.START`.
*
* Checks for a `SessionLog.START` event and purges all previously seen events with larger steps, because they are
* out of date. It is possible that this logic will cause the first few event messages to be discarded because the
* TensorFlow supervisor threading does not guarantee that the `START` message is deterministically written first.
*
* This method is preferred over `checkForOutOfOrderStepAndMaybePurge` which can inadvertently discard events due to
* the TensorFlow supervisor threading behavior.
*
* @param event Event to use as reference for the purge.
*/
private[this] def checkForRestartAndMaybePurge(event: Event): Unit = {
if (event.getSessionLog != null && event.getSessionLog.getStatus == SessionLog.SessionStatus.START)
purge(event, byTags = false)
}
/** Checks for an out-of-order event step and discards any expired events.
*
* Checks if the provided event is out of order relative to the global most recent step. If it is, then the method
* purges ant outdated summaries for tags that the event contains.
*
* @param event Event to use as reference for the purge.
*/
private[this] def checkForOutOfOrderStepAndMaybePurge(event: Event): Unit = {
if (event.getStep < _mostRecentStep && event.getWhatCase == Event.WhatCase.SUMMARY) {
purge(event, byTags = true)
} else {
_mostRecentWallTime = event.getWallTime
_mostRecentStep = event.getStep
}
}
/** Purges all events that have occurred after the provided event step.
*
* If `byTags` is `true`, then the method purges all events that occurred after the provided event step, but only for
* the tags that the event has. Non-sequential event steps suggest that a TensorFlow restart occurred, and we discard
* the out-of-order events in order to obtain a consistent view of the data.
*
* Discarding by tags is the safer method, when we are unsure whether a restart has occurred, given that threading in
* the TensorFlow supervisor can cause events with different tags to arrive with un-synchronized step values.
*
* If `byTags` is `false`, then the method purges all events with step greater than the provided event step. This can
* be used when we are certain that a TensorFlow restart has occurred and these events can be discarded.
*
* @param event Event to use as reference for the purge.
* @param byTags Boolean value indicating whether to purge all out-of-order events or only those that are associated
* with the provided reference event.
*/
private[this] def purge(event: Event, byTags: Boolean): Unit = {
// Keep data that has a step less than the event step in the reservoirs.
val notExpired = (e: EventRecord[_]) => e.step < event.getStep
val expiredPerType = {
if (byTags) {
val tags = event.getSummary.getValueList.asScala.map(_.getTag)
_reservoirs.view.mapValues(r => tags.map(t => r.filter(notExpired, Some(t))).sum).toMap
} else {
_reservoirs.view.mapValues(_.filter(notExpired)).toMap
}
}
if (expiredPerType.values.sum > 0) {
EventAccumulator.logger.warn(
"Detected out of order event step likely caused by a TensorFlow restart." +
s"Purging expired events between the previous step " +
s"(${_mostRecentStep} - timestamp = ${_mostRecentWallTime}) and the current step " +
s"(${event.getStep} - timestamp = ${event.getWallTime}). " +
s"Removing ${expiredPerType(ScalarEventType)} scalars, ${expiredPerType(ImageEventType)} images, " +
s"${expiredPerType(AudioEventType)} audio, ${expiredPerType(HistogramEventType)} histograms, and " +
s"${expiredPerType(CompressedHistogramEventType)}} compressed histograms.")
}
}
//endregion Purging Methods
}
object EventAccumulator {
private[EventAccumulator] val logger: Logger = Logger(LoggerFactory.getLogger("Event Accumulator"))
/** Default size guidance to use. */
private[events] val DEFAULT_SIZE_GUIDANCE: Map[EventType, Int] = Map(
ScalarEventType -> 10000,
ImageEventType -> 4,
AudioEventType -> 4,
HistogramEventType -> 1,
CompressedHistogramEventType -> 500,
TensorEventType -> 10,
GraphEventType -> 1,
MetaGraphEventType -> 1,
RunMetadataEventType -> 1
)
/** Default histogram compression BPS to use. The Normal CDF for standard deviations:
* (-Inf, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, Inf) naturally gives bands around the median of width 1 std dev, 2 std dev,
* 3 std dev, and then the long tail. */
private[events] val DEFAULT_HISTOGRAM_COMPRESSION_BPS: Seq[Int] = {
Seq(0, 668, 1587, 3085, 5000, 6915, 8413, 9332, 10000)
}
/** Returns an events file reader for the provided path. */
private[EventAccumulator] def eventLoaderFromPath(path: Path): () => Iterator[Event] = {
if (Files.isRegularFile(path) && path.getFileName.toString.contains("tfevents")) {
() => EventFileReader(path).load()
} else {
() => DirectoryLoader(path, EventFileReader(_), p => p.getFileName.toString.contains("tfevents")).load()
}
}
}
| eaplatanios/tensorflow_scala | modules/api/src/main/scala/org/platanios/tensorflow/api/io/events/EventAccumulator.scala | Scala | apache-2.0 | 24,900 |
package org.openurp.teach.action
import scala.collection.mutable.ListBuffer
import org.beangle.data.model.Entity
import org.beangle.webmvc.api.context.ContextHolder
import org.beangle.webmvc.api.view.View
import org.beangle.webmvc.entity.action.{ AbstractEntityAction, RestfulAction }
import org.openurp.teach.code.GradeType
import org.openurp.teach.core.Student
import org.openurp.teach.grade.CourseGrade
import org.openurp.teach.lesson.Lesson
class LessonGradeReportAction extends AbstractEntityAction {
def index(): String = {
val lessonNo = "1958"
val lesson = entityDao.findBy(classOf[Lesson], "no", List(lessonNo))
val grades = entityDao.findBy(classOf[CourseGrade], "lessonNo", List(lessonNo))
val gradeTypes = entityDao.findBy(classOf[GradeType], "code", List("0003", "0002", "0007"))
put("lessonGradeReports", List(LessonReport(lesson.head, grades, gradeTypes)))
val lglist = new ListBuffer[LevelGrade]()
lglist.append(new LevelGrade("优", 90, 100))
lglist.append(new LevelGrade("良", 80, 89.9F))
lglist.append(new LevelGrade("中", 70, 79.9F))
lglist.append(new LevelGrade("及格", 60, 69.9F))
lglist.append(new LevelGrade("不及格", 0, 59.9F))
val gradeType = gradeTypes.find(gt => { "0007".equals(gt.code) }).get
grades.foreach(grade => {
lglist.foreach(lg => {
val score = grade.getGrade(gradeType).score
if (lg.min <= score && score <= lg.max) {
lg.count = lg.count + 1
}
})
})
put("lglist", lglist)
forward("index_" + ContextHolder.context.locale.getLanguage)
}
def statTask(): String = {
val lessonNo = "1958"
val lesson = entityDao.findBy(classOf[Lesson], "no", List(lessonNo)).head
val grades = entityDao.findBy(classOf[CourseGrade], "lessonNo", List(lessonNo))
val gradeTypes = entityDao.findBy(classOf[GradeType], "code", List("0002", "0007"))
val courseStat = new GradeSegStats(0)
courseStat.courseGrades ++= grades
courseStat.lesson = lesson
courseStat.scoreSegments.append(new FloatSegment(90, 100))
courseStat.scoreSegments.append(new FloatSegment(80, 89))
courseStat.scoreSegments.append(new FloatSegment(70, 79))
courseStat.scoreSegments.append(new FloatSegment(60, 69))
courseStat.scoreSegments.append(new FloatSegment(50, 59))
courseStat.scoreSegments.append(new FloatSegment(0, 49))
courseStat.stat(gradeTypes)
put("courseStats", List(courseStat))
forward()
}
def reportForExam(): String = {
val lessonNo = "1958"
val lesson = entityDao.findBy(classOf[Lesson], "no", List(lessonNo)).head
val grades = entityDao.findBy(classOf[CourseGrade], "lessonNo", List(lessonNo))
val gradeTypes = entityDao.findBy(classOf[GradeType], "code", List("0002"))
val courseStat = new GradeSegStats(0)
courseStat.courseGrades ++= grades
courseStat.lesson = lesson
courseStat.scoreSegments.append(new FloatSegment(90, 100))
courseStat.scoreSegments.append(new FloatSegment(80, 89))
courseStat.scoreSegments.append(new FloatSegment(70, 79))
courseStat.scoreSegments.append(new FloatSegment(60, 69))
courseStat.scoreSegments.append(new FloatSegment(50, 59))
courseStat.scoreSegments.append(new FloatSegment(0, 49))
courseStat.stat(gradeTypes)
put("courseStats", List(courseStat))
forward()
}
}
//case class CourseStat(lesson: Lesson, grades:Seq[CourseGrade],gradeType: Seq[GradeType],scoreSegments: ListBuffer[FloatSegment], gradeSegStat: ListBuffer[GradeSegStat]) {
//
//
//}
case class LessonReport(lesson: Lesson, grades: Seq[CourseGrade], gradeTypes: Seq[GradeType]) {
}
class LevelGrade(val name: String, val min: Float, val max: Float) {
var count: Int = _
}
| openurp/edu-core | grade/webapp/src/main/scala/org/openurp/teach/action/LessonGradeReportAction.scala | Scala | gpl-3.0 | 3,745 |
package org.jetbrains.plugins.scala
package lang.refactoring.ui
import com.intellij.refactoring.ui.ComboBoxVisibilityPanel
import org.jetbrains.plugins.scala.lang.refactoring.ui.ScalaComboBoxVisibilityPanel._
/**
* Nikolay.Tropin
* 2014-09-01
*/
class ScalaComboBoxVisibilityPanel(additional: String*)
extends ComboBoxVisibilityPanel[String](options(additional), names(additional))
object ScalaComboBoxVisibilityPanel {
private def modifiers(additional: Seq[String]) =
(Seq("private[this]", "private", "protected[this]", "protected") ++ additional.diff(Seq("", "public"))).sorted.distinct
def options(additional: Seq[String]): Array[String] = (modifiers(additional) :+ "").toArray
def names(additional: Seq[String]): Array[String] = (modifiers(additional) :+ "public").toArray
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/ui/ScalaComboBoxVisibilityPanel.scala | Scala | apache-2.0 | 804 |
/*
* Copyright 2018 TWO SIGMA OPEN SOURCE, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twosigma.beakerx.scala.chart.xychart
class NanoPlot extends com.twosigma.beakerx.chart.xychart.NanoPlot with NanoPlotProperties
trait NanoPlotProperties extends TimePlotProperties {
this: com.twosigma.beakerx.chart.xychart.NanoPlot =>
// No new properties
}
| jpallas/beakerx | kernel/scala/src/main/scala/com/twosigma/beakerx/scala/chart/xychart/NanoPlot.scala | Scala | apache-2.0 | 891 |
package darkpool.tools
import java.util.UUID
import akka.actor._
import com.github.nscala_time.time.Imports._
import darkpool.actors.{MatchingEngineActor, QueryActor}
import darkpool.book.OrderBook
import darkpool.engine.commands._
import darkpool.models.Trade
import darkpool.models.orders._
import scala.concurrent.duration._
import scala.util.Random
object RobotTrader {
case object GenerateOrder
class TradeGenerator extends Actor with ActorLogging {
val random = new Random()
val engine = context.actorSelection("../engine")
override def receive: Actor.Receive = {
case GenerateOrder => generateOrder
case _ =>
}
private def generateOrder {
engine ! Add(randomOrder)
}
private def randomThreshold = random.nextInt(100) + (random.nextInt(10) / 10.0)
private def randomQuantity = random.nextInt(100) + 1
private def randomOrder: Order = {
val sideSwitch = random.nextInt(100)
// Pick a Side
val side = if(sideSwitch % 2 == 0) {
BuyOrder
} else {
SellOrder
}
LimitOrder(side, randomQuantity, randomThreshold, UUID.randomUUID(), UUID.randomUUID())
}
}
}
| film42/dark-pool | src/main/scala/darkpool/tools/RobotTrader.scala | Scala | mit | 1,183 |
package tv.camfire.media_server.model
/**
* User: jonathan
* Date: 7/12/13
* Time: 1:26 PM
*/
class StreamInfo() {
var id: java.lang.Long = null
var created: java.lang.Long = System.currentTimeMillis()
}
| jgrowl/camfire-signaling | signaling-server/src/main/scala/tv/camfire/media_server/model/StreamInfo.scala | Scala | mit | 214 |
package com.shocktrade.models.contest
import java.util.Date
import com.shocktrade.models.contest.ContestStatuses.ContestStatus
import com.shocktrade.util.BSONHelper._
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json.{Reads, Writes, __}
import play.modules.reactivemongo.json.BSONFormats._
import reactivemongo.bson.{BSONDocument, BSONDocumentReader, BSONDocumentWriter, BSONObjectID, _}
/**
* Represents a contest
* @author [email protected]
*/
case class Contest(id: BSONObjectID = BSONObjectID.generate,
name: String,
creator: PlayerRef,
creationTime: Date,
startTime: Option[Date] = None,
expirationTime: Option[Date] = None,
startingBalance: BigDecimal,
messages: List[Message] = Nil,
participants: List[Participant] = Nil,
status: ContestStatus = ContestStatuses.ACTIVE,
levelCap: Option[Int] = None,
friendsOnly: Boolean = false,
invitationOnly: Boolean = false,
perksAllowed: Boolean = false,
robotsAllowed: Boolean = false,
asOfDate: Option[Date] = None)
/**
* Contest Singleton
* @author [email protected]
*/
object Contest {
val MaxPlayers = 24
implicit val contestReads: Reads[Contest] = (
(__ \\ "_id").read[BSONObjectID] and
(__ \\ "name").read[String] and
(__ \\ "creator").read[PlayerRef] and
(__ \\ "creationTime").read[Date] and
(__ \\ "startTime").readNullable[Date] and
(__ \\ "expirationTime").readNullable[Date] and
(__ \\ "startingBalance").read[BigDecimal] and
(__ \\ "messages").readNullable[List[Message]].map(_.getOrElse(Nil)) and
(__ \\ "participants").readNullable[List[Participant]].map(_.getOrElse(Nil)) and
(__ \\ "status").read[ContestStatus] and
(__ \\ "levelCap").readNullable[Int] and
(__ \\ "friendsOnly").read[Boolean] and
(__ \\ "invitationOnly").read[Boolean] and
(__ \\ "perksAllowed").read[Boolean] and
(__ \\ "robotsAllowed").read[Boolean] and
(__ \\ "asOfDate").readNullable[Date])(Contest.apply _)
implicit val contestWrites: Writes[Contest] = (
(__ \\ "_id").write[BSONObjectID] and
(__ \\ "name").write[String] and
(__ \\ "creator").write[PlayerRef] and
(__ \\ "creationTime").write[Date] and
(__ \\ "startTime").writeNullable[Date] and
(__ \\ "expirationTime").writeNullable[Date] and
(__ \\ "startingBalance").write[BigDecimal] and
(__ \\ "messages").write[List[Message]] and
(__ \\ "participants").write[List[Participant]] and
(__ \\ "status").write[ContestStatus] and
(__ \\ "levelCap").writeNullable[Int] and
(__ \\ "friendsOnly").write[Boolean] and
(__ \\ "invitationOnly").write[Boolean] and
(__ \\ "perksAllowed").write[Boolean] and
(__ \\ "robotsAllowed").write[Boolean] and
(__ \\ "asOfDate").writeNullable[Date])(unlift(Contest.unapply))
implicit object ContestReader extends BSONDocumentReader[Contest] {
def read(doc: BSONDocument) = Contest(
doc.getAs[BSONObjectID]("_id").get,
doc.getAs[String]("name").get,
doc.getAs[PlayerRef]("creator").get,
doc.getAs[Date]("creationTime").getOrElse(new Date()),
doc.getAs[Date]("startTime"),
doc.getAs[Date]("expirationTime"),
doc.getAs[BigDecimal]("startingBalance").get,
doc.getAs[List[Message]]("messages").getOrElse(Nil),
doc.getAs[List[Participant]]("participants").getOrElse(Nil),
doc.getAs[ContestStatus]("status").get,
doc.getAs[Int]("levelCap"),
doc.getAs[Boolean]("friendsOnly").contains(true),
doc.getAs[Boolean]("invitationOnly").contains(true),
doc.getAs[Boolean]("perksAllowed").contains(true),
doc.getAs[Boolean]("robotsAllowed").contains(true),
doc.getAs[Date]("asOfDate")
)
}
implicit object ContestWriter extends BSONDocumentWriter[Contest] {
def write(contest: Contest) = BSONDocument(
"_id" -> contest.id,
"name" -> contest.name,
"creator" -> contest.creator,
"creationTime" -> contest.creationTime,
"startTime" -> contest.startTime,
"expirationTime" -> contest.expirationTime,
"startingBalance" -> contest.startingBalance,
"messages" -> contest.messages,
"participants" -> contest.participants,
"status" -> contest.status,
"levelCap" -> contest.levelCap,
"friendsOnly" -> contest.friendsOnly,
"invitationOnly" -> contest.invitationOnly,
"perksAllowed" -> contest.perksAllowed,
"robotsAllowed" -> contest.robotsAllowed,
"asOfDate" -> contest.asOfDate,
"playerCount" -> contest.participants.length
)
}
} | ldaniels528/shocktrade-server | app-server/app/com/shocktrade/models/contest/Contest.scala | Scala | apache-2.0 | 4,875 |
package edu.msstate.dasi.csb.workload.neo4j
import edu.msstate.dasi.csb.workload.Workload
import org.apache.spark.graphx.Graph
import scala.reflect.ClassTag
/**
* Collects the neighbors for each vertex.
*
* @note Vertices with no edges are ignored.
*/
class Neighbors(engine: Neo4jEngine) extends Workload {
val name = "Neighbors"
/**
* Collects the neighbors for each vertex.
*
* @note Vertices with no edges are ignored.
*/
def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]): Unit = {
val query = "MATCH (n)--(m) RETURN n, collect(m);"
engine.run(query)
}
}
| msstate-dasi/csb | csb/src/main/scala/edu/msstate/dasi/csb/workload/neo4j/Neighbors.scala | Scala | gpl-3.0 | 604 |
package eu.devtty.ipld
import eu.devtty.ipfs.{DagGetResult, IpfsNode}
import scala.concurrent.Future
package object util {
implicit class IPLDLinkUtils(val link: IPLDLink) extends AnyVal {
@inline
def get(implicit ipfs: IpfsNode): Future[DagGetResult] = {
ipfs.dag.get(link./)
}
}
}
| magik6k/scalajs-ipfs-api | core-api/src/main/scala/eu/devtty/ipld/util/package.scala | Scala | mit | 310 |
package io.vamp.model.reader
import io.vamp.model.notification.{ EmptyImportError, ImportDefinitionError }
import io.vamp.model.reader.YamlSourceReader._
case class Import(base: Map[String, Any], references: List[ImportReference])
case class ImportReference(name: String, kind: String) {
override def toString = s"$kind/$name"
}
object ImportReader extends YamlReader[Import] {
override protected def expand(implicit source: YamlSourceReader): YamlSourceReader = {
expandToList("import")
source
}
override protected def parse(implicit source: YamlSourceReader): Import = {
val references = <<?[Any]("import") match {
case None ⇒ Nil
case Some(list: List[_]) ⇒
list.map {
case str: String if str.isEmpty ⇒ throwException(EmptyImportError)
case str: String ⇒
str.split('/').toList match {
case s :: Nil ⇒ ImportReference(s, "templates")
case k :: s :: Nil ⇒ ImportReference(s, k)
case _ ⇒ throwException(ImportDefinitionError)
}
case _ ⇒ throwException(ImportDefinitionError)
}
case Some(_) ⇒ throwException(ImportDefinitionError)
}
Import(source.flatten({ entry ⇒ entry != "import" }), references)
}
}
| dragoslav/vamp | model/src/main/scala/io/vamp/model/reader/ImportReader.scala | Scala | apache-2.0 | 1,305 |
/*
* Copyright (C) 2016 Language Technology Group and Interactive Graphics Systems Group, Technische Universität Darmstadt, Germany
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package models
/** Provides classes for accessing the underlying data. */
package object services
| thorbenwiese/newsleak-frontend | app/models/services/package.scala | Scala | agpl-3.0 | 906 |
package cookbook.core
// fintrospect-core
object Serving_Static_Content_Example extends App {
import com.twitter.finagle.Http
import com.twitter.finagle.http.path.Root
import com.twitter.util.Await.ready
import io.fintrospect.{Module, ResourceLoader, StaticModule}
val module: Module = StaticModule(Root, ResourceLoader.Directory("."))
ready(Http.serve(":9999", module.toService))
}
//curl -v http://localhost:9999/package.json | daviddenton/fintrospect | src/test/scala/cookbook/core/Serving_Static_Content_Example.scala | Scala | apache-2.0 | 445 |
package org.sisioh.aws4s.eb.model
import com.amazonaws.services.elasticbeanstalk.model.{ OptionSpecification, DescribeConfigurationOptionsRequest }
import org.sisioh.aws4s.PimpedType
import scala.collection.JavaConverters._
object DescribeConfigurationOptionsRequestFactory {
def create(): DescribeConfigurationOptionsRequest = new DescribeConfigurationOptionsRequest()
}
class RichDescribeConfigurationOptionsRequest(val underlying: DescribeConfigurationOptionsRequest)
extends AnyVal with PimpedType[DescribeConfigurationOptionsRequest] {
def applicationNameOpt: Option[String] = Option(underlying.getApplicationName)
def applicationNameOpt_=(value: Option[String]): Unit =
underlying.setApplicationName(value.orNull)
def withApplicationNameOpt(value: Option[String]): DescribeConfigurationOptionsRequest =
underlying.withApplicationName(value.orNull)
// ---
def templateNameOpt: Option[String] = Option(underlying.getTemplateName)
def templateNameOpt_=(value: Option[String]): Unit =
underlying.setTemplateName(value.orNull)
def withTemplateNameOpt(value: Option[String]): DescribeConfigurationOptionsRequest =
underlying.withTemplateName(value.orNull)
// ---
def environmentNameOpt: Option[String] = Option(underlying.getEnvironmentName)
def environmentNameOpt_=(value: Option[String]): Unit =
underlying.setEnvironmentName(value.orNull)
def withEnvironmentNameOpt(value: Option[String]): DescribeConfigurationOptionsRequest =
underlying.withEnvironmentName(value.orNull)
// ---
def solutionStackNameOpt: Option[String] = Option(underlying.getSolutionStackName)
def solutionStackNameOpt_=(value: Option[String]): Unit =
underlying.setSolutionStackName(value.orNull)
def withSolutionStackNameOpt(value: Option[String]): DescribeConfigurationOptionsRequest =
underlying.withSolutionStackName(value.orNull)
// ---
def options: Seq[OptionSpecification] = underlying.getOptions.asScala.toVector
def options_=(value: Seq[OptionSpecification]): Unit =
underlying.setOptions(value.asJava)
def withOptions(value: Seq[OptionSpecification]): DescribeConfigurationOptionsRequest =
underlying.withOptions(value.asJava)
}
| everpeace/aws4s | aws4s-eb/src/main/scala/org/sisioh/aws4s/eb/model/RichDescribeConfigurationOptionsRequest.scala | Scala | mit | 2,222 |
class i1[i1](override val i1: i1[i1]) {
def i1: i1[i1] = i1(null)
}
object i1 {
implicit def i2[i2](i2: i2[i1]): i1[i2] = ???
}
| som-snytt/dotty | tests/pending/fuzzy/SOE-bb0e91970b9373329dfd57a639c7d9211ebf3139.scala | Scala | apache-2.0 | 132 |
/**
* Copyright 2013, 2016 Gianluca Amato <[email protected]>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.ui.cli
import org.rogach.scallop._
import it.unich.jandom.targets.parameters.NarrowingStrategy
import it.unich.jandom.targets.parameters.WideningScope
import it.unich.jandom.ui.NarrowingStrategies
import it.unich.jandom.ui.WideningScopes
/**
* The class for command line parameters.
*/
class Conf(arguments: Seq[String]) extends ScallopConf(arguments) {
def enumConverter(e: Enumeration) = singleArgConverter(e.withName(_))
// we need to factour out common code here
val wideningScope = opt[WideningScope.Value]("widening", default = Some(WideningScopes.default.value))(enumConverter(WideningScope))
val narrowingStrategy = opt[NarrowingStrategy.Value]("narrowing", default = Some(NarrowingStrategies.default.value))(enumConverter(NarrowingStrategy))
val file = opt[String]("input", required = true)
verify()
}
| amato-gianluca/Jandom | core/src/main/scala/it/unich/jandom/ui/cli/Conf.scala | Scala | lgpl-3.0 | 1,632 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.batch.table
import org.apache.flink.api.scala._
import org.apache.flink.table.api.DataTypes._
import org.apache.flink.table.api._
import org.apache.flink.table.data.DecimalDataUtils
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.table.planner.expressions.utils._
import org.apache.flink.table.planner.runtime.utils.TestData._
import org.apache.flink.table.planner.runtime.utils.{BatchTableEnvUtil, BatchTestBase, CollectionBatchExecTable, UserDefinedFunctionTestUtils}
import org.apache.flink.table.planner.utils.DateTimeTestUtil.localDateTime
import org.apache.flink.table.utils.LegacyRowResource
import org.apache.flink.test.util.TestBaseUtils
import org.apache.flink.test.util.TestBaseUtils.compareResultAsText
import org.apache.flink.types.Row
import org.junit.Assert.assertEquals
import org.junit.{Before, Rule, Test}
import java.sql.{Date, Time, Timestamp}
import java.time.LocalDateTime
import java.util
import scala.collection.JavaConverters._
import scala.collection.{Seq, mutable}
class CalcITCase extends BatchTestBase {
@Rule
def usesLegacyRows: LegacyRowResource = LegacyRowResource.INSTANCE
@Before
override def before(): Unit = {
super.before()
registerCollection("Table3", data3, type3, "a, b, c", nullablesOfData3)
}
@Test
def testSimpleSelectAll(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv).select('_1, '_2, '_3)
val expected = "1,1,Hi\\n" + "2,2,Hello\\n" + "3,2,Hello world\\n" +
"4,3,Hello world, how are you?\\n" + "5,3,I am fine.\\n" + "6,3,Luke Skywalker\\n" +
"7,4,Comment#1\\n" + "8,4,Comment#2\\n" + "9,4,Comment#3\\n" + "10,4,Comment#4\\n" +
"11,5,Comment#5\\n" + "12,5,Comment#6\\n" + "13,5,Comment#7\\n" + "14,5,Comment#8\\n" +
"15,5,Comment#9\\n" + "16,6,Comment#10\\n" + "17,6,Comment#11\\n" + "18,6,Comment#12\\n" +
"19,6,Comment#13\\n" + "20,6,Comment#14\\n" + "21,6,Comment#15\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testSimpleSelectAllWithAs(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c").select('a, 'b, 'c)
val expected = "1,1,Hi\\n" + "2,2,Hello\\n" + "3,2,Hello world\\n" +
"4,3,Hello world, how are you?\\n" + "5,3,I am fine.\\n" + "6,3,Luke Skywalker\\n" +
"7,4,Comment#1\\n" + "8,4,Comment#2\\n" + "9,4,Comment#3\\n" + "10,4,Comment#4\\n" +
"11,5,Comment#5\\n" + "12,5,Comment#6\\n" + "13,5,Comment#7\\n" + "14,5,Comment#8\\n" +
"15,5,Comment#9\\n" + "16,6,Comment#10\\n" + "17,6,Comment#11\\n" + "18,6,Comment#12\\n" +
"19,6,Comment#13\\n" + "20,6,Comment#14\\n" + "21,6,Comment#15\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testSimpleSelectWithNaming(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv)
.select('_1 as 'a, '_2 as 'b, '_1 as 'c)
.select('a, 'b)
val expected = "1,1\\n" + "2,2\\n" + "3,2\\n" + "4,3\\n" + "5,3\\n" + "6,3\\n" + "7,4\\n" +
"8,4\\n" + "9,4\\n" + "10,4\\n" + "11,5\\n" + "12,5\\n" + "13,5\\n" + "14,5\\n" + "15,5\\n" +
"16,6\\n" + "17,6\\n" + "18,6\\n" + "19,6\\n" + "20,6\\n" + "21,6\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testSimpleSelectRenameAll(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv)
.select('_1 as 'a, '_2 as 'b, '_3 as 'c)
.select('a, 'b)
val expected = "1,1\\n" + "2,2\\n" + "3,2\\n" + "4,3\\n" + "5,3\\n" + "6,3\\n" + "7,4\\n" +
"8,4\\n" + "9,4\\n" + "10,4\\n" + "11,5\\n" + "12,5\\n" + "13,5\\n" + "14,5\\n" + "15,5\\n" +
"16,6\\n" + "17,6\\n" + "18,6\\n" + "19,6\\n" + "20,6\\n" + "21,6\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testSelectStar(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c").select('*)
val expected = "1,1,Hi\\n" + "2,2,Hello\\n" + "3,2,Hello world\\n" +
"4,3,Hello world, how are you?\\n" + "5,3,I am fine.\\n" + "6,3,Luke Skywalker\\n" +
"7,4,Comment#1\\n" + "8,4,Comment#2\\n" + "9,4,Comment#3\\n" + "10,4,Comment#4\\n" +
"11,5,Comment#5\\n" + "12,5,Comment#6\\n" + "13,5,Comment#7\\n" + "14,5,Comment#8\\n" +
"15,5,Comment#9\\n" + "16,6,Comment#10\\n" + "17,6,Comment#11\\n" + "18,6,Comment#12\\n" +
"19,6,Comment#13\\n" + "20,6,Comment#14\\n" + "21,6,Comment#15\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testAllRejectingFilter(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter(false)
val expected = "\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testAllPassingFilter(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter(true)
val expected = "1,1,Hi\\n" + "2,2,Hello\\n" + "3,2,Hello world\\n" + "4,3,Hello world, " +
"how are you?\\n" + "5,3,I am fine.\\n" + "6,3,Luke Skywalker\\n" + "7,4," +
"Comment#1\\n" + "8,4,Comment#2\\n" + "9,4,Comment#3\\n" + "10,4,Comment#4\\n" + "11,5," +
"Comment#5\\n" + "12,5,Comment#6\\n" + "13,5,Comment#7\\n" + "14,5,Comment#8\\n" + "15,5," +
"Comment#9\\n" + "16,6,Comment#10\\n" + "17,6,Comment#11\\n" + "18,6,Comment#12\\n" + "19," +
"6,Comment#13\\n" + "20,6,Comment#14\\n" + "21,6,Comment#15\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testFilterOnStringTupleField(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter('c.like("%world%"))
val expected = "3,2,Hello world\\n" + "4,3,Hello world, how are you?\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testFilterOnIntegerTupleField(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter('a % 2 === 0)
val expected = "2,2,Hello\\n" + "4,3,Hello world, how are you?\\n" +
"6,3,Luke Skywalker\\n" + "8,4," + "Comment#2\\n" + "10,4,Comment#4\\n" +
"12,5,Comment#6\\n" + "14,5,Comment#8\\n" + "16,6," +
"Comment#10\\n" + "18,6,Comment#12\\n" + "20,6,Comment#14\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testNotEquals(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter('a % 2 !== 0)
val expected = "1,1,Hi\\n" + "3,2,Hello world\\n" +
"5,3,I am fine.\\n" + "7,4,Comment#1\\n" + "9,4,Comment#3\\n" +
"11,5,Comment#5\\n" + "13,5,Comment#7\\n" + "15,5,Comment#9\\n" +
"17,6,Comment#11\\n" + "19,6,Comment#13\\n" + "21,6,Comment#15\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testDisjunctivePredicate(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter('a < 2 || 'a > 20)
val expected = "1,1,Hi\\n" + "21,6,Comment#15\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testConsecutiveFilters(): Unit = {
val ds = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
val filterDs = ds.filter('a % 2 !== 0).filter('b % 2 === 0)
val expected = "3,2,Hello world\\n" + "7,4,Comment#1\\n" +
"9,4,Comment#3\\n" + "17,6,Comment#11\\n" +
"19,6,Comment#13\\n" + "21,6,Comment#15\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testFilterBasicType(): Unit = {
val ds = CollectionBatchExecTable.getStringDataSet(tEnv)
val filterDs = ds.filter('f0.like("H%"))
val expected = "Hi\\n" + "Hello\\n" + "Hello world\\n" + "Hello world, how are you?\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testFilterOnCustomType(): Unit = {
val filterDs = CollectionBatchExecTable.getCustomTypeDataSet(tEnv)
.filter('myString.like("%a%"))
val expected = "3,3,Hello world, how are you?\\n" + "3,4,I am fine.\\n" + "3,5,Luke Skywalker\\n"
val results = executeQuery(filterDs)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testSimpleCalc(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv)
.select('_1, '_2, '_3)
.where('_1 < 7)
.select('_1, '_3)
val expected = "1,Hi\\n" + "2,Hello\\n" + "3,Hello world\\n" +
"4,Hello world, how are you?\\n" + "5,I am fine.\\n" + "6,Luke Skywalker\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testCalcWithTwoFilters(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv)
.select('_1, '_2, '_3)
.where('_1 < 7 && '_2 === 3)
.select('_1, '_3)
.where('_1 === 4)
.select('_1)
val expected = "4\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testCalcWithAggregation(): Unit = {
val t = CollectionBatchExecTable.get3TupleDataSet(tEnv)
.select('_1, '_2, '_3)
.where('_1 < 15)
.groupBy('_2)
.select('_1.min, '_2.count as 'cnt)
.where('cnt > 3)
val expected = "7,4\\n" + "11,4\\n"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testCalcJoin(): Unit = {
val ds1 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv, "a, b, c")
val ds2 = CollectionBatchExecTable.get5TupleDataSet(tEnv, "d, e, f, g, h")
val joinT = ds1.select('a, 'b).join(ds2).where('b === 'e).select('a, 'b, 'd, 'e, 'f)
.where('b > 1).select('a, 'd).where('d === 2)
val expected = "2,2\\n" + "3,2\\n"
val results = executeQuery(joinT)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testAdvancedDataTypes(): Unit = {
val bd1 = BigDecimal("78.454654654654654").bigDecimal
val bd2 = BigDecimal("4E+16").bigDecimal
val t = BatchTableEnvUtil.fromCollection(tEnv,
Seq((bd1, bd2, Date.valueOf("1984-07-12"),
Time.valueOf("14:34:24"),
Timestamp.valueOf("1984-07-12 14:34:24"))), "_1, _2, _3, _4, _5")
.select('_1, '_2, '_3, '_4, '_5, BigDecimal("11.2"), BigDecimal("11.2").bigDecimal,
Date.valueOf("1984-07-12"), Time.valueOf("14:34:24"),
Timestamp.valueOf("1984-07-12 14:34:24"))
// inferred Decimal(p,s) from BigDecimal.class
val bd1x = bd1.setScale(DecimalDataUtils.DECIMAL_SYSTEM_DEFAULT.getScale)
val bd2x = bd2.setScale(DecimalDataUtils.DECIMAL_SYSTEM_DEFAULT.getScale)
val expected = s"$bd1x,$bd2x,1984-07-12,14:34:24,1984-07-12T14:34:24," +
"11.2,11.2,1984-07-12,14:34:24,1984-07-12T14:34:24"
val results = executeQuery(t)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testUserDefinedScalarFunction() {
registerFunction("hashCode", HashCode)
val table = BatchTableEnvUtil.fromElements(tEnv, "a", "b", "c")
val result = table.select("f0.hashCode()")
val results = executeQuery(result)
val expected = "97\\n98\\n99"
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testNumericAutocastInArithmetic() {
val table = BatchTableEnvUtil.fromElements(tEnv,
(1.toByte, 1.toShort, 1, 1L, 1.0f, 1.0d, 1L, 1001.1))
.select('_1 + 1, '_2 + 1, '_3 + 1L, '_4 + 1.0f,
'_5 + 1.0d, '_6 + 1, '_7 + 1.0d, '_8 + '_1)
val results = executeQuery(table)
val expected = "2,2,2,2.0,2.0,2.0,2.0,1002.1"
compareResultAsText(results.asJava, expected)
}
@Test
def testNumericAutocastInComparison() {
val table = BatchTableEnvUtil.fromCollection(tEnv,
Seq(
(1.toByte, 1.toShort, 1, 1L, 1.0f, 1.0d),
(2.toByte, 2.toShort, 2, 2L, 2.0f, 2.0d)),
"a, b, c, d, e, f"
).filter('a > 1 && 'b > 1 && 'c > 1L && 'd > 1.0f && 'e > 1.0d && 'f > 1)
val results = executeQuery(table)
val expected: String = "2,2,2,2,2.0,2.0"
compareResultAsText(results.asJava, expected)
}
@Test
def testCasting() {
val table = BatchTableEnvUtil.fromElements(tEnv, (1, 0.0, 1L, true))
.select(
// * -> String
'_1.cast(STRING), '_2.cast(STRING), '_3.cast(STRING), '_4.cast(STRING),
// NUMERIC TYPE -> Boolean
'_1.cast(BOOLEAN), '_2.cast(BOOLEAN), '_3.cast(BOOLEAN),
// NUMERIC TYPE -> NUMERIC TYPE
'_1.cast(DOUBLE), '_2.cast(INT), '_3.cast(SMALLINT),
// Boolean -> NUMERIC TYPE
'_4.cast(DOUBLE),
// identity casting
'_1.cast(INT), '_2.cast(DOUBLE), '_3.cast(BIGINT), '_4.cast(BOOLEAN))
val results = executeQuery(table)
val expected = "1,0.0,1,true," + "true,false,true," +
"1.0,0,1," + "1.0," + "1,0.0,1,true\\n"
compareResultAsText(results.asJava, expected)
}
@Test
def testCastFromString() {
val table = BatchTableEnvUtil.fromElements(tEnv, ("1", "true", "2.0"))
.select('_1.cast(TINYINT), '_1.cast(SMALLINT), '_1.cast(INT), '_1.cast(BIGINT),
'_3.cast(DOUBLE), '_3.cast(FLOAT), '_2.cast(BOOLEAN))
val results = executeQuery(table)
val expected = "1,1,1,1,2.0,2.0,true\\n"
compareResultAsText(results.asJava, expected)
}
@Test
def testUserDefinedScalarFunctionWithParameter(): Unit = {
registerFunction("RichFunc2", new RichFunc2)
UserDefinedFunctionTestUtils.setJobParameters(env, Map("string.value" -> "ABC"))
val ds = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv, "a, b, c")
tEnv.registerTable("t1", ds)
val sqlQuery = "SELECT c FROM t1 where RichFunc2(c)='ABC#Hello'"
val result = tEnv.sqlQuery(sqlQuery)
val expected = "Hello"
val results = executeQuery(result)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testUserDefinedScalarFunctionWithDistributedCache(): Unit = {
val words = "Hello\\nWord"
val filePath = UserDefinedFunctionTestUtils.writeCacheFile("test_words", words)
env.registerCachedFile(filePath, "words")
registerFunction("RichFunc3", new RichFunc3)
val ds = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv, "a, b, c")
tEnv.registerTable("t1", ds)
val sqlQuery = "SELECT c FROM t1 where RichFunc3(c)=true"
val result = tEnv.sqlQuery(sqlQuery)
val expected = "Hello"
val results = executeQuery(result)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testMultipleUserDefinedScalarFunctions(): Unit = {
registerFunction("RichFunc1", new RichFunc1)
registerFunction("RichFunc2", new RichFunc2)
UserDefinedFunctionTestUtils.setJobParameters(env, Map("string.value" -> "Abc"))
val ds = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv, "a, b, c")
tEnv.registerTable("t1", ds)
val sqlQuery = "SELECT c FROM t1 where " +
"RichFunc2(c)='Abc#Hello' or RichFunc1(a)=3 and b=2"
val result = tEnv.sqlQuery(sqlQuery)
val expected = "Hello\\nHello world"
val results = executeQuery(result)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testScalarFunctionConstructorWithParams(): Unit = {
val data = List(
(1, 1L, "Jack#22"),
(2, 2L, "John#19"),
(3, 2L, "Anna#44"),
(4, 3L, "nosharp"))
val in = BatchTableEnvUtil.fromCollection(tEnv, data, "a, b, c")
val func0 = new Func13("default")
val func1 = new Func13("Sunny")
val func2 = new Func13("kevin2")
val result = in.select(func0('c), func1('c), func2('c))
val results = executeQuery(result)
val expected = "default-Anna#44,Sunny-Anna#44,kevin2-Anna#44\\n" +
"default-Jack#22,Sunny-Jack#22,kevin2-Jack#22\\n" +
"default-John#19,Sunny-John#19,kevin2-John#19\\n" +
"default-nosharp,Sunny-nosharp,kevin2-nosharp"
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testRowType(): Unit = {
val data = new mutable.MutableList[(Int, Long, String)]
data.+=((1, 1L, "Jack#22"))
data.+=((2, 2L, "John#19"))
data.+=((3, 2L, "Anna#44"))
data.+=((4, 3L, "nosharp"))
val in = BatchTableEnvUtil.fromCollection(tEnv, data, "a, b, c")
// literals
val result1 = in.select(row(1, "Hi", true))
executeQuery(result1).foreach { record =>
val row = record.getField(0).asInstanceOf[Row]
assertEquals(1, row.getField(0))
assertEquals("Hi", row.getField(1))
assertEquals(true, row.getField(2))
}
// primitive type
val result2 = in.select(row(1, 'a, 'b))
executeQuery(result2).zipWithIndex.foreach { case (record, idx) =>
val row = record.getField(0).asInstanceOf[Row]
assertEquals(1, row.getField(0))
assertEquals(data(idx)._1, row.getField(1))
assertEquals(data(idx)._2, row.getField(2))
}
// non-primitive type
val d = DecimalDataUtils.castFrom(2.0002, 5, 4)
val result3 = in.select(row(BigDecimal(2.0002), 'a, 'c))
executeQuery(result3).zipWithIndex.foreach { case (record, idx) =>
val row = record.getField(0).asInstanceOf[Row]
assertEquals(d.toBigDecimal, row.getField(0))
assertEquals(data(idx)._1, row.getField(1))
assertEquals(data(idx)._3, row.getField(2))
}
}
@Test
def testArrayType(): Unit = {
val in = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv)
// literals
val t1 = in.select(array("Hi", "Hello", "How are you"))
val result1 = executeQuery(t1)
val expected1 = "[Hi, Hello, How are you]\\n" +
"[Hi, Hello, How are you]\\n" +
"[Hi, Hello, How are you]\\n"
TestBaseUtils.compareResultAsText(result1.asJava, expected1)
// primitive type
val t2 = in.select(array(30, '_1, 10))
val result2 = executeQuery(t2)
val expected2 = "[30, 1, 10]\\n" +
"[30, 2, 10]\\n" +
"[30, 3, 10]\\n"
TestBaseUtils.compareResultAsText(result2.asJava, expected2)
// non-primitive type
val t3 = in.select(array("Test", '_3))
val result3 = executeQuery(t3)
val expected3 = "[Test, Hi]\\n" +
"[Test, Hello]\\n" +
"[Test, Hello world]\\n"
TestBaseUtils.compareResultAsText(result3.asJava, expected3)
}
@Test
def testMapType(): Unit = {
val in = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv)
// literals
val t1 = in.select(map(1, "Hello", 2, "Hi"))
val result1 = executeQuery(t1)
val expected1 = "{1=Hello, 2=Hi}\\n" +
"{1=Hello, 2=Hi}\\n" +
"{1=Hello, 2=Hi}\\n"
TestBaseUtils.compareResultAsText(result1.asJava, expected1)
// primitive type
val t2 = in.select(map('_2, 30, 10L, '_1))
val result2 = executeQuery(t2)
val expected2 = "{1=30, 10=1}\\n" +
"{2=30, 10=2}\\n" +
"{2=30, 10=3}\\n"
TestBaseUtils.compareResultAsText(result2.asJava, expected2)
// non-primitive type
val t3 = in.select(map('_1, '_3))
val result3 = executeQuery(t3)
val expected3 = "{1=Hi}\\n" +
"{2=Hello}\\n" +
"{3=Hello world}\\n"
TestBaseUtils.compareResultAsText(result3.asJava, expected3)
val data = new mutable.MutableList[(String, BigDecimal, String, BigDecimal)]
data.+=(("AAA", BigDecimal.valueOf(123.45), "BBB", BigDecimal.valueOf(234.56)))
data.+=(("CCC", BigDecimal.valueOf(345.67), "DDD", BigDecimal.valueOf(456.78)))
data.+=(("EEE", BigDecimal.valueOf(567.89), "FFF", BigDecimal.valueOf(678.99)))
val t4 = BatchTableEnvUtil.fromCollection(tEnv, data, "a, b, c, d")
.select(map('a, 'b, 'c, 'd))
val result4 = executeQuery(t4)
val expected4 = "{AAA=123.45, BBB=234.56}\\n" +
"{CCC=345.67, DDD=456.78}\\n" +
"{EEE=567.89, FFF=678.99}\\n"
TestBaseUtils.compareResultAsText(result4.asJava, expected4)
}
@Test
def testValueConstructor(): Unit = {
val data = new mutable.MutableList[(String, Int, LocalDateTime)]
data.+=(("foo", 12, localDateTime("1984-07-12 14:34:24")))
val t = BatchTableEnvUtil.fromCollection(tEnv, data, "a, b, c").select(
row('a, 'b, 'c),
array(12, 'b),
map('a, 'c))
val result = executeQuery(t)
val nestedRow = result.head.getField(0).asInstanceOf[Row]
assertEquals(data.head._1, nestedRow.getField(0))
assertEquals(data.head._2, nestedRow.getField(1))
assertEquals(data.head._3, nestedRow.getField(2))
val arr = result.head.getField(1).asInstanceOf[Array[Integer]]
assertEquals(12, arr(0))
assertEquals(data.head._2, arr(1))
val hashMap = result.head.getField(2).asInstanceOf[util.HashMap[String, Timestamp]]
assertEquals(data.head._3, hashMap.get(data.head._1.asInstanceOf[String]))
}
@Test
def testSelectStarFromNestedTable(): Unit = {
val table = BatchTableEnvUtil.fromCollection(tEnv, Seq(
((0, 0), "0"),
((1, 1), "1"),
((2, 2), "2")
)).select('*)
val results = executeQuery(table)
results.zipWithIndex.foreach {
case (row, i) =>
val nestedRow = row.getField(0).asInstanceOf[(Int, Int)]
assertEquals(i, nestedRow._1)
assertEquals(i, nestedRow._2)
assertEquals(i.toString, row.getField(1))
}
}
@Test
def testFunctionWithUnicodeParameters(): Unit = {
val data = List(
("a\\u0001b", "c\\"d", "e\\\\\\"\\u0004f"), // uses Java/Scala escaping
("x\\u0001y", "y\\"z", "z\\\\\\"\\u0004z")
)
val splitUDF0 = new SplitUDF(deterministic = true)
val splitUDF1 = new SplitUDF(deterministic = false)
// uses Java/Scala escaping
val ds = BatchTableEnvUtil.fromCollection(tEnv, data, "a, b, c")
.select(
splitUDF0('a, "\\u0001", 0) as 'a0,
splitUDF1('a, "\\u0001", 0) as 'a1,
splitUDF0('b, "\\"", 1) as 'b0,
splitUDF1('b, "\\"", 1) as 'b1,
splitUDF0('c, "\\\\\\"\\u0004", 0) as 'c0,
splitUDF1('c, "\\\\\\"\\u0004", 0) as 'c1)
val results = executeQuery(ds)
val expected = List("a,a,d,d,e,e", "x,x,z,z,z,z").mkString("\\n")
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testSplitFieldsOnCustomType(): Unit = {
tEnv.getConfig.setMaxGeneratedCodeLength(1) // splits fields
val ds = CollectionBatchExecTable.getCustomTypeDataSet(tEnv, "myInt, myLong, myString")
.filter('myString.like("%a%") && 'myString.charLength() > 12)
.select('myInt, 'myLong, 'myString.charLength())
val expected = "3,3,25\\n" + "3,5,14\\n"
val results = executeQuery(ds)
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
}
@SerialVersionUID(1L)
object HashCode extends ScalarFunction {
def eval(s: String): Int = s.hashCode
}
@SerialVersionUID(1L)
object OldHashCode extends ScalarFunction {
def eval(s: String): Int = -1
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/CalcITCase.scala | Scala | apache-2.0 | 23,921 |
/*
* Copyright (c) 2014-2016
* nonblocking.at gmbh [http://www.nonblocking.at]
*
* This file is part of Cliwix.
*
* Cliwix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.nonblocking.cliwix.core.util
import javax.xml.bind.Unmarshaller
import at.nonblocking.cliwix.model._
import com.typesafe.scalalogging.slf4j.LazyLogging
import java.{util => jutil}
private[core] class UseMapDbCollectionForPotentialHugeListsUnmarshalListener(potentialHugeLists: List[Class[_]], resourceAwareCollectionFactory: ResourceAwareCollectionFactory)
extends Unmarshaller.Listener with LazyLogging {
override def beforeUnmarshal(target: scala.Any, parent: scala.Any): Unit = {
target match {
case list: ListType[_] =>
if (list.getList == null && potentialHugeLists.contains(target.getClass)) {
val listMethod = target.getClass.getDeclaredMethod("getList")
val listType = listMethod.getReturnType
val listField = target.getClass.getDeclaredFields.find(_.getType == listType)
if (listField.isDefined) {
listField.get.setAccessible(true)
val map = resourceAwareCollectionFactory.createMap()
val wrapper = classOf[MapValuesListWrapper[_, _]].getDeclaredConstructor(classOf[jutil.Map[_, _]]).newInstance(map)
listField.get.set(target, wrapper)
} else {
logger.warn("Couldn't determine list field of type: {}", target.getClass)
}
}
case _ =>
}
}
}
| nonblocking/cliwix | cliwix-core/src/main/scala/at/nonblocking/cliwix/core/util/UseMapDbCollectionForPotentialHugeListsUnmarshalListener.scala | Scala | agpl-3.0 | 2,111 |
package org.velvia.filo.vectors
import org.scalatest.{FunSpec, Matchers}
import org.velvia.filo.{FiloVector, GrowableVector, VectorTooSmall}
class IntBinaryVectorTest extends FunSpec with Matchers {
describe("IntAppendingVector") {
it("should append a mix of Ints and read them all back") {
val builder = IntBinaryVector.appendingVectorNoNA(4)
val orig = Seq(1, 2, -5, 101)
orig.foreach(builder.addData)
builder.length should equal (4)
builder.freeze().toSeq should equal (orig)
}
it("should append 16-bit Ints and read them back") {
val builder = IntBinaryVector.appendingVectorNoNA(5)
val orig = Seq(1, 0, -127, Short.MaxValue, Short.MinValue)
orig.foreach(builder.addData)
builder.length should equal (5)
builder.freeze().toSeq should equal (orig)
}
it("should append bytes and read them back") {
val builder = IntBinaryVector.appendingVectorNoNA(4)
val orig = Seq(1, 0, -128, 127)
orig.foreach(builder.addData)
builder.length should equal (4)
builder.freeze().toSeq should equal (orig)
}
it("should be able to create new FiloVector from frozen appending vector") {
// Make sure it can freeze when primaryMaxBytes is much greater.
val builder = IntBinaryVector.appendingVectorNoNA(1000)
val orig = Seq(1, 0, -128, 127)
orig.foreach(builder.addData)
val readVect = IntBinaryVector(builder.base, builder.offset, builder.numBytes)
readVect.length should equal (4)
readVect.toSeq should equal (orig)
builder.frozenSize should equal (20)
val frozen = builder.freeze()
frozen.length should equal (4)
frozen.toSeq should equal (orig)
}
it("should throw error if not enough space to add new items") {
val builder = IntBinaryVector.appendingVectorNoNA(4)
val orig = Seq(1, 2, -5, 101)
orig.foreach(builder.addData)
intercept[VectorTooSmall] { builder.addNA() }
}
}
describe("IntBinaryVector 2/4 bit") {
it("should append and read back list with nbits=4") {
val builder = IntBinaryVector.appendingVectorNoNA(10, nbits=4, signed=false)
builder.length should equal (0)
builder.addData(2)
builder.numBytes should equal (5)
builder.toSeq should equal (Seq(2))
builder.addData(4)
builder.addData(3)
builder.length should equal (3)
builder.toSeq should equal (Seq(2, 4, 3))
builder.frozenSize should equal (6)
val frozen = builder.freeze()
frozen.length should equal (3)
frozen.toSeq should equal (Seq(2, 4, 3))
val intVect = FiloVector[Int](builder.toFiloBuffer)
intVect.toSeq should equal (Seq(2, 4, 3))
}
it("should append and read back list with nbits=2") {
val builder = IntBinaryVector.appendingVectorNoNA(10, nbits=2, signed=false)
val orig = Seq(0, 2, 1, 3, 2)
orig.foreach(builder.addData)
builder.toSeq should equal (orig)
builder.numBytes should equal (6)
val intVect = FiloVector[Int](builder.toFiloBuffer)
intVect.toSeq should equal (orig)
}
}
describe("MaskedIntAppendingVector") {
it("should append a list of all NAs and read all NAs back") {
val builder = IntBinaryVector.appendingVector(100)
builder.addNA
builder.isAllNA should be (true)
builder.noNAs should be (false)
val sc = builder.optimize()
sc.base should not equal (builder.base)
sc.length should equal (1)
sc(0) // Just to make sure this does not throw an exception
sc.isAvailable(0) should equal (false)
sc.toList should equal (Nil)
sc.optionIterator.toSeq should equal (Seq(None))
}
it("should encode a mix of NAs and Ints and decode iterate and skip NAs") {
val cb = IntBinaryVector.appendingVector(5)
cb.addNA
cb.addData(101)
cb.addData(102)
cb.addData(103)
cb.addNA
cb.isAllNA should be (false)
cb.noNAs should be (false)
val sc = cb.optimize()
sc.base should not equal (cb.base)
sc.length should equal (5)
sc.isAvailable(0) should equal (false)
sc.isAvailable(1) should equal (true)
sc.isAvailable(4) should equal (false)
sc(1) should equal (101)
sc.boxed(2) should equal (102)
sc.boxed(2) shouldBe a [java.lang.Integer]
sc.get(0) should equal (None)
sc.get(-1) should equal (None)
sc.get(2) should equal (Some(102))
sc.toList should equal (List(101, 102, 103))
}
it("should be able to append lots of ints and grow vector") {
val numInts = 1000
val builder = IntBinaryVector.appendingVector(numInts / 2)
(0 until numInts).foreach(builder.addData)
builder.length should equal (numInts)
builder.isOffheap shouldEqual false
builder.isAllNA should be (false)
builder.noNAs should be (true)
}
it("should be able to append lots of ints off-heap and grow vector") {
val numInts = 1000
val builder = IntBinaryVector.appendingVector(numInts / 2, offheap=true)
(0 until numInts).foreach(builder.addData)
builder.length should equal (numInts)
builder.isOffheap shouldEqual true
builder.isAllNA should be (false)
builder.noNAs should be (true)
}
it("should be able to grow vector even if adding all NAs") {
val numInts = 1000
val builder = IntBinaryVector.appendingVector(numInts / 2)
(0 until numInts).foreach(i => builder.addNA)
builder.length should equal (numInts)
builder.isAllNA should be (true)
builder.noNAs should be (false)
}
it("should be able to return minMax accurately with NAs") {
val cb = IntBinaryVector.appendingVector(5)
cb.addNA
cb.addData(101)
cb.addData(102)
cb.addData(103)
cb.addNA
val inner = cb.asInstanceOf[GrowableVector[Int]].inner.asInstanceOf[MaskedIntAppendingVector]
inner.minMax should equal ((101, 103))
}
it("should be able to freeze() and minimize bytes used") {
val builder = IntBinaryVector.appendingVector(100)
// Test numBytes to make sure it's accurate
builder.numBytes should equal (4 + 16 + 4) // 2 long words needed for 100 bits
(0 to 4).foreach(builder.addData)
builder.numBytes should equal (4 + 16 + 4 + 20)
val frozen = builder.freeze()
frozen.numBytes should equal (4 + 8 + 4 + 20) // bitmask truncated
frozen.length shouldEqual 5
frozen.toSeq should equal (0 to 4)
}
it("should toFiloBuffer and read back using FiloVector.apply") {
val cb = IntBinaryVector.appendingVector(5)
cb.addNA
cb.addData(101)
cb.addData(102)
cb.addData(103)
cb.addNA
val buffer = cb.optimize().toFiloBuffer
val readVect = FiloVector[Int](buffer)
readVect.toSeq should equal (Seq(101, 102, 103))
}
it("should toFiloBuffer from offheap and read back using FiloVector.apply") {
val cb = IntBinaryVector.appendingVector(5, offheap=true)
cb.addNA
cb.addData(101)
cb.addData(102)
cb.addData(103)
cb.addNA
val buffer = cb.optimize().toFiloBuffer
val readVect = FiloVector[Int](buffer)
readVect.toSeq should equal (Seq(101, 102, 103))
}
it("should support resetting and optimizing AppendableVector multiple times") {
val cb = IntBinaryVector.appendingVector(5)
// Use large numbers on purpose so cannot optimized to less than 32 bits
val orig = Seq(100000, 200001, 300002)
cb.addNA()
orig.foreach(cb.addData)
cb.toSeq should equal (orig)
val optimized = cb.optimize()
assert(optimized.base != cb.base) // just compare instances
val readVect1 = FiloVector[Int](optimized.toFiloBuffer)
readVect1.toSeq should equal (orig)
// Now the optimize should not have damaged original vector
cb.toSeq should equal (orig)
cb.reset()
val orig2 = orig.map(_ * 2)
orig2.foreach(cb.addData)
val readVect2 = FiloVector[Int](cb.optimize().toFiloBuffer)
readVect2.toSeq should equal (orig2)
cb.toSeq should equal (orig2)
}
it("should be able to optimize a 32-bit appending vector to smaller size") {
val builder = IntBinaryVector.appendingVector(100)
(0 to 4).foreach(builder.addData)
val optimized = builder.optimize()
optimized.length shouldEqual 5
optimized.toSeq should equal (0 to 4)
optimized.numBytes should equal (4 + 3) // nbits=4, so only 3 extra bytes
}
it("should be able to optimize a 32-bit offheap vector to smaller size") {
val builder = IntBinaryVector.appendingVector(100, offheap=true)
(0 to 4).foreach(builder.addData)
val optimized = builder.optimize()
optimized.length shouldEqual 5
optimized.isOffheap shouldEqual true
optimized.toSeq should equal (0 to 4)
optimized.numBytes should equal (4 + 3) // nbits=4, so only 3 extra bytes
}
it("should be able to optimize constant ints to an IntConstVector") {
val builder = IntBinaryVector.appendingVector(100)
(0 to 4).foreach(n => builder.addData(999))
val buf = builder.optimize().toFiloBuffer
val readVect = FiloVector[Int](buf)
readVect shouldBe a[IntConstVector]
readVect.toSeq should equal (Seq(999, 999, 999, 999, 999))
}
}
} | velvia/filo | filo-scala/src/test/scala/org.velvia.filo/vectors/IntBinaryVectorTest.scala | Scala | apache-2.0 | 9,421 |
package ir.ast.debug
import ir.Type
import ir.ast.{FPattern, Lambda, Pattern}
import ir.interpreter.Interpreter._
/**
* A pattern for debugging Lift code.
* PrintView adds a comment into the OpenCL kernel with the optional message and
* a string representation of the View at the point in expression where PrintView
* is inserted.
*/
case class PrintView(msg: String = "", f: Lambda) extends Pattern(arity = f.arity)
with FPattern {
override def checkType(argType: Type,
setType: Boolean): Type = {
f.checkType(argType, setType)
}
override def eval(valueMap: ValueMap, args: Any*): Any = {
assert(args.length == arity)
f.eval(valueMap, args:_*)
}
override def copy(f: Lambda): Pattern = PrintView(msg, f)
}
| lift-project/lift | src/main/ir/ast/debug/PrintView.scala | Scala | mit | 821 |
package ersirjs
import ersir.shared.Posting
import ersirjs.ErsirJS.Postings
import org.scalajs.dom.{CanvasRenderingContext2D, FileReader, UIEvent, html}
import rescala.extra.Tags._
import rescala.default._
import scalatags.JsDom
import scalatags.JsDom.all._
import scalatags.JsDom.tags2.{article, main}
class Index(connected: Signal[String]) {
val textinput = textarea.render
val imageinput = input(`type` := "file", accept := "image/*", attr("capture") := "camera", "Take a picture").render
val addPost = Events.fromCallback[Posting] { postCB =>
onclick := { _: UIEvent =>
if (
scala.scalajs.js.isUndefined(imageinput.files) || scala.scalajs.js.isUndefined(
imageinput.files(0)
)
) {
val posting = Posting.parse(textinput.value.toString)
textinput.value = ""
postCB(posting)
} else {
val reader = new FileReader()
println(s"reading ${imageinput.files(0)}")
reader.onload = { _ =>
val dataUrl = reader.result.asInstanceOf[String]
println(s"loaded $dataUrl")
val imageTag = img.render
imageTag.onload = { _ =>
val canvasTag = canvas.render
val ctx = canvasTag.getContext("2d").asInstanceOf[CanvasRenderingContext2D]
val width = 300
val height = Math.floor(width.toDouble * imageTag.height / imageTag.width).toInt
canvasTag.width = width
canvasTag.height = height
ctx.drawImage(imageTag, 0, 0, width.toDouble, height.toDouble)
val encoded = canvasTag.toDataURL("image/jpeg", 0.5)
println(s"printing $encoded")
val posting = Posting.parse(textinput.value.toString, encoded)
textinput.value = ""
imageinput.value = imageinput.defaultValue
postCB(posting)
}
imageTag.src = dataUrl
}
reader.readAsDataURL(imageinput.files(0))
}
}
}
val reset = Events.fromCallback[UIEvent] { onclick := _ }
def gen(list: Signal[Postings]): JsDom.TypedTag[html.Body] = {
val articles = list.map { itemsToDisplay =>
itemsToDisplay.toList.map { emergentcy =>
article(
lang := "de",
if (emergentcy.img.isEmpty) frag()
else div(cls := "pic", style := s"background-image: url(${emergentcy.img});"),
div(
h1(stringFrag(emergentcy.title)),
stringFrag(emergentcy.desc)
)
)
}
}
body(
id := "index",
header(
cls := connected,
Icons.disconnected,
img(cls := "logo", src := "static/logo-small.svg"),
Icons.lamp
),
article(cls := "controls", textinput, imageinput, button("Post", addPost.data)),
main(articles.asModifierL),
article(
cls := "controls",
button("Reset", reset.data),
button("Fullscreen", onclick := { (_: UIEvent) => Fullscreen.toggleFullscreen() })
)
)
}
}
| guidosalva/REScala | Code/Examples/Ersir/web/src/main/scala/ersirjs/Index.scala | Scala | apache-2.0 | 3,028 |
package info.armado.ausleihe.database.converter
import java.sql.Timestamp
import java.time.LocalDateTime
import javax.persistence.{AttributeConverter, Converter}
/**
* A converter used to convert [[LocalDateTime]] instances to [[Timestamp]] instances, which can be persisted inside a database via JPA.
*
* @author Marc Arndt
* @since 24.06.17
*/
@Converter(autoApply = true)
class LocalDateTimeConverter extends AttributeConverter[LocalDateTime, Timestamp] {
override def convertToEntityAttribute(databaseValue: Timestamp): LocalDateTime =
Option(databaseValue).map(_.toLocalDateTime).orNull
override def convertToDatabaseColumn(entityValue: LocalDateTime): Timestamp =
Option(entityValue).map(Timestamp.valueOf(_)).orNull
}
| Spielekreis-Darmstadt/lending | lending-database/src/main/scala/info/armado/ausleihe/database/converter/LocalDateTimeConverter.scala | Scala | apache-2.0 | 750 |
package de.sciss.fscape.tests
import de.sciss.file._
import de.sciss.fscape.gui.SimpleGUI
import de.sciss.fscape.{Graph, graph, stream}
import scala.swing.Swing
object TransposeTest extends App {
val width = 2048 // 1920 // 1024
val height = 1535 // 1080 // 768
val fIn = userHome / "Pictures" / "naya" / "18237862_10156159758633065_8364916541260985254_o.jpg"
val fOut = userHome / "Documents" / "test-rot.jpg"
val g = Graph {
import graph._
val in = ImageFileIn(file = fIn.toURI, numChannels = 3)
// val xSin = SinOsc(Seq[GE](0.5/width, 1.0/width, 1.5/width)).abs
// val ySin = SinOsc(0.5/(height * width))
// val in = xSin * ySin
val sig0 = TransposeMatrix(in = in, rows = height, columns = width)
val sig = ReverseWindow(sig0, size = height * width)
val spec = ImageFile.Spec(width = height, height = width, numChannels = 3,
fileType = ImageFile.Type.JPG, sampleFormat = ImageFile.SampleFormat.Int8,
quality = 100)
ImageFileOut(file = fOut.toURI, spec = spec, in = sig)
}
val config = stream.Control.Config()
config.blockSize = 1024 // exposes problem
val ctrl = stream.Control(config)
ctrl.run(g)
Swing.onEDT {
SimpleGUI(ctrl)
}
println("Running.")
} | Sciss/FScape-next | core/jvm/src/test/scala/de/sciss/fscape/tests/TransposeTest.scala | Scala | agpl-3.0 | 1,265 |
package ohnosequences.db.rna16s
import com.amazonaws.services.s3.AmazonS3ClientBuilder
import ohnosequences.s3.{S3Object, request}
import java.io.File
/**
* Helpers:
* - Partial applications of functions from `s3`, using a standard S3Client
* built here, [[helpers.s3Client]], and with a default part size,
* [[helpers.partSize5MiB]]. All functions here map their Errors to an
* object of type [[Error.S3Error]].
* - Method to check whether all the files for a version exist in S3
*/
private[rna16s] case object helpers {
lazy val s3Client = AmazonS3ClientBuilder.standard().build()
val partSize5MiB = 5 * 1024 * 1024
def getCheckedFile(s3Obj: S3Object, file: File) =
request.getCheckedFile(s3Client)(s3Obj, file).left.map(Error.S3Error)
def paranoidPutFile(file: File, s3Obj: S3Object) =
request
.paranoidPutFile(s3Client)(file, s3Obj, partSize5MiB)(
data.hashingFunction
)
.left
.map(Error.S3Error)
def getCheckedFileIfDifferent(s3Obj: S3Object, file: File) =
request
.getCheckedFileIfDifferent(s3Client)(s3Obj, file)
.left
.map(Error.S3Error)
/** Returns true when object does not exists or communication with S3
* cannot be established */
def objectExists(s3Obj: S3Object) =
request
.objectExists(s3Client)(s3Obj)
.fold(
err => true,
identity
)
/**
* Finds any object under [[data.s3Prefix(version)]] that could be overwritten
* by [[mirrorNewVersion]].
*
* @param version is the version that specifies the S3 folder
*
* @return Some(object) with the first object found under
* [[data.s3Prefix(version)]] if any, None otherwise.
*/
def findVersionInS3(version: Version): Option[S3Object] =
data
.everything(version)
.find(
obj => objectExists(obj)
)
}
| ohnosequences/db.rna16s | src/main/scala/helpers.scala | Scala | agpl-3.0 | 1,869 |
package spgui.widgets.itemexplorer
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import scalacss.ScalaCssReact._
import spgui.SPWidget
import spgui.components.Icon
import java.util.UUID
import scalajs.js
import js.Dynamic.{ literal => l }
import js.JSON
// TODO refactor this and TreeView into diode-like one-source-of-truth thinking
// RootDirectory should not be needed
// upon dragging an item onto another, a validation should be made somewhere and the notification
// should be sent to itemService/modelService which should in turn notify ItemExplorer to rerender itself
// should have more props and less state
case class OperationDirItem(name: String, id: String, content: String) extends DirectoryItem
case class SOPSpecDirItem(name: String, id: String, content: String) extends DirectoryItem
object SPItemsToRootDirectory {
def apply(spItems: Seq[IDAble]) = {
val dirItems = spItems.map{
case HierarchyRoot(name, children, attributes, id) =>
Directory(name, id.toString, children.map(_.item.toString))
case Operation(name, conditions, attributes, id) =>
OperationDirItem(name, id.toString, "OpDirItemContent")
case SOPSpec(name, sop, attributes, id) =>
SOPSpecDirItem(name, id.toString, "SOPSpecDirItemContent")
}
new RootDirectory(dirItems)
}
}
object GetItemIcon {
def apply(item: DirectoryItem): VdomNode = item match {
case d: Directory => Icon.folder
case op: OperationDirItem => Icon.arrowCircleRight
case ss: SOPSpecDirItem => Icon.sitemap
}
}
object RenderItemContent {
// pre-tag keeps the indentation and gives a nice frame
def contentToElement(content: js.Object) = <.pre(
Style.itemContent,
JSON.stringify(content, space = 2),
^.onClick --> Callback.log(
"Clicked button with content " + JSON.stringify(content) +
". TODO: make this open the json in itemeditor"
)
)
def apply(item: DirectoryItem): VdomNode = item match {
case item: OperationDirItem => contentToElement(l("stuff" -> item.content))
case item: SOPSpecDirItem => contentToElement(l("stuff" -> item.content))
}
}
object OnSaveButtonClick {
val printText = Callback.log("You clicked the save button, it does nothing for now, this callback has access to the directoryItems, however, see below:")
val printItems = (rootDir: RootDirectory) => Callback.log(rootDir.items.toString)
def apply(rootDirectory: RootDirectory) = printText >> printItems(rootDirectory)
}
object ItemExplorer {
def emptyDir() = Directory("New Directory", UUID.randomUUID().toString, List())
def apply() = SPWidget(spwb => TreeView(
SPItemsToRootDirectory(SampleSPItems()),
("Directory", () => emptyDir()) ::
Nil,
item => GetItemIcon(item),
item => RenderItemContent(item),
rootDir => OnSaveButtonClick(rootDir)
))
}
| kristoferB/SP | spgui/src/main/scala/spgui/widgets/itemexplorer/ItemExplorer.scala | Scala | mit | 3,036 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.util
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml._
import org.apache.spark.ml.evaluation.Evaluator
import org.apache.spark.ml.feature.{Instance, LabeledPoint}
import org.apache.spark.ml.linalg.{Vector, Vectors, VectorUDT}
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.param.shared.HasWeightCol
import org.apache.spark.ml.recommendation.{ALS, ALSModel}
import org.apache.spark.ml.tree.impl.TreeTests
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
object MLTestingUtils extends SparkFunSuite {
def checkCopyAndUids[T <: Estimator[_]](estimator: T, model: Model[_]): Unit = {
assert(estimator.uid === model.uid, "Model uid does not match parent estimator")
// copied model must have the same parent
val copied = model.copy(ParamMap.empty)
.asInstanceOf[Model[_]]
assert(copied.parent == model.parent)
assert(copied.parent.uid == model.parent.uid)
}
def checkNumericTypes[M <: Model[M], T <: Estimator[M]](
estimator: T,
spark: SparkSession,
isClassification: Boolean = true)(check: (M, M) => Unit): Unit = {
val dfs = if (isClassification) {
genClassifDFWithNumericLabelCol(spark)
} else {
genRegressionDFWithNumericLabelCol(spark)
}
val finalEstimator = estimator match {
case weighted: Estimator[M] with HasWeightCol =>
weighted.set(weighted.weightCol, "weight")
weighted
case _ => estimator
}
val expected = finalEstimator.fit(dfs(DoubleType))
val actuals = dfs.keys.filter(_ != DoubleType).map { t =>
finalEstimator.fit(dfs(t))
}
actuals.foreach(actual => check(expected, actual))
val dfWithStringLabels = spark.createDataFrame(Seq(
("0", 1, Vectors.dense(0, 2, 3), 0.0)
)).toDF("label", "weight", "features", "censor")
val thrown = intercept[IllegalArgumentException] {
estimator.fit(dfWithStringLabels)
}
assert(thrown.getMessage.contains(
"Column label must be of type numeric but was actually of type string"))
estimator match {
case weighted: Estimator[M] with HasWeightCol =>
val dfWithStringWeights = spark.createDataFrame(Seq(
(0, "1", Vectors.dense(0, 2, 3), 0.0)
)).toDF("label", "weight", "features", "censor")
weighted.set(weighted.weightCol, "weight")
val thrown = intercept[IllegalArgumentException] {
weighted.fit(dfWithStringWeights)
}
assert(thrown.getMessage.contains(
"Column weight must be of type numeric but was actually of type string"))
case _ =>
}
}
def checkNumericTypes[T <: Evaluator](evaluator: T, spark: SparkSession): Unit = {
val dfs = genEvaluatorDFWithNumericLabelCol(spark, "label", "prediction")
val expected = evaluator.evaluate(dfs(DoubleType))
val actuals = dfs.keys.filter(_ != DoubleType).map(t => evaluator.evaluate(dfs(t)))
actuals.foreach(actual => assert(expected === actual))
val dfWithStringLabels = spark.createDataFrame(Seq(
("0", 0d)
)).toDF("label", "prediction")
val thrown = intercept[IllegalArgumentException] {
evaluator.evaluate(dfWithStringLabels)
}
assert(thrown.getMessage.contains(
"Column label must be of type numeric but was actually of type string"))
}
def genClassifDFWithNumericLabelCol(
spark: SparkSession,
labelColName: String = "label",
featuresColName: String = "features",
weightColName: String = "weight"): Map[NumericType, DataFrame] = {
val df = spark.createDataFrame(Seq(
(0, Vectors.dense(0, 2, 3)),
(1, Vectors.dense(0, 3, 1)),
(0, Vectors.dense(0, 2, 2)),
(1, Vectors.dense(0, 3, 9)),
(0, Vectors.dense(0, 2, 6))
)).toDF(labelColName, featuresColName)
val types =
Seq(ShortType, LongType, IntegerType, FloatType, ByteType, DoubleType, DecimalType(10, 0))
types.map { t =>
val castDF = df.select(col(labelColName).cast(t), col(featuresColName))
t -> TreeTests.setMetadata(castDF, 2, labelColName, featuresColName)
.withColumn(weightColName, round(rand(seed = 42)).cast(t))
}.toMap
}
def genRegressionDFWithNumericLabelCol(
spark: SparkSession,
labelColName: String = "label",
weightColName: String = "weight",
featuresColName: String = "features",
censorColName: String = "censor"): Map[NumericType, DataFrame] = {
val df = spark.createDataFrame(Seq(
(1, Vectors.dense(1)),
(2, Vectors.dense(2)),
(3, Vectors.dense(3)),
(4, Vectors.dense(4))
)).toDF(labelColName, featuresColName)
val types =
Seq(ShortType, LongType, IntegerType, FloatType, ByteType, DoubleType, DecimalType(10, 0))
types.map { t =>
val castDF = df.select(col(labelColName).cast(t), col(featuresColName))
t -> TreeTests.setMetadata(castDF, 0, labelColName, featuresColName)
.withColumn(censorColName, lit(0.0))
.withColumn(weightColName, round(rand(seed = 42)).cast(t))
}.toMap
}
def genEvaluatorDFWithNumericLabelCol(
spark: SparkSession,
labelColName: String = "label",
predictionColName: String = "prediction"): Map[NumericType, DataFrame] = {
val df = spark.createDataFrame(Seq(
(0, 0d),
(1, 1d),
(2, 2d),
(3, 3d),
(4, 4d)
)).toDF(labelColName, predictionColName)
val types =
Seq(ShortType, LongType, IntegerType, FloatType, ByteType, DoubleType, DecimalType(10, 0))
types
.map(t => t -> df.select(col(labelColName).cast(t), col(predictionColName)))
.toMap
}
/**
* Given a DataFrame, generate two output DataFrames: one having the original rows oversampled
* an integer number of times, and one having the original rows but with a column of weights
* proportional to the number of oversampled instances in the oversampled DataFrames.
*/
def genEquivalentOversampledAndWeightedInstances(
data: Dataset[LabeledPoint],
seed: Long): (Dataset[Instance], Dataset[Instance]) = {
import data.sparkSession.implicits._
val rng = new scala.util.Random(seed)
val sample: () => Int = () => rng.nextInt(10) + 1
val sampleUDF = udf(sample)
val rawData = data.select("label", "features").withColumn("samples", sampleUDF())
val overSampledData = rawData.rdd.flatMap { case Row(label: Double, features: Vector, n: Int) =>
Iterator.fill(n)(Instance(label, 1.0, features))
}.toDS()
rng.setSeed(seed)
val weightedData = rawData.rdd.map { case Row(label: Double, features: Vector, n: Int) =>
Instance(label, n.toDouble, features)
}.toDS()
(overSampledData, weightedData)
}
/**
* Helper function for testing sample weights. Tests that oversampling each point is equivalent
* to assigning a sample weight proportional to the number of samples for each point.
*/
def testOversamplingVsWeighting[M <: Model[M], E <: Estimator[M]](
data: Dataset[LabeledPoint],
estimator: E with HasWeightCol,
modelEquals: (M, M) => Unit,
seed: Long): Unit = {
val (overSampledData, weightedData) = genEquivalentOversampledAndWeightedInstances(
data, seed)
val overSampledModel = estimator.set(estimator.weightCol, "").fit(overSampledData)
val weightedModel = estimator.set(estimator.weightCol, "weight").fit(weightedData)
modelEquals(weightedModel, overSampledModel)
}
/**
* Helper function for testing sample weights. Tests that injecting a large number of outliers
* with very small sample weights does not affect fitting. The predictor should learn the true
* model despite the outliers.
*/
def testOutliersWithSmallWeights[M <: Model[M], E <: Estimator[M]](
data: Dataset[LabeledPoint],
estimator: E with HasWeightCol,
numClasses: Int,
modelEquals: (M, M) => Unit,
outlierRatio: Int): Unit = {
import data.sqlContext.implicits._
val outlierDS = data.withColumn("weight", lit(1.0)).as[Instance].flatMap {
case Instance(l, w, f) =>
val outlierLabel = if (numClasses == 0) -l else numClasses - l - 1
List.fill(outlierRatio)(Instance(outlierLabel, 0.0001, f)) ++ List(Instance(l, w, f))
}
val trueModel = estimator.set(estimator.weightCol, "").fit(data)
val outlierModel = estimator.set(estimator.weightCol, "weight")
.fit(outlierDS)
modelEquals(trueModel, outlierModel)
}
/**
* Helper function for testing sample weights. Tests that giving constant weights to each data
* point yields the same model, regardless of the magnitude of the weight.
*/
def testArbitrarilyScaledWeights[M <: Model[M], E <: Estimator[M]](
data: Dataset[LabeledPoint],
estimator: E with HasWeightCol,
modelEquals: (M, M) => Unit): Unit = {
estimator.set(estimator.weightCol, "weight")
val models = Seq(0.01, 1.0, 1000.0).map { w =>
val df = data.withColumn("weight", lit(w))
estimator.fit(df)
}
models.sliding(2).foreach { case Seq(m1, m2) => modelEquals(m1, m2)}
}
/**
* Helper function for testing different input types for "features" column. Given a DataFrame,
* generate three output DataFrames: one having vector "features" column with float precision,
* one having double array "features" column with float precision, and one having float array
* "features" column.
*/
def generateArrayFeatureDataset(dataset: Dataset[_],
featuresColName: String = "features"): (Dataset[_], Dataset[_], Dataset[_]) = {
val toFloatVectorUDF = udf { (features: Vector) =>
Vectors.dense(features.toArray.map(_.toFloat.toDouble))}
val toDoubleArrayUDF = udf { (features: Vector) => features.toArray}
val toFloatArrayUDF = udf { (features: Vector) => features.toArray.map(_.toFloat)}
val newDataset = dataset.withColumn(featuresColName, toFloatVectorUDF(col(featuresColName)))
val newDatasetD = newDataset.withColumn(featuresColName, toDoubleArrayUDF(col(featuresColName)))
val newDatasetF = newDataset.withColumn(featuresColName, toFloatArrayUDF(col(featuresColName)))
assert(newDataset.schema(featuresColName).dataType.equals(new VectorUDT))
assert(newDatasetD.schema(featuresColName).dataType.equals(new ArrayType(DoubleType, false)))
assert(newDatasetF.schema(featuresColName).dataType.equals(new ArrayType(FloatType, false)))
(newDataset, newDatasetD, newDatasetF)
}
def modelPredictionEquals[M <: PredictionModel[_, M]](
data: DataFrame,
compareFunc: (Double, Double) => Boolean,
fractionInTol: Double)(
model1: M,
model2: M): Unit = {
val pred1 = model1.transform(data).select(model1.getPredictionCol).collect()
val pred2 = model2.transform(data).select(model2.getPredictionCol).collect()
val inTol = pred1.zip(pred2).count { case (p1, p2) =>
val x = p1.getDouble(0)
val y = p2.getDouble(0)
compareFunc(x, y)
}
assert(inTol / pred1.length.toDouble >= fractionInTol)
}
}
| WindCanDie/spark | mllib/src/test/scala/org/apache/spark/ml/util/MLTestingUtils.scala | Scala | apache-2.0 | 11,930 |
package org.jetbrains.plugins.scala
package findUsages
package setter
import com.intellij.psi._
import com.intellij.psi.search.searches.ReferencesSearch
import com.intellij.psi.search.{PsiSearchHelper, SearchScope, TextOccurenceProcessor, UsageSearchContext}
import com.intellij.util.{Processor, QueryExecutor}
import org.jetbrains.plugins.scala.extensions.{Parent, inReadAction}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScAssignStmt
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScVariable}
import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiMethod
import org.jetbrains.plugins.scala.lang.psi.light.PsiTypedDefinitionWrapper
class SetterMethodSearcher extends QueryExecutor[PsiReference, ReferencesSearch.SearchParameters] {
private val suffixScala = "_="
private val suffixJava = "_$eq"
def execute(queryParameters: ReferencesSearch.SearchParameters, cons: Processor[PsiReference]): Boolean = {
inReadAction {
implicit val scope = queryParameters.getEffectiveSearchScope
implicit val consumer = cons
val element = queryParameters.getElementToSearch
if (element.isValid) {
element match {
case fun: ScFunction if fun.name endsWith suffixScala =>
processAssignments(fun, fun.name)
processSimpleUsages(fun, fun.name)
case refPattern: ScReferencePattern if ScalaPsiUtil.nameContext(refPattern).isInstanceOf[ScVariable] =>
val name = refPattern.name
processAssignments(refPattern, name)
processSimpleUsages(refPattern, name + suffixScala)
processSimpleUsages(refPattern, name + suffixJava)
case _ =>
}
}
}
true
}
private def processAssignments(element: PsiElement, name: String)(implicit consumer: Processor[PsiReference], scope: SearchScope) = {
val processor = new TextOccurenceProcessor {
def execute(elem: PsiElement, offsetInElement: Int): Boolean = {
elem match {
case Parent(Parent(assign: ScAssignStmt)) => assign.resolveAssignment match {
case Some(res) if res.element.getNavigationElement == element =>
Option(assign.getLExpression).foreach {
case ref: ScReferenceElement => if (!consumer.process(ref)) return false
}
case _ =>
}
case _ =>
}
true
}
}
val helper: PsiSearchHelper = PsiSearchHelper.SERVICE.getInstance(element.getProject)
helper.processElementsWithWord(processor, scope, name.stripSuffix(suffixScala), UsageSearchContext.IN_CODE, true)
}
private def processSimpleUsages(element: PsiElement, name: String)(implicit consumer: Processor[PsiReference], scope: SearchScope) = {
val processor = new TextOccurenceProcessor {
def execute(elem: PsiElement, offsetInElement: Int): Boolean = {
elem match {
case ref: PsiReference => ref.resolve() match {
case fakeMethod: FakePsiMethod if fakeMethod.navElement == element =>
if (!consumer.process(ref)) return false
case wrapper: PsiTypedDefinitionWrapper if wrapper.typedDefinition == element =>
if (!consumer.process(ref)) return false
case _ =>
}
case _ =>
}
true
}
}
val helper: PsiSearchHelper = PsiSearchHelper.SERVICE.getInstance(element.getProject)
helper.processElementsWithWord(processor, scope, name, UsageSearchContext.IN_CODE, true)
}
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/findUsages/setter/SetterMethodSearcher.scala | Scala | apache-2.0 | 3,743 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless.lang
import stainless.annotation._
/**
* @author Mikael
*/
object StrOps {
@ignore
def concat(a: String, b: String): String = {
a + b
}
@ignore
def bigLength(s: String): BigInt = {
BigInt(s.length)
}
@ignore
def bigSubstring(s: String, start: BigInt, end: BigInt): String = {
s.substring(start.toInt, end.toInt)
}
}
| epfl-lara/stainless | frontends/library/stainless/lang/StrOps.scala | Scala | apache-2.0 | 412 |
package com.tsmms.hackathon.choices.miniwicket
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, PrintWriter}
import java.util.logging.Logger
import javax.servlet._
import javax.servlet.http.{HttpServletRequest, HttpServletResponse, HttpServletResponseWrapper}
import scala.collection.mutable
import scala.xml._
/**
* A servletfilter that simulates some behaviour of apache wicket. Just done since it is much easier use for now.
* @author <a href="http://www.stoerr.net/">Hans-Peter Stoerr</a>
* @since 02.03.2015
*/
class MiniWicketServletFilter extends Filter {
val logger = Logger.getLogger(getClass.toString)
var ignoredPaths: Array[String] = Array()
override def init(filterConfig: FilterConfig): Unit = {
ignoredPaths = filterConfig.getInitParameter("ignorePaths").split(",")
}
override def destroy(): Unit = {}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
val httpRequest = request.asInstanceOf[HttpServletRequest]
if (ignoredPaths.find(path => (httpRequest.getRequestURI).contains(path)).isDefined)
chain.doFilter(request, response)
else {
val responseWrapper = new OutputCapturingServletResponseWrapper(response.asInstanceOf[HttpServletResponse])
try {
chain.doFilter(request, responseWrapper)
} finally {
responseWrapper.transformOutput(xhtmlTransformer(httpRequest))
}
}
}
def xhtmlTransformer(request: HttpServletRequest)(in: Array[Byte], out: PrintWriter): Unit = {
if (in.length > 0) {
val xml = XML.load(new ByteArrayInputStream(in))
MiniWicketProcessor.wicketyTransformer(xml)(request) foreach (XML.write(out, _, "UTF-8", false, null))
}
}
}
class OutputCapturingServletResponseWrapper(response: HttpServletResponse) extends HttpServletResponseWrapper(response) {
val capturedOutput = new ByteArrayOutputStream()
override def getWriter: PrintWriter = new PrintWriter(getOutputStream)
override def getOutputStream: ServletOutputStream = new ServletOutputStream {
override def write(b: Int): Unit = capturedOutput.write(b)
}
def transformOutput(transformer: (Array[Byte], PrintWriter) => Unit): Unit = {
val writer = super.getWriter
transformer(capturedOutput.toByteArray, writer)
writer.close()
flushBuffer()
capturedOutput.reset()
}
}
| stoerr/Groupchoices | src/main/scala/com/tsmms/hackathon/choices/miniwicket/MiniWicketServletFilter.scala | Scala | apache-2.0 | 2,381 |
/*
Copyright 2013 Ilya Lakhin (Илья Александрович Лахин)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package name.lakhin.eliah.projects
package papacarlo.lexis
final class Token(val kind: String,
val value: String,
private var skipped: Boolean = false,
private var mutable: Boolean = false,
private var indentation: Boolean = false) {
private val originallySkipped = skipped
private val originallyMutable = mutable
private[lexis] var context = Context.Base
private[lexis] var seam: SeamType = RegularSeam
def isSkippable = skipped
def isMutable = mutable
def getContext = context
private[lexis] def applySkipLevel(level: SkipLevel): Unit = {
level match {
case ForceSkip => skipped = true
case ForceUse => skipped = false
case OriginalSkipping => skipped = originallySkipped
}
}
private[lexis] def revertMutability(): Unit = {
mutable = originallyMutable
}
private[lexis] def sameAs(another: Token) = {
value == another.value || kind == another.kind &&
(mutable || another.mutable)
}
}
object Token {
val LineBreakKind = "lineBreak"
val UnknownKind = "unknown"
def unknown(value: String) =
new Token(
kind = UnknownKind,
value = value,
skipped = false,
mutable = false
)
def terminal(value: String) =
new Token(
kind = value,
value = value,
skipped = false,
mutable = false
)
def lineBreak =
new Token(
kind = LineBreakKind,
value = "\n",
skipped = true,
mutable = false
)
}
| Eliah-Lakhin/papa-carlo | src/main/scala/name.lakhin.eliah.projects/papacarlo/lexis/Token.scala | Scala | apache-2.0 | 2,180 |
/**
* This file is part of SensApp [ http://sensapp.modelbased.net ]
*
* Copyright (C) 2011- SINTEF ICT
* Contact: SINTEF ICT <[email protected]>
*
* Module: net.modelbased.sensapp
*
* SensApp is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* SensApp is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with SensApp. If not, see
* <http://www.gnu.org/licenses/>.
*/
package net.modelbased.sensapp.rrd.services
import net.modelbased.sensapp.restful._
import net.modelbased.sensapp.rrd.datamodel._
import akka.http._
import javax.ws.rs.core.MediaType
import java.io.{BufferedReader, InputStreamReader}
import org.specs2.internal.scalaz.Validation
import com.sun.xml.internal.ws.wsdl.writer.document.soap12.Body
import java.lang.StringBuilder
import org.rrd4j.core.RrdDefTemplate
/**
* The service that exposes a set of XML RRD templates as a RESTful artefact
*
* The XML format used for the templates is the RRD4J format as described here:
* http://rrd4j.googlecode.com/svn/trunk/javadoc/reference/org/rrd4j/core/RrdDefTemplate.html
*
* @author Sebastien Mosser
* @author Franck Fleurey
*/
class RRDTemplateRegistryService(p: URIPattern, r: String) extends ResourceHandler(p,r) {
// the internal registry
private val _registry = new RRDTemplateRegistry()
// The bindings expected as a ResourceHandler
override val _bindings = Map("GET" -> { getTemplate(_) },
"PUT" -> { addTemplate(_) })
/**
* Retrieve an XML RRD template from the registry
*
* <strong>Remark</strong>: A 404 status is returned if there is no template available
*
* @param req the received request
*/
private def getTemplate(req: RequestMethod) : Boolean = {
val identifier = _params("id")
req.response.setContentType(MediaType.TEXT_PLAIN)
_registry pull ("id", identifier) match {
case Some(rrdtemplate) => req OK rrdtemplate.template
case None => req NotFound ("RRD Template ["+identifier+"] not found.")
}
}
/**
* Add a RRD template into the registry, provided as an XML document.
* The XML format is standard RRD4J XML format as described at
* http://rrd4j.googlecode.com/svn/trunk/javadoc/reference/org/rrd4j/core/RrdDefTemplate.html
* <strong>Remark</strong>:
* <ul>
* <li>The template is described using XML</li>
* <li> A conflict (409) is returned if the description ID does not match the URL one
* <li> An error is returned if the XML document cannot be parsed properly
* </ul>
*/
private def addTemplate(req: RequestMethod) : Boolean = {
val id = _params("id")
// Read the body of the request
val br = new BufferedReader(new InputStreamReader(req.request.getInputStream))
val body = new StringBuilder
while (br.ready()) body.append(br.readLine() + "\\n")
br.close();
val xml = body.toString
val rrdTemplate = new RRDTemplate(id, xml)
req.response.setContentType(MediaType.TEXT_PLAIN)
try {
var rrdt = new RrdDefTemplate(xml)
_registry push rrdTemplate
req OK "OK"
}
catch {
case e => req Error "Invalid Template: " + e.getMessage
}
}
} | SINTEF-9012/sensapp | _attic/net.modelbased.sensapp.rrd/src/main/scala/net/modelbased/sensapp/rrd/services/RRDTemplateRegistryService.scala | Scala | lgpl-3.0 | 3,614 |
package com.example.test
import android.os.Build.VERSION_CODES.LOLLIPOP
import com.example.R
import org.robolectric.RuntimeEnvironment
import org.robolectric.annotation.Config
import org.scalatest.{Matchers, FlatSpec, FeatureSpec, RobolectricSuite}
@Config( sdk = Array( LOLLIPOP ) )
class Test
extends FlatSpec
with Matchers
with RobolectricSuite {
"Resources" should "be accessible via R" in {
RuntimeEnvironment.application.getString( R.string.name ) shouldBe "Hello Scala!"
}
} | Taig/Scala-on-Android | src/main/page/testing/robolectric/usage.scala | Scala | mit | 507 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDsl._
import org.scalatest.{Matchers, WordSpec}
import scala.concurrent.duration._
/** @author Stephen Samuel */
class ValidateTest extends WordSpec with ElasticSugar with Matchers {
implicit val duration: Duration = 10.seconds
client.execute {
index into "food/pasta" fields(
"name" -> "maccaroni",
"color" -> "yellow"
)
}.await
blockUntilCount(1, "food")
"a validate query" should {
"return valid when the query is valid for a string query" in {
val resp = client.execute {
validate in "food/pasta" query "maccaroni"
}.await
resp.isValid shouldBe true
}
"return valid when the query is valid for a dsl query" in {
val resp = client.execute {
validate in "food/pasta" query {
bool {
should {
termQuery("name", "maccaroni")
}
}
}
}.await
resp.isValid shouldBe true
}
}
}
| alexander-svendsen/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/ValidateTest.scala | Scala | apache-2.0 | 1,014 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool.logging
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding.Post
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.FormData
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.headers.Authorization
import akka.http.scaladsl.model.headers.BasicHttpCredentials
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.ActorMaterializer
import akka.stream.OverflowStrategy
import akka.stream.QueueOfferResult
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.Keep
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import com.typesafe.sslconfig.akka.AkkaSSLConfig
import pureconfig._
import scala.concurrent.Future
import scala.concurrent.Promise
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import spray.json._
import org.apache.openwhisk.common.AkkaLogging
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.entity.ActivationLogs
import org.apache.openwhisk.core.entity.WhiskActivation
import org.apache.openwhisk.core.database.UserContext
case class SplunkLogStoreConfig(host: String,
port: Int,
username: String,
password: String,
index: String,
logTimestampField: String,
logStreamField: String,
logMessageField: String,
activationIdField: String,
queryConstraints: String,
queryTimestampOffsetSeconds: Int,
disableSNI: Boolean)
case class SplunkResponse(results: Vector[JsObject])
object SplunkResponseJsonProtocol extends DefaultJsonProtocol {
implicit val orderFormat = jsonFormat1(SplunkResponse)
}
/**
* A Splunk based impl of LogDriverLogStore. Logs are routed to splunk via docker log driver, and retrieved via Splunk REST API
*
* @param actorSystem
* @param httpFlow Optional Flow to use for HttpRequest handling (to enable stream based tests)
*/
class SplunkLogStore(
actorSystem: ActorSystem,
httpFlow: Option[Flow[(HttpRequest, Promise[HttpResponse]), (Try[HttpResponse], Promise[HttpResponse]), Any]] = None,
splunkConfig: SplunkLogStoreConfig = loadConfigOrThrow[SplunkLogStoreConfig](ConfigKeys.splunk))
extends LogDriverLogStore(actorSystem) {
implicit val as = actorSystem
implicit val ec = as.dispatcher
implicit val materializer = ActorMaterializer()
private val logging = new AkkaLogging(actorSystem.log)
private val splunkApi = Path / "services" / "search" / "jobs" //see http://docs.splunk.com/Documentation/Splunk/6.6.3/RESTREF/RESTsearch#search.2Fjobs
import SplunkResponseJsonProtocol._
val maxPendingRequests = 500
val defaultHttpFlow = Http().cachedHostConnectionPoolHttps[Promise[HttpResponse]](
host = splunkConfig.host,
port = splunkConfig.port,
connectionContext =
if (splunkConfig.disableSNI)
Http().createClientHttpsContext(AkkaSSLConfig().mapSettings(s => s.withLoose(s.loose.withDisableSNI(true))))
else Http().defaultClientHttpsContext)
override def fetchLogs(activation: WhiskActivation, context: UserContext): Future[ActivationLogs] = {
//example curl request:
// curl -u username:password -k https://splunkhost:port/services/search/jobs -d exec_mode=oneshot -d output_mode=json -d "search=search index=\"someindex\" | spath=log_message | search activation_id=a930e5ae4ad4455c8f2505d665aad282 | table log_message" -d "earliest_time=2017-08-29T12:00:00" -d "latest_time=2017-10-29T12:00:00"
//example response:
// {"preview":false,"init_offset":0,"messages":[],"fields":[{"name":"log_message"}],"results":[{"log_message":"some log message"}], "highlighted":{}}
//note: splunk returns results in reverse-chronological order, therefore we include "| reverse" to cause results to arrive in chronological order
val search =
s"""search index="${splunkConfig.index}"| spath ${splunkConfig.logMessageField}| search ${splunkConfig.queryConstraints} ${splunkConfig.activationIdField}=${activation.activationId.toString}| table ${splunkConfig.logTimestampField}, ${splunkConfig.logStreamField}, ${splunkConfig.logMessageField}| reverse"""
val entity = FormData(
Map(
"exec_mode" -> "oneshot",
"search" -> search,
"output_mode" -> "json",
"earliest_time" -> activation.start
.minusSeconds(splunkConfig.queryTimestampOffsetSeconds)
.toString, //assume that activation start/end are UTC zone, and splunk events are the same
"latest_time" -> activation.end
.plusSeconds(splunkConfig.queryTimestampOffsetSeconds) //add 5s to avoid a timerange of 0 on short-lived activations
.toString)).toEntity
logging.debug(this, "sending request")
queueRequest(
Post(Uri(path = splunkApi))
.withEntity(entity)
.withHeaders(List(Authorization(BasicHttpCredentials(splunkConfig.username, splunkConfig.password)))))
.flatMap(response => {
logging.debug(this, s"splunk API response ${response}")
Unmarshal(response.entity)
.to[SplunkResponse]
.map(
r =>
ActivationLogs(
r.results
.map(js => Try(toLogLine(js)))
.map {
case Success(s) => s
case Failure(t) =>
logging.debug(
this,
s"The log message might have been too large " +
s"for '${splunkConfig.index}' Splunk index and can't be retrieved, ${t.getMessage}")
s"The log message can't be retrieved, ${t.getMessage}"
}))
})
}
private def toLogLine(l: JsObject) = //format same as org.apache.openwhisk.core.containerpool.logging.LogLine.toFormattedString
f"${l.fields(splunkConfig.logTimestampField).convertTo[String]}%-30s ${l
.fields(splunkConfig.logStreamField)
.convertTo[String]}: ${l.fields(splunkConfig.logMessageField).convertTo[String].trim}"
//based on http://doc.akka.io/docs/akka-http/10.0.6/scala/http/client-side/host-level.html
val queue =
Source
.queue[(HttpRequest, Promise[HttpResponse])](maxPendingRequests, OverflowStrategy.dropNew)
.via(httpFlow.getOrElse(defaultHttpFlow))
.toMat(Sink.foreach({
case ((Success(resp), p)) => p.success(resp)
case ((Failure(e), p)) => p.failure(e)
}))(Keep.left)
.run()
def queueRequest(request: HttpRequest): Future[HttpResponse] = {
val responsePromise = Promise[HttpResponse]()
queue.offer(request -> responsePromise).flatMap {
case QueueOfferResult.Enqueued => responsePromise.future
case QueueOfferResult.Dropped =>
Future.failed(new RuntimeException("Splunk API Client Queue overflowed. Try again later."))
case QueueOfferResult.Failure(ex) => Future.failed(ex)
case QueueOfferResult.QueueClosed =>
Future.failed(
new RuntimeException(
"Splunk API Client Queue was closed (pool shut down) while running the request. Try again later."))
}
}
}
object SplunkLogStoreProvider extends LogStoreProvider {
override def instance(actorSystem: ActorSystem) = new SplunkLogStore(actorSystem)
}
| openwhisk/openwhisk | common/scala/src/main/scala/org/apache/openwhisk/core/containerpool/logging/SplunkLogStore.scala | Scala | apache-2.0 | 8,552 |
/**
* Copyright (C) 2016 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.multideposit.actions
import better.files.{ Dispose, _ }
import cats.syntax.either._
import nl.knaw.dans.easy.multideposit.PathExplorer.OutputPathExplorer
import nl.knaw.dans.easy.multideposit.actions.ReportDatasets._
import nl.knaw.dans.easy.multideposit.model.Deposit
import nl.knaw.dans.easy.multideposit.{ ActionError, FailFast, encoding }
import org.apache.commons.csv.{ CSVFormat, CSVPrinter }
class ReportDatasets {
def report(deposits: Seq[Deposit])(implicit output: OutputPathExplorer): FailFast[Unit] = {
Either.catchNonFatal {
for (printer <- csvPrinter(output.reportFile);
deposit <- deposits)
printRecord(deposit, printer)
}.leftMap(e => ActionError("Could not write the dataset report", e))
}
private def csvPrinter(file: File): Dispose[CSVPrinter] = {
file.bufferedWriter(charset = encoding)
.flatMap[CSVPrinter, Dispose](writer => new Dispose(csvFormat.print(writer)))
}
private def printRecord(deposit: Deposit, printer: CSVPrinter): Unit = {
printer.printRecord(
deposit.depositId,
deposit.bagId,
deposit.baseUUID.getOrElse(deposit.bagId)
)
}
}
object ReportDatasets {
private val csvFormat = CSVFormat.RFC4180.withHeader("DATASET", "UUID", "BASE-REVISION")
}
| DANS-KNAW/easy-split-multi-deposit | src/main/scala/nl.knaw.dans.easy.multideposit/actions/ReportDatasets.scala | Scala | apache-2.0 | 1,941 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.carbondata.bucketing
import org.apache.spark.sql.common.util.QueryTest
import org.apache.spark.sql.execution.command.LoadTable
import org.apache.spark.sql.execution.exchange.ShuffleExchange
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.metadata.CarbonMetadata
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
sqlContext.setConf("spark.sql.autoBroadcastJoinThreshold", "-1")
sql("DROP TABLE IF EXISTS t3")
sql("DROP TABLE IF EXISTS t4")
sql("DROP TABLE IF EXISTS t5")
sql("DROP TABLE IF EXISTS t6")
sql("DROP TABLE IF EXISTS t7")
sql("DROP TABLE IF EXISTS t8")
sql("DROP TABLE IF EXISTS t9")
}
test("test create table with buckets") {
sql(
"""
CREATE TABLE t4
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
USING org.apache.spark.sql.CarbonSource
OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t4")
""")
LoadTable(Some("default"), "t4", s"$resourcesPath/source.csv", Nil,
Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
val table: CarbonTable = CarbonMetadata.getInstance().getCarbonTable("default_t4")
if (table != null && table.getBucketingInfo("t4") != null) {
assert(true)
} else {
assert(false, "Bucketing info does not exist")
}
}
test("must be unable to create if number of buckets is in negative number") {
try {
sql(
"""
CREATE TABLE t9
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
USING org.apache.spark.sql.CarbonSource
OPTIONS("bucketnumber"="-1", "bucketcolumns"="name", "tableName"="t9")
""")
assert(false)
}
catch {
case malformedCarbonCommandException: MalformedCarbonCommandException => assert(true)
}
}
test("test create table with no bucket join of carbon tables") {
sql(
"""
CREATE TABLE t5
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
USING org.apache.spark.sql.CarbonSource
OPTIONS("tableName"="t5")
""")
LoadTable(Some("default"), "t5", s"$resourcesPath/source.csv", Nil,
Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
val plan = sql(
"""
|select t1.*, t2.*
|from t5 t1, t5 t2
|where t1.name = t2.name
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
case s: ShuffleExchange => shuffleExists = true
}
assert(shuffleExists, "shuffle should exist on non bucket tables")
}
test("test create table with bucket join of carbon tables") {
sql(
"""
CREATE TABLE t6
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
USING org.apache.spark.sql.CarbonSource
OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t6")
""")
LoadTable(Some("default"), "t6", s"$resourcesPath/source.csv", Nil,
Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
val plan = sql(
"""
|select t1.*, t2.*
|from t6 t1, t6 t2
|where t1.name = t2.name
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
case s: ShuffleExchange => shuffleExists = true
}
assert(!shuffleExists, "shuffle should not exist on bucket tables")
}
test("test create table with bucket join of carbon table and parquet table") {
sql(
"""
CREATE TABLE t7
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
USING org.apache.spark.sql.CarbonSource
OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t7")
""")
LoadTable(Some("default"), "t7", s"$resourcesPath/source.csv", Nil,
Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
sql("DROP TABLE IF EXISTS bucketed_parquet_table")
sql("select * from t7").write
.format("parquet")
.bucketBy(4, "name")
.saveAsTable("bucketed_parquet_table")
val plan = sql(
"""
|select t1.*, t2.*
|from t7 t1, bucketed_parquet_table t2
|where t1.name = t2.name
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
case s: ShuffleExchange => shuffleExists = true
}
assert(!shuffleExists, "shuffle should not exist on bucket tables")
}
test("test create table with bucket join of carbon table and non bucket parquet table") {
sql(
"""
CREATE TABLE t8
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
USING org.apache.spark.sql.CarbonSource
OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t8")
""")
LoadTable(Some("default"), "t8", s"$resourcesPath/source.csv", Nil,
Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
sql("DROP TABLE IF EXISTS parquet_table")
sql("select * from t8").write
.format("parquet")
.saveAsTable("parquet_table")
val plan = sql(
"""
|select t1.*, t2.*
|from t8 t1, parquet_table t2
|where t1.name = t2.name
""".stripMargin).queryExecution.executedPlan
var shuffleExists = false
plan.collect {
case s: ShuffleExchange => shuffleExists = true
}
assert(shuffleExists, "shuffle should exist on non bucket tables")
}
override def afterAll {
sql("DROP TABLE IF EXISTS t3")
sql("DROP TABLE IF EXISTS t4")
sql("DROP TABLE IF EXISTS t5")
sql("DROP TABLE IF EXISTS t6")
sql("DROP TABLE IF EXISTS t7")
sql("DROP TABLE IF EXISTS t8")
}
}
| mohammadshahidkhan/incubator-carbondata | integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala | Scala | apache-2.0 | 7,297 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.data.stats
import org.geotools.data.DataStoreFinder
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.opengis.filter.Filter
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class StatsCombinerTest extends TestWithDataStore {
import scala.collection.JavaConverters._
sequential
override val spec = "name:String,dtg:Date,*geom:Point:srid=4326"
step {
// add two batches so that we have multiple rows to combine in the stats iter
addFeatures(Seq(ScalaSimpleFeature.create(sft, "0", "name0", "2017-01-01T00:00:00.000Z", "POINT (40 55)")))
addFeatures(Seq(ScalaSimpleFeature.create(sft, "1", "name1", "2017-01-01T01:00:00.000Z", "POINT (41 55)")))
}
// gets a new data store so that we don't read any cached values
def statCount(): Option[Long] = {
val ds = DataStoreFinder.getDataStore(dsParams.asJava).asInstanceOf[AccumuloDataStore]
try { ds.stats.getCount(sft, Filter.INCLUDE, exact = false) } finally { ds.dispose() }
}
"StatsCombiner" should {
"add/remove configured combiners" in {
statCount() must beSome(2L)
ds.stats.removeStatCombiner(ds.connector, sft)
// the exact behavior here doesn't matter, it's just to verify that the combiner is not enabled
// in this case, it will just return the first row
statCount() must beSome(1L)
ds.stats.configureStatCombiner(ds.connector, sft)
statCount() must beSome(2L)
}
}
}
| elahrvivaz/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/stats/StatsCombinerTest.scala | Scala | apache-2.0 | 2,118 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import kafka.api.{FetchRequestBuilder, FetchResponsePartitionData, OffsetRequest, Request}
import kafka.cluster.BrokerEndPoint
import kafka.message.ByteBufferMessageSet
import kafka.server.{AbstractFetcherThread, PartitionFetchState}
import AbstractFetcherThread.ResultWithPartitions
import kafka.common.{ErrorMapping, TopicAndPartition}
import scala.collection.Map
import ConsumerFetcherThread._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.record.MemoryRecords
import org.apache.kafka.common.requests.EpochEndOffset
@deprecated("This class has been deprecated and will be removed in a future release. " +
"Please use org.apache.kafka.clients.consumer.internals.Fetcher instead.", "0.11.0.0")
class ConsumerFetcherThread(name: String,
val config: ConsumerConfig,
sourceBroker: BrokerEndPoint,
partitionMap: Map[TopicPartition, PartitionTopicInfo],
val consumerFetcherManager: ConsumerFetcherManager)
extends AbstractFetcherThread(name = name,
clientId = config.clientId,
sourceBroker = sourceBroker,
fetchBackOffMs = config.refreshLeaderBackoffMs,
isInterruptible = true,
includeLogTruncation = false) {
type REQ = FetchRequest
type PD = PartitionData
private val clientId = config.clientId
private val fetchSize = config.fetchMessageMaxBytes
private val simpleConsumer = new SimpleConsumer(sourceBroker.host, sourceBroker.port, config.socketTimeoutMs,
config.socketReceiveBufferBytes, config.clientId)
private val fetchRequestBuilder = new FetchRequestBuilder().
clientId(clientId).
replicaId(Request.OrdinaryConsumerId).
maxWait(config.fetchWaitMaxMs).
minBytes(config.fetchMinBytes).
requestVersion(3) // for now, the old consumer is pinned to the old message format through the fetch request
override def initiateShutdown(): Boolean = {
val justShutdown = super.initiateShutdown()
if (justShutdown && isInterruptible)
simpleConsumer.disconnectToHandleJavaIOBug()
justShutdown
}
override def shutdown(): Unit = {
super.shutdown()
simpleConsumer.close()
}
// process fetched data
def processPartitionData(topicPartition: TopicPartition, fetchOffset: Long, partitionData: PartitionData) {
val pti = partitionMap(topicPartition)
if (pti.getFetchOffset != fetchOffset)
throw new RuntimeException("Offset doesn't match for partition [%s,%d] pti offset: %d fetch offset: %d"
.format(topicPartition.topic, topicPartition.partition, pti.getFetchOffset, fetchOffset))
pti.enqueue(partitionData.underlying.messages.asInstanceOf[ByteBufferMessageSet])
}
// handle a partition whose offset is out of range and return a new fetch offset
def handleOffsetOutOfRange(topicPartition: TopicPartition): Long = {
val startTimestamp = config.autoOffsetReset match {
case OffsetRequest.SmallestTimeString => OffsetRequest.EarliestTime
case _ => OffsetRequest.LatestTime
}
val topicAndPartition = TopicAndPartition(topicPartition.topic, topicPartition.partition)
val newOffset = simpleConsumer.earliestOrLatestOffset(topicAndPartition, startTimestamp, Request.OrdinaryConsumerId)
val pti = partitionMap(topicPartition)
pti.resetFetchOffset(newOffset)
pti.resetConsumeOffset(newOffset)
newOffset
}
// any logic for partitions whose leader has changed
def handlePartitionsWithErrors(partitions: Iterable[TopicPartition]) {
if (partitions.nonEmpty) {
removePartitions(partitions.toSet)
consumerFetcherManager.addPartitionsWithError(partitions)
}
}
protected def buildFetchRequest(partitionMap: collection.Seq[(TopicPartition, PartitionFetchState)]): ResultWithPartitions[FetchRequest] = {
partitionMap.foreach { case ((topicPartition, partitionFetchState)) =>
if (partitionFetchState.isReadyForFetch)
fetchRequestBuilder.addFetch(topicPartition.topic, topicPartition.partition, partitionFetchState.fetchOffset, fetchSize)
}
ResultWithPartitions(new FetchRequest(fetchRequestBuilder.build()), Set())
}
protected def fetch(fetchRequest: FetchRequest): Seq[(TopicPartition, PartitionData)] =
simpleConsumer.fetch(fetchRequest.underlying).data.map { case (TopicAndPartition(t, p), value) =>
new TopicPartition(t, p) -> new PartitionData(value)
}
override def buildLeaderEpochRequest(allPartitions: Seq[(TopicPartition, PartitionFetchState)]): ResultWithPartitions[Map[TopicPartition, Int]] = {
ResultWithPartitions(Map(), Set())
}
override def fetchEpochsFromLeader(partitions: Map[TopicPartition, Int]): Map[TopicPartition, EpochEndOffset] = { Map() }
override def maybeTruncate(fetchedEpochs: Map[TopicPartition, EpochEndOffset]): ResultWithPartitions[Map[TopicPartition, Long]] = {
ResultWithPartitions(Map(), Set())
}
}
@deprecated("This object has been deprecated and will be removed in a future release. " +
"Please use org.apache.kafka.clients.consumer.internals.Fetcher instead.", "0.11.0.0")
object ConsumerFetcherThread {
class FetchRequest(val underlying: kafka.api.FetchRequest) extends AbstractFetcherThread.FetchRequest {
private lazy val tpToOffset: Map[TopicPartition, Long] = underlying.requestInfo.map { case (tp, fetchInfo) =>
new TopicPartition(tp.topic, tp.partition) -> fetchInfo.offset
}.toMap
def isEmpty: Boolean = underlying.requestInfo.isEmpty
def offset(topicPartition: TopicPartition): Long = tpToOffset(topicPartition)
override def toString = underlying.toString
}
class PartitionData(val underlying: FetchResponsePartitionData) extends AbstractFetcherThread.PartitionData {
def error = underlying.error
def toRecords: MemoryRecords = underlying.messages.asInstanceOf[ByteBufferMessageSet].asRecords
def highWatermark: Long = underlying.hw
def exception: Option[Throwable] =
if (error == Errors.NONE) None else Some(ErrorMapping.exceptionFor(error.code))
override def toString = underlying.toString
}
}
| themarkypantz/kafka | core/src/main/scala/kafka/consumer/ConsumerFetcherThread.scala | Scala | apache-2.0 | 7,194 |
package sio.regions
import sio.core.{IORef, IO}
/**
* Created by alex on 9/3/16.
*/
final case class RefCountedFinalizer(run: IO[Unit], refCount: IORef[Int])
| alexknvl/sio | regions/src/main/scala/sio/regions/RefCountedFinalizer.scala | Scala | mit | 164 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.