code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/***
* Copyright 2016 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.step
import javax.servlet.FilterChain
import com.rackspace.com.papi.components.checker.servlet._
import com.rackspace.com.papi.components.checker.step.base.{ConnectedStep, Step, StepContext}
import com.rackspace.com.papi.components.checker.util.HeaderUtil._
class SetHeaderAlways(id : String, label : String, val name : String, val value : String,
next : Array[Step]) extends ConnectedStep(id, label, next) {
override def checkStep(req : CheckerServletRequest, resp : CheckerServletResponse, chain : FilterChain, context : StepContext) : Option[StepContext] = {
//
// Always set a header with the given value when this step
// executes. If an existing header exists a new value will be
// added.
//
Some(context.copy(requestHeaders = context.requestHeaders.addHeader(name, value)))
}
}
| rackerlabs/api-checker | core/src/main/scala/com/rackspace/com/papi/components/checker/step/SetHeaderAlways.scala | Scala | apache-2.0 | 1,516 |
package ee.cone.c4gate.deep_session
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor._
import ee.cone.c4di.{CreateTypeKey, c4, c4multi}
import ee.cone.c4gate.{CurrentSessionKey, KeyGenerator, SessionAttr, SessionAttrAccessFactory, SessionAttrLens}
import ee.cone.c4gate.SessionDataProtocol.{N_RawDataNode, U_RawSessionData}
import ee.cone.c4gate.deep_session.DeepSessionDataProtocol.{U_RawRoleData, U_RawUserData}
import ee.cone.c4proto.ToByteString
import okio.ByteString
trait DeepSessionAttrAccessFactoryUtils {
def qAdapterRegistry: QAdapterRegistry
def origMetaRegistry: OrigMetaRegistry
lazy val sessionMeta: OrigMeta[U_RawSessionData] = origMetaRegistry.getByCl(classOf[U_RawSessionData])
lazy val userMeta: OrigMeta[U_RawUserData] = origMetaRegistry.getByCl(classOf[U_RawUserData])
lazy val roleMeta: OrigMeta[U_RawRoleData] = origMetaRegistry.getByCl(classOf[U_RawRoleData])
def getMeta[P <: Product](attr: SessionAttr[P]): OrigMeta[P] =
origMetaRegistry.byName(attr.className).asInstanceOf[OrigMeta[P]]
def sessionLens[P <: Product](attr: SessionAttr[P])(of: U_RawSessionData => P, set: P => U_RawSessionData => U_RawSessionData): ProdLens[U_RawSessionData, P] =
SessionAttrLens[U_RawSessionData, P](attr.metaList, sessionMeta, getMeta(attr))(of, set)
def userLens[P <: Product](attr: SessionAttr[P])(of: U_RawUserData => P, set: P => U_RawUserData => U_RawUserData): ProdLens[U_RawUserData, P] =
SessionAttrLens[U_RawUserData, P](attr.metaList, userMeta, getMeta(attr))(of, set)
def roleLens[P <: Product](attr: SessionAttr[P])(of: U_RawRoleData => P, set: P => U_RawRoleData => U_RawRoleData): ProdLens[U_RawRoleData, P] =
SessionAttrLens[U_RawRoleData, P](attr.metaList, roleMeta, getMeta(attr))(of, set)
def deepLens[P <: Product](attr: SessionAttr[P]): ProdLens[DeepRawSessionData[P], P] = {
val attrMeta = getMeta(attr)
val rawSessionDataKey = CreateTypeKey(classOf[DeepRawSessionData[P]], "DeepRawSessionData", attrMeta.typeKey :: Nil)
ProdLensStrict[DeepRawSessionData[P], P](attr.metaList, classOf[DeepRawSessionData[P]], attrMeta.cl, rawSessionDataKey, attrMeta.typeKey)(
_.of(qAdapterRegistry),
value => deepData => deepData.set(qAdapterRegistry)(value)(deepData)
)
}
}
@c4("DeepSessionAttrFactoryImplApp") final class DeepSessionAttrAccessFactoryImpl(
val qAdapterRegistry: QAdapterRegistry,
modelFactory: ModelFactory,
modelAccessFactory: RModelAccessFactory,
val idGenUtil: IdGenUtil,
sessionAttrAccessFactory: SessionAttrAccessFactory,
txDeepRawDataLensFactory: TxDeepRawDataLensFactory,
val origMetaRegistry: OrigMetaRegistry,
roleByPK: GetByPK[U_RawRoleData],
sessionByPK: GetByPK[U_RawSessionData],
userByPK: GetByPK[U_RawUserData],
) extends DeepSessionAttrAccessFactory with SessionAttrAccessFactory with KeyGenerator with DeepSessionAttrAccessFactoryUtils {
lazy val rawDataAdapter = qAdapterRegistry.byName(classOf[U_RawSessionData].getName)
lazy val rawUserAdapter = qAdapterRegistry.byName(classOf[U_RawUserData].getName)
lazy val rawRoleAdapter = qAdapterRegistry.byName(classOf[U_RawRoleData].getName)
def to[P <: Product](attr: SessionAttr[P]): Context => Option[Access[P]] =
if (attr.metaList.collectFirst { case UserLevelAttr => "" }.isEmpty) {
sessionAttrAccessFactory.to(attr)
} else {
toUser(attr)
}
def toUser[P <: Product](attr: SessionAttr[P]): Context => Option[Access[P]] = local => {
val dataNode = N_RawDataNode(
domainSrcId = attr.pk,
fieldId = attr.id,
valueTypeId = 0,
value = ByteString.EMPTY
)
// Session
val contextKey = CurrentSessionKey.of(local)
val stubRawData: U_RawSessionData = U_RawSessionData(
srcId = "",
sessionKey = contextKey,
dataNode = Option(dataNode)
)
val rawDataPK = genPK(stubRawData, rawDataAdapter)
val rawDataOpt: Option[U_RawSessionData] = sessionByPK.ofA(local).get(rawDataPK)
// User
val userKey = CurrentUserIdKey.of(local)
val stubRawUserData: U_RawUserData = U_RawUserData(
srcId = "",
userId = userKey,
dataNode = Option(dataNode)
)
val rawUserDataPK = genPK(stubRawUserData, rawUserAdapter)
val rawUserDataOpt: Option[U_RawUserData] = userByPK.ofA(local).get(rawUserDataPK)
// Role
val roleKey = CurrentRoleIdKey.of(local)
val stubRawRoleData: U_RawRoleData = U_RawRoleData(
srcId = "",
roleId = roleKey,
dataNode = Option(dataNode)
)
val rawRoleDataPK = genPK(stubRawRoleData, rawRoleAdapter)
val rawRoleDataOpt: Option[U_RawRoleData] = roleByPK.ofA(local).get(rawRoleDataPK)
// Rest
val attrMeta: OrigMeta[P] = getMeta(attr)
val lensRaw = sessionLens(attr)(
rawSessionData => qAdapterRegistry.byId(rawSessionData.dataNode.get.valueTypeId).decode(rawSessionData.dataNode.get.value).asInstanceOf[P],
value => rawRoleData => {
val valueAdapter = qAdapterRegistry.byName(attr.className)
val byteString = ToByteString(valueAdapter.encode(value))
val newDataNode = rawRoleData.dataNode.get.copy(valueTypeId = valueAdapter.id, value = byteString)
rawRoleData.copy(dataNode = Option(newDataNode))
}
)
val lensRawUser = userLens(attr)(
rawRoleData => qAdapterRegistry.byId(rawRoleData.dataNode.get.valueTypeId).decode(rawRoleData.dataNode.get.value).asInstanceOf[P],
value => rawRoleData => {
val valueAdapter = qAdapterRegistry.byName(attr.className)
val byteString = ToByteString(valueAdapter.encode(value))
val newDataNode = rawRoleData.dataNode.get.copy(valueTypeId = valueAdapter.id, value = byteString)
rawRoleData.copy(dataNode = Option(newDataNode))
}
)
val defaultModel: SrcId => P = srcId => modelFactory.create[P](attr.className)(srcId)
val defaultRawData = lensRaw.set(defaultModel(rawDataPK))(stubRawData.copy(srcId = rawDataPK))
val defaultRawUserData = lensRawUser.set(defaultModel(rawUserDataPK))(stubRawUserData.copy(srcId = rawUserDataPK))
val data = DeepRawSessionData[P](rawDataOpt, rawUserDataOpt, rawRoleDataOpt, (defaultRawData, defaultRawUserData), (rawDataPK, rawUserDataPK, rawRoleDataPK))
val lens = deepLens(attr)
val access: AccessImpl[DeepRawSessionData[P]] = AccessImpl(data, Option(txDeepRawDataLensFactory.create(data)), NameMetaAttr("DeepRawSessionData") :: Nil)
Option(access.to(lens))
}
def toRole[P <: Product](attr: SessionAttr[P]): Context => Option[Access[P]] = {
val dataNode = N_RawDataNode(
domainSrcId = attr.pk,
fieldId = attr.id,
valueTypeId = 0,
value = ByteString.EMPTY
)
val lens = roleLens(attr)(
rawRoleData => qAdapterRegistry.byId(rawRoleData.dataNode.get.valueTypeId).decode(rawRoleData.dataNode.get.value).asInstanceOf[P],
value => rawRoleData => {
val valueAdapter = qAdapterRegistry.byName(attr.className)
val byteString = ToByteString(valueAdapter.encode(value))
val newDataNode = rawRoleData.dataNode.get.copy(valueTypeId = valueAdapter.id, value = byteString)
rawRoleData.copy(dataNode = Option(newDataNode))
}
)
local => {
val roleKey = CurrentRoleIdKey.of(local)
val stubRawRoleData = U_RawRoleData(
srcId = "",
roleId = roleKey,
dataNode = Option(dataNode)
)
val pk = genPK(stubRawRoleData, rawRoleAdapter)
val value = roleByPK.ofA(local).getOrElse(pk, {
val model = modelFactory.create[P](attr.className)(pk)
lens.set(model)(stubRawRoleData.copy(srcId = pk))
}
)
modelAccessFactory.to(roleByPK, value).map(_.to(lens))
}
}
}
case class DeepRawSessionData[P <: Product](
sessionData: Option[U_RawSessionData],
userData: Option[U_RawUserData],
roleData: Option[U_RawRoleData],
default: (U_RawSessionData, U_RawUserData),
srcIds: (SrcId, SrcId, SrcId)
) {
override def toString: SrcId = s"$productPrefix(\n\t$sessionData\n\t$userData\n\t$roleData\n\t$default\n\t$srcIds\n)"
def get: (Long, okio.ByteString) = {
if (sessionData.isDefined)
sessionData.get.dataNode.get match {
case p => (p.valueTypeId, p.value)
}
else if (userData.isDefined)
userData.get.dataNode.get match {
case p => (p.valueTypeId, p.value)
}
else if (roleData.isDefined)
roleData.get.dataNode.get match {
case p => (p.valueTypeId, p.value)
}
else
default match {
case (p, _) => (p.dataNode.get.valueTypeId, p.dataNode.get.value)
}
}
def of: QAdapterRegistry => P = registry => {
val (id, value) = get
registry.byId(id).decode(value).asInstanceOf[P]
}
def set: QAdapterRegistry => P => DeepRawSessionData[P] => DeepRawSessionData[P] = registry => model => old => {
val adapter = registry.byName(model.getClass.getName)
val byteString = ToByteString(adapter.encode(model))
val (defaultRaw, defaultUser) = old.default
val newDataNode = defaultRaw.dataNode.get.copy(valueTypeId = adapter.id, value = byteString)
old.copy(sessionData = Option(defaultRaw.copy(dataNode = Option(newDataNode))), userData = Option(defaultUser.copy(dataNode = Option(newDataNode))))
}
}
@c4multi("TxDeepRawDataLensApp") final case class TxDeepRawDataLens[P <: Product](initialValue: DeepRawSessionData[P])(
dataByPK: GetByPK[U_RawSessionData],
userByPK: GetByPK[U_RawUserData],
roleByPK: GetByPK[U_RawRoleData],
txAdd: LTxAdd,
) extends AbstractLens[Context, DeepRawSessionData[P]] {
def of: Context => DeepRawSessionData[P] = local => {
val (rawId, userId, roleId) = initialValue.srcIds
val rawOpt = dataByPK.ofA(local).get(rawId)
val userOpt = userByPK.ofA(local).get(userId)
val roleOpt = roleByPK.ofA(local).get(roleId)
initialValue.copy(sessionData = rawOpt, userData = userOpt, roleData = roleOpt)
}
def set: DeepRawSessionData[P] => Context => Context = value => local => {
if (initialValue != of(local)) throw new Exception(s"'$initialValue' != '${of(local)}'")
val DeepRawSessionData(raw, user, _, _, _) = value
val rawEvent = raw.map(LEvent.update).toList.flatten
val userEvent = user.map(LEvent.update).toList.flatten
txAdd.add(rawEvent ++ userEvent)(local)
}
}
| conecenter/c4proto | extra_lib/src/main/scala/ee/cone/c4gate/deep_session/DeepSessionAttrImpl.scala | Scala | apache-2.0 | 10,314 |
package models.account
import java.util.UUID
import org.joda.time.DateTime
import services.MailToken
import scala.concurrent.Future
case class MailTokenUser(id: String, email: String, expirationTime: DateTime, isSignUp: Boolean) extends MailToken
object MailTokenUser {
def apply(email: String, isSignUp: Boolean): MailTokenUser =
MailTokenUser(UUID.randomUUID().toString, email, (new DateTime()).plusHours(24), isSignUp)
// TODO persist these tokens
val tokens = scala.collection.mutable.HashMap[String, MailTokenUser]()
def findById(id: String): Future[Option[MailTokenUser]] = {
Future.successful(tokens.get(id))
}
def save(token: MailTokenUser): Future[MailTokenUser] = {
tokens += (token.id -> token)
Future.successful(token)
}
def delete(id: String): Unit = {
tokens.remove(id)
}
} | asciiu/halo | arbiter/app/models/account/MailTokenUser.scala | Scala | mit | 832 |
package org.thinkmeta.smp.playground
import org.thinkmeta.smp.core.{HostedLisp, ASTShow}
/**
* @author Hossam Karim
*/
object MacrosPlayground extends App {
import language.reflectiveCalls
val m1 = ASTShow.showModule {
"""
module A {
using scala::math::_
module B {
using A::*
(defun f [x:int] (plus x 1))
}
}
"""
}
println(m1)
val HelloWorld = HostedLisp.module {
"""
module HelloWorld {
(defun hello "Hello World from Lisp from Scala from JVM")
}
"""
}
println(HelloWorld.hello)
val compiled = HostedLisp.module {
"""
module HostedLispInScala {
(defun f:int [i:int j:int] (+ i j))
(defun g:int [i:int j:int] (f i j))
(defun h "hello")
}
"""
}
println {
s"""
${compiled.f(1,2)}
${compiled.g(1,2)}
${compiled.h()}
"""
}
val mA = HostedLisp.module {
"""
module A {
using scala::math::_
(defun hello "Hello from A")
(defun calc [a:double] (sin a))
module B {
(defun f [x:int] (+ x 1))
(defun hello "Hello from B")
}
}
"""
}
println {
s"""
${mA.calc(0)}
${mA.B.f(1)}
"""
}
val fact = HostedLisp.function {
"""
(defun factorial:int [n:int]
(if (le n 1)
1
(* n (factorial (- n 1)))))
"""
}
println(s"${fact.factorial(10)}")
val lambdas = HostedLisp.module {
"""
module ds {
using Lists::_
(val x (new StringBuilder "5"))
(val option (Some 1))
(defun showOption (option.map {o โ (+ o 1)}))
(val l (List 1 2 3 4 5))
(defun show0 (map { x:int โ (+ x 1) } l))
(defun show1 (l.reduce {x:int y:int โ (+ x y)}))
(defun show2 l.length.toString)
}
"""
}
println(s"${lambdas.showOption}")
println(s"${lambdas.show0}")
println(s"${lambdas.show1}")
println(s"${lambdas.show2}")
val chaining = HostedLisp.module {
"""
module chaining {
(defun show1
(. (List 1 2 3)
(map {x โ (+ x 1)})
(reduce {x y โ (+ x y)})))
(defun show2
(val sb (new StringBuilder))
(. sb (append "a")
(append "b")
(append "c")))
(defun show3
(. "1" toInt))
(defun show4
(val fn {x:int โ (* x x)})
(val l (. 1 (to 10)))
(. l (map fn)))
(defun show5
(val fn {x:int โ (* x x)})
(val l (. 1 (to 10)))
(l.map fn))
(defun run
(println show1)
(println show2)
(println show3)
(println show4)
(println show5))
}
"""
}
chaining.run
val patterns = HostedLisp.module {
"""
module patterns {
(defun show1
(val t (Tuple3 1 "a" 2))
(match t
((Tuple3 1 b c) if (gt c 3) โ "first")
((Tuple3 1 b c) โ "second")
((Tuple3 a b c) โ "third")
)
)
}
"""
}
println(s"${patterns.show1}")
val par = HostedLisp.module {
"""
module par {
using scala::concurrent::_
using ExecutionContext::Implicits::global
using scala::util::Failure
using scala::util::Success
using scala::concurrent::duration::_
(defun run
(val futureList
(future
(.
(Range 1 100)
(map { x โ Math.random } ))))
(val result ( futureList.map { x -> x.sum } ))
(result.onComplete {
x โ
(match x
((Success v) โ (println v))
((Failure m) โ (println m)))
})
(Await.result result (. 4 seconds))
)
}
"""
}
par.run
}
| hkarim/macros-playground | playground/src/main/scala/org/thinkmeta/smp/playground/MacrosPlayground.scala | Scala | apache-2.0 | 3,758 |
package com.twitter.finagle.thrift
import com.twitter.finagle.{Service, SimpleFilter, TransportException}
import com.twitter.util.{Time, Future, Try, Return, Throw}
import scala.util.Random
/**
* Indicates that a Thrift response did not have the correct sequence
* ID according to that assigned by [[com.twitter.finagle.thrift.SeqIdFilter]]
* on the corresponding request.
*/
case class SeqMismatchException(id: Int, expected: Int) extends TransportException {
override def toString = "SeqMismatchException: got %d, expected %d".format(id, expected)
}
object SeqIdFilter {
val VersionMask = 0xffff0000
val Version1 = 0x80010000
}
/**
* A `Filter` that overrides Thrift request sequence IDs,
* replacing them with our own randomly-assigned i32s. Upon response receipt,
* this filter ensures that responses have the correct corresponding sequence ID,
* failing any requests that do not.
*
* @note This only works when using BinaryProtocol.
*/
class SeqIdFilter extends SimpleFilter[ThriftClientRequest, Array[Byte]] {
import SeqIdFilter._
// Why random? Since the underlying codec currently does serial
// dispatching, it doesn't make any difference, but technically we
// need to ensure that we pick IDs from a free pool.
private[this] val rng = new Random(Time.now.inMilliseconds)
private[this] def get32(buf: Array[Byte], off: Int) =
((buf(off+0) & 0xff) << 24) |
((buf(off+1) & 0xff) << 16) |
((buf(off+2) & 0xff) << 8) |
(buf(off+3) & 0xff)
private[this] def put32(buf: Array[Byte], off: Int, x: Int) {
buf(off) = (x>>24 & 0xff).toByte
buf(off+1) = (x>>16 & 0xff).toByte
buf(off+2) = (x>>8 & 0xff).toByte
buf(off+3) = (x & 0xff).toByte
}
private[this] def badMsg(why: String) = Throw(new IllegalArgumentException(why))
private[this] def getAndSetId(buf: Array[Byte], newId: Int): Try[Int] = {
if (buf.length < 4) return badMsg("short header")
val header = get32(buf, 0)
val off = if (header < 0) {
// [4]header
// [4]n
// [n]string
// [4]seqid
if ((header&VersionMask) != Version1)
return badMsg("bad version %d".format(header&VersionMask))
if (buf.length < 8) return badMsg("short name size")
4+4+get32(buf, 4)
} else {
// [4]n
// [n]name
// [1]type
// [4]seqid
4+header+1
}
if (buf.length < off+4) return badMsg("short buffer")
val currentId = get32(buf, off)
put32(buf, off, newId)
Return(currentId)
}
def apply(req: ThriftClientRequest, service: Service[ThriftClientRequest, Array[Byte]]): Future[Array[Byte]] =
if (req.oneway) service(req) else {
val reqBuf = req.message.clone()
val id = rng.nextInt()
val givenId = getAndSetId(reqBuf, id) match {
case Return(id) => id
case Throw(exc) => return Future.exception(exc)
}
val newReq = new ThriftClientRequest(reqBuf, req.oneway)
service(newReq) flatMap { resBuf =>
// We know it's safe to mutate the response buffer since the
// codec never touches it again.
getAndSetId(resBuf, givenId) match {
case Return(`id`) => Future.value(resBuf)
case Return(badId) => Future.exception(SeqMismatchException(badId, id))
case Throw(exc) => Future.exception(exc)
}
}
}
}
| koshelev/finagle | finagle-thrift/src/main/scala/com/twitter/finagle/thrift/SeqIdFilter.scala | Scala | apache-2.0 | 3,346 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.examples
import java.lang.management.ManagementFactory
import scala.collection.mutable.{Buffer, Map}
import tdb.{Adjustable, Mutator}
import tdb.Debug._
import tdb.list.ListConf
import tdb.master.{MasterConf, MasterConnector}
import tdb.worker.WorkerConf
abstract class Algorithm[Output](val conf: AlgorithmConf) {
val repeatedRuns =
for (run <- conf.runs; i <- 0 until conf.updateRepeat)
yield run
val connector =
if (conf.master != "") {
MasterConnector(conf.master)
} else {
val args = Array("--cacheSize", conf.cacheSize.toString,
"--envHomePath", conf.envHomePath)
val masterConf = new MasterConf(
Array("--port", Experiment.port.toString, "--log", conf.logLevel))
MasterConnector(
workerArgs = args,
masterConf = masterConf)
}
val mutator = new Mutator(connector)
var output: Output = null.asInstanceOf[Output]
def adjust: Adjustable[Output]
var mapCount = 0
var reduceCount = 0
var updateSize = 0
var naiveLoadElapsed: Long = 0
val results = Map[String, Double]()
val actualRuns = Buffer[String]()
protected def generateNaive()
protected def runNaive(): Any
protected def loadInitial()
protected def hasUpdates(): Boolean
protected def loadUpdate(): Int
protected def checkOutput(output: Output): Boolean
def run(): Map[String, Double] = {
System.gc()
if (Experiment.verbosity > 1) {
println("Generate")
}
val beforeLoad = System.currentTimeMillis()
generateNaive()
naiveLoadElapsed = System.currentTimeMillis() - beforeLoad
if (conf.naive) {
actualRuns += "naive"
results("naive-load") = naiveLoadElapsed
if (Experiment.verbosity > 1) {
println("Naive run.")
}
val gcBefore = getGCTime()
val before = System.currentTimeMillis()
runNaive()
results("naive-gc") = getGCTime() - gcBefore
results("naive") = System.currentTimeMillis() - before -
results("naive-gc")
}
// Initial run.
actualRuns += "initial"
System.gc()
initial()
if (Experiment.conf.prompts()) {
prompt
}
if (Experiment.dots) {
mutator.printDDGDots("pagerank.dot")
}
if (Experiment.verbosity > 1) {
if (mapCount != 0) {
println("map count = " + mapCount)
mapCount = 0
}
if (reduceCount != 0) {
println("reduce count = " + reduceCount)
reduceCount = 0
}
println("starting prop")
}
var r = 1
while (hasUpdates()) {
System.gc()
update()
if (Experiment.conf.prompts()) {
prompt
}
}
Experiment.confs("runs") = actualRuns.toList
if (Experiment.verbosity > 1) {
if (mapCount != 0)
println("map count = " + mapCount)
if (reduceCount != 0)
println("reduce count = " + reduceCount)
}
mutator.shutdown()
connector.shutdown()
results
}
def initial() {
if (Experiment.verbosity > 1) {
println("Initial load.")
}
val beforeLoad = System.currentTimeMillis()
loadInitial()
val loadElapsed = System.currentTimeMillis() - beforeLoad
if (Experiment.verbosity > 1) {
println("Initial run.")
}
val gcBefore = getGCTime()
val before = System.currentTimeMillis()
output = mutator.run[Output](adjust)
val elapsed = System.currentTimeMillis() - before
val gcElapsed = getGCTime() - gcBefore
if (Experiment.check) {
assert(checkOutput(output))
}
results("initial") = elapsed - gcElapsed
results("initial-load") = loadElapsed
results("initial-gc") = gcElapsed
}
def update() {
if (Experiment.verbosity > 1) {
println("Updating")
}
val beforeLoad = System.currentTimeMillis()
updateSize = loadUpdate()
val loadElapsed = System.currentTimeMillis() - beforeLoad
if (Experiment.verbosity > 1) {
println("Running change propagation.")
}
val gcBefore = getGCTime()
val before = System.currentTimeMillis()
mutator.propagate()
val elapsed = System.currentTimeMillis() - before
val gcElapsed = getGCTime() - gcBefore
if (Experiment.check) {
assert(checkOutput(output))
}
if (actualRuns.contains(updateSize + "")) {
val oldCount = results(updateSize + "-count")
/*def averageIn(oldAverage: Double, newValue: Double) =
(oldAverage * oldCount + newValue) / (oldCount + 1)
results(updateSize + "") =
averageIn(results(updateSize + ""), elapsed - gcElapsed)
results(updateSize + "-load") =
averageIn(results(updateSize + "-load"), loadElapsed)
results(updateSize + "-gc") =
averageIn(results(updateSize + "-gc"), gcElapsed)
results(updateSize + "-count") = oldCount + 1*/
results(updateSize + "-" + oldCount) = elapsed - gcElapsed
results(updateSize + "-" + oldCount + "-load") = loadElapsed
results(updateSize + "-" + oldCount + "-gc") = gcElapsed
results(updateSize + "-count") = oldCount + 1
actualRuns += updateSize + "-" + oldCount
} else {
results(updateSize + "") = elapsed - gcElapsed
results(updateSize + "-load") = loadElapsed
results(updateSize + "-gc") = gcElapsed
results(updateSize + "-count") = 1
actualRuns += updateSize + ""
}
}
private def getGCTime(): Long = {
var garbageCollectionTime: Long = 0
val iter = ManagementFactory.getGarbageCollectorMXBeans().iterator()
while (iter.hasNext()) {
val gc = iter.next()
val time = gc.getCollectionTime()
if(time >= 0) {
garbageCollectionTime += time
}
}
garbageCollectionTime
}
}
| twmarshall/tdb | core/src/main/scala/tdb/examples/Algorithm.scala | Scala | apache-2.0 | 6,356 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import org.apache.flink.annotation.{Internal, Public, PublicEvolving}
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.functions.KeySelector
import org.apache.flink.streaming.api.datastream.{ConnectedStreams => JavaCStream, DataStream => JavaStream}
import org.apache.flink.streaming.api.functions.co._
import org.apache.flink.streaming.api.operators.{TwoInputStreamOperator, TwoInputStreamOperatorFactory}
import org.apache.flink.util.Collector
/**
* [[ConnectedStreams]] represents two connected streams of (possibly) different data types.
* Connected streams are useful for cases where operations on one stream directly
* affect the operations on the other stream, usually via shared state between the streams.
*
* An example for the use of connected streams would be to apply rules that change over time
* onto another stream. One of the connected streams has the rules, the other stream the
* elements to apply the rules to. The operation on the connected stream maintains the
* current set of rules in the state. It may receive either a rule update and update the state
* or a data element and apply the rules in the state to the element.
*
* The connected stream can be conceptually viewed as a union stream of an Either type, that
* holds either the first stream's type or the second stream's type.
*/
@Public
class ConnectedStreams[IN1, IN2](javaStream: JavaCStream[IN1, IN2]) {
// ------------------------------------------------------
// Transformations
// ------------------------------------------------------
/**
* Applies a CoMap transformation on the connected streams.
*
* The transformation consists of two separate functions, where
* the first one is called for each element of the first connected stream,
* and the second one is called for each element of the second connected stream.
*
* @param fun1 Function called per element of the first input.
* @param fun2 Function called per element of the second input.
* @return The resulting data stream.
*/
def map[R: TypeInformation](fun1: IN1 => R, fun2: IN2 => R):
DataStream[R] = {
if (fun1 == null || fun2 == null) {
throw new NullPointerException("Map function must not be null.")
}
val cleanFun1 = clean(fun1)
val cleanFun2 = clean(fun2)
val comapper = new CoMapFunction[IN1, IN2, R] {
def map1(in1: IN1): R = cleanFun1(in1)
def map2(in2: IN2): R = cleanFun2(in2)
}
map(comapper)
}
/**
* Applies a CoMap transformation on these connected streams.
*
* The transformation calls [[CoMapFunction#map1]] for each element
* in the first stream and [[CoMapFunction#map2]] for each element
* of the second stream.
*
* On can pass a subclass of [[org.apache.flink.streaming.api.functions.co.RichCoMapFunction]]
* to gain access to the [[org.apache.flink.api.common.functions.RuntimeContext]]
* and to additional life cycle methods.
*
* @param coMapper
* The CoMapFunction used to transform the two connected streams
* @return
* The resulting data stream
*/
def map[R: TypeInformation](coMapper: CoMapFunction[IN1, IN2, R]): DataStream[R] = {
if (coMapper == null) {
throw new NullPointerException("Map function must not be null.")
}
val outType : TypeInformation[R] = implicitly[TypeInformation[R]]
asScalaStream(javaStream.map(coMapper, outType).asInstanceOf[JavaStream[R]])
}
/**
* Applies the given [[CoProcessFunction]] on the connected input streams,
* thereby creating a transformed output stream.
*
* The function will be called for every element in the input streams and can produce zero
* or more output elements. Contrary to the [[flatMap(CoFlatMapFunction)]] function,
* this function can also query the time and set timers. When reacting to the firing of set
* timers the function can directly emit elements and/or register yet more timers.
*
* @param coProcessFunction The [[CoProcessFunction]] that is called for each element
* in the stream.
* @return The transformed [[DataStream]].
*/
@PublicEvolving
def process[R: TypeInformation](
coProcessFunction: CoProcessFunction[IN1, IN2, R]) : DataStream[R] = {
if (coProcessFunction == null) {
throw new NullPointerException("CoProcessFunction function must not be null.")
}
val outType : TypeInformation[R] = implicitly[TypeInformation[R]]
asScalaStream(javaStream.process(coProcessFunction, outType))
}
/**
* Applies the given [[KeyedCoProcessFunction]] on the connected input keyed streams,
* thereby creating a transformed output stream.
*
* The function will be called for every element in the input keyed streams and can produce
* zero or more output elements. Contrary to the [[flatMap(CoFlatMapFunction)]] function, this
* function can also query the time and set timers. When reacting to the firing of set timers
* the function can directly emit elements and/or register yet more timers.
*
* @param keyedCoProcessFunction The [[KeyedCoProcessFunction]] that is called for each element
* in the stream.
* @return The transformed [[DataStream]].
*/
@PublicEvolving
def process[K, R: TypeInformation](
keyedCoProcessFunction: KeyedCoProcessFunction[K, IN1, IN2, R]) : DataStream[R] = {
if (keyedCoProcessFunction == null) {
throw new NullPointerException("KeyedCoProcessFunction function must not be null.")
}
val outType : TypeInformation[R] = implicitly[TypeInformation[R]]
asScalaStream(javaStream.process(keyedCoProcessFunction, outType))
}
/**
* Applies a CoFlatMap transformation on these connected streams.
*
* The transformation calls [[CoFlatMapFunction#flatMap1]] for each element
* in the first stream and [[CoFlatMapFunction#flatMap2]] for each element
* of the second stream.
*
* On can pass a subclass of [[org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction]]
* to gain access to the [[org.apache.flink.api.common.functions.RuntimeContext]]
* and to additional life cycle methods.
*
* @param coFlatMapper
* The CoFlatMapFunction used to transform the two connected streams
* @return
* The resulting data stream.
*/
def flatMap[R: TypeInformation](coFlatMapper: CoFlatMapFunction[IN1, IN2, R]):
DataStream[R] = {
if (coFlatMapper == null) {
throw new NullPointerException("FlatMap function must not be null.")
}
val outType : TypeInformation[R] = implicitly[TypeInformation[R]]
asScalaStream(javaStream.flatMap(coFlatMapper, outType).asInstanceOf[JavaStream[R]])
}
/**
* Applies a CoFlatMap transformation on the connected streams.
*
* The transformation consists of two separate functions, where
* the first one is called for each element of the first connected stream,
* and the second one is called for each element of the second connected stream.
*
* @param fun1 Function called per element of the first input.
* @param fun2 Function called per element of the second input.
* @return The resulting data stream.
*/
def flatMap[R: TypeInformation](
fun1: (IN1, Collector[R]) => Unit,
fun2: (IN2, Collector[R]) => Unit): DataStream[R] = {
if (fun1 == null || fun2 == null) {
throw new NullPointerException("FlatMap functions must not be null.")
}
val cleanFun1 = clean(fun1)
val cleanFun2 = clean(fun2)
val flatMapper = new CoFlatMapFunction[IN1, IN2, R] {
def flatMap1(value: IN1, out: Collector[R]): Unit = cleanFun1(value, out)
def flatMap2(value: IN2, out: Collector[R]): Unit = cleanFun2(value, out)
}
flatMap(flatMapper)
}
/**
* Applies a CoFlatMap transformation on the connected streams.
*
* The transformation consists of two separate functions, where
* the first one is called for each element of the first connected stream,
* and the second one is called for each element of the second connected stream.
*
* @param fun1 Function called per element of the first input.
* @param fun2 Function called per element of the second input.
* @return The resulting data stream.
*/
def flatMap[R: TypeInformation](
fun1: IN1 => TraversableOnce[R],
fun2: IN2 => TraversableOnce[R]): DataStream[R] = {
if (fun1 == null || fun2 == null) {
throw new NullPointerException("FlatMap functions must not be null.")
}
val cleanFun1 = clean(fun1)
val cleanFun2 = clean(fun2)
val flatMapper = new CoFlatMapFunction[IN1, IN2, R] {
def flatMap1(value: IN1, out: Collector[R]) = { cleanFun1(value) foreach out.collect }
def flatMap2(value: IN2, out: Collector[R]) = { cleanFun2(value) foreach out.collect }
}
flatMap(flatMapper)
}
// ------------------------------------------------------
// grouping and partitioning
// ------------------------------------------------------
/**
* Keys the two connected streams together. After this operation, all
* elements with the same key from both streams will be sent to the
* same parallel instance of the transformation functions.
*
* @param keyPosition1 The first stream's key field
* @param keyPosition2 The second stream's key field
* @return The key-grouped connected streams
*/
def keyBy(keyPosition1: Int, keyPosition2: Int): ConnectedStreams[IN1, IN2] = {
asScalaStream(javaStream.keyBy(keyPosition1, keyPosition2))
}
/**
* Keys the two connected streams together. After this operation, all
* elements with the same key from both streams will be sent to the
* same parallel instance of the transformation functions.
*
* @param keyPositions1 The first stream's key fields
* @param keyPositions2 The second stream's key fields
* @return The key-grouped connected streams
*/
def keyBy(keyPositions1: Array[Int], keyPositions2: Array[Int]): ConnectedStreams[IN1, IN2] = {
asScalaStream(javaStream.keyBy(keyPositions1, keyPositions2))
}
/**
* Keys the two connected streams together. After this operation, all
* elements with the same key from both streams will be sent to the
* same parallel instance of the transformation functions.
*
* @param field1 The first stream's key expression
* @param field2 The second stream's key expression
* @return The key-grouped connected streams
*/
def keyBy(field1: String, field2: String): ConnectedStreams[IN1, IN2] = {
asScalaStream(javaStream.keyBy(field1, field2))
}
/**
* Keys the two connected streams together. After this operation, all
* elements with the same key from both streams will be sent to the
* same parallel instance of the transformation functions.
*
* @param fields1 The first stream's key expressions
* @param fields2 The second stream's key expressions
* @return The key-grouped connected streams
*/
def keyBy(fields1: Array[String], fields2: Array[String]): ConnectedStreams[IN1, IN2] = {
asScalaStream(javaStream.keyBy(fields1, fields2))
}
/**
* Keys the two connected streams together. After this operation, all
* elements with the same key from both streams will be sent to the
* same parallel instance of the transformation functions.
*
* @param fun1 The first stream's key function
* @param fun2 The second stream's key function
* @return The key-grouped connected streams
*/
def keyBy[KEY: TypeInformation](fun1: IN1 => KEY, fun2: IN2 => KEY):
ConnectedStreams[IN1, IN2] = {
val keyType = implicitly[TypeInformation[KEY]]
val cleanFun1 = clean(fun1)
val cleanFun2 = clean(fun2)
val keyExtractor1 = new JavaKeySelector[IN1, KEY](cleanFun1)
val keyExtractor2 = new JavaKeySelector[IN2, KEY](cleanFun2)
asScalaStream(javaStream.keyBy(keyExtractor1, keyExtractor2, keyType))
}
/**
* Returns a "closure-cleaned" version of the given function. Cleans only if closure cleaning
* is not disabled in the [[org.apache.flink.api.common.ExecutionConfig]]
*/
private[flink] def clean[F <: AnyRef](f: F): F = {
new StreamExecutionEnvironment(javaStream.getExecutionEnvironment).scalaClean(f)
}
@PublicEvolving
def transform[R: TypeInformation](
functionName: String,
operator: TwoInputStreamOperator[IN1, IN2, R]): DataStream[R] = {
asScalaStream(javaStream.transform(functionName, implicitly[TypeInformation[R]], operator))
}
@PublicEvolving
def transform[R: TypeInformation](
functionName: String,
factory: TwoInputStreamOperatorFactory[IN1, IN2, R]): DataStream[R] = {
asScalaStream(javaStream.transform(functionName, implicitly[TypeInformation[R]], factory))
}
}
@Internal
class JavaKeySelector[IN, K](private[this] val fun: IN => K) extends KeySelector[IN, K] {
override def getKey(value: IN): K = fun(value)
}
| apache/flink | flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/ConnectedStreams.scala | Scala | apache-2.0 | 13,889 |
package me.hawkweisman.util.tests
import me.hawkweisman.math.linear.{Linear, SequentialAlgebra, ParallelAlgebra}
import org.scalameter.api._
import scala.util.Random
/**
* Performance tests comparing the parallel and sequential linear algebra
* implementations.
*
* Created by hawk on 9/14/15.
*/
abstract class LinearBenchmark(val name: String)
extends PerformanceTest.OfflineRegressionReport
with Linear {
val sizes = Gen.range("size")(10,100,10)
def intMatrix(n: Int): Matrix[Int]
= Array.ofDim[Int](n * n)
.map(_ โ Random.nextInt())
.grouped(n)
.toArray
def intVector(n: Int): Vector[Int]
= Array.ofDim[Int](n)
.map(_ โ Random.nextInt())
val intMatrices: Gen[(Matrix[Int], Matrix[Int])]
= for { n โ sizes }
yield (intMatrix(n), intMatrix(n))
val intVectors: Gen[(Vector[Int], Vector[Int])]
= for { n โ sizes }
yield (intVector(n), intVector(n))
performance of s"$name vector algebra" in {
measure method "vector-vector addition" in {
using(intVectors) in { case ((u, v)) โ u + v }
}
measure method "vector-vector subtraction" in {
using(intVectors) in { case ((u, v)) โ u - v }
}
measure method "vector-vector multiplication" in {
using(intVectors) in { case ((u, v)) โ u * v }
}
}
performance of s"$name matrix algebra" in {
measure method "matrix-matrix addition" in {
using(intMatrices) in { case ((m, n)) โ m + n }
}
measure method "matrix-matrix subtraction" in {
using(intMatrices) in { case ((m, n)) โ m - n }
}
measure method "matrix-matrix multiplication" in {
using(intMatrices) in { case ((m, n)) โ m * n }
}
}
}
object SeqLinearBench
extends LinearBenchmark("sequential")
with SequentialAlgebra
object ParLinearBench
extends LinearBenchmark("parallel")
with ParallelAlgebra
| hawkw/scala-common | src/bench/scala/me/hawkweisman/bench/LinearBenchmarks.scala | Scala | mit | 1,914 |
package net.manub.embeddedkafka.schemaregistry
import net.manub.embeddedkafka.EmbeddedKafkaSpecSupport
class EmbeddedKafkaWithSchemaRegistryTraitSpec
extends EmbeddedKafkaSpecSupport
with EmbeddedKafkaWithSchemaRegistry {
"the withRunningKafka method" should {
"start a Schema Registry server on a specified port" in {
implicit val config: EmbeddedKafkaConfigWithSchemaRegistry =
EmbeddedKafkaConfigWithSchemaRegistry(schemaRegistryPort = 12345)
withRunningKafka {
schemaRegistryIsAvailable(12345)
}
}
}
"the withRunningKafkaOnFoundPort method" should {
"start and stop Kafka, Zookeeper, and Schema Registry successfully on non-zero ports" in {
val userDefinedConfig =
EmbeddedKafkaConfigWithSchemaRegistry(kafkaPort = 12345,
zooKeeperPort = 12346,
schemaRegistryPort = 12347)
val actualConfig = withRunningKafkaOnFoundPort(userDefinedConfig) {
actualConfig =>
actualConfig shouldBe userDefinedConfig
everyServerIsAvailable(actualConfig)
actualConfig
}
noServerIsAvailable(actualConfig)
}
}
private def everyServerIsAvailable(
config: EmbeddedKafkaConfigWithSchemaRegistry): Unit = {
kafkaIsAvailable(config.kafkaPort)
schemaRegistryIsAvailable(config.schemaRegistryPort)
zookeeperIsAvailable(config.zooKeeperPort)
}
private def noServerIsAvailable(
config: EmbeddedKafkaConfigWithSchemaRegistry): Unit = {
kafkaIsNotAvailable(config.kafkaPort)
schemaRegistryIsNotAvailable(config.schemaRegistryPort)
zookeeperIsNotAvailable(config.zooKeeperPort)
}
}
| manub/scalatest-embedded-kafka | schema-registry/src/test/scala/net/manub/embeddedkafka/schemaregistry/EmbeddedKafkaWithSchemaRegistryTraitSpec.scala | Scala | mit | 1,730 |
/*
* OpenVC, an open source VHDL compiler/simulator
* Copyright (C) 2010 Christian Reisinger
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.jku.ssw.tests
import java.io.{File, PrintWriter}
import at.jku.ssw.openvc.util.SourceFile
import at.jku.ssw.openvs.Simulator
import at.jku.ssw.openvc.{CompilationUnit, VHDLCompiler}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.{BeforeAndAfter, FunSuite}
trait GenericTest extends FunSuite with ShouldMatchers with BeforeAndAfter {
val library = "testLibrary"
val configuration = new CompilationUnit.Configuration(
enableAMS = false,
enableVhdl2008 = true,
noWarn = false,
encoding = None,
outputDirectory = "output/",
designLibrary = library,
libraryDirectory = "vhdlLibs\\\\",
XrunOnlyToPhase = None,
XdebugCompiler = false,
XdebugCodeGenerator = false
)
val directory = new File(configuration.libraryOutputDirectory)
after {
if (directory.exists) directory.listFiles.foreach {
file =>
if (file.isDirectory)
file.listFiles.foreach(_.delete())
file.delete()
}
}
def compile(source: String) {
val unit = VHDLCompiler.compile(new CompilationUnit(SourceFile.fromString(source, "testFile"), configuration))
unit.printMessages(new PrintWriter(System.out))
unit.hasErrors should equal(false)
}
def compileAndLoad(text: String)(source: String) {
test(text) {
compile(source)
Simulator.loadFiles(this.getClass.getClassLoader, configuration.outputDirectory, directory.listFiles.filter(_.getName.endsWith(".class")).map(configuration.designLibrary + "." + _.getName.split('.').head), List("std.jar"))
//Simulator.loadFiles(this.getClass.getClassLoader, listFiles(new File(configuration.libraryOutputDirectory), classFilter, true).map(file => file.getPath.substring(file.getPath.indexOf('\\\\') + 1).split('.').head.replace('\\\\', '.')), List("std.jar", "ieee.jar"))
}
}
def compileAndRun(text: String, packageName: String, procedure: String)(source: String) {
test(text) {
compile(source)
Simulator.runClass(this.getClass.getClassLoader, configuration.outputDirectory, configuration.designLibrary + "." + packageName + "_body", procedure, List("std.jar", "ieee.jar"))
}
}
def compileCodeInPackageAndLoad(text: String)(source: String) {
compileAndLoad(text) {
"""
package Dummy is
""" +
source +
"""
end Dummy ;
"""
}
}
def compileCodeInPackageAndRun(text: String, packageName: String = "dummy", procedure: String = "main$-1404437944")(source: String) {
compileAndRun(text, packageName, procedure) {
"""
package dummy is
end package dummy;
package body Dummy is
""" +
source +
"""
end Dummy ;
"""
}
}
} | chrreisinger/OpenVC | src/test/scala/at/jku/ssw/tests/GenericTest.scala | Scala | gpl-3.0 | 3,525 |
/*
* Ported from https://github.com/junit-team/junit
*/
package org.junit
import java.lang.annotation._
import scala.scalajs.js.annotation.JSExport
@Retention(RetentionPolicy.RUNTIME)
@Target(Array(ElementType.METHOD))
@JSExport
case class Test(expected: Class[_ <: Throwable] = classOf[Test.None],
timeout: Long = 0L) extends Annotation {
def annotationType (): Class[Annotation] =
classOf[Test].asInstanceOf[Class[Annotation]]
}
object Test {
@SerialVersionUID(1L)
final class None private() extends Throwable
}
| nicolasstucki/scala-js-junit | runtime/src/main/scala/org/junit/Test.scala | Scala | bsd-3-clause | 534 |
package com.github.diegopacheco.scala3.playground.features
// Same effect as having trait and object implementation - but with much less typing.
object Logarithms:
opaque type Logarithm = Double
object Logarithm:
def apply(d:Double):Logarithm = math.log(d)
extension (x:Logarithm)
def toDouble:Double = math.exp(x)
def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y))
def * (y: Logarithm): Logarithm = x + y
object OpaqueTypeMain extends App{
import Logarithms._
val l2 = Logarithm(2.0)
val l3 = Logarithm(3.0)
println((l2 * l3).toDouble) // prints 6.0
println((l2 + l3).toDouble) // prints 4.999...
// val d: Double = l2 // ERROR: Found Logarithm required Double
}
| diegopacheco/scala-playground | scala-3-playground/scala-3-playground/src/main/scala/com/github/diegopacheco/scala3/playground/features/OpaqueTypeMain.scala | Scala | unlicense | 730 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.docgen
import org.neo4j.graphdb.index.Index
import org.junit.{Before, After}
import scala.collection.JavaConverters._
import java.io.{PrintWriter, File}
import org.neo4j.graphdb._
import factory.{GraphDatabaseSetting, GraphDatabaseSettings}
import java.io.ByteArrayOutputStream
import org.neo4j.visualization.graphviz.{AsciiDocStyle, GraphvizWriter, GraphStyle}
import org.neo4j.walk.Walker
import org.neo4j.visualization.asciidoc.AsciidocHelper
import org.neo4j.cypher.CuteGraphDatabaseService.gds2cuteGds
import org.neo4j.cypher.javacompat.GraphImpl
import org.neo4j.cypher.{CypherParser, ExecutionResult, ExecutionEngine}
import org.neo4j.test.{ImpermanentGraphDatabase, TestGraphDatabaseFactory, GraphDescription}
import org.neo4j.test.GeoffService
import org.scalatest.Assertions
import org.neo4j.test.AsciiDocGenerator
trait DocumentationHelper {
def generateConsole:Boolean
def db: GraphDatabaseService
def nicefy(in: String): String = in.toLowerCase.replace(" ", "-")
def simpleName: String = this.getClass.getSimpleName.replaceAll("Test", "").toLowerCase
def createWriter(title: String, folder: String): (File, PrintWriter) = {
val dir = new File(path + nicefy(folder))
if (!dir.exists()) {
dir.mkdirs()
}
val writer = new PrintWriter(new File(dir, nicefy(title) + ".asciidoc"), "UTF-8")
(dir, writer)
}
val path: String = "target/docs/dev/ql/"
val graphvizFileName = "cypher-" + simpleName + "-graph.asciidoc"
def dumpGraphViz(dir: File, graphVizOptions:String) {
val graphViz = new PrintWriter(new File(dir, graphvizFileName), "UTF-8")
val foo = emitGraphviz(graphvizFileName, graphVizOptions)
graphViz.write(foo)
graphViz.flush()
graphViz.close()
}
private def emitGraphviz(fileName:String, graphVizOptions:String): String = {
val out = new ByteArrayOutputStream()
val writer = new GraphvizWriter(getGraphvizStyle)
writer.emit(out, Walker.fullGraph(db))
return """
.Graph
["dot", "%s.svg", "neoviz", "%s"]
----
%s
----
""".format(fileName, graphVizOptions, out)
}
protected def getGraphvizStyle: GraphStyle = AsciiDocStyle.withAutomaticRelationshipTypeColors()
}
abstract class DocumentingTestBase extends Assertions with DocumentationHelper {
def testQuery(title: String, text: String, queryText: String, returns: String, assertions: (ExecutionResult => Unit)*) {
val r = testWithoutDocs(queryText, assertions:_*)
val result: ExecutionResult = r._1
var query: String = r._2
val (dir: File, writer: PrintWriter) = createWriter(title, section)
dumpToFile(dir, writer, title, query, returns, text, result)
dumpGraphViz(dir, graphvizOptions)
}
var db: GraphDatabaseService = null
val parser: CypherParser = new CypherParser
var engine: ExecutionEngine = null
var nodes: Map[String, Long] = null
var nodeIndex: Index[Node] = null
var relIndex: Index[Relationship] = null
val properties: Map[String, Map[String, Any]] = Map()
var generateConsole: Boolean = true
var generateInitialGraphForConsole: Boolean = true
val graphvizOptions: String = ""
val noTitle: Boolean = false;
def section: String
def graphDescription: List[String]
def indexProps: List[String] = List()
def dumpToFile(dir: File, writer: PrintWriter, title: String, query: String, returns: String, text: String, result: ExecutionResult) {
val testId = nicefy(section + " " + title)
writer.println("[[" + testId + "]]")
if (!noTitle) writer.println("== " + title + " ==")
writer.println(text)
writer.println()
runQuery(dir, writer, testId, query, returns, result)
writer.flush()
writer.close()
}
def executeQuery(queryText: String): ExecutionResult = {
var query = queryText
nodes.keySet.foreach((key) => query = query.replace("%" + key + "%", node(key).getId.toString))
engine.execute(query)
}
def testWithoutDocs(queryText: String, assertions: (ExecutionResult => Unit)*): (ExecutionResult, String) = {
var query = queryText
nodes.keySet.foreach((key) => query = query.replace("%" + key + "%", node(key).getId.toString))
val result = engine.execute(query)
assertions.foreach(_.apply(result))
(result, query)
}
def indexProperties[T <: PropertyContainer](n: T, index: Index[T]) {
indexProps.foreach((property) => {
if (n.hasProperty(property)) {
val value = n.getProperty(property)
index.add(n, property, value)
}
})
}
def node(name: String): Node = db.getNodeById(nodes.getOrElse(name, throw new NotFoundException(name)))
def rel(id: Long): Relationship = db.getRelationshipById(id)
@After
def teardown() {
if (db != null) db.shutdown()
}
@Before
def init() {
db = new TestGraphDatabaseFactory().newImpermanentDatabaseBuilder().
setConfig( GraphDatabaseSettings.node_keys_indexable, "name" ).
setConfig( GraphDatabaseSettings.node_auto_indexing, GraphDatabaseSetting.TRUE ).
newGraphDatabase()
engine = new ExecutionEngine(db)
db.asInstanceOf[ImpermanentGraphDatabase].cleanContent(false)
db.inTx(() => {
nodeIndex = db.index().forNodes("nodes")
relIndex = db.index().forRelationships("rels")
val g = new GraphImpl(graphDescription.toArray[String])
val description = GraphDescription.create(g)
nodes = description.create(db).asScala.map {
case (name, node) => name -> node.getId
}.toMap
db.getAllNodes.asScala.foreach((n) => {
indexProperties(n, nodeIndex)
n.getRelationships(Direction.OUTGOING).asScala.foreach(indexProperties(_, relIndex))
})
properties.foreach((n) => {
val nod = node(n._1)
n._2.foreach((kv) => nod.setProperty(kv._1, kv._2))
})
})
}
def runQuery(dir: File, writer: PrintWriter, testId: String, query: String, returns: String, result: ExecutionResult) {
val output = new StringBuilder(2048)
output.append(".Query\n")
output.append(AsciidocHelper.createCypherSnippet(query))
writer.println(AsciiDocGenerator.dumpToSeparateFile(dir, testId + ".query", output.toString))
writer.println
writer.println(returns)
writer.println
val resultText = result.dumpToString()
output.clear
output.append(".Result\n")
output.append(AsciidocHelper.createQueryResultSnippet(resultText))
output.append('\n')
writer.println(AsciiDocGenerator.dumpToSeparateFile(dir, testId + ".result", output.toString))
if (generateConsole) {
output.clear
output.append(".Try this query live\n")
output.append("[console]\n")
output.append("----\n")
output.append(if (generateInitialGraphForConsole) new GeoffService(db).toGeoff else "start n=node(*) match n-[r?]->() delete n, r;")
output.append("\n\n")
output.append(query)
output.append("\n----")
writer.println(AsciiDocGenerator.dumpToSeparateFile(dir, testId + ".console", output.toString))
}
}
}
| dksaputra/community | cypher/src/test/scala/org/neo4j/cypher/docgen/DocumentingTestBase.scala | Scala | gpl-3.0 | 7,809 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spot.proxy
import org.apache.log4j.{Level, LogManager}
import org.apache.spot.SuspiciousConnectsArgumentParser.SuspiciousConnectsConfig
import org.apache.spot.proxy.ProxySchema.Word
import org.apache.spot.testutils.TestingSparkContextFlatSpec
import org.scalatest.Matchers
class ProxyWordCreationTest extends TestingSparkContextFlatSpec with Matchers {
val testConfigProxy = SuspiciousConnectsConfig(analysis = "proxy",
inputPath = "",
feedbackFile = "",
duplicationFactor = 1,
topicCount = 20,
hdfsScoredConnect = "",
threshold = 1.0d,
maxResults = 1000,
outputDelimiter = "\t",
ldaPRGSeed = None,
ldaMaxiterations = 20,
ldaAlpha = 1.02,
ldaBeta = 1.001)
"proxy word creation" should "return the correct word given the set of rules to form the word" in {
val logger = LogManager.getLogger("ProxyWordCreation")
logger.setLevel(Level.WARN)
// case class ProxyInput has the form:
// 1 p_date:String,
// 2 p_time:String, <- currently used for feature creation
// 3 clientip:String,
// 4 host:String, <- currently used for feature creation
// 5 reqmethod:String, <- currently used for feature creation
// 6 useragent:String, <- currently used for feature creation
// 7 resconttype:String, <- currently used for feature creation
// 8 duration:Int,
// 9 username:String,
// 10 webcat:String,
// 11 referer:String,
// 12 respcode:String, <- currently used for feature creation
// 13 uriport:Int,
// 14 uripath:String,
// 15 uriquery:String,
// 16 serverip:String,
// 17 scbytes:Int,
// 18 csbytes:Int,
// 19 fulluri:String) <- currently used for feature creation
val noAlexaPutLoEntroTextRareAgentShortUri202 = ProxyInput("2016-10-03", "00:57:36", "127.0.0.1", "intel.com", "PUT",
"Mozilla/5.0", "text/plain", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "202", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "ab")
val AlexaPutMidEntroImagePopularAgentShortUri202 = ProxyInput("2016-10-03", "01:57:36", "127.0.0.1", "maw.bronto.com",
"PUT", "Safari/537.36", "image", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "202", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "abc")
val AlexaPutMidEntroImagePopularAgentShortUri304 = ProxyInput("2016-10-03", "02:57:36", "127.0.0.1", "maw.bronto.com",
"PUT", "Safari/537.36", "image", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "304", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "abcd")
val AlexaPutMidEntroBinaryPopularAgentShortUri304 = ProxyInput("2016-10-03", "03:57:36", "127.0.0.1", "maw.bronto.com",
"PUT", "Safari/537.36", "binary", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "304", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "abcde")
val AlexaPutMidEntroBinaryPopularAgentShortUri206 = ProxyInput("2016-10-03", "10:57:36", "127.0.0.1", "maw.bronto.com",
"PUT", "Safari/537.36", "binary", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "206", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "abcdef")
val AlexaGetHiEntroBinaryPopularAgentShortUri206 = ProxyInput("2016-10-03", "11:57:36", "127.0.0.1", "maw.bronto.com",
"GET", "Safari/537.36", "binary", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "206", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "abcdefghijklmnopqrstuvwxyz")
val AlexaGetZeroEntroTextPopularAgentShortUri200 = ProxyInput("2016-10-03", "13:57:36", "127.0.0.1", "maw.bronto.com",
"GET", "Safari/537.36", "text/plain", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "200", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "aaa")
val AlexaGetLoEntroTextPopularAgentShortUri200 = ProxyInput("2016-10-03", "14:57:36", "127.0.0.1", "maw.bronto.com",
"GET", "Safari/537.36", "text/plain", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "200", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "aaabbb")
val AlexaGetMidEntroTextPopularAgentMidUri302 = ProxyInput("2016-10-03", "22:57:36", "127.0.0.1", "maw.bronto.com",
"GET", "Safari/537.36", "text/plain", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "302", 80,
"/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "aaaaaaaaaaabbbbbbbbbbbccccccccccc")
val AlexaGetHiEntroTextPopularAgentLargeUri302 = ProxyInput("2016-10-03", "23:57:36", "127.0.0.1", "maw.bronto.com",
"GET", "Safari/537.36", "text/plain", 230, "-", "Technology/Internet", "http://www.spoonflower.com/tags/color", "302",
80, "/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle",
"-", "127.0.0.1", 338, 647, "maw.bronto.com/sites/c37i4q22szvir8ga3m8mtxaft7gwnm5fio8hfxo35mu81absi1/carts" +
"/4b3a313d-50f6-4117-8ffd-4e804fd354ef/fiddle")
val data = sqlContext.createDataFrame(Seq(noAlexaPutLoEntroTextRareAgentShortUri202,
AlexaPutMidEntroImagePopularAgentShortUri202,
AlexaPutMidEntroImagePopularAgentShortUri304,
AlexaPutMidEntroBinaryPopularAgentShortUri304,
AlexaPutMidEntroBinaryPopularAgentShortUri206,
AlexaGetHiEntroBinaryPopularAgentShortUri206,
AlexaGetZeroEntroTextPopularAgentShortUri200,
AlexaGetLoEntroTextPopularAgentShortUri200,
AlexaGetMidEntroTextPopularAgentMidUri302,
AlexaGetHiEntroTextPopularAgentLargeUri302))
val model = ProxySuspiciousConnectsModel.trainModel(sparkContext, sqlContext, logger, testConfigProxy, data)
val scoredData = model.score(sparkContext, data)
val words = scoredData.collect().map(_.getAs[String](Word))
words(0) shouldBe "2_0_PUT_4_text_0_1_202"
words(1) shouldBe "1_1_PUT_6_image_3_1_202"
words(2) shouldBe "1_2_PUT_7_image_3_2_304"
words(3) shouldBe "1_3_PUT_8_binary_3_2_304"
words(4) shouldBe "1_10_PUT_9_binary_3_2_206"
words(5) shouldBe "1_11_GET_16_binary_3_4_206"
words(6) shouldBe "1_13_GET_0_text_3_1_200"
words(7) shouldBe "1_14_GET_4_text_3_2_200"
words(8) shouldBe "1_22_GET_6_text_3_5_302"
words(9) shouldBe "1_23_GET_17_text_3_6_302"
}
}
| NathanSegerlind/incubator-spot | spot-ml/src/test/scala/org/apache/spot/proxy/ProxyWordCreationTest.scala | Scala | apache-2.0 | 8,100 |
/*
* Copyright (c) 2015-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
package test
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
class TypeTrace[T]
object TypeTrace {
implicit def apply[T]: TypeTrace[T] = macro TypeTraceMacros.applyImpl[T]
}
class TypeTraceMacros(val c: blackbox.Context) {
import c.universe._
def applyImpl[T](implicit tTag: WeakTypeTag[T]): Tree = {
val tTpe = weakTypeOf[T]
c.info(
c.enclosingPosition,
s"Trace: $tTpe ${tTpe.dealias} ${tTpe.getClass.getName} ${tTpe.dealias.getClass.getName}",
force = true
)
q"""new _root_.shapeless.test.TypeTrace[$tTpe]"""
}
}
| isaka/shapeless | core/src/main/scala/shapeless/test/typetrace.scala | Scala | apache-2.0 | 1,216 |
package view
import service.RequestCache
import twirl.api.Html
import util.StringUtil
trait AvatarImageProvider { self: RequestCache =>
/**
* Returns <img> which displays the avatar icon.
* Looks up Gravatar if avatar icon has not been configured in user settings.
*/
protected def getAvatarImageHtml(userName: String, size: Int,
mailAddress: String = "", tooltip: Boolean = false)(implicit context: app.Context): Html = {
val src = if(mailAddress.isEmpty){
// by user name
getAccountByUserName(userName).map { account =>
if(account.image.isEmpty && context.settings.gravatar){
s"""https://www.gravatar.com/avatar/${StringUtil.md5(account.mailAddress.toLowerCase)}?s=${size}"""
} else {
s"""${context.path}/${account.userName}/_avatar"""
}
} getOrElse {
s"""${context.path}/_unknown/_avatar"""
}
} else {
// by mail address
getAccountByMailAddress(mailAddress).map { account =>
if(account.image.isEmpty && context.settings.gravatar){
s"""https://www.gravatar.com/avatar/${StringUtil.md5(account.mailAddress.toLowerCase)}?s=${size}"""
} else {
s"""${context.path}/${account.userName}/_avatar"""
}
} getOrElse {
if(context.settings.gravatar){
s"""https://www.gravatar.com/avatar/${StringUtil.md5(mailAddress.toLowerCase)}?s=${size}"""
} else {
s"""${context.path}/_unknown/_avatar"""
}
}
}
if(tooltip){
Html(s"""<img src="${src}" class="avatar" style="width: ${size}px; height: ${size}px;" data-toggle="tooltip" title="${userName}"/>""")
} else {
Html(s"""<img src="${src}" class="avatar" style="width: ${size}px; height: ${size}px;" />""")
}
}
} | emag/gitbucket | src/main/scala/view/AvatarImageProvider.scala | Scala | apache-2.0 | 1,801 |
package com.heaton.maven
import java.io.File
import java.net.URI
import java.nio.charset.Charset
import scala.io.Source
object Converter {
case class Dependency(groupId: String, name: String, version: String) {
private def makeXmlPattern:String = s"""<groupId>.*?</groupId>(\\\\s*)<artifactId>$name-$version</artifactId>(\\\\s*)<version>\\\\$$\\\\{.*?\\\\}</version>"""
private def makeStandardXml: String = s"""<groupId>${groupId}</groupId>$$1<artifactId>${name}</artifactId>$$2<version>${version}</version>"""
def replaceAll(src:String): String = src.replaceAll(makeXmlPattern, makeStandardXml)
}
val dependencyPattern = "(.*?):(.*?):(?:jar:)?(.*)".r
def fixPom(publicJarsFile: String, pomFile: String): String = {
val deps = getPublicJarsFromFile(publicJarsFile)
val pom = readFile(pomFile).mkString
deps.foldLeft(pom)(replaceAll)
}
private def replaceAll(pom:String, dependency: Dependency) = dependency.replaceAll(pom)
def parse(input: String): Option[Dependency] = for {
dependencyPattern(groupId, name, version) <- dependencyPattern.findFirstIn(input)
} yield Dependency(groupId, name, version)
def getPublicJarsFromFile(file: String): List[Dependency] = (for {
line <- readFile(file).getLines
dependency <- parse(line)
} yield dependency).toList
def saveFile(content: String, file: String): File = {
import java.nio.file._
val path: Path = Paths.get(file)
Files.write(path, content.getBytes("UTF-8"))
path.toFile
}
private def readFile(path: String) = Source fromFile uri(path)
private def uri(path: String): URI = this.getClass.getResource(path).toURI
}
| heaton/pom-convertor | src/main/scala/com/heaton/maven/Converter.scala | Scala | mit | 1,649 |
/**
* Copyright (C) 2014 VanillaSource
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.vanillasource.wicket.gatling
import io.gatling.core.session.Expression
import io.gatling.core.validation._
/**
* Import all methods from this object to use all wicket-gatling functions.
*
* Usage:
* {{{
* import com.vanillasource.wicket.gatling.Predef._
* }}}
*/
object Predef {
/**
* Get all the wicket URIs that match the given parameters. See [[WicketTargets.getUris]].
*/
def wicketUris(targetType: TargetType, pathSpec: Expression[String]*): Expression[List[String]] = session => {
if (!session.contains("wicketTargets")) {
"no wicket targets available in session, probably not enabled through enableWicketTargets(), or page was not loaded properly".failure
} else {
switchSeqAndExpression(pathSpec.toList).apply(session).map(pathSpecStrings => {
session("wicketTargets").as[WicketTargets].getUris(targetType, pathSpecStrings:_*)
})
}
}
/**
* Turn the expression and sequence inside out. This is done so it can be
* easily mapped once, and any potential errors in any of the expressions
* immediately lead to failure.
*/
private def switchSeqAndExpression[T](expressions: List[Expression[T]]): Expression[List[T]] = session => {
if (expressions.isEmpty) {
Nil.success
} else {
for (
current <- expressions.head.apply(session);
rest <- switchSeqAndExpression(expressions.tail).apply(session)
) yield (current :: rest)
}
}
/**
* Makes it possible to select a single URI from possible multiple ones using
* a supplied logic.
*
* Intended usage is on any method here that returns multiple URIs, for
* example selecting a link randomly:
* {{{
* wicketLinks("link-id").selectUri(links -> links(rnd.nextInt(links.size)))
* }}}
*/
implicit class SelectableUriList(uris: Expression[List[String]]) {
def selectUri(mapper: List[String] => String) = uris.andThen(_.map(mapper))
}
/**
* Can be used in conditional executions like doIf() calls, the following way:
* {{{
* .doIf(wicketUriExists("next-page")) {
* exec(http("...") ...)
* }
* }}}
*/
def wicketUriExists(pathSpec: Expression[String]*): Expression[Boolean] =
wicketUris(TargetType.Any, pathSpec:_*).map(!_.isEmpty)
/**
* Expects and returns exactly one URI. If either no URI can be found
* for the given specification, or more than one URI is found, then a failure
* is returned.
*/
def wicketUri(targetType: TargetType, pathSpec: Expression[String]*) = wicketUris(targetType, pathSpec:_*).andThen(_.flatMap(uriList =>
if (uriList.isEmpty) {
s"on the given path '$pathSpec' is no URI to be found".failure
} else if (uriList.size > 1) {
s"on thet given path '$pathSpec' there are multiple URIs to be found".failure
} else {
uriList.head.success
}
))
/**
* Returns all links found under the given path.
*/
def wicketLinksUnder(pathSpec: Expression[String]*) = wicketUris(TargetType.Link, pathSpec:_*)
/**
* Returns all forms found under the given path.
*/
def wicketFormsUnder(pathSpec: Expression[String]*) = wicketUris(TargetType.Form, pathSpec:_*)
/**
* Returns exactly one link found under the given path. Expected
* to be used with the get() method:
* {{{
* http("Go to next page")
* .get(wicketLinkUnder("next-page-link"))
* }}}
*/
def wicketLinkUnder(pathSpec: Expression[String]*) = wicketUri(TargetType.Link, pathSpec:_*)
/**
* Returns exactly one form found under the given path. Expected
* to be used with the post() method:
* {{{
* http("Submit User Form")
* .post(wicketFormUnder("user-form"))
* }}}
*/
def wicketFormUnder(pathSpec: Expression[String]*) = wicketUri(TargetType.Form, pathSpec:_*)
}
| vanillasource/wicket-gatling | src/main/scala/com/vanillasource/wicket/gatling/Predef.scala | Scala | lgpl-3.0 | 4,796 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.net.{URI, URISyntaxException}
import java.text.{BreakIterator, DecimalFormat, DecimalFormatSymbols}
import java.util.{HashMap, Locale, Map => JMap}
import java.util.regex.Pattern
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{ByteArray, UTF8String}
////////////////////////////////////////////////////////////////////////////////////////////////////
// This file defines expressions for string operations.
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* An expression that concatenates multiple input strings into a single string.
* If any input is null, concat returns null.
*/
@ExpressionDescription(
usage = "_FUNC_(str1, str2, ..., strN) - Returns the concatenation of str1, str2, ..., strN.",
extended = """
Examples:
> SELECT _FUNC_('Spark', 'SQL');
SparkSQL
""")
case class Concat(children: Seq[Expression]) extends Expression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq.fill(children.size)(StringType)
override def dataType: DataType = StringType
override def nullable: Boolean = children.exists(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
override def eval(input: InternalRow): Any = {
val inputs = children.map(_.eval(input).asInstanceOf[UTF8String])
UTF8String.concat(inputs : _*)
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evals = children.map(_.genCode(ctx))
val inputs = evals.map { eval =>
s"${eval.isNull} ? null : ${eval.value}"
}.mkString(", ")
ev.copy(evals.map(_.code).mkString("\\n") + s"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = UTF8String.concat($inputs);
if (${ev.value} == null) {
${ev.isNull} = true;
}
""")
}
}
/**
* An expression that concatenates multiple input strings or array of strings into a single string,
* using a given separator (the first child).
*
* Returns null if the separator is null. Otherwise, concat_ws skips all null values.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(sep, [str | array(str)]+) - Returns the concatenation of the strings separated by `sep`.",
extended = """
Examples:
> SELECT _FUNC_(' ', 'Spark', 'SQL');
Spark SQL
""")
// scalastyle:on line.size.limit
case class ConcatWs(children: Seq[Expression])
extends Expression with ImplicitCastInputTypes {
require(children.nonEmpty, s"$prettyName requires at least one argument.")
override def prettyName: String = "concat_ws"
/** The 1st child (separator) is str, and rest are either str or array of str. */
override def inputTypes: Seq[AbstractDataType] = {
val arrayOrStr = TypeCollection(ArrayType(StringType), StringType)
StringType +: Seq.fill(children.size - 1)(arrayOrStr)
}
override def dataType: DataType = StringType
override def nullable: Boolean = children.head.nullable
override def foldable: Boolean = children.forall(_.foldable)
override def eval(input: InternalRow): Any = {
val flatInputs = children.flatMap { child =>
child.eval(input) match {
case s: UTF8String => Iterator(s)
case arr: ArrayData => arr.toArray[UTF8String](StringType)
case null => Iterator(null.asInstanceOf[UTF8String])
}
}
UTF8String.concatWs(flatInputs.head, flatInputs.tail : _*)
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
if (children.forall(_.dataType == StringType)) {
// All children are strings. In that case we can construct a fixed size array.
val evals = children.map(_.genCode(ctx))
val inputs = evals.map { eval =>
s"${eval.isNull} ? (UTF8String) null : ${eval.value}"
}.mkString(", ")
ev.copy(evals.map(_.code).mkString("\\n") + s"""
UTF8String ${ev.value} = UTF8String.concatWs($inputs);
boolean ${ev.isNull} = ${ev.value} == null;
""")
} else {
val array = ctx.freshName("array")
val varargNum = ctx.freshName("varargNum")
val idxInVararg = ctx.freshName("idxInVararg")
val evals = children.map(_.genCode(ctx))
val (varargCount, varargBuild) = children.tail.zip(evals.tail).map { case (child, eval) =>
child.dataType match {
case StringType =>
("", // we count all the StringType arguments num at once below.
s"$array[$idxInVararg ++] = ${eval.isNull} ? (UTF8String) null : ${eval.value};")
case _: ArrayType =>
val size = ctx.freshName("n")
(s"""
if (!${eval.isNull}) {
$varargNum += ${eval.value}.numElements();
}
""",
s"""
if (!${eval.isNull}) {
final int $size = ${eval.value}.numElements();
for (int j = 0; j < $size; j ++) {
$array[$idxInVararg ++] = ${ctx.getValue(eval.value, StringType, "j")};
}
}
""")
}
}.unzip
ev.copy(evals.map(_.code).mkString("\\n") +
s"""
int $varargNum = ${children.count(_.dataType == StringType) - 1};
int $idxInVararg = 0;
${varargCount.mkString("\\n")}
UTF8String[] $array = new UTF8String[$varargNum];
${varargBuild.mkString("\\n")}
UTF8String ${ev.value} = UTF8String.concatWs(${evals.head.value}, $array);
boolean ${ev.isNull} = ${ev.value} == null;
""")
}
}
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(n, str1, str2, ...) - Returns the `n`-th string, e.g., returns `str2` when `n` is 2.",
extended = """
Examples:
> SELECT _FUNC_(1, 'scala', 'java');
scala
""")
// scalastyle:on line.size.limit
case class Elt(children: Seq[Expression])
extends Expression with ImplicitCastInputTypes {
private lazy val indexExpr = children.head
private lazy val stringExprs = children.tail.toArray
/** This expression is always nullable because it returns null if index is out of range. */
override def nullable: Boolean = true
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = IntegerType +: Seq.fill(children.size - 1)(StringType)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size < 2) {
TypeCheckResult.TypeCheckFailure("elt function requires at least two arguments")
} else {
super[ImplicitCastInputTypes].checkInputDataTypes()
}
}
override def eval(input: InternalRow): Any = {
val indexObj = indexExpr.eval(input)
if (indexObj == null) {
null
} else {
val index = indexObj.asInstanceOf[Int]
if (index <= 0 || index > stringExprs.length) {
null
} else {
stringExprs(index - 1).eval(input)
}
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val index = indexExpr.genCode(ctx)
val strings = stringExprs.map(_.genCode(ctx))
val assignStringValue = strings.zipWithIndex.map { case (eval, index) =>
s"""
case ${index + 1}:
${ev.value} = ${eval.isNull} ? null : ${eval.value};
break;
"""
}.mkString("\\n")
val indexVal = ctx.freshName("index")
val stringArray = ctx.freshName("strings");
ev.copy(index.code + "\\n" + strings.map(_.code).mkString("\\n") + s"""
final int $indexVal = ${index.value};
UTF8String ${ev.value} = null;
switch ($indexVal) {
$assignStringValue
}
final boolean ${ev.isNull} = ${ev.value} == null;
""")
}
}
trait String2StringExpression extends ImplicitCastInputTypes {
self: UnaryExpression =>
def convert(v: UTF8String): UTF8String
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType)
protected override def nullSafeEval(input: Any): Any =
convert(input.asInstanceOf[UTF8String])
}
/**
* A function that converts the characters of a string to uppercase.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns `str` with all characters changed to uppercase.",
extended = """
Examples:
> SELECT _FUNC_('SparkSql');
SPARKSQL
""")
case class Upper(child: Expression)
extends UnaryExpression with String2StringExpression {
override def convert(v: UTF8String): UTF8String = v.toUpperCase
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).toUpperCase()")
}
}
/**
* A function that converts the characters of a string to lowercase.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns `str` with all characters changed to lowercase.",
extended = """
Examples:
> SELECT _FUNC_('SparkSql');
sparksql
""")
case class Lower(child: Expression) extends UnaryExpression with String2StringExpression {
override def convert(v: UTF8String): UTF8String = v.toLowerCase
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).toLowerCase()")
}
}
/** A base trait for functions that compare two strings, returning a boolean. */
abstract class StringPredicate extends BinaryExpression
with Predicate with ImplicitCastInputTypes with NullIntolerant {
def compare(l: UTF8String, r: UTF8String): Boolean
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
protected override def nullSafeEval(input1: Any, input2: Any): Any =
compare(input1.asInstanceOf[UTF8String], input2.asInstanceOf[UTF8String])
override def toString: String = s"$nodeName($left, $right)"
}
/**
* A function that returns true if the string `left` contains the string `right`.
*/
case class Contains(left: Expression, right: Expression) extends StringPredicate {
override def compare(l: UTF8String, r: UTF8String): Boolean = l.contains(r)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"($c1).contains($c2)")
}
}
/**
* A function that returns true if the string `left` starts with the string `right`.
*/
case class StartsWith(left: Expression, right: Expression) extends StringPredicate {
override def compare(l: UTF8String, r: UTF8String): Boolean = l.startsWith(r)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"($c1).startsWith($c2)")
}
}
/**
* A function that returns true if the string `left` ends with the string `right`.
*/
case class EndsWith(left: Expression, right: Expression) extends StringPredicate {
override def compare(l: UTF8String, r: UTF8String): Boolean = l.endsWith(r)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"($c1).endsWith($c2)")
}
}
object StringTranslate {
def buildDict(matchingString: UTF8String, replaceString: UTF8String)
: JMap[Character, Character] = {
val matching = matchingString.toString()
val replace = replaceString.toString()
val dict = new HashMap[Character, Character]()
var i = 0
while (i < matching.length()) {
val rep = if (i < replace.length()) replace.charAt(i) else '\\u0000'
if (null == dict.get(matching.charAt(i))) {
dict.put(matching.charAt(i), rep)
}
i += 1
}
dict
}
}
/**
* A function translate any character in the `srcExpr` by a character in `replaceExpr`.
* The characters in `replaceExpr` is corresponding to the characters in `matchingExpr`.
* The translate will happen when any character in the string matching with the character
* in the `matchingExpr`.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(input, from, to) - Translates the `input` string by replacing the characters present in the `from` string with the corresponding characters in the `to` string.",
extended = """
Examples:
> SELECT _FUNC_('AaBbCc', 'abc', '123');
A1B2C3
""")
// scalastyle:on line.size.limit
case class StringTranslate(srcExpr: Expression, matchingExpr: Expression, replaceExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
@transient private var lastMatching: UTF8String = _
@transient private var lastReplace: UTF8String = _
@transient private var dict: JMap[Character, Character] = _
override def nullSafeEval(srcEval: Any, matchingEval: Any, replaceEval: Any): Any = {
if (matchingEval != lastMatching || replaceEval != lastReplace) {
lastMatching = matchingEval.asInstanceOf[UTF8String].clone()
lastReplace = replaceEval.asInstanceOf[UTF8String].clone()
dict = StringTranslate.buildDict(lastMatching, lastReplace)
}
srcEval.asInstanceOf[UTF8String].translate(dict)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val termLastMatching = ctx.freshName("lastMatching")
val termLastReplace = ctx.freshName("lastReplace")
val termDict = ctx.freshName("dict")
val classNameDict = classOf[JMap[Character, Character]].getCanonicalName
ctx.addMutableState("UTF8String", termLastMatching, s"$termLastMatching = null;")
ctx.addMutableState("UTF8String", termLastReplace, s"$termLastReplace = null;")
ctx.addMutableState(classNameDict, termDict, s"$termDict = null;")
nullSafeCodeGen(ctx, ev, (src, matching, replace) => {
val check = if (matchingExpr.foldable && replaceExpr.foldable) {
s"$termDict == null"
} else {
s"!$matching.equals($termLastMatching) || !$replace.equals($termLastReplace)"
}
s"""if ($check) {
// Not all of them is literal or matching or replace value changed
$termLastMatching = $matching.clone();
$termLastReplace = $replace.clone();
$termDict = org.apache.spark.sql.catalyst.expressions.StringTranslate
.buildDict($termLastMatching, $termLastReplace);
}
${ev.value} = $src.translate($termDict);
"""
})
}
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = srcExpr :: matchingExpr :: replaceExpr :: Nil
override def prettyName: String = "translate"
}
/**
* A function that returns the index (1-based) of the given string (left) in the comma-
* delimited list (right). Returns 0, if the string wasn't found or if the given
* string (left) contains a comma.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(str, str_array) - Returns the index (1-based) of the given string (`str`) in the comma-delimited list (`str_array`).
Returns 0, if the string was not found or if the given string (`str`) contains a comma.
""",
extended = """
Examples:
> SELECT _FUNC_('ab','abc,b,ab,c,def');
3
""")
// scalastyle:on
case class FindInSet(left: Expression, right: Expression) extends BinaryExpression
with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType)
override protected def nullSafeEval(word: Any, set: Any): Any =
set.asInstanceOf[UTF8String].findInSet(word.asInstanceOf[UTF8String])
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (word, set) =>
s"${ev.value} = $set.findInSet($word);"
)
}
override def dataType: DataType = IntegerType
override def prettyName: String = "find_in_set"
}
/**
* A function that trim the spaces from both ends for the specified string.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Removes the leading and trailing space characters from `str`.",
extended = """
Examples:
> SELECT _FUNC_(' SparkSQL ');
SparkSQL
""")
case class StringTrim(child: Expression)
extends UnaryExpression with String2StringExpression {
def convert(v: UTF8String): UTF8String = v.trim()
override def prettyName: String = "trim"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).trim()")
}
}
/**
* A function that trim the spaces from left end for given string.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Removes the leading and trailing space characters from `str`.",
extended = """
Examples:
> SELECT _FUNC_(' SparkSQL');
SparkSQL
""")
case class StringTrimLeft(child: Expression)
extends UnaryExpression with String2StringExpression {
def convert(v: UTF8String): UTF8String = v.trimLeft()
override def prettyName: String = "ltrim"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).trimLeft()")
}
}
/**
* A function that trim the spaces from right end for given string.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Removes the trailing space characters from `str`.",
extended = """
Examples:
> SELECT _FUNC_(' SparkSQL ');
SparkSQL
""")
case class StringTrimRight(child: Expression)
extends UnaryExpression with String2StringExpression {
def convert(v: UTF8String): UTF8String = v.trimRight()
override def prettyName: String = "rtrim"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).trimRight()")
}
}
/**
* A function that returns the position of the first occurrence of substr in the given string.
* Returns null if either of the arguments are null and
* returns 0 if substr could not be found in str.
*
* NOTE: that this is not zero based, but 1-based index. The first character in str has index 1.
*/
@ExpressionDescription(
usage = "_FUNC_(str, substr) - Returns the (1-based) index of the first occurrence of `substr` in `str`.",
extended = """
Examples:
> SELECT _FUNC_('SparkSQL', 'SQL');
6
""")
case class StringInstr(str: Expression, substr: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = str
override def right: Expression = substr
override def dataType: DataType = IntegerType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def nullSafeEval(string: Any, sub: Any): Any = {
string.asInstanceOf[UTF8String].indexOf(sub.asInstanceOf[UTF8String], 0) + 1
}
override def prettyName: String = "instr"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (l, r) =>
s"($l).indexOf($r, 0) + 1")
}
}
/**
* Returns the substring from string str before count occurrences of the delimiter delim.
* If count is positive, everything the left of the final delimiter (counting from left) is
* returned. If count is negative, every to the right of the final delimiter (counting from the
* right) is returned. substring_index performs a case-sensitive match when searching for delim.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(str, delim, count) - Returns the substring from `str` before `count` occurrences of the delimiter `delim`.
If `count` is positive, everything to the left of the final delimiter (counting from the
left) is returned. If `count` is negative, everything to the right of the final delimiter
(counting from the right) is returned. The function substring_index performs a case-sensitive match
when searching for `delim`.
""",
extended = """
Examples:
> SELECT _FUNC_('www.apache.org', '.', 2);
www.apache
""")
// scalastyle:on line.size.limit
case class SubstringIndex(strExpr: Expression, delimExpr: Expression, countExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, IntegerType)
override def children: Seq[Expression] = Seq(strExpr, delimExpr, countExpr)
override def prettyName: String = "substring_index"
override def nullSafeEval(str: Any, delim: Any, count: Any): Any = {
str.asInstanceOf[UTF8String].subStringIndex(
delim.asInstanceOf[UTF8String],
count.asInstanceOf[Int])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (str, delim, count) => s"$str.subStringIndex($delim, $count)")
}
}
/**
* A function that returns the position of the first occurrence of substr
* in given string after position pos.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(substr, str[, pos]) - Returns the position of the first occurrence of `substr` in `str` after position `pos`.
The given `pos` and return value are 1-based.
""",
extended = """
Examples:
> SELECT _FUNC_('bar', 'foobarbar', 5);
7
""")
// scalastyle:on line.size.limit
case class StringLocate(substr: Expression, str: Expression, start: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(substr: Expression, str: Expression) = {
this(substr, str, Literal(1))
}
override def children: Seq[Expression] = substr :: str :: start :: Nil
override def nullable: Boolean = substr.nullable || str.nullable
override def dataType: DataType = IntegerType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, IntegerType)
override def eval(input: InternalRow): Any = {
val s = start.eval(input)
if (s == null) {
// if the start position is null, we need to return 0, (conform to Hive)
0
} else {
val r = substr.eval(input)
if (r == null) {
null
} else {
val l = str.eval(input)
if (l == null) {
null
} else {
val sVal = s.asInstanceOf[Int]
if (sVal < 1) {
0
} else {
l.asInstanceOf[UTF8String].indexOf(
r.asInstanceOf[UTF8String],
s.asInstanceOf[Int] - 1) + 1
}
}
}
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val substrGen = substr.genCode(ctx)
val strGen = str.genCode(ctx)
val startGen = start.genCode(ctx)
ev.copy(code = s"""
int ${ev.value} = 0;
boolean ${ev.isNull} = false;
${startGen.code}
if (!${startGen.isNull}) {
${substrGen.code}
if (!${substrGen.isNull}) {
${strGen.code}
if (!${strGen.isNull}) {
if (${startGen.value} > 0) {
${ev.value} = ${strGen.value}.indexOf(${substrGen.value},
${startGen.value} - 1) + 1;
}
} else {
${ev.isNull} = true;
}
} else {
${ev.isNull} = true;
}
}
""")
}
override def prettyName: String = "locate"
}
/**
* Returns str, left-padded with pad to a length of len.
*/
@ExpressionDescription(
usage = """
_FUNC_(str, len, pad) - Returns `str`, left-padded with `pad` to a length of `len`.
If `str` is longer than `len`, the return value is shortened to `len` characters.
""",
extended = """
Examples:
> SELECT _FUNC_('hi', 5, '??');
???hi
> SELECT _FUNC_('hi', 1, '??');
h
""")
case class StringLPad(str: Expression, len: Expression, pad: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def children: Seq[Expression] = str :: len :: pad :: Nil
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, IntegerType, StringType)
override def nullSafeEval(str: Any, len: Any, pad: Any): Any = {
str.asInstanceOf[UTF8String].lpad(len.asInstanceOf[Int], pad.asInstanceOf[UTF8String])
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (str, len, pad) => s"$str.lpad($len, $pad)")
}
override def prettyName: String = "lpad"
}
/**
* Returns str, right-padded with pad to a length of len.
*/
@ExpressionDescription(
usage = """
_FUNC_(str, len, pad) - Returns `str`, right-padded with `pad` to a length of `len`.
If `str` is longer than `len`, the return value is shortened to `len` characters.
""",
extended = """
Examples:
> SELECT _FUNC_('hi', 5, '??');
hi???
> SELECT _FUNC_('hi', 1, '??');
h
""")
case class StringRPad(str: Expression, len: Expression, pad: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def children: Seq[Expression] = str :: len :: pad :: Nil
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, IntegerType, StringType)
override def nullSafeEval(str: Any, len: Any, pad: Any): Any = {
str.asInstanceOf[UTF8String].rpad(len.asInstanceOf[Int], pad.asInstanceOf[UTF8String])
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (str, len, pad) => s"$str.rpad($len, $pad)")
}
override def prettyName: String = "rpad"
}
object ParseUrl {
private val HOST = UTF8String.fromString("HOST")
private val PATH = UTF8String.fromString("PATH")
private val QUERY = UTF8String.fromString("QUERY")
private val REF = UTF8String.fromString("REF")
private val PROTOCOL = UTF8String.fromString("PROTOCOL")
private val FILE = UTF8String.fromString("FILE")
private val AUTHORITY = UTF8String.fromString("AUTHORITY")
private val USERINFO = UTF8String.fromString("USERINFO")
private val REGEXPREFIX = "(&|^)"
private val REGEXSUBFIX = "=([^&]*)"
}
/**
* Extracts a part from a URL
*/
@ExpressionDescription(
usage = "_FUNC_(url, partToExtract[, key]) - Extracts a part from a URL.",
extended = """
Examples:
> SELECT _FUNC_('http://spark.apache.org/path?query=1', 'HOST')
spark.apache.org
> SELECT _FUNC_('http://spark.apache.org/path?query=1', 'QUERY')
query=1
> SELECT _FUNC_('http://spark.apache.org/path?query=1', 'QUERY', 'query')
1
""")
case class ParseUrl(children: Seq[Expression])
extends Expression with ExpectsInputTypes with CodegenFallback {
override def nullable: Boolean = true
override def inputTypes: Seq[DataType] = Seq.fill(children.size)(StringType)
override def dataType: DataType = StringType
override def prettyName: String = "parse_url"
// If the url is a constant, cache the URL object so that we don't need to convert url
// from UTF8String to String to URL for every row.
@transient private lazy val cachedUrl = children(0) match {
case Literal(url: UTF8String, _) if url ne null => getUrl(url)
case _ => null
}
// If the key is a constant, cache the Pattern object so that we don't need to convert key
// from UTF8String to String to StringBuilder to String to Pattern for every row.
@transient private lazy val cachedPattern = children(2) match {
case Literal(key: UTF8String, _) if key ne null => getPattern(key)
case _ => null
}
// If the partToExtract is a constant, cache the Extract part function so that we don't need
// to check the partToExtract for every row.
@transient private lazy val cachedExtractPartFunc = children(1) match {
case Literal(part: UTF8String, _) => getExtractPartFunc(part)
case _ => null
}
import ParseUrl._
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size > 3 || children.size < 2) {
TypeCheckResult.TypeCheckFailure(s"$prettyName function requires two or three arguments")
} else {
super[ExpectsInputTypes].checkInputDataTypes()
}
}
private def getPattern(key: UTF8String): Pattern = {
Pattern.compile(REGEXPREFIX + key.toString + REGEXSUBFIX)
}
private def getUrl(url: UTF8String): URI = {
try {
new URI(url.toString)
} catch {
case e: URISyntaxException => null
}
}
private def getExtractPartFunc(partToExtract: UTF8String): URI => String = {
// partToExtract match {
// case HOST => _.toURL().getHost
// case PATH => _.toURL().getPath
// case QUERY => _.toURL().getQuery
// case REF => _.toURL().getRef
// case PROTOCOL => _.toURL().getProtocol
// case FILE => _.toURL().getFile
// case AUTHORITY => _.toURL().getAuthority
// case USERINFO => _.toURL().getUserInfo
// case _ => (url: URI) => null
// }
partToExtract match {
case HOST => _.getHost
case PATH => _.getRawPath
case QUERY => _.getRawQuery
case REF => _.getRawFragment
case PROTOCOL => _.getScheme
case FILE =>
(url: URI) =>
if (url.getRawQuery ne null) {
url.getRawPath + "?" + url.getRawQuery
} else {
url.getRawPath
}
case AUTHORITY => _.getRawAuthority
case USERINFO => _.getRawUserInfo
case _ => (url: URI) => null
}
}
private def extractValueFromQuery(query: UTF8String, pattern: Pattern): UTF8String = {
val m = pattern.matcher(query.toString)
if (m.find()) {
UTF8String.fromString(m.group(2))
} else {
null
}
}
private def extractFromUrl(url: URI, partToExtract: UTF8String): UTF8String = {
if (cachedExtractPartFunc ne null) {
UTF8String.fromString(cachedExtractPartFunc.apply(url))
} else {
UTF8String.fromString(getExtractPartFunc(partToExtract).apply(url))
}
}
private def parseUrlWithoutKey(url: UTF8String, partToExtract: UTF8String): UTF8String = {
if (cachedUrl ne null) {
extractFromUrl(cachedUrl, partToExtract)
} else {
val currentUrl = getUrl(url)
if (currentUrl ne null) {
extractFromUrl(currentUrl, partToExtract)
} else {
null
}
}
}
override def eval(input: InternalRow): Any = {
val evaluated = children.map{e => e.eval(input).asInstanceOf[UTF8String]}
if (evaluated.contains(null)) return null
if (evaluated.size == 2) {
parseUrlWithoutKey(evaluated(0), evaluated(1))
} else {
// 3-arg, i.e. QUERY with key
assert(evaluated.size == 3)
if (evaluated(1) != QUERY) {
return null
}
val query = parseUrlWithoutKey(evaluated(0), evaluated(1))
if (query eq null) {
return null
}
if (cachedPattern ne null) {
extractValueFromQuery(query, cachedPattern)
} else {
extractValueFromQuery(query, getPattern(evaluated(2)))
}
}
}
}
/**
* Returns the input formatted according do printf-style format strings
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(strfmt, obj, ...) - Returns a formatted string from printf-style format strings.",
extended = """
Examples:
> SELECT _FUNC_("Hello World %d %s", 100, "days");
Hello World 100 days
""")
// scalastyle:on line.size.limit
case class FormatString(children: Expression*) extends Expression with ImplicitCastInputTypes {
require(children.nonEmpty, "format_string() should take at least 1 argument")
override def foldable: Boolean = children.forall(_.foldable)
override def nullable: Boolean = children(0).nullable
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] =
StringType :: List.fill(children.size - 1)(AnyDataType)
override def eval(input: InternalRow): Any = {
val pattern = children(0).eval(input)
if (pattern == null) {
null
} else {
val sb = new StringBuffer()
val formatter = new java.util.Formatter(sb, Locale.US)
val arglist = children.tail.map(_.eval(input).asInstanceOf[AnyRef])
formatter.format(pattern.asInstanceOf[UTF8String].toString, arglist: _*)
UTF8String.fromString(sb.toString)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val pattern = children.head.genCode(ctx)
val argListGen = children.tail.map(x => (x.dataType, x.genCode(ctx)))
val argListCode = argListGen.map(_._2.code + "\\n")
val argListString = argListGen.foldLeft("")((s, v) => {
val nullSafeString =
if (ctx.boxedType(v._1) != ctx.javaType(v._1)) {
// Java primitives get boxed in order to allow null values.
s"(${v._2.isNull}) ? (${ctx.boxedType(v._1)}) null : " +
s"new ${ctx.boxedType(v._1)}(${v._2.value})"
} else {
s"(${v._2.isNull}) ? null : ${v._2.value}"
}
s + "," + nullSafeString
})
val form = ctx.freshName("formatter")
val formatter = classOf[java.util.Formatter].getName
val sb = ctx.freshName("sb")
val stringBuffer = classOf[StringBuffer].getName
ev.copy(code = s"""
${pattern.code}
boolean ${ev.isNull} = ${pattern.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${argListCode.mkString}
$stringBuffer $sb = new $stringBuffer();
$formatter $form = new $formatter($sb, ${classOf[Locale].getName}.US);
$form.format(${pattern.value}.toString() $argListString);
${ev.value} = UTF8String.fromString($sb.toString());
}""")
}
override def prettyName: String = "format_string"
}
/**
* Returns string, with the first letter of each word in uppercase, all other letters in lowercase.
* Words are delimited by whitespace.
*/
@ExpressionDescription(
usage = """
_FUNC_(str) - Returns `str` with the first letter of each word in uppercase.
All other letters are in lowercase. Words are delimited by white space.
""",
extended = """
Examples:
> SELECT initcap('sPark sql');
Spark Sql
""")
case class InitCap(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(StringType)
override def dataType: DataType = StringType
override def nullSafeEval(string: Any): Any = {
string.asInstanceOf[UTF8String].toLowerCase.toTitleCase
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, str => s"$str.toLowerCase().toTitleCase()")
}
}
/**
* Returns the string which repeat the given string value n times.
*/
@ExpressionDescription(
usage = "_FUNC_(str, n) - Returns the string which repeats the given string value n times.",
extended = """
Examples:
> SELECT _FUNC_('123', 2);
123123
""")
case class StringRepeat(str: Expression, times: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = str
override def right: Expression = times
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, IntegerType)
override def nullSafeEval(string: Any, n: Any): Any = {
string.asInstanceOf[UTF8String].repeat(n.asInstanceOf[Integer])
}
override def prettyName: String = "repeat"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (l, r) => s"($l).repeat($r)")
}
}
/**
* Returns the reversed given string.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns the reversed given string.",
extended = """
Examples:
> SELECT _FUNC_('Spark SQL');
LQS krapS
""")
case class StringReverse(child: Expression) extends UnaryExpression with String2StringExpression {
override def convert(v: UTF8String): UTF8String = v.reverse()
override def prettyName: String = "reverse"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).reverse()")
}
}
/**
* Returns a string consisting of n spaces.
*/
@ExpressionDescription(
usage = "_FUNC_(n) - Returns a string consisting of `n` spaces.",
extended = """
Examples:
> SELECT concat(_FUNC_(2), '1');
1
""")
case class StringSpace(child: Expression)
extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(IntegerType)
override def nullSafeEval(s: Any): Any = {
val length = s.asInstanceOf[Int]
UTF8String.blankString(if (length < 0) 0 else length)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (length) =>
s"""${ev.value} = UTF8String.blankString(($length < 0) ? 0 : $length);""")
}
override def prettyName: String = "space"
}
/**
* A function that takes a substring of its first argument starting at a given position.
* Defined for String and Binary types.
*
* NOTE: that this is not zero based, but 1-based index. The first character in str has index 1.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`.",
extended = """
Examples:
> SELECT _FUNC_('Spark SQL', 5);
k SQL
> SELECT _FUNC_('Spark SQL', -3);
SQL
> SELECT _FUNC_('Spark SQL', 5, 1);
k
""")
// scalastyle:on line.size.limit
case class Substring(str: Expression, pos: Expression, len: Expression)
extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant {
def this(str: Expression, pos: Expression) = {
this(str, pos, Literal(Integer.MAX_VALUE))
}
override def dataType: DataType = str.dataType
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(StringType, BinaryType), IntegerType, IntegerType)
override def children: Seq[Expression] = str :: pos :: len :: Nil
override def nullSafeEval(string: Any, pos: Any, len: Any): Any = {
str.dataType match {
case StringType => string.asInstanceOf[UTF8String]
.substringSQL(pos.asInstanceOf[Int], len.asInstanceOf[Int])
case BinaryType => ByteArray.subStringSQL(string.asInstanceOf[Array[Byte]],
pos.asInstanceOf[Int], len.asInstanceOf[Int])
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (string, pos, len) => {
str.dataType match {
case StringType => s"$string.substringSQL($pos, $len)"
case BinaryType => s"${classOf[ByteArray].getName}.subStringSQL($string, $pos, $len)"
}
})
}
}
/**
* A function that return the length of the given string or binary expression.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the length of `expr` or number of bytes in binary data.",
extended = """
Examples:
> SELECT _FUNC_('Spark SQL');
9
""")
case class Length(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = IntegerType
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(StringType, BinaryType))
protected override def nullSafeEval(value: Any): Any = child.dataType match {
case StringType => value.asInstanceOf[UTF8String].numChars
case BinaryType => value.asInstanceOf[Array[Byte]].length
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case StringType => defineCodeGen(ctx, ev, c => s"($c).numChars()")
case BinaryType => defineCodeGen(ctx, ev, c => s"($c).length")
}
}
}
/**
* A function that return the Levenshtein distance between the two given strings.
*/
@ExpressionDescription(
usage = "_FUNC_(str1, str2) - Returns the Levenshtein distance between the two given strings.",
extended = """
Examples:
> SELECT _FUNC_('kitten', 'sitting');
3
""")
case class Levenshtein(left: Expression, right: Expression) extends BinaryExpression
with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType)
override def dataType: DataType = IntegerType
protected override def nullSafeEval(leftValue: Any, rightValue: Any): Any =
leftValue.asInstanceOf[UTF8String].levenshteinDistance(rightValue.asInstanceOf[UTF8String])
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (left, right) =>
s"${ev.value} = $left.levenshteinDistance($right);")
}
}
/**
* A function that return Soundex code of the given string expression.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns Soundex code of the string.",
extended = """
Examples:
> SELECT _FUNC_('Miller');
M460
""")
case class SoundEx(child: Expression) extends UnaryExpression with ExpectsInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType)
override def nullSafeEval(input: Any): Any = input.asInstanceOf[UTF8String].soundex()
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"$c.soundex()")
}
}
/**
* Returns the numeric value of the first character of str.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns the numeric value of the first character of `str`.",
extended = """
Examples:
> SELECT _FUNC_('222');
50
> SELECT _FUNC_(2);
50
""")
case class Ascii(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = IntegerType
override def inputTypes: Seq[DataType] = Seq(StringType)
protected override def nullSafeEval(string: Any): Any = {
val bytes = string.asInstanceOf[UTF8String].getBytes
if (bytes.length > 0) {
bytes(0).asInstanceOf[Int]
} else {
0
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (child) => {
val bytes = ctx.freshName("bytes")
s"""
byte[] $bytes = $child.getBytes();
if ($bytes.length > 0) {
${ev.value} = (int) $bytes[0];
} else {
${ev.value} = 0;
}
"""})
}
}
/**
* Converts the argument from binary to a base 64 string.
*/
@ExpressionDescription(
usage = "_FUNC_(bin) - Converts the argument from a binary `bin` to a base 64 string.",
extended = """
Examples:
> SELECT _FUNC_('Spark SQL');
U3BhcmsgU1FM
""")
case class Base64(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(BinaryType)
protected override def nullSafeEval(bytes: Any): Any = {
UTF8String.fromBytes(
org.apache.commons.codec.binary.Base64.encodeBase64(
bytes.asInstanceOf[Array[Byte]]))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (child) => {
s"""${ev.value} = UTF8String.fromBytes(
org.apache.commons.codec.binary.Base64.encodeBase64($child));
"""})
}
}
/**
* Converts the argument from a base 64 string to BINARY.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Converts the argument from a base 64 string `str` to a binary.",
extended = """
Examples:
> SELECT _FUNC_('U3BhcmsgU1FM');
Spark SQL
""")
case class UnBase64(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = BinaryType
override def inputTypes: Seq[DataType] = Seq(StringType)
protected override def nullSafeEval(string: Any): Any =
org.apache.commons.codec.binary.Base64.decodeBase64(string.asInstanceOf[UTF8String].toString)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (child) => {
s"""
${ev.value} = org.apache.commons.codec.binary.Base64.decodeBase64($child.toString());
"""})
}
}
/**
* Decodes the first argument into a String using the provided character set
* (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16').
* If either argument is null, the result will also be null.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(bin, charset) - Decodes the first argument using the second argument character set.",
extended = """
Examples:
> SELECT _FUNC_(encode('abc', 'utf-8'), 'utf-8');
abc
""")
// scalastyle:on line.size.limit
case class Decode(bin: Expression, charset: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = bin
override def right: Expression = charset
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(BinaryType, StringType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val fromCharset = input2.asInstanceOf[UTF8String].toString
UTF8String.fromString(new String(input1.asInstanceOf[Array[Byte]], fromCharset))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (bytes, charset) =>
s"""
try {
${ev.value} = UTF8String.fromString(new String($bytes, $charset.toString()));
} catch (java.io.UnsupportedEncodingException e) {
org.apache.spark.unsafe.Platform.throwException(e);
}
""")
}
}
/**
* Encodes the first argument into a BINARY using the provided character set
* (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16').
* If either argument is null, the result will also be null.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, charset) - Encodes the first argument using the second argument character set.",
extended = """
Examples:
> SELECT _FUNC_('abc', 'utf-8');
abc
""")
// scalastyle:on line.size.limit
case class Encode(value: Expression, charset: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = value
override def right: Expression = charset
override def dataType: DataType = BinaryType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val toCharset = input2.asInstanceOf[UTF8String].toString
input1.asInstanceOf[UTF8String].toString.getBytes(toCharset)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (string, charset) =>
s"""
try {
${ev.value} = $string.toString().getBytes($charset.toString());
} catch (java.io.UnsupportedEncodingException e) {
org.apache.spark.unsafe.Platform.throwException(e);
}""")
}
}
/**
* Formats the number X to a format like '#,###,###.##', rounded to D decimal places,
* and returns the result as a string. If D is 0, the result has no decimal point or
* fractional part.
*/
@ExpressionDescription(
usage = """
_FUNC_(expr1, expr2) - Formats the number `expr1` like '#,###,###.##', rounded to `expr2`
decimal places. If `expr2` is 0, the result has no decimal point or fractional part.
This is supposed to function like MySQL's FORMAT.
""",
extended = """
Examples:
> SELECT _FUNC_(12332.123456, 4);
12,332.1235
""")
case class FormatNumber(x: Expression, d: Expression)
extends BinaryExpression with ExpectsInputTypes {
override def left: Expression = x
override def right: Expression = d
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType, IntegerType)
// Associated with the pattern, for the last d value, and we will update the
// pattern (DecimalFormat) once the new coming d value differ with the last one.
// This is an Option to distinguish between 0 (numberFormat is valid) and uninitialized after
// serialization (numberFormat has not been updated for dValue = 0).
@transient
private var lastDValue: Option[Int] = None
// A cached DecimalFormat, for performance concern, we will change it
// only if the d value changed.
@transient
private lazy val pattern: StringBuffer = new StringBuffer()
// SPARK-13515: US Locale configures the DecimalFormat object to use a dot ('.')
// as a decimal separator.
@transient
private lazy val numberFormat = new DecimalFormat("", new DecimalFormatSymbols(Locale.US))
override protected def nullSafeEval(xObject: Any, dObject: Any): Any = {
val dValue = dObject.asInstanceOf[Int]
if (dValue < 0) {
return null
}
lastDValue match {
case Some(last) if last == dValue =>
// use the current pattern
case _ =>
// construct a new DecimalFormat only if a new dValue
pattern.delete(0, pattern.length)
pattern.append("#,###,###,###,###,###,##0")
// decimal place
if (dValue > 0) {
pattern.append(".")
var i = 0
while (i < dValue) {
i += 1
pattern.append("0")
}
}
lastDValue = Some(dValue)
numberFormat.applyLocalizedPattern(pattern.toString)
}
x.dataType match {
case ByteType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Byte]))
case ShortType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Short]))
case FloatType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Float]))
case IntegerType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Int]))
case LongType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Long]))
case DoubleType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Double]))
case _: DecimalType =>
UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Decimal].toJavaBigDecimal))
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (num, d) => {
def typeHelper(p: String): String = {
x.dataType match {
case _ : DecimalType => s"""$p.toJavaBigDecimal()"""
case _ => s"$p"
}
}
val sb = classOf[StringBuffer].getName
val df = classOf[DecimalFormat].getName
val dfs = classOf[DecimalFormatSymbols].getName
val l = classOf[Locale].getName
// SPARK-13515: US Locale configures the DecimalFormat object to use a dot ('.')
// as a decimal separator.
val usLocale = "US"
val lastDValue = ctx.freshName("lastDValue")
val pattern = ctx.freshName("pattern")
val numberFormat = ctx.freshName("numberFormat")
val i = ctx.freshName("i")
val dFormat = ctx.freshName("dFormat")
ctx.addMutableState("int", lastDValue, s"$lastDValue = -100;")
ctx.addMutableState(sb, pattern, s"$pattern = new $sb();")
ctx.addMutableState(df, numberFormat,
s"""$numberFormat = new $df("", new $dfs($l.$usLocale));""")
s"""
if ($d >= 0) {
$pattern.delete(0, $pattern.length());
if ($d != $lastDValue) {
$pattern.append("#,###,###,###,###,###,##0");
if ($d > 0) {
$pattern.append(".");
for (int $i = 0; $i < $d; $i++) {
$pattern.append("0");
}
}
$lastDValue = $d;
$numberFormat.applyLocalizedPattern($pattern.toString());
}
${ev.value} = UTF8String.fromString($numberFormat.format(${typeHelper(num)}));
} else {
${ev.value} = null;
${ev.isNull} = true;
}
"""
})
}
override def prettyName: String = "format_number"
}
/**
* Splits a string into arrays of sentences, where each sentence is an array of words.
* The 'lang' and 'country' arguments are optional, and if omitted, the default locale is used.
*/
@ExpressionDescription(
usage = "_FUNC_(str[, lang, country]) - Splits `str` into an array of array of words.",
extended = """
Examples:
> SELECT _FUNC_('Hi there! Good morning.');
[["Hi","there"],["Good","morning"]]
""")
case class Sentences(
str: Expression,
language: Expression = Literal(""),
country: Expression = Literal(""))
extends Expression with ImplicitCastInputTypes with CodegenFallback {
def this(str: Expression) = this(str, Literal(""), Literal(""))
def this(str: Expression, language: Expression) = this(str, language, Literal(""))
override def nullable: Boolean = true
override def dataType: DataType =
ArrayType(ArrayType(StringType, containsNull = false), containsNull = false)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = str :: language :: country :: Nil
override def eval(input: InternalRow): Any = {
val string = str.eval(input)
if (string == null) {
null
} else {
val languageStr = language.eval(input).asInstanceOf[UTF8String]
val countryStr = country.eval(input).asInstanceOf[UTF8String]
val locale = if (languageStr != null && countryStr != null) {
new Locale(languageStr.toString, countryStr.toString)
} else {
Locale.US
}
getSentences(string.asInstanceOf[UTF8String].toString, locale)
}
}
private def getSentences(sentences: String, locale: Locale) = {
val bi = BreakIterator.getSentenceInstance(locale)
bi.setText(sentences)
var idx = 0
val result = new ArrayBuffer[GenericArrayData]
while (bi.next != BreakIterator.DONE) {
val sentence = sentences.substring(idx, bi.current)
idx = bi.current
val wi = BreakIterator.getWordInstance(locale)
var widx = 0
wi.setText(sentence)
val words = new ArrayBuffer[UTF8String]
while (wi.next != BreakIterator.DONE) {
val word = sentence.substring(widx, wi.current)
widx = wi.current
if (Character.isLetterOrDigit(word.charAt(0))) words += UTF8String.fromString(word)
}
result += new GenericArrayData(words)
}
new GenericArrayData(result)
}
}
| MLnick/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala | Scala | apache-2.0 | 55,347 |
/*******************************************************************************/
/* */
/* Copyright (C) 2017 by Max Lv <[email protected]> */
/* Copyright (C) 2017 by Mygod Studio <[email protected]> */
/* */
/* This program is free software: you can redistribute it and/or modify */
/* it under the terms of the GNU General Public License as published by */
/* the Free Software Foundation, either version 3 of the License, or */
/* (at your option) any later version. */
/* */
/* This program is distributed in the hope that it will be useful, */
/* but WITHOUT ANY WARRANTY; without even the implied warranty of */
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/* GNU General Public License for more details. */
/* */
/* You should have received a copy of the GNU General Public License */
/* along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* */
/*******************************************************************************/
package com.github.shadowsocks
import java.util.GregorianCalendar
import android.app.Activity
import android.content._
import android.net.Uri
import android.os.Bundle
import android.support.design.widget.Snackbar
import android.support.v7.widget.RecyclerView.ViewHolder
import android.support.v7.widget._
import android.support.v7.widget.helper.ItemTouchHelper
import android.support.v7.widget.helper.ItemTouchHelper.SimpleCallback
import android.text.TextUtils
import android.view.View.OnLongClickListener
import android.view._
import android.widget.{LinearLayout, PopupMenu, TextView, Toast}
import com.github.shadowsocks.ShadowsocksApplication.app
import com.github.shadowsocks.database.Profile
import com.github.shadowsocks.plugin.PluginConfiguration
import com.github.shadowsocks.utils._
import com.github.shadowsocks.widget.UndoSnackbarManager
import com.google.android.gms.ads.{AdRequest, AdSize, NativeExpressAdView}
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.util.Random
object ProfilesFragment {
var instance: ProfilesFragment = _ // used for callback from ProfileManager and stateChanged from MainActivity
private final val REQUEST_SCAN_QR_CODE = 0
}
final class ProfilesFragment extends ToolbarFragment with Toolbar.OnMenuItemClickListener {
import ProfilesFragment._
private val cardButtonLongClickListener: OnLongClickListener = view => {
Utils.positionToast(Toast.makeText(getActivity, view.getContentDescription, Toast.LENGTH_SHORT), view,
getActivity.getWindow, 0, getResources.getDimensionPixelOffset(R.dimen.margin_small)).show()
true
}
/**
* Is ProfilesFragment editable at all.
*/
private def isEnabled = getActivity.asInstanceOf[MainActivity].state match {
case State.CONNECTED | State.STOPPED => true
case _ => false
}
private def isProfileEditable(id: => Int) = getActivity.asInstanceOf[MainActivity].state match {
case State.CONNECTED => id != app.profileId
case State.STOPPED => true
case _ => false
}
final class ProfileViewHolder(view: View) extends RecyclerView.ViewHolder(view)
with View.OnClickListener with PopupMenu.OnMenuItemClickListener {
var item: Profile = _
private val text1 = itemView.findViewById(android.R.id.text1).asInstanceOf[TextView]
private val text2 = itemView.findViewById(android.R.id.text2).asInstanceOf[TextView]
private val traffic = itemView.findViewById(R.id.traffic).asInstanceOf[TextView]
private val edit = itemView.findViewById(R.id.edit)
edit.setOnClickListener(_ => startConfig(item.id))
edit.setOnLongClickListener(cardButtonLongClickListener)
itemView.setOnClickListener(this)
// it will not take effect unless set in code
itemView.findViewById(R.id.indicator).setBackgroundResource(R.drawable.background_profile)
private var adView: NativeExpressAdView = _
{
val share = itemView.findViewById(R.id.share)
share.setOnClickListener(_ => {
val popup = new PopupMenu(getActivity, share)
popup.getMenuInflater.inflate(R.menu.profile_share_popup, popup.getMenu)
popup.setOnMenuItemClickListener(this)
popup.show()
})
share.setOnLongClickListener(cardButtonLongClickListener)
}
def bind(item: Profile) {
this.item = item
val editable = isProfileEditable(item.id)
edit.setEnabled(editable)
edit.setAlpha(if (editable) 1 else 0.5F)
var tx = item.tx
var rx = item.rx
if (item.id == bandwidthProfile) {
tx += txTotal
rx += rxTotal
}
text1.setText(item.getName)
val t2 = new ListBuffer[String]
if (!item.nameIsEmpty) t2 += item.formattedAddress
new PluginConfiguration(item.plugin).selected match {
case "" =>
case id => t2 += app.getString(R.string.profile_plugin, id)
}
if (t2.isEmpty) text2.setVisibility(View.GONE) else {
text2.setVisibility(View.VISIBLE)
text2.setText(t2.mkString("\\n"))
}
if (tx <= 0 && rx <= 0) traffic.setVisibility(View.GONE) else {
traffic.setVisibility(View.VISIBLE)
traffic.setText(getString(R.string.stat_profiles,
TrafficMonitor.formatTraffic(tx), TrafficMonitor.formatTraffic(rx)))
}
if (item.id == app.profileId) {
itemView.setSelected(true)
selectedItem = this
} else {
itemView.setSelected(false)
if (selectedItem eq this) selectedItem = null
}
if (item.host == "198.199.101.152") {
if (adView == null) {
val params =
new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)
params.gravity = Gravity.CENTER_HORIZONTAL
params.setMargins(0, getResources.getDimensionPixelOffset(R.dimen.margin_small), 0, 0)
adView = new NativeExpressAdView(getActivity)
adView.setLayoutParams(params)
adView.setAdUnitId("ca-app-pub-9097031975646651/5224027521")
adView.setAdSize(new AdSize(328, 132))
itemView.findViewById(R.id.content).asInstanceOf[LinearLayout].addView(adView)
// Demographics
val random = new Random()
val adBuilder = new AdRequest.Builder()
adBuilder.setGender(AdRequest.GENDER_MALE)
val year = 1975 + random.nextInt(40)
val month = 1 + random.nextInt(12)
val day = random.nextInt(28)
adBuilder.setBirthday(new GregorianCalendar(year, month, day).getTime)
// Load Ad
adView.loadAd(adBuilder.build())
} else adView.setVisibility(View.VISIBLE)
} else if (adView != null) adView.setVisibility(View.GONE)
}
def onClick(v: View): Unit = if (isEnabled) {
val activity = getActivity.asInstanceOf[MainActivity]
val old = app.profileId
app.switchProfile(item.id)
profilesAdapter.refreshId(old)
itemView.setSelected(true)
if (activity.state == State.CONNECTED) activity.bgService.use(item.id) // reconnect to new profile
}
override def onMenuItemClick(menu: MenuItem): Boolean = menu.getItemId match {
case R.id.action_qr_code_nfc =>
getFragmentManager.beginTransaction().add(new QRCodeDialog(item.toString), "").commitAllowingStateLoss()
true
case R.id.action_export =>
clipboard.setPrimaryClip(ClipData.newPlainText(null, item.toString))
true
case _ => false
}
}
final class ProfilesAdapter extends RecyclerView.Adapter[ProfileViewHolder] {
var profiles = new ArrayBuffer[Profile]
profiles ++= app.profileManager.getAllProfiles.getOrElse(List.empty[Profile])
def getItemCount: Int = profiles.length
def onBindViewHolder(vh: ProfileViewHolder, i: Int): Unit = vh.bind(profiles(i))
def onCreateViewHolder(vg: ViewGroup, i: Int) =
new ProfileViewHolder(LayoutInflater.from(vg.getContext).inflate(R.layout.layout_profile, vg, false))
setHasStableIds(true) // Reason: http://stackoverflow.com/a/32488059/2245107
override def getItemId(position: Int): Long = profiles(position).id
def add(item: Profile) {
undoManager.flush()
val pos = getItemCount
profiles += item
notifyItemInserted(pos)
}
def move(from: Int, to: Int) {
undoManager.flush()
val step = if (from < to) 1 else -1
val first = profiles(from)
var previousOrder = profiles(from).userOrder
for (i <- from until to by step) {
val next = profiles(i + step)
val order = next.userOrder
next.userOrder = previousOrder
previousOrder = order
profiles(i) = next
app.profileManager.updateProfile(next)
}
first.userOrder = previousOrder
profiles(to) = first
app.profileManager.updateProfile(first)
notifyItemMoved(from, to)
}
def remove(pos: Int) {
profiles.remove(pos)
notifyItemRemoved(pos)
}
def undo(actions: Iterator[(Int, Profile)]): Unit = for ((index, item) <- actions) {
profiles.insert(index, item)
notifyItemInserted(index)
}
def commit(actions: Iterator[(Int, Profile)]): Unit =
for ((_, item) <- actions) app.profileManager.delProfile(item.id)
def refreshId(id: Int) {
val index = profiles.indexWhere(_.id == id)
if (index >= 0) notifyItemChanged(index)
}
def deepRefreshId(id: Int) {
val index = profiles.indexWhere(_.id == id)
if (index >= 0) {
profiles(index) = app.profileManager.getProfile(id).get
notifyItemChanged(index)
}
}
def removeId(id: Int) {
val index = profiles.indexWhere(_.id == id)
if (index >= 0) {
profiles.remove(index)
notifyItemRemoved(index)
if (id == app.profileId) app.profileId(0) // switch to null profile
}
}
}
private var selectedItem: ProfileViewHolder = _
lazy val profilesAdapter = new ProfilesAdapter
private var undoManager: UndoSnackbarManager[Profile] = _
private var bandwidthProfile: Int = _
private var txTotal: Long = _
private var rxTotal: Long = _
private lazy val clipboard = getActivity.getSystemService(Context.CLIPBOARD_SERVICE).asInstanceOf[ClipboardManager]
private def startConfig(id: Int) = startActivity(new Intent(getActivity, classOf[ProfileConfigActivity])
.putExtra(Action.EXTRA_PROFILE_ID, id))
override def onCreateView(inflater: LayoutInflater, container: ViewGroup, savedInstanceState: Bundle): View =
inflater.inflate(R.layout.layout_list, container, false)
override def onViewCreated(view: View, savedInstanceState: Bundle) {
super.onViewCreated(view, savedInstanceState)
toolbar.setTitle(R.string.profiles)
toolbar.inflateMenu(R.menu.profile_manager_menu)
toolbar.setOnMenuItemClickListener(this)
if (app.profileManager.getFirstProfile.isEmpty) app.profileId(app.profileManager.createProfile().id)
val profilesList = view.findViewById(R.id.list).asInstanceOf[RecyclerView]
val layoutManager = new LinearLayoutManager(getActivity, LinearLayoutManager.VERTICAL, false)
profilesList.setLayoutManager(layoutManager)
layoutManager.scrollToPosition(profilesAdapter.profiles.zipWithIndex.collectFirst {
case (profile, i) if profile.id == app.profileId => i
}.getOrElse(-1))
val animator = new DefaultItemAnimator()
animator.setSupportsChangeAnimations(false) // prevent fading-in/out when rebinding
profilesList.setItemAnimator(animator)
profilesList.setAdapter(profilesAdapter)
instance = this
undoManager = new UndoSnackbarManager[Profile](getActivity.findViewById(R.id.snackbar),
profilesAdapter.undo, profilesAdapter.commit)
new ItemTouchHelper(new SimpleCallback(ItemTouchHelper.UP | ItemTouchHelper.DOWN,
ItemTouchHelper.START | ItemTouchHelper.END) {
override def getSwipeDirs(recyclerView: RecyclerView, viewHolder: ViewHolder): Int =
if (isProfileEditable(viewHolder.asInstanceOf[ProfileViewHolder].item.id))
super.getSwipeDirs(recyclerView, viewHolder) else 0
override def getDragDirs(recyclerView: RecyclerView, viewHolder: ViewHolder): Int =
if (isEnabled) super.getDragDirs(recyclerView, viewHolder) else 0
def onSwiped(viewHolder: ViewHolder, direction: Int) {
val index = viewHolder.getAdapterPosition
profilesAdapter.remove(index)
undoManager.remove((index, viewHolder.asInstanceOf[ProfileViewHolder].item))
}
def onMove(recyclerView: RecyclerView, viewHolder: ViewHolder, target: ViewHolder): Boolean = {
profilesAdapter.move(viewHolder.getAdapterPosition, target.getAdapterPosition)
true
}
}).attachToRecyclerView(profilesList)
}
override def onTrafficUpdated(profileId: Int, txRate: Long, rxRate: Long, txTotal: Long, rxTotal: Long): Unit =
if (profileId != -1) { // ignore resets from MainActivity
if (bandwidthProfile != profileId) {
onTrafficPersisted(bandwidthProfile)
bandwidthProfile = profileId
}
this.txTotal = txTotal
this.rxTotal = rxTotal
profilesAdapter.refreshId(profileId)
}
def onTrafficPersisted(profileId: Int) {
txTotal = 0
rxTotal = 0
if (bandwidthProfile != profileId) {
onTrafficPersisted(bandwidthProfile)
bandwidthProfile = profileId
}
profilesAdapter.deepRefreshId(profileId)
}
override def onDetach() {
undoManager.flush()
super.onDetach()
}
override def onDestroy() {
instance = null
super.onDestroy()
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent): Unit = requestCode match {
case REQUEST_SCAN_QR_CODE => if (resultCode == Activity.RESULT_OK) {
val contents = data.getStringExtra("SCAN_RESULT")
if (!TextUtils.isEmpty(contents)) Parser.findAll(contents).foreach(app.profileManager.createProfile)
}
case _ => super.onActivityResult(resultCode, resultCode, data)
}
def onMenuItemClick(item: MenuItem): Boolean = item.getItemId match {
case R.id.action_scan_qr_code =>
try startActivityForResult(new Intent("com.google.zxing.client.android.SCAN")
.addCategory(Intent.CATEGORY_DEFAULT)
.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_DOCUMENT),
REQUEST_SCAN_QR_CODE) catch {
case _: ActivityNotFoundException =>
startActivity(new Intent(getActivity, classOf[ScannerActivity]))
case e: SecurityException =>
e.printStackTrace()
app.track(e)
startActivity(new Intent(getActivity, classOf[ScannerActivity]))
}
true
case R.id.action_import =>
try {
val profiles = Parser.findAll(clipboard.getPrimaryClip.getItemAt(0).getText)
if (profiles.nonEmpty) {
profiles.foreach(app.profileManager.createProfile)
Toast.makeText(getActivity, R.string.action_import_msg, Toast.LENGTH_SHORT).show()
return true
}
} catch {
case _: Exception =>
}
Snackbar.make(getActivity.findViewById(R.id.snackbar), R.string.action_import_err, Snackbar.LENGTH_LONG).show()
true
case R.id.action_manual_settings =>
startConfig(app.profileManager.createProfile().id)
true
case R.id.action_export =>
app.profileManager.getAllProfiles match {
case Some(profiles) =>
clipboard.setPrimaryClip(ClipData.newPlainText(null, profiles.mkString("\\n")))
Toast.makeText(getActivity, R.string.action_export_msg, Toast.LENGTH_SHORT).show()
case _ => Toast.makeText(getActivity, R.string.action_export_err, Toast.LENGTH_SHORT).show()
}
true
case _ => false
}
}
| hangim/shadowsocks-android | mobile/src/main/scala/com/github/shadowsocks/ProfilesFragment.scala | Scala | gpl-3.0 | 16,310 |
package skinny.orm.feature
import skinny.orm._
import skinny.orm.feature.associations._
import skinny.orm.feature.includes.IncludesQueryRepository
import scalikejdbc._
/**
* Provides #joins APIs.
*
* NOTE: CRUDFeature has copy implementation from this trait.
*/
trait NoIdJoinsFeature[Entity] extends SkinnyMapperBase[Entity] with AssociationsFeature[Entity] {
/**
* Appends join definition on runtime.
*
* @param associations associations
* @return self
*/
def joins(
associations: Association[_]*
): NoIdJoinsFeature[Entity] with NoIdFinderFeature[Entity] with NoIdQueryingFeature[Entity] = {
val _self = this
val _associations = associations
new NoIdJoinsFeature[Entity] with NoIdFinderFeature[Entity] with NoIdQueryingFeature[Entity] {
override protected val underlying = _self
override def defaultAlias = _self.defaultAlias
override def tableName = _self.tableName
override def columnNames = _self.columnNames
override def primaryKeyField = _self.primaryKeyField
override def primaryKeyFieldName = _self.primaryKeyFieldName
override val associations = _self.associations ++ _associations
override val defaultJoinDefinitions = _self.defaultJoinDefinitions
override val defaultBelongsToExtractors = _self.defaultBelongsToExtractors
override val defaultHasOneExtractors = _self.defaultHasOneExtractors
override val defaultOneToManyExtractors = _self.defaultOneToManyExtractors
override def autoSession = underlying.autoSession
override def connectionPoolName = underlying.connectionPoolName
override def connectionPool = underlying.connectionPool
override def defaultScope(alias: Alias[Entity]) = _self.defaultScope(alias)
//override def singleSelectQuery = _self.singleSelectQuery
def extract(rs: WrappedResultSet, n: ResultName[Entity]) = underlying.extract(rs, n)
}
}
override def extract(sql: SQL[Entity, NoExtractor])(
implicit includesRepository: IncludesQueryRepository[Entity]
): SQL[Entity, HasExtractor] = {
extractWithAssociations(
sql,
belongsToAssociations,
hasOneAssociations,
hasManyAssociations
)
}
}
| seratch/skinny-framework | orm/src/main/scala/skinny/orm/feature/NoIdJoinsFeature.scala | Scala | mit | 2,276 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.data
import java.util.Date
import org.locationtech.geomesa.curve.BinnedTime.TimeToBinnedTime
import org.locationtech.geomesa.index.api.{KeyValue, WritableFeature}
import org.locationtech.geomesa.kudu.KuduValue
import org.locationtech.geomesa.kudu.schema.KuduSimpleFeatureSchema
import org.locationtech.geomesa.security.SecurityUtils
import org.opengis.feature.simple.SimpleFeature
class KuduWritableFeature(delegate: WritableFeature,
schema: KuduSimpleFeatureSchema,
dtgIndex: Option[Int],
toBin: TimeToBinnedTime) extends WritableFeature {
override val feature: SimpleFeature = delegate.feature
override val values: Seq[KeyValue] = delegate.values
override val id: Array[Byte] = delegate.id
val kuduValues: Seq[KuduValue[_]] = schema.serialize(feature)
val vis: String = SecurityUtils.getVisibility(feature)
val bin: Short = dtgIndex match {
case None => 0
case Some(i) =>
val dtg = feature.getAttribute(i).asInstanceOf[Date]
if (dtg == null) { 0 } else {
toBin(dtg.getTime).bin
}
}
}
| elahrvivaz/geomesa | geomesa-kudu/geomesa-kudu-datastore/src/main/scala/org/locationtech/geomesa/kudu/data/KuduWritableFeature.scala | Scala | apache-2.0 | 1,621 |
package utils
import scala.util.{Try, Success, Failure}
import scala.collection.JavaConversions._
import scalax.io.Resource
import scalax.io.JavaConverters._
import resource._
import java.io.InputStream
import java.util.zip.GZIPInputStream
import org.apache.commons.net.ftp._
import org.joda.time.DateTime
import org.joda.time.DateTimeZone.UTC
import models.IsdStation
import models.IsdWeather
class IsdFtp {
private val ftp = new FTPClient()
private val dataPath = "/pub/data/noaa/isd-lite"
/**
* Connection
*/
def connect(): Unit = {
ftp.connect("ftp.ncdc.noaa.gov")
val login = ftp.login("anonymous", "anonymous")
val reply = ftp.getReplyCode()
if (login && FTPReply.isPositiveCompletion(reply)) {
// Disable buffering
ftp.setBufferSize(0)
// Transfer as binary
ftp.setFileType(FTP.BINARY_FILE_TYPE)
// Set timeout to 5 minutes
ftp.setControlKeepAliveTimeout(300)
// Enable passive mode
ftp.enterLocalPassiveMode()
} else {
disconnect()
}
}
def disconnect(): Unit = {
ftp.disconnect()
}
def requireConnection(): Unit = {
if (!ftp.isConnected())
connect()
}
/**
* NOAA Identifiers
*/
private def checkUsaf(usaf: String): Unit = {
require(usaf.length == 6)
val usafNum = usaf.toInt
require(0 <= usafNum && usafNum <= 999999)
}
private def checkWban(wban: String): Unit = {
require(wban.length == 5)
val wbanNum = wban.toInt
require(0 <= wbanNum && wbanNum <= 99999)
}
private def checkYear(year: Int): Unit = {
require(1901 <= year && year < 3000)
}
private def makeFileName(usaf: String, wban: String, year: Int): String = {
checkYear(year)
checkUsaf(usaf)
checkWban(wban)
s"${usaf}-${wban}-${year}.gz"
}
private def makeFilePath(usaf: String, wban: String, year: Int): String = {
val name = makeFileName(usaf, wban, year)
s"${dataPath}/${year}/${name}"
}
/**
* FTP
*/
def listFiles(path: String): Seq[FTPFile] = {
requireConnection()
ftp.listFiles(path).toSeq
}
def listNames(path: String): Seq[String] = {
requireConnection()
ftp.listNames(path).toSeq
}
def listYear(year: Int): Seq[String] = {
checkYear(year)
val path = s"${dataPath}/${year}"
listNames(path)
}
def downloadFile(infile: String, outfile: String): Unit = {
requireConnection()
for {
out <- managed(new java.io.FileOutputStream(outfile))
} {
ftp.retrieveFile(infile, out)
}
}
def downloadStream(filepath: String): Option[InputStream] = {
requireConnection()
Option(ftp.retrieveFileStream(filepath))
}
def decompressStream(stream: InputStream): java.io.BufferedInputStream = {
val gzip = new GZIPInputStream(stream)
val buff = new java.io.BufferedInputStream(gzip)
buff
}
/*
* Weather
*/
def getWeatherAsLines(usaf: String, wban: String, year: Int): Option[Seq[String]] = {
val filepath = makeFilePath(usaf, wban, year)
for {
stream <- downloadStream(filepath)
} yield {
val buff = decompressStream(stream)
val input = buff.asInput
val lines = input.lines().toSeq
ftp.completePendingCommand()
lines
}
}
def getWeatherAsMap(usaf: String, wban: String, year: Int):
Option[Seq[Map[String, String]]] = {
for {
lines <- getWeatherAsLines(usaf, wban, year)
} yield {
lines.map(parseLine)
}
}
def getWeather(usaf: String, wban: String, year: Int):
Option[Seq[IsdWeather]] = {
for {
lines <- getWeatherAsMap(usaf, wban, year)
} yield {
lines.map { m =>
IsdWeather(
usaf = usaf,
wban = wban,
datetime = new DateTime(
m("year").toInt, m("month").toInt, m("day").toInt,
m("hour").toInt, 0, 0, 0, UTC),
tempDry = if (m("air temp") == "") 0.0d else m("air temp").toDouble,
tempDew = if (m("dew temp") == "") 0.0d else m("dew temp").toDouble
)
}
}
}
/**
* Parsing
*/
def parseLine(line: String): Map[String, String] = {
case class Field(
val start: Int,
val end: Int,
val length: Int,
val name: String,
val description: String
)
val fields = Seq(
Field( 1, 4, 4, "year", "Year"),
Field( 6, 7, 2, "month", "Month"),
Field( 9, 11, 2, "day", "Day"),
Field(12, 13, 2, "hour", "Hour"),
Field(14, 19, 6, "air temp", "Air Temperature (C)"),
Field(20, 25, 6, "dew temp", "Dew Point Temperature (C)"),
Field(26, 31, 6, "pressure", "Sea Level Pressure (hPa)"),
Field(32, 37, 6, "wind dir", "Wind Direction (Degrees)"),
Field(38, 43, 6, "wind speed", "Wind Speed Rate (m/s)"),
Field(44, 49, 6, "sky cond", "Sky Condition"),
Field(50, 55, 6, "precip 1h", "Precipitation 1-Hour (mm)"),
Field(56, 61, 6, "precip 6h", "Precipitation 6-Hour (mm)")
)
val empty = Map[String, String]().withDefaultValue("")
if (line.length >= 61) {
fields.foldLeft(empty){ (m,f) =>
val value = line.substring(f.start-1, f.end).trim
val recoded = if (value == "-9999") "" else value
m.updated(f.name, recoded)
}
} else {
empty
}
}
}
/*
object NoaaIsdFtpExample {
val usaf = "010230"
val wban = "99999"
val year = 1960
val noaa = new NoaaIsdFtp()
for {
data <- noaa.getWeatherAsMap(usaf, wban, year)
} {
println(data.take(10))
}
}
*/
| tanimoto/weather | app/utils/ISD.scala | Scala | mit | 5,581 |
package org.dsa.iot.ignition.core
import org.dsa.iot.dslink.link.Requester
import org.dsa.iot.dslink.node.value.Value
import org.dsa.iot.rx.AbstractRxBlock
import org.dsa.iot.scala.{ DSAHelper, Having }
/**
* Reads values from a DSA node.
*/
class DSAInput(implicit requester: Requester) extends AbstractRxBlock[Value] {
def path(str: String): DSAInput = this having (path <~ str)
val path = Port[String]("path")
protected def compute = path.in flatMap (p => DSAHelper.watch(p).map(_.getValue))
}
/**
* Factory for [[DSAInput]] instances.
*/
object DSAInput {
/**
* Creates a new DSAInput instance.
*/
def apply()(implicit requester: Requester) = new DSAInput
/**
* Creates a new DSAInput instance for the specified path.
*/
def apply(path: String)(implicit requester: Requester) = {
val block = new DSAInput
block.path <~ path
block
}
} | IOT-DSA/dslink-scala-ignition | src/main/scala/org/dsa/iot/ignition/core/DSAInput.scala | Scala | apache-2.0 | 893 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.log4j.{Level, Logger}
import org.apache.spark.mllib.recommendation.{ALS, MatrixFactorizationModel, Rating}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import scopt.OptionParser
import scala.collection.mutable
/**
* An example app for ALS on MovieLens data (http://grouplens.org/datasets/movielens/).
* Run with
* {{{
* bin/run-example org.apache.spark.examples.mllib.MovieLensALS
* }}}
* A synthetic dataset in MovieLens format can be found at `data/mllib/sample_movielens_data.txt`.
* If you use it as a template to create your own app, please use `spark-submit` to submit your app.
*/
object MovieLensALS {
case class Params(
input: String = null,
kryo: Boolean = false,
numIterations: Int = 20,
lambda: Double = 1.0,
rank: Int = 10,
numUserBlocks: Int = -1,
numProductBlocks: Int = -1,
implicitPrefs: Boolean = false) extends AbstractParams[Params]
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("MovieLensALS") {
head("MovieLensALS: an example app for ALS on MovieLens data.")
opt[Int]("rank")
.text(s"rank, default: ${defaultParams.rank}")
.action((x, c) => c.copy(rank = x))
opt[Int]("numIterations")
.text(s"number of iterations, default: ${defaultParams.numIterations}")
.action((x, c) => c.copy(numIterations = x))
opt[Double]("lambda")
.text(s"lambda (smoothing constant), default: ${defaultParams.lambda}")
.action((x, c) => c.copy(lambda = x))
opt[Unit]("kryo")
.text("use Kryo serialization")
.action((_, c) => c.copy(kryo = true))
opt[Int]("numUserBlocks")
.text(s"number of user blocks, default: ${defaultParams.numUserBlocks} (auto)")
.action((x, c) => c.copy(numUserBlocks = x))
opt[Int]("numProductBlocks")
.text(s"number of product blocks, default: ${defaultParams.numProductBlocks} (auto)")
.action((x, c) => c.copy(numProductBlocks = x))
opt[Unit]("implicitPrefs")
.text("use implicit preference")
.action((_, c) => c.copy(implicitPrefs = true))
arg[String]("<input>")
.required()
.text("input paths to a MovieLens dataset of ratings")
.action((x, c) => c.copy(input = x))
note(
"""
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.MovieLensALS \
| examples/target/scala-*/spark-examples-*.jar \
| --rank 5 --numIterations 20 --lambda 1.0 --kryo \
| data/mllib/sample_movielens_data.txt
""".stripMargin)
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"MovieLensALS with $params")
if (params.kryo) {
conf.registerKryoClasses(Array(classOf[mutable.BitSet], classOf[Rating]))
.set("spark.kryoserializer.buffer", "8m")
}
val sc = new SparkContext(conf)
Logger.getRootLogger.setLevel(Level.WARN)
val implicitPrefs = params.implicitPrefs
val ratings = sc.textFile(params.input).map { line =>
val fields = line.split("::")
if (implicitPrefs) {
/*
* MovieLens ratings are on a scale of 1-5:
* 5: Must see
* 4: Will enjoy
* 3: It's okay
* 2: Fairly bad
* 1: Awful
* So we should not recommend a movie if the predicted rating is less than 3.
* To map ratings to confidence scores, we use
* 5 -> 2.5, 4 -> 1.5, 3 -> 0.5, 2 -> -0.5, 1 -> -1.5. This mappings means unobserved
* entries are generally between It's okay and Fairly bad.
* The semantics of 0 in this expanded world of non-positive weights
* are "the same as never having interacted at all".
*/
Rating(fields(0).toInt, fields(1).toInt, fields(2).toDouble - 2.5)
} else {
Rating(fields(0).toInt, fields(1).toInt, fields(2).toDouble)
}
}.cache()
val numRatings = ratings.count()
val numUsers = ratings.map(_.user).distinct().count()
val numMovies = ratings.map(_.product).distinct().count()
println(s"Got $numRatings ratings from $numUsers users on $numMovies movies.")
val splits = ratings.randomSplit(Array(0.8, 0.2))
val training = splits(0).cache()
val test = if (params.implicitPrefs) {
/*
* 0 means "don't know" and positive values mean "confident that the prediction should be 1".
* Negative values means "confident that the prediction should be 0".
* We have in this case used some kind of weighted RMSE. The weight is the absolute value of
* the confidence. The error is the difference between prediction and either 1 or 0,
* depending on whether r is positive or negative.
*/
splits(1).map(x => Rating(x.user, x.product, if (x.rating > 0) 1.0 else 0.0))
} else {
splits(1)
}.cache()
val numTraining = training.count()
val numTest = test.count()
println(s"Training: $numTraining, test: $numTest.")
ratings.unpersist(blocking = false)
val model = new ALS()
.setRank(params.rank)
.setIterations(params.numIterations)
.setLambda(params.lambda)
.setImplicitPrefs(params.implicitPrefs)
.setUserBlocks(params.numUserBlocks)
.setProductBlocks(params.numProductBlocks)
.run(training)
val rmse = computeRmse(model, test, params.implicitPrefs)
println(s"Test RMSE = $rmse.")
sc.stop()
}
/** Compute RMSE (Root Mean Squared Error). */
def computeRmse(model: MatrixFactorizationModel, data: RDD[Rating], implicitPrefs: Boolean)
: Double = {
def mapPredictedRating(r: Double): Double = {
if (implicitPrefs) math.max(math.min(r, 1.0), 0.0) else r
}
val predictions: RDD[Rating] = model.predict(data.map(x => (x.user, x.product)))
val predictionsAndRatings = predictions.map { x =>
((x.user, x.product), mapPredictedRating(x.rating))
}.join(data.map(x => ((x.user, x.product), x.rating))).values
math.sqrt(predictionsAndRatings.map(x => (x._1 - x._2) * (x._1 - x._2)).mean())
}
}
// scalastyle:on println
| chgm1006/spark-app | src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala | Scala | apache-2.0 | 8,148 |
package trivial.rest
import scala.reflect.ClassTag
// TODO - CAS - 23/04/15 - Introduces boilerplate. Looking for ways to remove it.
trait Resource[T <: Resource[T]] {
def id: Option[String]
// To return T, this has to be a self-recursive type. We can now call aCurrency.withId("x") and get a Currency back, not just a Resource.
// Self-recursive types are not loved by all: http://logji.blogspot.se/2012/11/f-bounded-type-polymorphism-give-up-now.html
// However, an overridden type-alias would have introduced yet more boilerplate.
def withId(newId: Option[String]): T
}
object Resource {
def name[T : ClassTag]: String = Classy.name[T].toLowerCase
}
// TODO - CAS - 22/06/15 - Make trying to POST one of these a compile failure
trait HardCoded {
def withId(newId: Option[String]): this.type = this
} | agmenc/trivial-rest | src/main/scala/trivial/rest/Resource.scala | Scala | mit | 821 |
package operation
import model.{ Article, Tag }
import scalikejdbc.DBSession
import view_model._
/**
* The operation for search articles.
*/
sealed trait SearchOperation {
val PAGE_SIZE: Int = 20
def search(q: String, pageNo: Int, pageSize: Int = PAGE_SIZE)(implicit s: DBSession = Tag.autoSession): SearchResult
}
class SearchOperationImpl extends SearchOperation {
override def search(q: String, pageNo: Int, pageSize: Int = PAGE_SIZE)(implicit s: DBSession = Tag.autoSession): SearchResult = {
SearchResult(
q,
Tag.findByName(q),
{
val totalCount = Article.countByTitleOrBody(q)
val totalPages = (totalCount / pageSize).toInt + (if (totalCount % pageSize == 0) 0 else 1)
val articles = Article.searchByTitleOrBody(q, pageNo, pageSize)
Pagination(pageNo, totalPages, totalCount, articles)
}
)
}
} | atware/sharedocs | src/main/scala/operation/SearchOperation.scala | Scala | mit | 879 |
package com.sksamuel.elastic4s.reindex
import com.sksamuel.elastic4s.{Index, Indexes}
import com.sksamuel.exts.OptionImplicits._
trait ReindexApi {
def reindex(source: String, target: Index): ReindexRequest = reindex(Indexes(source), target)
def reindex(source: Indexes, target: Index): ReindexRequest = ReindexRequest(source, target)
def reindex(source: Index, target: Index): ReindexRequest = ReindexRequest(source.toIndexes, target)
@deprecated("use reindex(from, to)", "6.0.0")
def reindex(sourceIndexes: Indexes): ReindexExpectsTarget = new ReindexExpectsTarget(sourceIndexes)
class ReindexExpectsTarget(sourceIndexes: Indexes) {
def into(index: String): ReindexRequest = ReindexRequest(sourceIndexes, index)
def into(index: String, `type`: String): ReindexRequest = ReindexRequest(sourceIndexes, index, `type`.some)
}
}
| Tecsisa/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/reindex/ReindexApi.scala | Scala | apache-2.0 | 871 |
package geotrellis.transit.loader.osm
import geotrellis.network._
trait WayInfo {
val wayId:String
def isWalkable:Boolean
def walkSpeed:Double
val isBikable:Boolean
val bikeSpeed:Double
private var _direction:WayDirection = BothWays
def direction = _direction
val tags:Map[String,String]
}
abstract sealed class WayDirection
case object OneWay extends WayDirection
case object BothWays extends WayDirection
case object OneWayReverse extends WayDirection
case object Impassable extends WayInfo {
val wayId = "IMPASSABLE"
val isWalkable = false
val isBikable = false
val walkSpeed = 0.0
val bikeSpeed = 0.0
val tags = Map[String,String]()
}
trait Walkable {
val isWalkable = true
val walkSpeed = Speeds.walking
}
trait Bikable {
val isBikable = true
val bikeSpeed = Speeds.biking
}
case class WalkOrBike(wayId:String,tags:Map[String,String]) extends WayInfo
with Walkable
with Bikable
case class WalkOnly(wayId:String,tags:Map[String,String]) extends WayInfo
with Walkable {
val isBikable = false
val bikeSpeed = 0.0
}
case class BikeOnly(wayId:String,tags:Map[String,String]) extends WayInfo
with Bikable {
val isWalkable = false
val walkSpeed = 0.0
}
object WayInfo {
// http://wiki.openstreetmap.org/wiki/Key:oneway
private val oneWayTrueValues = Set("yes","true","1")
private val oneWayReverseValues = Set("-1","reverse")
def fromTags(wayId:String,tags:Map[String,String]):WayInfo = {
var info:WayInfo = null
if(tags.contains("highway")) {
info = forHighwayType(wayId,tags)
}
if(info == null) {
if(tags.contains("public_transport")) {
if(tags("public_transport") == "platform") {
info = WalkOnly(wayId,tags)
}
}
}
if(info == null) {
if(tags.contains("railway")) {
if(tags("railway") == "platform") {
info = WalkOnly(wayId,tags)
}
}
}
info match {
case null => Impassable
case Impassable => Impassable
case _ =>
// Check for one-way
if(tags.contains("oneway")) {
val oneway = tags("oneway")
info._direction =
if(oneWayTrueValues.contains(oneway)) {
OneWay
} else if (oneWayReverseValues.contains(oneway)) {
OneWayReverse
} else {
BothWays
}
}
info
}
}
// http://wiki.openstreetmap.org/wiki/Map_Features#Highway
def forHighwayType(wayId:String,tags:Map[String,String]):WayInfo =
tags("highway") match {
case "motorway" => Impassable
case "motorway_link" => Impassable
case "trunk" => Impassable
case "trunk_link" => Impassable
case "primary" => WalkOrBike(wayId,tags)
case "primary_link" => WalkOrBike(wayId,tags)
case "secondary" => WalkOrBike(wayId,tags)
case "secondary_link" => WalkOrBike(wayId,tags)
case "tertiary" => WalkOrBike(wayId,tags)
case "tertiary_link" => WalkOrBike(wayId,tags)
case "living_street" => WalkOrBike(wayId,tags)
case "pedestrian" => WalkOrBike(wayId,tags)
case "residential" => WalkOrBike(wayId,tags)
case "unclassified" => WalkOrBike(wayId,tags)
case "service" => WalkOrBike(wayId,tags)
case "track" => WalkOrBike(wayId,tags)
case "bus_guideway" => Impassable
case "raceway" => Impassable
case "road" => WalkOrBike(wayId,tags)
case "path" => WalkOrBike(wayId,tags)
case "footway" => WalkOrBike(wayId,tags)
case "cycleway" => WalkOrBike(wayId,tags)
case "bridleway" => WalkOrBike(wayId,tags)
case "steps" => WalkOnly(wayId,tags)
case "proposed" => Impassable
case "construction" => Impassable
case "bus_stop" => WalkOnly(wayId,tags)
case "crossing" => WalkOrBike(wayId,tags)
case "emergency_access_point" => Impassable
case "escape" => Impassable
case "give_way" => Impassable
case "mini_roundabout" => WalkOrBike(wayId,tags)
case "motorway_junction" => Impassable
case "parking" => WalkOnly(wayId,tags)
case _ => Impassable
}
}
| flibbertigibbet/open-transit-indicators | scala/geotrellis-transit/src/main/scala/geotrellis/transit/loader/osm/OsmWayInfo.scala | Scala | gpl-3.0 | 4,239 |
package org.scalatra
import org.scalatra.test.scalatest.ScalatraFunSuite
class ErrorHandlerTest extends ScalatraFunSuite {
trait TestException extends RuntimeException
case class Exception1() extends TestException
case class Exception2() extends TestException
case class Exception3() extends TestException
class BaseServlet extends ScalatraServlet {
get("/1") {
status = 418
throw new Exception1
}
get("/uncaught") { throw new RuntimeException }
error { case e: TestException => "base" }
}
class ChildServlet extends BaseServlet {
get("/2") { throw new Exception2 }
error { case e: Exception2 => "child" }
}
class HaltServlet extends BaseServlet {
get("/3") { throw new Exception3 }
error { case e: Exception3 => halt(413, "no more") }
}
addServlet(new BaseServlet, "/base/*")
addServlet(new ChildServlet, "/child/*")
addServlet(new HaltServlet, "/halt/*")
test("result of error handler should be rendered") {
get("/base/1") {
body should equal("base")
}
}
test("error handlers are composable") {
get("/child/2") {
body should equal("child")
}
get("/child/1") {
body should equal("base")
}
}
test("response status should not be set on error") {
get("/base/1") {
status should equal(418)
}
}
test("rethrows uncaught exceptions") {
get("/base/uncaught") {
status should equal(500)
}
}
test("halt() can be used from error handler") {
get("/halt/3") {
status should equal(413)
body should equal("no more")
}
}
}
| lightvector/scalatra | core/src/test/scala/org/scalatra/ErrorHandlerTest.scala | Scala | bsd-2-clause | 1,600 |
package infrastructure.akka
import akka.actor.ActorSystem
import scala.reflect.ClassTag
import akka.serialization.SerializationExtension
trait SerializationSupport {
implicit protected def system: ActorSystem
def serialize(serializable: AnyRef)(implicit system: ActorSystem) = {
val serialization = SerializationExtension(system)
val serializer = serialization.findSerializerFor(serializable)
serializer.toBinary(serializable)
}
def deserialize[T](bytes: Array[Byte], serializedClass: Option[Class[_]] = None)(implicit system: ActorSystem, classTag: ClassTag[T]): T = {
val serialization = SerializationExtension(system)
val serializer = serialization.serializerFor(serializedClass.getOrElse(classTag.runtimeClass))
serializer.fromBinary(bytes, None).asInstanceOf[T]
}
}
| pawelkaczor/ddd-leaven-akka | src/main/scala/infrastructure/akka/SerializationSupport.scala | Scala | mit | 810 |
package io.bartholomews.spotify4s.client
import io.bartholomews.fsclient.core.FsClient
import io.bartholomews.fsclient.core.config.UserAgent
import io.bartholomews.fsclient.core.oauth.v2.OAuthV2.{AccessToken, RefreshToken}
import io.bartholomews.fsclient.core.oauth.v2.{ClientId, ClientPassword, ClientSecret}
import io.bartholomews.fsclient.core.oauth.{
AccessTokenSigner,
ClientPasswordAuthentication,
NonRefreshableTokenSigner,
Scope
}
import io.bartholomews.spotify4s.core.SpotifyClient
import io.bartholomews.spotify4s.core.entities.{SpotifyId, SpotifyUserId}
import sttp.client.{HttpURLConnectionBackend, Identity}
object ClientData {
val sampleSpotifyId: SpotifyId = SpotifyId("SAMPLE_SPOTIFY_ID")
val sampleSpotifyUserId: SpotifyUserId = SpotifyUserId("SAMPLE_SPOTIFY_USER_ID")
val sampleUserAgent: UserAgent = UserAgent(
appName = "SAMPLE_APP_NAME",
appVersion = Some("SAMPLE_APP_VERSION"),
appUrl = Some("https://bartholomews.io/sample-app-url")
)
val sampleClientId: ClientId = ClientId("SAMPLE_CLIENT_ID")
val sampleClientSecret: ClientSecret = ClientSecret("SAMPLE_CLIENT_SECRET")
val sampleClientPassword: ClientPassword = ClientPassword(sampleClientId, sampleClientSecret)
def sampleFsClient: FsClient[Identity, ClientPasswordAuthentication] = FsClient(
sampleUserAgent,
ClientPasswordAuthentication(sampleClientPassword),
HttpURLConnectionBackend()
)
val sampleAccessTokenKey: AccessToken = AccessToken(
"00000000000-0000000000000000000-0000000-0000000000000000000000000000000000000000001"
)
val sampleRefreshToken: RefreshToken = RefreshToken("SAMPLE_REFRESH_TOKEN")
val sampleAuthorizationCode: AccessTokenSigner = AccessTokenSigner(
generatedAt = 21312L,
accessToken = sampleAccessTokenKey,
tokenType = "bearer",
expiresIn = 1000L,
refreshToken = Some(sampleRefreshToken),
scope = Scope(List.empty)
)
val sampleNonRefreshableToken: NonRefreshableTokenSigner = NonRefreshableTokenSigner(
generatedAt = 21312L,
accessToken = sampleAccessTokenKey,
tokenType = "bearer",
expiresIn = 1000L,
scope = Scope(List.empty)
)
val sampleClient: SpotifyClient[Identity] = new SpotifyClient(sampleFsClient)
}
| bartholomews/spotify-scala-client | modules/circe/src/test/scala/io/bartholomews/spotify4s/client/ClientData.scala | Scala | mit | 2,237 |
package scala
package reflect
package runtime
import scala.collection.mutable
import scala.reflect.internal.Flags._
private[reflect] trait SymbolLoaders { self: SymbolTable =>
/** The standard completer for top-level classes
* @param clazz The top-level class
* @param module The companion object of `clazz`
* Calling `complete` on this type will assign the infos of `clazz` and `module`
* by unpickling information from the corresponding Java class. If no Java class
* is found, a package is created instead.
*/
class TopClassCompleter(clazz: ClassSymbol, module: ModuleSymbol) extends SymLoader with FlagAssigningCompleter {
markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags)
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
slowButSafeEnteringPhaseNotLaterThan(picklerPhase) {
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
// NOTE: can't mark as thread-safe here, because unpickleClass might decide to delegate to FromJavaClassCompleter
// if (!isCompilerUniverse) markAllCompleted(clazz, module)
}
}
override def load(sym: Symbol) = complete(sym)
}
/** Create a class and a companion object, enter in enclosing scope,
* and initialize with a lazy type completer.
* @param owner The owner of the newly created class and object
* @param name The simple name of the newly created class
* @param completer The completer to be used to set the info of the class and the module
*/
protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (ClassSymbol, ModuleSymbol) => LazyType) = {
assert(!(name.toString endsWith "[]"), name)
val clazz = owner.newClass(name)
val module = owner.newModule(name.toTermName)
// without this check test/files/run/t5256g and test/files/run/t5256h will crash
// todo. reflection meeting verdict: need to enter the symbols into the first symbol in the owner chain that has a non-empty scope
if (owner.info.decls != EmptyScope) {
owner.info.decls enter clazz
owner.info.decls enter module
}
initClassAndModule(clazz, module, completer(clazz, module))
(clazz, module)
}
protected def setAllInfos(clazz: Symbol, module: Symbol, info: Type) = {
List(clazz, module, module.moduleClass) foreach (_ setInfo info)
}
protected def initClassAndModule(clazz: Symbol, module: Symbol, completer: LazyType) =
setAllInfos(clazz, module, completer)
/** The type completer for packages.
*/
class LazyPackageType extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
assert(sym.isPackageClass)
// Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule`
// creates a module symbol and invokes invokes `companionModule` while the `infos` field is
// still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks.
slowButSafeEnteringPhaseNotLaterThan(picklerPhase) {
sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
// override def safeToString = pkgClass.toString
openPackageModule(sym)
markAllCompleted(sym)
}
}
}
// Since runtime reflection doesn't have a luxury of enumerating all classes
// on the classpath, it has to materialize symbols for top-level definitions
// (packages, classes, objects) on demand.
//
// Someone asks us for a class named `foo.Bar`? Easy. Let's speculatively create
// a package named `foo` and then look up `newTypeName("bar")` in its decls.
// This lookup, implemented in `SymbolLoaders.PackageScope` tests the waters by
// trying to to `Class.forName("foo.Bar")` and then creates a ClassSymbol upon
// success (the whole story is a bit longer, but the rest is irrelevant here).
//
// That's all neat, but these non-deterministic mutations of the global symbol
// table give a lot of trouble in multi-threaded setting. One of the popular
// reflection crashes happens when multiple threads happen to trigger symbol
// materialization multiple times for the same symbol, making subsequent
// reflective operations stumble upon outrageous stuff like overloaded packages.
//
// Short of significantly changing SymbolLoaders I see no other way than just
// to slap a global lock on materialization in runtime reflection.
class PackageScope(pkgClass: Symbol) extends Scope
with SynchronizedScope {
assert(pkgClass.isType)
// materializing multiple copies of the same symbol in PackageScope is a very popular bug
// this override does its best to guard against it
override def enter[T <: Symbol](sym: T): T = {
// workaround for SI-7728
if (isCompilerUniverse) super.enter(sym)
else {
val existing = super.lookupEntry(sym.name)
def eitherIsMethod(sym1: Symbol, sym2: Symbol) = sym1.isMethod || sym2.isMethod
assert(existing == null || eitherIsMethod(existing.sym, sym), s"pkgClass = $pkgClass, sym = $sym, existing = $existing")
super.enter(sym)
}
}
override def enterIfNew[T <: Symbol](sym: T): T = {
val existing = super.lookupEntry(sym.name)
if (existing == null) enter(sym)
else existing.sym.asInstanceOf[T]
}
// package scopes need to synchronize on the GIL
// because lookupEntry might cause changes to the global symbol table
override def syncLockSynchronized[T](body: => T): T = gilSynchronized(body)
private val negatives = new mutable.HashSet[Name]
override def lookupEntry(name: Name): ScopeEntry = syncLockSynchronized {
val e = super.lookupEntry(name)
if (e != null)
e
else if (negatives contains name)
null
else {
val path =
if (pkgClass.isEmptyPackageClass) name.toString
else pkgClass.fullName + "." + name
val currentMirror = mirrorThatLoaded(pkgClass)
currentMirror.tryJavaClass(path) match {
case Some(cls) =>
val loadingMirror = currentMirror.mirrorDefining(cls)
val (_, module) =
if (loadingMirror eq currentMirror) {
initAndEnterClassAndModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
} else {
val origOwner = loadingMirror.packageNameToScala(pkgClass.fullName)
val clazz = origOwner.info decl name.toTypeName
val module = origOwner.info decl name.toTermName
assert(clazz != NoSymbol)
assert(module != NoSymbol)
// currentMirror.mirrorDefining(cls) might side effect by entering symbols into pkgClass.info.decls
// therefore, even though in the beginning of this method, super.lookupEntry(name) returned null
// entering clazz/module now will result in a double-enter assertion in PackageScope.enter
// here's how it might happen
// 1) we are the rootMirror
// 2) cls.getClassLoader is different from our classloader
// 3) mirrorDefining(cls) looks up a mirror corresponding to that classloader and cannot find it
// 4) mirrorDefining creates a new mirror
// 5) that triggers Mirror.init() of the new mirror
// 6) that triggers definitions.syntheticCoreClasses
// 7) that might materialize symbols and enter them into our scope (because syntheticCoreClasses live in rootMirror)
// 8) now we come back here and try to enter one of the now entered symbols => BAM!
// therefore we use enterIfNew rather than just enter
enterIfNew(clazz)
enterIfNew(module)
(clazz, module)
}
debugInfo(s"created $module/${module.moduleClass} in $pkgClass")
lookupEntry(name)
case none =>
debugInfo("*** not found : "+path)
negatives += name
null
}
}
}
}
/** Assert that packages have package scopes */
override def validateClassInfo(tp: ClassInfoType) {
assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope])
}
override def newPackageScope(pkgClass: Symbol) = new PackageScope(pkgClass)
override def scopeTransform(owner: Symbol)(op: => Scope): Scope =
if (owner.isPackageClass) owner.info.decls else op
}
| felixmulder/scala | src/reflect/scala/reflect/runtime/SymbolLoaders.scala | Scala | bsd-3-clause | 8,732 |
/*
* Copyright (c) 2015 Elder Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eri.viz.gui.jfx.monadic
/**
* Binding tests for monadic
*
* @author <a href="mailto:[email protected]">Simeon H.K. Fitch</a>
* @since 10/3/15
*/
class MonadSyntaxTests extends MonadicJFXTestSpec {
describe("ObservableObjectValue operators") {
new Fixture {
val p = propA
val foo = for {
a โ p
d โ a.propD
s โ d.propB
} yield a
assert(foo.get === null)
}
}
}
| ElderResearch/monadic-jfx | src/test/scala/eri/viz/gui/jfx/monadic/MonadSyntaxTests.scala | Scala | apache-2.0 | 1,057 |
package me.flygare.utils
import _root_.me.flygare.models._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import spray.json.DefaultJsonProtocol
object JsonSupport extends DefaultJsonProtocol with SprayJsonSupport {
implicit val personFormat = jsonFormat2(Person)
implicit val personDB = jsonFormat3(PersonDB)
}
| flygare/Minopt | PersonService/src/main/scala/me/flygare/utils/JsonSupport.scala | Scala | mit | 335 |
package skuber.ext
import skuber.ResourceSpecification.{Names, Scope}
import skuber.{LabelSelector, NonCoreResourceSpecification, ObjectMeta, ObjectResource, Pod, ResourceDefinition}
/**
* @author Cory Klein
*/
case class DaemonSet(val kind: String ="DaemonSet",
override val apiVersion: String = extensionsAPIVersion,
val metadata: ObjectMeta,
spec: Option[DaemonSet.Spec] = None,
status: Option[DaemonSet.Status] = None)
extends ObjectResource {
lazy val copySpec = this.spec.getOrElse(new DaemonSet.Spec)
def withTemplate(template: Pod.Template.Spec) = this.copy(spec=Some(copySpec.copy(template=Some(template))))
def withLabelSelector(sel: LabelSelector) = this.copy(spec=Some(copySpec.copy(selector=Some(sel))))
}
object DaemonSet {
val specification=NonCoreResourceSpecification (
apiGroup="extensions",
version="v1beta1",
scope = Scope.Namespaced,
names=Names(
plural = "daemonsets",
singular = "daemonset",
kind = "DaemonSet",
shortNames = List("ds")
)
)
implicit val dsDef = new ResourceDefinition[DaemonSet] { def spec=specification }
implicit val dsListDef = new ResourceDefinition[DaemonSetList] { def spec=specification }
def apply(name: String) = new DaemonSet(metadata=ObjectMeta(name=name))
case class Spec(
minReadySeconds: Int = 0,
selector: Option[LabelSelector] = None,
template: Option[Pod.Template.Spec] = None,
updateStrategy: Option[UpdateStrategy] = None,
revisionHistoryLimit: Option[Int] = None
)
case class UpdateStrategy(`type`: Option[String] = Some("OnDelete"), rollingUpdate: Option[RollingUpdate]=None)
case class RollingUpdate(maxUnavailable: Int = 1)
case class Status(
currentNumberScheduled: Int,
numberMisscheduled: Int,
desiredNumberScheduled: Int,
numberReady: Int,
observedGeneration: Option[Long],
updatedNumberScheduled: Option[Int],
numberAvailable: Option[Int],
numberUnavailable:Option[Int],
collisionCount:Option[Long]
)
}
| doriordan/skuber | client/src/main/scala/skuber/ext/DaemonSet.scala | Scala | apache-2.0 | 2,099 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.examples.classification
import org.apache.predictionio.controller.OptionAverageMetric
import org.apache.predictionio.controller.EmptyEvaluationInfo
import org.apache.predictionio.controller.Evaluation
case class Precision(label: Double)
extends OptionAverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
override def header: String = s"Precision(label = $label)"
override
def calculate(query: Query, predicted: PredictedResult, actual: ActualResult)
: Option[Double] = {
if (predicted.label == label) {
if (predicted.label == actual.label) {
Some(1.0) // True positive
} else {
Some(0.0) // False positive
}
} else {
None // Unrelated case for calculating precision
}
}
}
object PrecisionEvaluation extends Evaluation {
engineMetric = (ClassificationEngine(), Precision(label = 1.0))
}
| PredictionIO/PredictionIO | examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala | Scala | apache-2.0 | 1,715 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.jdk
import java.util.{Optional, OptionalDouble, OptionalInt, OptionalLong}
/** This object provides extension methods that convert between Scala `Option` and Java `Optional`
* types.
*
* When writing Java code, use the explicit conversion methods defined in
* [[javaapi.OptionConverters]] instead.
*
* Scala `Option` is extended with a `toJava` method that creates a corresponding `Optional`, and
* a `toJavaPrimitive` method that creates a specialized variant (e.g., `OptionalInt`) if
* applicable.
*
* Java `Optional` is extended with a `toScala` method and a `toJavaPrimitive` method.
*
* Finally, specialized `Optional` types are extended with `toScala` and `toJavaGeneric` methods.
*
* Example usage:
*
* {{{
* import scala.jdk.OptionConverters._
* val a = Option("example").toJava // Creates java.util.Optional[String] containing "example"
* val b = (None: Option[String]).toJava // Creates an empty java.util.Optional[String]
* val c = a.toScala // Back to Option("example")
* val d = b.toScala // Back to None typed as Option[String]
* val e = Option(2.7).toJava // java.util.Optional[Double] containing boxed 2.7
* val f = Option(2.7).toJavaPrimitive // java.util.OptionalDouble containing 2.7 (not boxed)
* val g = f.toScala // Back to Option(2.7)
* val h = f.toJavaGeneric // Same as e
* val i = e.toJavaPrimitive // Same as f
* }}}
*/
object OptionConverters {
/** Provides conversions from Java `Optional` to Scala `Option` and specialized `Optional` types */
implicit class RichOptional[A](private val o: java.util.Optional[A]) extends AnyVal {
/** Convert a Java `Optional` to a Scala `Option` */
def toScala: Option[A] = if (o.isPresent) Some(o.get) else None
/** Convert a Java `Optional` to a Scala `Option` */
@deprecated("Use `toScala` instead", "2.13.0")
def asScala: Option[A] = if (o.isPresent) Some(o.get) else None
/** Convert a generic Java `Optional` to a specialized variant */
def toJavaPrimitive[O](implicit shape: OptionShape[A, O]): O = shape.fromJava(o)
}
/** Provides conversions from Scala `Option` to Java `Optional` types */
implicit class RichOption[A](private val o: Option[A]) extends AnyVal {
/** Convert a Scala `Option` to a generic Java `Optional` */
def toJava: Optional[A] = o match { case Some(a) => Optional.ofNullable(a); case _ => Optional.empty[A] }
/** Convert a Scala `Option` to a generic Java `Optional` */
@deprecated("Use `toJava` instead", "2.13.0")
def asJava: Optional[A] = o match { case Some(a) => Optional.ofNullable(a); case _ => Optional.empty[A] }
/** Convert a Scala `Option` to a specialized Java `Optional` */
def toJavaPrimitive[O](implicit shape: OptionShape[A, O]): O = shape.fromScala(o)
}
/** Provides conversions from `OptionalDouble` to Scala `Option` and the generic `Optional` */
implicit class RichOptionalDouble(private val o: OptionalDouble) extends AnyVal {
/** Convert a Java `OptionalDouble` to a Scala `Option` */
def toScala: Option[Double] = if (o.isPresent) Some(o.getAsDouble) else None
/** Convert a Java `OptionalDouble` to a Scala `Option` */
@deprecated("Use `toScala` instead", "2.13.0")
def asScala: Option[Double] = if (o.isPresent) Some(o.getAsDouble) else None
/** Convert a Java `OptionalDouble` to a generic Java `Optional` */
def toJavaGeneric: Optional[Double] = if (o.isPresent) Optional.of(o.getAsDouble) else Optional.empty[Double]
}
/** Provides conversions from `OptionalInt` to Scala `Option` and the generic `Optional` */
implicit class RichOptionalInt(private val o: OptionalInt) extends AnyVal {
/** Convert a Java `OptionalInt` to a Scala `Option` */
def toScala: Option[Int] = if (o.isPresent) Some(o.getAsInt) else None
/** Convert a Java `OptionalInt` to a Scala `Option` */
@deprecated("Use `toScala` instead", "2.13.0")
def asScala: Option[Int] = if (o.isPresent) Some(o.getAsInt) else None
/** Convert a Java `OptionalInt` to a generic Java `Optional` */
def toJavaGeneric: Optional[Int] = if (o.isPresent) Optional.of(o.getAsInt) else Optional.empty[Int]
}
/** Provides conversions from `OptionalLong` to Scala `Option` and the generic `Optional` */
implicit class RichOptionalLong(private val o: OptionalLong) extends AnyVal {
/** Convert a Java `OptionalLong` to a Scala `Option` */
def toScala: Option[Long] = if (o.isPresent) Some(o.getAsLong) else None
/** Convert a Java `OptionalLong` to a Scala `Option` */
@deprecated("Use `toScala` instead", "2.13.0")
def asScala: Option[Long] = if (o.isPresent) Some(o.getAsLong) else None
/** Convert a Java `OptionalLong` to a generic Java `Optional` */
def toJavaGeneric: Optional[Long] = if (o.isPresent) Optional.of(o.getAsLong) else Optional.empty[Long]
}
}
| scala/scala | src/library/scala/jdk/OptionConverters.scala | Scala | apache-2.0 | 5,298 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.allqueries
import java.io.File
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.test.util.QueryTest
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
/**
* Test Class for all query on multiple datatypes
*
*/
class MeasureOnlyTableTestCases extends QueryTest with BeforeAndAfterAll {
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
override def beforeAll {
clean
sql(s"""
| CREATE TABLE carbon_table(
| shortField SMALLINT,
| intField INT,
| bigintField BIGINT,
| doubleField DOUBLE,
| floatField FLOAT,
| decimalField DECIMAL(18,2)
| )
| STORED BY 'carbondata'
""".stripMargin)
val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '$path'
| INTO TABLE carbon_table
| OPTIONS('FILEHEADER'='shortField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData',
| 'COMPLEX_DELIMITER_LEVEL_1'='#')
""".stripMargin)
sql("create table if not exists carbon_table_hive (shortField SMALLINT,intField INT," +
"bigintField BIGINT,doubleField DOUBLE,stringField STRING,timestampField TIMESTAMP," +
"decimalField DECIMAL(18,2),dateField DATE,charField CHAR(5),floatField FLOAT,complexData ARRAY<STRING>)row format delimited fields terminated by ','")
sql(s"""LOAD DATA LOCAL INPATH '$path' INTO table carbon_table_hive""")
}
def clean {
sql("drop table if exists carbon_table")
sql("drop table if exists carbon_table_hive")
}
override def afterAll {
clean
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
test("SELECT sum(intField) FROM carbon_table where intField > 10") {
checkAnswer(
sql("SELECT sum(intField) FROM carbon_table where intField > 10"),
sql("SELECT sum(intField) FROM carbon_table_hive where intField > 10")
)
}
test("SELECT sum(intField),sum(doubleField) FROM carbon_table where intField > 10 OR doubleField > 10") {
checkAnswer(
sql("SELECT sum(intField),sum(doubleField) FROM carbon_table where intField > 10 OR " +
"doubleField > 10"),
sql("SELECT sum(intField),sum(doubleField) FROM carbon_table_hive where intField > 10 OR " +
"doubleField > 10")
)
}
test("SELECT sum(decimalField) FROM carbon_table") {
checkAnswer(
sql("SELECT sum(decimalField) FROM carbon_table"),
sql("SELECT sum(decimalField) FROM carbon_table_hive")
)
}
test("SELECT count(*), sum(intField) FROM carbon_table where intField > 10") {
checkAnswer(
sql("SELECT count(*), sum(intField) FROM carbon_table where intField > 10"),
sql("SELECT count(*), sum(intField) FROM carbon_table_hive where intField > 10")
)
}
test("SELECT count(*), sum(decimalField) b FROM carbon_table order by b") {
checkAnswer(
sql("SELECT count(*), sum(decimalField) b FROM carbon_table order by b"),
sql("SELECT count(*), sum(decimalField) b FROM carbon_table_hive order by b")
)
}
test("SELECT intField, sum(floatField) total FROM carbon_table group by intField order by " +
"total") {
checkAnswer(
sql("SELECT intField, sum(floatField) total FROM carbon_table group by intField order by " +
"total"),
sql("SELECT intField, sum(floatField) total FROM carbon_table_hive group by intField order " +
"by total")
)
}
test("select shortField, avg(intField+ 10) as a from carbon_table group by shortField") {
checkAnswer(
sql("select shortField, avg(intField+ 10) as a from carbon_table group by shortField"),
sql("select shortField, avg(intField+ 10) as a from carbon_table_hive group by shortField")
)
}
test("select shortField, avg(intField+ 10) as a from carbon_table group by shortField order by " +
"a") {
checkAnswer(
sql("select shortField, avg(intField+ 10) as a from carbon_table group by shortField order " +
"by a"),
sql("select shortField, avg(intField+ 10) as a from carbon_table_hive group by shortField " +
"order by a")
)
}
test("select shortField, avg(intField+ intField) as a from carbon_table group by shortField " +
"order by a") {
checkAnswer(
sql("select shortField, avg(intField+ intField) as a from carbon_table group by shortField order " +
"by a"),
sql("select shortField, avg(intField+ intField) as a from carbon_table_hive group by " +
"shortField order by a")
)
}
test("select shortField, count(intField+ 10) as a from carbon_table group by shortField order " +
"by a") {
checkAnswer(
sql("select shortField, count(intField+ 10) as a from carbon_table group by shortField " +
"order by a"),
sql("select shortField, count(intField+ 10) as a from carbon_table_hive group by shortField" +
" order by a")
)
}
test("select shortField, min(intField+ 10) as a from carbon_table group by shortField order " +
"by a") {
checkAnswer(
sql("select shortField, min(intField+ 10) as a from carbon_table group by shortField " +
"order by a"),
sql("select shortField, min(intField+ 10) as a from carbon_table_hive group by shortField " +
"order by a")
)
}
test("select shortField, max(intField+ 10) as a from carbon_table group by shortField order " +
"by a") {
checkAnswer(
sql("select shortField, count(intField+ 10) as a from carbon_table group by shortField " +
"order by a"),
sql("select shortField, count(intField+ 10) as a from carbon_table_hive group by shortField" +
" order by a")
)
}
test("select shortField, sum(distinct intField) + 10 as a from carbon_table group by shortField" +
"order by a") {
checkAnswer(
sql("select shortField, sum(distinct intField) + 10 as a from carbon_table group by " +
"shortField order by a"),
sql("select shortField, sum(distinct intField) + 10 as a from carbon_table_hive group by " +
"shortField order by a")
)
}
test("select sum(doubleField) + 7.28 as a, intField from carbon_table group by intField") {
checkAnswer(
sql("select sum(doubleField) + 7.28 as a, intField from carbon_table group by intField"),
sql("select sum(doubleField) + 7.28 as a, intField from carbon_table_hive group by intField")
)
}
test("select count(floatField) + 7.28 a, intField from carbon_table group by intField") {
checkAnswer(
sql("select count(floatField) + 7.28 a, intField from carbon_table group by intField"),
sql("select count(floatField) + 7.28 a, intField from carbon_table_hive group by intField")
)
}
test("select count(distinct floatField) + 7.28 a, intField from carbon_table group by " +
"intField") {
checkAnswer(
sql("select count(distinct floatField) + 7.28 a, intField from carbon_table group by intField"),
sql("select count(distinct floatField) + 7.28 a, intField from carbon_table_hive group" +
" by intField")
)
}
test("select count (if(doubleField>100,NULL,doubleField)) a from carbon_table") {
checkAnswer(
sql("select count (if(doubleField>100,NULL,doubleField)) a from carbon_table"),
sql("select count (if(doubleField>100,NULL,doubleField)) a from carbon_table_hive")
)
}
test("select count (if(decimalField>100,NULL,decimalField)) a from carbon_table") {
checkAnswer(
sql("select count (if(decimalField>100,NULL,decimalField)) a from carbon_table"),
sql("select count (if(decimalField>100,NULL,decimalField)) a from carbon_table_hive")
)
}
test("select avg (if(floatField>100,NULL,floatField)) a from carbon_table") {
checkAnswer(
sql("select avg (if(floatField>100,NULL,floatField)) a from carbon_table"),
sql("select avg (if(floatField>100,NULL,floatField)) a from carbon_table_hive")
)
}
test("select min (if(intField>100,NULL,intField)) a from carbon_table") {
checkAnswer(
sql("select min (if(intField>3,NULL,intField)) a from carbon_table"),
sql("select min (if(intField>3,NULL,intField)) a from carbon_table_hive")
)
}
test("select max (if(intField>5,NULL,intField)) a from carbon_table")({
checkAnswer(
sql("select max (if(intField>5,NULL,intField)) a from carbon_table"),
sql("select max (if(intField>5,NULL,intField)) a from carbon_table_hive")
)
})
test("select variance(doubleField) as a from carbon_table")({
checkAnswer(
sql("select variance(doubleField) as a from carbon_table"),
sql("select variance(doubleField) as a from carbon_table_hive")
)
})
test("select var_samp(doubleField) as a from carbon_table")({
checkAnswer(
sql("select var_samp(doubleField) as a from carbon_table"),
sql("select var_samp(doubleField) as a from carbon_table_hive")
)
})
test("select stddev_pop(doubleField) as a from carbon_table")({
checkAnswer(
sql("select stddev_pop(doubleField) as a from carbon_table"),
sql("select stddev_pop(doubleField) as a from carbon_table_hive")
)
})
//TC_106
test("select stddev_samp(doubleField) as a from carbon_table")({
checkAnswer(
sql("select stddev_samp(doubleField) as a from carbon_table"),
sql("select stddev_samp(doubleField) as a from carbon_table_hive")
)
})
test("select covar_pop(doubleField,doubleField) as a from carbon_table")({
checkAnswer(
sql("select covar_pop(doubleField,doubleField) as a from carbon_table"),
sql("select covar_pop(doubleField,doubleField) as a from carbon_table_hive")
)
})
test("select covar_samp(doubleField,doubleField) as a from carbon_table")({
checkAnswer(
sql("select covar_samp(doubleField,doubleField) as a from carbon_table"),
sql("select covar_samp(doubleField,doubleField) as a from carbon_table_hive")
)
})
test("select corr(doubleField,doubleField) as a from carbon_table")({
checkAnswer(
sql("select corr(doubleField,doubleField) as a from carbon_table"),
sql("select corr(doubleField,doubleField) as a from carbon_table_hive")
)
})
test("select percentile(bigintField,0.2) as a from carbon_table")({
checkAnswer(
sql("select percentile(bigintField,0.2) as a from carbon_table"),
sql("select percentile(bigintField,0.2) as a from carbon_table_hive"))
})
test("select last(doubleField) a from carbon_table")({
checkAnswer(
sql("select last(doubleField) a from carbon_table"),
sql("select last(doubleField) a from carbon_table_hive")
)
})
test("select intField from carbon_table where carbon_table.intField IN (3,2)")({
checkAnswer(
sql("select intField from carbon_table where carbon_table.intField IN (3,2)"),
sql("select intField from carbon_table_hive where carbon_table_hive.intField IN (3,2)")
)
})
test("select intField from carbon_table where carbon_table.intField NOT IN (3,2)")({
checkAnswer(
sql("select intField from carbon_table where carbon_table.intField NOT IN (3,2)"),
sql("select intField from carbon_table_hive where carbon_table_hive.intField NOT IN (3,2)")
)
})
test("select intField,sum(floatField) a from carbon_table group by intField order by a " +
"desc")({
checkAnswer(
sql("select intField,sum(floatField) a from carbon_table group by intField order by " +
"a desc"),
sql("select intField,sum(floatField) a from carbon_table_hive group by intField order by " +
"a desc")
)
})
test("select intField,sum(floatField) a from carbon_table group by intField order by a" +
" asc")({
checkAnswer(
sql("select intField,sum(floatField) a from carbon_table group by intField order by " +
"a asc"),
sql("select intField,sum(floatField) a from carbon_table_hive group by intField order by " +
"a asc")
)
})
test("select doubleField from carbon_table where doubleField NOT BETWEEN intField AND floatField")({
checkAnswer(
sql("select doubleField from carbon_table where doubleField NOT BETWEEN intField AND floatField"),
sql("select doubleField from carbon_table_hive where doubleField NOT BETWEEN intField AND " +
"floatField")
)
})
test("select cast(doubleField as int) as a from carbon_table limit 10")({
checkAnswer(
sql("select cast(doubleField as int) as a from carbon_table limit 10"),
sql("select cast(doubleField as int) as a from carbon_table_hive limit 10")
)
})
test("select percentile_approx(1, 0.5 ,5000) from carbon_table")({
checkAnswer(
sql("select percentile_approx(1, 0.5 ,5000) from carbon_table"),
sql("select percentile_approx(1, 0.5 ,5000) from carbon_table_hive")
)
})
test("CARBONDATA-60-union-defect")({
sql("drop table if exists carbonunion")
import sqlContext.implicits._
val df = sqlContext.sparkContext.parallelize(1 to 1000).map(x => (x, (x+100))).toDF("c1", "c2")
df.createOrReplaceTempView("sparkunion")
df.write
.format("carbondata")
.mode(SaveMode.Overwrite)
.option("tableName", "carbonunion")
.save()
checkAnswer(
sql("select c1,count(c1) from (select c1 as c1,c2 as c2 from carbonunion union all select c2 as c1,c1 as c2 from carbonunion)t where c1=200 group by c1"),
sql("select c1,count(c1) from (select c1 as c1,c2 as c2 from sparkunion union all select c2 as c1,c1 as c2 from sparkunion)t where c1=200 group by c1"))
sql("drop table if exists carbonunion")
})
test("select b.intField from carbon_table a join carbon_table b on a.intField=b.intField")({
checkAnswer(
sql("select b.intField from carbon_table a join carbon_table b on a.intField=b.intField"),
sql("select b.intField from carbon_table_hive a join carbon_table_hive b on a.intField=b.intField"))
})
} | jatin9896/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala | Scala | apache-2.0 | 15,279 |
package strawman.collections
import Predef.{augmentString as _, wrapString as _, *}
import scala.reflect.ClassTag
import annotation.unchecked.uncheckedVariance
import annotation.tailrec
/** A strawman architecture for new collections. It contains some
* example collection classes and methods with the intent to expose
* some key issues. It would be good to compare this to other
* implementations of the same functionality, to get an idea of the
* strengths and weaknesses of different collection architectures.
*
* For a test file, see tests/run/CollectionTests.scala.
*
* Strawman5 is like strawman4, but using inheritance through ...Like traits
* instead of decorators.
*
* Advantage: Much easier to specialize. See partition for strict (buildable) collections
* or drop for Lists.
*
* Disadvantage: More "weird" types in base traits; some awkwardness with
* @uncheckedVariance.
*/
object CollectionStrawMan5 {
/* ------------ Base Traits -------------------------------- */
/** Iterator can be used only once */
trait IterableOnce[+A] {
def iterator: Iterator[A]
}
/** Base trait for instances that can construct a collection from an iterable */
trait FromIterable[+C[X] <: Iterable[X]] {
def fromIterable[B](it: Iterable[B]): C[B]
}
/** Base trait for companion objects of collections */
trait IterableFactory[+C[X] <: Iterable[X]] extends FromIterable[C] {
def empty[X]: C[X] = fromIterable(View.Empty)
def apply[A](xs: A*): C[A] = fromIterable(View.Elems(xs*))
}
/** Base trait for generic collections */
trait Iterable[+A] extends IterableOnce[A] with IterableLike[A, Iterable] {
protected def coll: Iterable[A] = this
def knownLength: Int = -1
}
/** Base trait for sequence collections */
trait Seq[+A] extends Iterable[A] with SeqLike[A, Seq] {
def apply(i: Int): A
def length: Int
}
/** Base trait for strict collections */
trait Buildable[+A, +To <: Iterable[A]] extends Iterable[A] {
protected[this] def newBuilder: Builder[A @uncheckedVariance, To]
override def partition(p: A => Boolean): (To, To) = {
val l, r = newBuilder
iterator.foreach(x => (if (p(x)) l else r) += x)
(l.result, r.result)
}
// one might also override other transforms here to avoid generating
// iterators if it helps efficiency.
}
/** Base trait for collection builders */
trait Builder[-A, +To] {
def +=(x: A): this.type
def result: To
def ++=(xs: IterableOnce[A]): this.type = {
xs.iterator.foreach(+=)
this
}
}
/* ------------ Operations ----------------------------------- */
/** Base trait for Iterable operations
*
* VarianceNote
* ============
*
* We require that for all child classes of Iterable the variance of
* the child class and the variance of the `C` parameter passed to `IterableLike`
* are the same. We cannot express this since we lack variance polymorphism. That's
* why we have to resort at some places to write `C[A @uncheckedVariance]`.
*
*/
trait IterableLike[+A, +C[X] <: Iterable[X]]
extends FromIterable[C]
with IterableOps[A]
with IterableMonoTransforms[A, C[A @uncheckedVariance]] // sound bcs of VarianceNote
with IterablePolyTransforms[A, C] {
protected[this] def fromLikeIterable(coll: Iterable[A @uncheckedVariance]): C[A @uncheckedVariance] = fromIterable(coll)
}
/** Base trait for Seq operations */
trait SeqLike[+A, +C[X] <: Seq[X]]
extends IterableLike[A, C] with SeqMonoTransforms[A, C[A @uncheckedVariance]] // sound bcs of VarianceNote
trait IterableOps[+A] extends Any {
def iterator: Iterator[A]
def foreach(f: A => Unit): Unit = iterator.foreach(f)
def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op)
def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op)
def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p)
def isEmpty: Boolean = !iterator.hasNext
def head: A = iterator.next()
def view: View[A] = View.fromIterator(iterator)
}
trait IterableMonoTransforms[+A, +Repr] extends Any {
protected def coll: Iterable[A]
protected[this] def fromLikeIterable(coll: Iterable[A] @scala.annotation.unchecked.uncheckedVariance): Repr
def filter(p: A => Boolean): Repr = fromLikeIterable(View.Filter(coll, p))
def partition(p: A => Boolean): (Repr, Repr) = {
val pn = View.Partition(coll, p)
(fromLikeIterable(pn.left), fromLikeIterable(pn.right))
}
def drop(n: Int): Repr = fromLikeIterable(View.Drop(coll, n))
def to[C[X] <: Iterable[X]](fi: FromIterable[C]): C[A @uncheckedVariance] =
// variance seems sound because `to` could just as well have been added
// as a decorator. We should investigate this further to be sure.
fi.fromIterable(coll)
}
trait IterablePolyTransforms[+A, +C[A]] extends Any {
protected def coll: Iterable[A]
def fromIterable[B](coll: Iterable[B]): C[B]
def map[B](f: A => B): C[B] = fromIterable(View.Map(coll, f))
def flatMap[B](f: A => IterableOnce[B]): C[B] = fromIterable(View.FlatMap(coll, f))
def ++[B >: A](xs: IterableOnce[B]): C[B] = fromIterable(View.Concat(coll, xs))
def zip[B](xs: IterableOnce[B]): C[(A @uncheckedVariance, B)] = fromIterable(View.Zip(coll, xs))
// sound bcs of VarianceNote
}
trait SeqMonoTransforms[+A, +Repr] extends Any with IterableMonoTransforms[A, Repr] {
def reverse: Repr = {
var xs: List[A] = Nil
var it = coll.iterator
while (it.hasNext) xs = new Cons(it.next(), xs)
fromLikeIterable(xs)
}
}
/* --------- Concrete collection types ------------------------------- */
/** Concrete collection type: List */
sealed trait List[+A] extends Seq[A] with SeqLike[A, List] with Buildable[A, List[A]] { self =>
def isEmpty: Boolean
def head: A
def tail: List[A]
def iterator = new Iterator[A] {
private[this] var current = self
def hasNext = !current.isEmpty
def next() = { val r = current.head; current = current.tail; r }
}
def fromIterable[B](c: Iterable[B]): List[B] = List.fromIterable(c)
def apply(i: Int): A = {
require(!isEmpty)
if (i == 0) head else tail.apply(i - 1)
}
def length: Int =
if (isEmpty) 0 else 1 + tail.length
protected[this] def newBuilder = new ListBuffer[A @uncheckedVariance]
def ++:[B >: A](prefix: List[B]): List[B] =
if (prefix.isEmpty) this
else Cons(prefix.head, prefix.tail ++: this)
override def ++[B >: A](xs: IterableOnce[B]): List[B] = xs match {
case xs: List[B] => this ++: xs
case _ => super.++(xs)
}
@tailrec final override def drop(n: Int) =
if (n > 0) tail.drop(n - 1) else this
}
case class Cons[+A](x: A, private[collections] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally
extends List[A] {
override def isEmpty = false
override def head = x
def tail = next
}
case object Nil extends List[Nothing] {
override def isEmpty = true
override def head = ???
def tail = ???
}
object List extends IterableFactory[List] {
def fromIterable[B](coll: Iterable[B]): List[B] = coll match {
case coll: List[B] => coll
case _ => ListBuffer.fromIterable(coll).result
}
}
/** Concrete collection type: ListBuffer */
class ListBuffer[A] extends Seq[A] with SeqLike[A, ListBuffer] with Builder[A, List[A]] {
private var first, last: List[A] = Nil
private var aliased = false
def iterator = first.iterator
def fromIterable[B](coll: Iterable[B]) = ListBuffer.fromIterable(coll)
def apply(i: Int) = first.apply(i)
def length = first.length
private def copyElems(): Unit = {
val buf = ListBuffer.fromIterable(result)
first = buf.first
last = buf.last
aliased = false
}
def result = {
aliased = true
first
}
def +=(elem: A) = {
if (aliased) copyElems()
val last1 = Cons(elem, Nil)
last match {
case last: Cons[A] => last.next = last1
case _ => first = last1
}
last = last1
this
}
override def toString: String =
if (first.isEmpty) "ListBuffer()"
else {
val b = new StringBuilder("ListBuffer(").append(first.head)
first.tail.foldLeft(b)(_.append(", ").append(_)).append(")").toString
}
}
object ListBuffer extends IterableFactory[ListBuffer] {
def fromIterable[B](coll: Iterable[B]): ListBuffer[B] = new ListBuffer[B] ++= coll
}
/** Concrete collection type: ArrayBuffer */
class ArrayBuffer[A] private (initElems: Array[AnyRef], initLength: Int)
extends Seq[A] with SeqLike[A, ArrayBuffer] with Builder[A, ArrayBuffer[A]] {
def this() = this(new Array[AnyRef](16), 0)
private var elems: Array[AnyRef] = initElems
private var start = 0
private var end = initLength
def apply(n: Int) = elems(start + n).asInstanceOf[A]
def length = end - start
override def knownLength = length
override def view = new ArrayBufferView(elems, start, end)
def iterator = view.iterator
def fromIterable[B](it: Iterable[B]): ArrayBuffer[B] =
ArrayBuffer.fromIterable(it)
def +=(elem: A): this.type = {
if (end == elems.length) {
if (start > 0) {
Array.copy(elems, start, elems, 0, length)
end -= start
start = 0
}
else {
val newelems = new Array[AnyRef](end * 2)
Array.copy(elems, 0, newelems, 0, end)
elems = newelems
}
}
elems(end) = elem.asInstanceOf[AnyRef]
end += 1
this
}
def result = this
def trimStart(n: Int): Unit = start += (n max 0)
override def ++[B >: A](xs: IterableOnce[B]): ArrayBuffer[B] = xs match {
case xs: ArrayBuffer[B] =>
val elems = new Array[AnyRef](length + xs.length)
Array.copy(this.elems, this.start, elems, 0, this.length)
Array.copy(xs.elems, xs.start, elems, this.length, xs.length)
new ArrayBuffer(elems, elems.length)
case _ => super.++(xs)
}
override def toString = s"ArrayBuffer(${elems.slice(start, end).mkString(", ")})"
}
object ArrayBuffer extends IterableFactory[ArrayBuffer] {
def fromIterable[B](coll: Iterable[B]): ArrayBuffer[B] =
if (coll.knownLength >= 0) {
val elems = new Array[AnyRef](coll.knownLength)
val it = coll.iterator
for (i <- 0 until elems.length) elems(i) = it.next().asInstanceOf[AnyRef]
new ArrayBuffer[B](elems, elems.length)
}
else {
val buf = new ArrayBuffer[B]
val it = coll.iterator
while (it.hasNext) buf += it.next()
buf
}
}
class ArrayBufferView[A](val elems: Array[AnyRef], val start: Int, val end: Int) extends RandomAccessView[A] {
def apply(n: Int) = elems(start + n).asInstanceOf[A]
}
/** Concrete collection type: String */
implicit class StringOps(val s: String)
extends AnyVal with IterableOps[Char]
with SeqMonoTransforms[Char, String]
with IterablePolyTransforms[Char, List] {
protected def coll = new StringView(s)
def iterator = coll.iterator
protected def fromLikeIterable(coll: Iterable[Char]): String = {
val sb = new StringBuilder
for (ch <- coll) sb.append(ch)
sb.toString
}
def fromIterable[B](coll: Iterable[B]): List[B] = List.fromIterable(coll)
def map(f: Char => Char): String = {
val sb = new StringBuilder
for (ch <- s) sb.append(f(ch))
sb.toString
}
def flatMap(f: Char => String): String = {
val sb = new StringBuilder
for (ch <- s) sb.append(f(ch))
sb.toString
}
def ++(xs: IterableOnce[Char]): String = {
val sb = new StringBuilder(s)
for (ch <- xs.iterator) sb.append(ch)
sb.toString
}
def ++(xs: String): String = s + xs
}
case class StringView(s: String) extends RandomAccessView[Char] {
val start = 0
val end = s.length
def apply(n: Int) = s.charAt(n)
}
/* ---------- Views -------------------------------------------------------*/
/** Concrete collection type: View */
trait View[+A] extends Iterable[A] with IterableLike[A, View] {
override def view = this
override def fromIterable[B](c: Iterable[B]): View[B] = c match {
case c: View[B] => c
case _ => View.fromIterator(c.iterator)
}
}
/** View defined in terms of indexing a range */
trait RandomAccessView[+A] extends View[A] {
def start: Int
def end: Int
def apply(i: Int): A
def iterator: Iterator[A] = new Iterator[A] {
private var current = start
def hasNext = current < end
def next(): A = {
val r = apply(current)
current += 1
r
}
}
override def knownLength = end - start max 0
}
object View {
def fromIterator[A](it: => Iterator[A]): View[A] = new View[A] {
def iterator = it
}
case object Empty extends View[Nothing] {
def iterator = Iterator.empty
override def knownLength = 0
}
case class Elems[A](xs: A*) extends View[A] {
def iterator = Iterator(xs*)
override def knownLength = xs.length
}
case class Filter[A](val underlying: Iterable[A], p: A => Boolean) extends View[A] {
def iterator = underlying.iterator.filter(p)
}
case class Partition[A](val underlying: Iterable[A], p: A => Boolean) {
val left = Partitioned(this, true)
val right = Partitioned(this, false)
}
case class Partitioned[A](partition: Partition[A], cond: Boolean) extends View[A] {
def iterator = partition.underlying.iterator.filter(x => partition.p(x) == cond)
}
case class Drop[A](underlying: Iterable[A], n: Int) extends View[A] {
def iterator = underlying.iterator.drop(n)
override def knownLength =
if (underlying.knownLength >= 0) underlying.knownLength - n max 0 else -1
}
case class Map[A, B](underlying: Iterable[A], f: A => B) extends View[B] {
def iterator = underlying.iterator.map(f)
override def knownLength = underlying.knownLength
}
case class FlatMap[A, B](underlying: Iterable[A], f: A => IterableOnce[B]) extends View[B] {
def iterator = underlying.iterator.flatMap(f)
}
case class Concat[A](underlying: Iterable[A], other: IterableOnce[A]) extends View[A] {
def iterator = underlying.iterator ++ other
override def knownLength = other match {
case other: Iterable[_] if underlying.knownLength >= 0 && other.knownLength >= 0 =>
underlying.knownLength + other.knownLength
case _ =>
-1
}
}
case class Zip[A, B](underlying: Iterable[A], other: IterableOnce[B]) extends View[(A, B)] {
def iterator = underlying.iterator.zip(other)
override def knownLength = other match {
case other: Iterable[_] if underlying.knownLength >= 0 && other.knownLength >= 0 =>
underlying.knownLength min other.knownLength
case _ =>
-1
}
}
}
/* ---------- Iterators ---------------------------------------------------*/
/** A core Iterator class */
trait Iterator[+A] extends IterableOnce[A] { self =>
def hasNext: Boolean
def next(): A
def iterator = this
def foldLeft[B](z: B)(op: (B, A) => B): B =
if (hasNext) foldLeft(op(z, next()))(op) else z
def foldRight[B](z: B)(op: (A, B) => B): B =
if (hasNext) op(next(), foldRight(z)(op)) else z
def foreach(f: A => Unit): Unit =
while (hasNext) f(next())
def indexWhere(p: A => Boolean): Int = {
var i = 0
while (hasNext) {
if (p(next())) return i
i += 1
}
-1
}
def filter(p: A => Boolean): Iterator[A] = new Iterator[A] {
private var hd: A = compiletime.uninitialized
private var hdDefined: Boolean = false
def hasNext: Boolean = hdDefined || {
while ({
if (!self.hasNext) return false
hd = self.next()
!p(hd)
}) ()
hdDefined = true
true
}
def next() =
if (hasNext) {
hdDefined = false
hd
}
else Iterator.empty.next()
}
def map[B](f: A => B): Iterator[B] = new Iterator[B] {
def hasNext = self.hasNext
def next() = f(self.next())
}
def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new Iterator[B] {
private var myCurrent: Iterator[B] = Iterator.empty
private def current = {
while (!myCurrent.hasNext && self.hasNext)
myCurrent = f(self.next()).iterator
myCurrent
}
def hasNext = current.hasNext
def next() = current.next()
}
def ++[B >: A](xs: IterableOnce[B]): Iterator[B] = new Iterator[B] {
private var myCurrent: Iterator[B] = self
private var first = true
private def current = {
if (!myCurrent.hasNext && first) {
myCurrent = xs.iterator
first = false
}
myCurrent
}
def hasNext = current.hasNext
def next() = current.next()
}
def drop(n: Int): Iterator[A] = {
var i = 0
while (i < n && hasNext) {
next()
i += 1
}
this
}
def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new Iterator[(A, B)] {
val thatIterator = that.iterator
def hasNext = self.hasNext && thatIterator.hasNext
def next() = (self.next(), thatIterator.next())
}
}
object Iterator {
val empty: Iterator[Nothing] = new Iterator[Nothing] {
def hasNext = false
def next() = throw new NoSuchElementException("next on empty iterator")
}
def apply[A](xs: A*): Iterator[A] = new RandomAccessView[A] {
val start = 0
val end = xs.length
def apply(n: Int) = xs(n)
}.iterator
}
}
| dotty-staging/dotty | tests/pos-special/strawman-collections/CollectionStrawMan5.scala | Scala | apache-2.0 | 17,972 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ${package}
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.walkthrough.common.table._
object SpendReport {
def main(args: Array[String]): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = BatchTableEnvironment.create(env)
tEnv.registerTableSource("transactions", new BoundedTransactionTableSource)
tEnv.registerTableSink("spend_report", new SpendReportTableSink)
val truncateDateToHour = new TruncateDateToHour
tEnv
.scan("transactions")
.insertInto("spend_report")
env.execute("Spend Report")
}
}
| fhueske/flink | flink-walkthroughs/flink-walkthrough-table-scala/src/main/resources/archetype-resources/src/main/scala/SpendReport.scala | Scala | apache-2.0 | 1,442 |
package org.unisonweb.util
import org.unisonweb.EasyTest._
object TextTests {
lazy val tests = suite("Text")(
test("Buffer.apply") { implicit T =>
(0 until 100) foreach { n =>
val buf = Text.emptyBlock
val codepoints = replicate(n)(codepoint)
(0 until n) foreach { i =>
buf :+ (i, codepoints(i))
expect1 (buf(i) == codepoints(i))
}
}
ok
},
test("round trip (ascii)") { implicit T =>
(0 until 100) foreach { n =>
val s = alphas123(n)
expect1 { Text.toString(Text.fromString(s)) == s }
}
ok
},
test("round trip (unicode)") { implicit T =>
(0 until 100) foreach { n =>
val s = string(n)
val txt = Text.fromString(s)
val ch = codepoint
equal1 ((txt :+ ch)(txt.size), ch)
equal1 ((ch +: txt)(0) , ch)
equal1 (Text.toString(Text.fromString(s)), s)
}
ok
},
test("++") { implicit T =>
(0 until 100) foreach { n =>
val t1 = textOf(intIn(0, n * 1)).run
val t2 = textOf(intIn(0, n * 1)).run
val t3 = textOf(intIn(0, n * 1)).run
equal1 (t1 ++ (t2 ++ t3), (t1 ++ t2) ++ t3)
equal1 (t1 ++ (t3 ++ t2), (t1 ++ t3) ++ t2)
}
ok
},
test("take/drop/reverse") { implicit T =>
(0 until 100) foreach { n =>
val txt = textOf(n * 100).run
val txtList = txt.toList
val m = intIn(-3, txt.size.toInt + 3)
if (m >= 0 && m < txt.size) equal1 (txt(m), txtList(m))
equal1 (txt.take(m).toList, txtList.take(m))
equal1 (txt.drop(m).toList, txtList.drop(m))
equal1 (txt.take(m) ++ txt.drop(m), txt)
equal1 (txt.reverse.toList, txtList.reverse)
}
ok
}
)
def textOf(size: Int): Test[Text.Text] = test { implicit T =>
if (size <= 0) Text.empty
else intIn(0,5) match {
case 0 => (0 until size).foldLeft(Text.empty)((buf,i) => buf :+ codepoint)
case 1 => (0 until size).foldLeft(Text.empty)((buf,i) => codepoint +: buf)
case 2 => textOf(size/4).run ++ textOf(size/4).run ++ textOf(size/4).run ++ textOf(size/4).run
case 3 =>
textOf(size/2).run ++ textOf(size/4).run ++ textOf(size/8).run ++ textOf(size/16).run
case 4 =>
textOf(size/16).run ++ textOf(size/4).run ++ textOf(size/8).run ++ textOf(size/2).run
}
}
}
| paulp/unison | runtime-jvm/main/src/test/scala/util/TextTests.scala | Scala | mit | 2,400 |
package com.thecookiezen
import akka.NotUsed
import akka.actor.{ActorSystem, Props}
import akka.http.scaladsl.Http
import akka.pattern.ask
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import akka.util.{ByteString, Timeout}
import com.thecookiezen.business.containers.control.{Cluster, Host}
import com.thecookiezen.integration.docker.DockerClusterEngine
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
object Bootstrap extends App {
implicit val timeout = Timeout(5 seconds)
implicit val system = ActorSystem("clusterEngine")
implicit val materializer = ActorMaterializer()
// val clusterActor = system.actorOf(Props(classOf[Cluster], "testing_cluster", 10), "clusterActor")
//
// clusterActor ! Cluster.StartCluster
//
// val future1 = clusterActor ? Cluster.SizeOfCluster()
// println(Await.result(future1, 5 seconds))
//
// val future2 = clusterActor ? Host.ListContainers("test_label")
// println(Await.result(future2, 5 seconds))
//
// val future3 = clusterActor ? Cluster.ListHosts()
// println(Await.result(future3, 5 seconds))
//
// clusterActor ! Cluster.AddNewHost("1.32","http://localhost:2375")
val http = Http(system)
val testing = new DockerClusterEngine("1.32","localhost:2375", http.singleRequest(_))
private val eventualInitialized: Future[Host.Initialized] = testing.getRunningContainers("test")
private val initialized: Host.Initialized = Await.result(eventualInitialized, 5 seconds)
println(initialized)
private val future: Future[Source[ByteString, NotUsed]] = testing.log("462cc24ae03f")
future.map(source => source.runForeach(byteString => println(byteString.utf8String)))
}
| nikom1337/ClusterActor | src/main/scala/com/thecookiezen/Bootstrap.scala | Scala | apache-2.0 | 1,757 |
package ee.cone.c4actor
import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.ProtoConflict.D_ConflictOrig
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble.{Assemble, Single, assemble}
import ee.cone.c4di.{c4, c4app}
import ee.cone.c4proto.{Id, protocol}
// C4STATE_TOPIC_PREFIX=ee.cone.c4actor.ConflictOrigTestApp sbt ~'c4actor-extra-examples/runMain -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 ee.cone.c4actor.ServerMain'
trait ProtoConflictAppBase
@protocol("ProtoConflictApp") object ProtoConflict {
@Id(0x103) case class D_ConflictOrig(
@Id(0x104) srcId: String,
@Id(0x105) value: Int
)
}
case class ConflictRich(conflict: D_ConflictOrig)
@assemble class AssembleConflictBase(produce: D_ConflictOrig) {
def Produce(
modelId: SrcId,
fb: Each[S_Firstborn]
): Values[(SrcId, D_ConflictOrig)] =
WithPK(produce) :: Nil
def ProduceRich(
modelId: SrcId,
origs: Values[D_ConflictOrig]
): Values[(SrcId, ConflictRich)] =
for {
origg <- Single.option(origs).toList
} yield
WithPK(ConflictRich(origg))
}
@c4("D_ConflictOrigTestApp") final class ConflictingOrigTest(
execution: Execution, toUpdate: ToUpdate, contextFactory: ContextFactory,
getConflictRich: GetByPK[ConflictRich],
) extends Executable with LazyLogging {
def run(): Unit = {
import LEvent.update
println("DEBUG ME NOWWWW")
Thread.sleep(5000)
val emptyLocal = contextFactory.updated(Nil)
logger.info("============Empty local===================")
println(getConflictRich.ofA(emptyLocal).values.toList)
val worldUpdate: Seq[LEvent[Product]] = List(D_ConflictOrig("main", 2)).flatMap(update)
val updates: List[QProtocol.N_Update] = worldUpdate.map(rec => toUpdate.toUpdate(rec)).toList
val nonEmptyLocal = contextFactory.updated(updates)
logger.info("============Non empty local===================")
println(getConflictRich.ofA(nonEmptyLocal).values.toList)
//logger.info(s"${nGlobal.assembled}")
execution.complete()
}
}
@c4app trait D_ConflictOrigTestAppBase extends TestVMRichDataApp
with ExecutableApp
with VMExecutionApp
with ProtoConflictApp
{
override def assembles: List[Assemble] = new AssembleConflict(D_ConflictOrig("main", 0)) :: super.assembles
lazy val assembleProfiler = ConsoleAssembleProfiler //ValueAssembleProfiler
} | conecenter/c4proto | extra_examples/src/main/scala/ee/cone/c4actor/OrigConflictTest.scala | Scala | apache-2.0 | 2,481 |
//
// SyncChannelType.scala -- Scala object SyncChannelType
// Project OrcScala
//
// $Id: SyncChannelType.scala 2933 2011-12-15 16:26:02Z jthywissen $
//
// Created by dkitchin on Dec 1, 2010.
//
// Copyright (c) 2011 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.lib.state.types
import orc.types._
import orc.error.compiletime.typing._
import orc.lib.builtin.structured.ListType
/**
*
* @author dkitchin
*/
object SyncChannelType extends SimpleTypeConstructor("SyncChannel", Invariant) {
def getBuilder: Type = {
val X = new TypeVariable()
FunctionType(List(X), Nil, this(X))
}
override def instance(ts: List[Type]) = {
val List(t) = ts
new RecordType(
"get" -> SimpleFunctionType(t),
"put" -> SimpleFunctionType(t, SignalType))
}
}
| laurenyew/cOrcS | src/orc/lib/state/types/SyncChannelType.scala | Scala | bsd-3-clause | 1,019 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.{createTempDirectory, thisLineNumber}
import enablers.Writability
import Matchers._
import exceptions.TestFailedException
class ShouldBeWritableExplicitSpec extends Spec {
trait Thing {
def canRead: Boolean
}
val book = new Thing {
val canRead = true
}
val stone = new Thing {
val canRead = false
}
val writability =
new Writability[Thing] {
def isWritable(thing: Thing): Boolean = thing.canRead
}
val fileName: String = "ShouldBeWritableExplicitSpec.scala"
def wasNotWritable(left: Any): String =
FailureMessages.wasNotWritable(left)
def wasWritable(left: Any): String =
FailureMessages.wasWritable(left)
def `book should be writable, stone should not be writable` {
assert(book.canRead === true)
assert(stone.canRead === false)
}
def allError(left: Any, message: String, lineNumber: Int): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(messageWithIndex, left)
}
object `Writable matcher` {
object `when work with 'file should be (writable)'` {
def `should do nothing when file is writable` {
(book should be (writable)) (writability)
}
def `should throw TestFailedException with correct stack depth when file is not writable` {
val caught1 = intercept[TestFailedException] {
(stone should be (writable)) (writability)
}
assert(caught1.message === Some(wasNotWritable(stone)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'file should not be writable'` {
def `should do nothing when file is not writable` {
(stone should not be writable) (writability)
}
def `should throw TestFailedException with correct stack depth when file is writable` {
val caught1 = intercept[TestFailedException] {
(book should not be writable) (writability)
}
assert(caught1.message === Some(wasWritable(book)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'file shouldBe writable'` {
def `should do nothing when file is writable` {
(book shouldBe writable) (writability)
}
def `should throw TestFailedException with correct stack depth when file is not writable` {
val caught1 = intercept[TestFailedException] {
(stone shouldBe writable) (writability)
}
assert(caught1.message === Some(wasNotWritable(stone)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'file shouldNot be (writable)'` {
def `should do nothing when file is not writable` {
(stone shouldNot be (writable)) (writability)
}
def `should throw TestFailedException with correct stack depth when file is writable` {
val caught1 = intercept[TestFailedException] {
(book shouldNot be (writable)) (writability)
}
assert(caught1.message === Some(wasWritable(book)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) should be (writable)'` {
def `should do nothing when all(xs) is writable` {
(all(List(book)) should be (writable)) (writability)
}
def `should throw TestFailedException with correct stack depth when all(xs) is not writable` {
val left1 = List(stone)
val caught1 = intercept[TestFailedException] {
(all(left1) should be (writable)) (writability)
}
assert(caught1.message === Some(allError(left1, wasNotWritable(stone), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) should not be writable'` {
def `should do nothing when all(xs) is not writable` {
(all(List(stone)) should not be writable) (writability)
}
def `should throw TestFailedException with correct stack depth when all(xs) is writable` {
val left1 = List(book)
val caught1 = intercept[TestFailedException] {
(all(left1) should not be writable) (writability)
}
assert(caught1.message === Some(allError(left1, wasWritable(book), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) shouldBe writable'` {
def `should do nothing when all(xs) is writable` {
(all(List(book)) shouldBe writable) (writability)
}
def `should throw TestFailedException with correct stack depth when all(xs) is not writable` {
val left1 = List(stone)
val caught1 = intercept[TestFailedException] {
(all(left1) shouldBe writable) (writability)
}
assert(caught1.message === Some(allError(left1, wasNotWritable(stone), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) shouldNot be (writable)'` {
def `should do nothing when all(xs) is not writable` {
(all(List(stone)) shouldNot be (writable)) (writability)
}
def `should throw TestFailedException with correct stack depth when all(xs) is writable` {
val left1 = List(book)
val caught1 = intercept[TestFailedException] {
(all(left1) shouldNot be (writable)) (writability)
}
assert(caught1.message === Some(allError(left1, wasWritable(book), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
| SRGOM/scalatest | scalatest-test/src/test/scala/org/scalatest/ShouldBeWritableExplicitSpec.scala | Scala | apache-2.0 | 7,213 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.linalg
import java.lang.{Double => JavaDouble, Integer => JavaInteger, Iterable => JavaIterable}
import java.util
import scala.annotation.varargs
import scala.collection.JavaConverters._
import scala.collection.mutable
import breeze.linalg.{DenseVector => BDV, SparseVector => BSV, Vector => BV}
import org.apache.spark.annotation.Since
/**
* Represents a numeric vector, whose index type is Int and value type is Double.
*
* @note Users should not implement this interface.
*/
@Since("2.0.0")
sealed trait Vector extends Serializable {
/**
* Size of the vector.
*/
@Since("2.0.0")
def size: Int
/**
* Converts the instance to a double array.
*/
@Since("2.0.0")
def toArray: Array[Double]
override def equals(other: Any): Boolean = {
other match {
case v2: Vector =>
if (this.size != v2.size) return false
(this, v2) match {
case (s1: SparseVector, s2: SparseVector) =>
Vectors.equals(s1.indices, s1.values, s2.indices, s2.values)
case (s1: SparseVector, d1: DenseVector) =>
Vectors.equals(s1.indices, s1.values, 0 until d1.size, d1.values)
case (d1: DenseVector, s1: SparseVector) =>
Vectors.equals(0 until d1.size, d1.values, s1.indices, s1.values)
case (_, _) => util.Arrays.equals(this.toArray, v2.toArray)
}
case _ => false
}
}
/**
* Returns a hash code value for the vector. The hash code is based on its size and its first 128
* nonzero entries, using a hash algorithm similar to `java.util.Arrays.hashCode`.
*/
override def hashCode(): Int = {
// This is a reference implementation. It calls return in foreachActive, which is slow.
// Subclasses should override it with optimized implementation.
var result: Int = 31 + size
var nnz = 0
this.foreachActive { (index, value) =>
if (nnz < Vectors.MAX_HASH_NNZ) {
// ignore explicit 0 for comparison between sparse and dense
if (value != 0) {
result = 31 * result + index
val bits = java.lang.Double.doubleToLongBits(value)
result = 31 * result + (bits ^ (bits >>> 32)).toInt
nnz += 1
}
} else {
return result
}
}
result
}
/**
* Converts the instance to a breeze vector.
*/
private[spark] def asBreeze: BV[Double]
/**
* Gets the value of the ith element.
* @param i index
*/
@Since("2.0.0")
def apply(i: Int): Double = asBreeze(i)
/**
* Makes a deep copy of this vector.
*/
@Since("2.0.0")
def copy: Vector = {
throw new UnsupportedOperationException(s"copy is not implemented for ${this.getClass}.")
}
/**
* Applies a function `f` to all the elements of dense and sparse vector.
*
* @param f the function takes two parameters where the first parameter is the index of
* the vector with type `Int`, and the second parameter is the corresponding value
* with type `Double`.
*/
private[spark] def foreach(f: (Int, Double) => Unit): Unit =
iterator.foreach { case (i, v) => f(i, v) }
/**
* Applies a function `f` to all the active elements of dense and sparse vector.
*
* @param f the function takes two parameters where the first parameter is the index of
* the vector with type `Int`, and the second parameter is the corresponding value
* with type `Double`.
*/
@Since("2.0.0")
def foreachActive(f: (Int, Double) => Unit): Unit =
activeIterator.foreach { case (i, v) => f(i, v) }
/**
* Applies a function `f` to all the non-zero elements of dense and sparse vector.
*
* @param f the function takes two parameters where the first parameter is the index of
* the vector with type `Int`, and the second parameter is the corresponding value
* with type `Double`.
*/
private[spark] def foreachNonZero(f: (Int, Double) => Unit): Unit =
nonZeroIterator.foreach { case (i, v) => f(i, v) }
/**
* Number of active entries. An "active entry" is an element which is explicitly stored,
* regardless of its value. Note that inactive entries have value 0.
*/
@Since("2.0.0")
def numActives: Int
/**
* Number of nonzero elements. This scans all active values and count nonzeros.
*/
@Since("2.0.0")
def numNonzeros: Int
/**
* Converts this vector to a sparse vector with all explicit zeros removed.
*/
@Since("2.0.0")
def toSparse: SparseVector = toSparseWithSize(numNonzeros)
/**
* Converts this vector to a sparse vector with all explicit zeros removed when the size is known.
* This method is used to avoid re-computing the number of non-zero elements when it is
* already known. This method should only be called after computing the number of non-zero
* elements via [[numNonzeros]]. e.g.
* {{{
* val nnz = numNonzeros
* val sv = toSparse(nnz)
* }}}
*
* If `nnz` is under-specified, a [[java.lang.ArrayIndexOutOfBoundsException]] is thrown.
*/
private[linalg] def toSparseWithSize(nnz: Int): SparseVector
/**
* Converts this vector to a dense vector.
*/
@Since("2.0.0")
def toDense: DenseVector = new DenseVector(this.toArray)
/**
* Returns a vector in either dense or sparse format, whichever uses less storage.
*/
@Since("2.0.0")
def compressed: Vector = {
val nnz = numNonzeros
// A dense vector needs 8 * size + 8 bytes, while a sparse vector needs 12 * nnz + 20 bytes.
if (1.5 * (nnz + 1.0) < size) {
toSparseWithSize(nnz)
} else {
toDense
}
}
/**
* Find the index of a maximal element. Returns the first maximal element in case of a tie.
* Returns -1 if vector has length 0.
*/
@Since("2.0.0")
def argmax: Int
/**
* Calculate the dot product of this vector with another.
*
* If `size` does not match an [[IllegalArgumentException]] is thrown.
*/
@Since("3.0.0")
def dot(v: Vector): Double = BLAS.dot(this, v)
/**
* Returns an iterator over all the elements of this vector.
*/
private[spark] def iterator: Iterator[(Int, Double)] =
Iterator.tabulate(size)(i => (i, apply(i)))
/**
* Returns an iterator over all the active elements of this vector.
*/
private[spark] def activeIterator: Iterator[(Int, Double)]
/**
* Returns an iterator over all the non-zero elements of this vector.
*/
private[spark] def nonZeroIterator: Iterator[(Int, Double)] =
activeIterator.filter(_._2 != 0)
}
/**
* Factory methods for [[org.apache.spark.ml.linalg.Vector]].
* We don't use the name `Vector` because Scala imports
* `scala.collection.immutable.Vector` by default.
*/
@Since("2.0.0")
object Vectors {
/**
* Creates a dense vector from its values.
*/
@varargs
@Since("2.0.0")
def dense(firstValue: Double, otherValues: Double*): Vector =
new DenseVector((firstValue +: otherValues).toArray)
// A dummy implicit is used to avoid signature collision with the one generated by @varargs.
/**
* Creates a dense vector from a double array.
*/
@Since("2.0.0")
def dense(values: Array[Double]): Vector = new DenseVector(values)
/**
* Creates a sparse vector providing its index array and value array.
*
* @param size vector size.
* @param indices index array, must be strictly increasing.
* @param values value array, must have the same length as indices.
*/
@Since("2.0.0")
def sparse(size: Int, indices: Array[Int], values: Array[Double]): Vector =
new SparseVector(size, indices, values)
/**
* Creates a sparse vector using unordered (index, value) pairs.
*
* @param size vector size.
* @param elements vector elements in (index, value) pairs.
*/
@Since("2.0.0")
def sparse(size: Int, elements: Seq[(Int, Double)]): Vector = {
val (indices, values) = elements.sortBy(_._1).unzip
new SparseVector(size, indices.toArray, values.toArray)
}
/**
* Creates a sparse vector using unordered (index, value) pairs in a Java friendly way.
*
* @param size vector size.
* @param elements vector elements in (index, value) pairs.
*/
@Since("2.0.0")
def sparse(size: Int, elements: JavaIterable[(JavaInteger, JavaDouble)]): Vector = {
sparse(size, elements.asScala.map { case (i, x) =>
(i.intValue(), x.doubleValue())
}.toSeq)
}
/**
* Creates a vector of all zeros.
*
* @param size vector size
* @return a zero vector
*/
@Since("2.0.0")
def zeros(size: Int): Vector = {
new DenseVector(new Array[Double](size))
}
/**
* Creates a vector instance from a breeze vector.
*/
private[spark] def fromBreeze(breezeVector: BV[Double]): Vector = {
breezeVector match {
case v: BDV[Double] =>
if (v.offset == 0 && v.stride == 1 && v.length == v.data.length) {
new DenseVector(v.data)
} else {
new DenseVector(v.toArray) // Can't use underlying array directly, so make a new one
}
case v: BSV[Double] =>
if (v.index.length == v.used) {
new SparseVector(v.length, v.index, v.data)
} else {
new SparseVector(v.length, v.index.slice(0, v.used), v.data.slice(0, v.used))
}
case v: BV[_] =>
sys.error("Unsupported Breeze vector type: " + v.getClass.getName)
}
}
/**
* Returns the p-norm of this vector.
* @param vector input vector.
* @param p norm.
* @return norm in L^p^ space.
*/
@Since("2.0.0")
def norm(vector: Vector, p: Double): Double = {
require(p >= 1.0, "To compute the p-norm of the vector, we require that you specify a p>=1. " +
s"You specified p=$p.")
val values = vector match {
case DenseVector(vs) => vs
case SparseVector(n, ids, vs) => vs
case v => throw new IllegalArgumentException("Do not support vector type " + v.getClass)
}
val size = values.length
if (p == 1) {
var sum = 0.0
var i = 0
while (i < size) {
sum += math.abs(values(i))
i += 1
}
sum
} else if (p == 2) {
var sum = 0.0
var i = 0
while (i < size) {
sum += values(i) * values(i)
i += 1
}
math.sqrt(sum)
} else if (p == Double.PositiveInfinity) {
var max = 0.0
var i = 0
while (i < size) {
val value = math.abs(values(i))
if (value > max) max = value
i += 1
}
max
} else {
var sum = 0.0
var i = 0
while (i < size) {
sum += math.pow(math.abs(values(i)), p)
i += 1
}
math.pow(sum, 1.0 / p)
}
}
/**
* Returns the squared distance between two Vectors.
* @param v1 first Vector.
* @param v2 second Vector.
* @return squared distance between two Vectors.
*/
@Since("2.0.0")
def sqdist(v1: Vector, v2: Vector): Double = {
require(v1.size == v2.size, s"Vector dimensions do not match: Dim(v1)=${v1.size} and Dim(v2)" +
s"=${v2.size}.")
var squaredDistance = 0.0
(v1, v2) match {
case (v1: SparseVector, v2: SparseVector) =>
val v1Values = v1.values
val v1Indices = v1.indices
val v2Values = v2.values
val v2Indices = v2.indices
val nnzv1 = v1Indices.length
val nnzv2 = v2Indices.length
var kv1 = 0
var kv2 = 0
while (kv1 < nnzv1 || kv2 < nnzv2) {
var score = 0.0
if (kv2 >= nnzv2 || (kv1 < nnzv1 && v1Indices(kv1) < v2Indices(kv2))) {
score = v1Values(kv1)
kv1 += 1
} else if (kv1 >= nnzv1 || (kv2 < nnzv2 && v2Indices(kv2) < v1Indices(kv1))) {
score = v2Values(kv2)
kv2 += 1
} else {
score = v1Values(kv1) - v2Values(kv2)
kv1 += 1
kv2 += 1
}
squaredDistance += score * score
}
case (v1: SparseVector, v2: DenseVector) =>
squaredDistance = sqdist(v1, v2)
case (v1: DenseVector, v2: SparseVector) =>
squaredDistance = sqdist(v2, v1)
case (DenseVector(vv1), DenseVector(vv2)) =>
var kv = 0
val sz = vv1.length
while (kv < sz) {
val score = vv1(kv) - vv2(kv)
squaredDistance += score * score
kv += 1
}
case _ =>
throw new IllegalArgumentException("Do not support vector type " + v1.getClass +
" and " + v2.getClass)
}
squaredDistance
}
/**
* Returns the squared distance between DenseVector and SparseVector.
*/
private[ml] def sqdist(v1: SparseVector, v2: DenseVector): Double = {
var kv1 = 0
var kv2 = 0
val indices = v1.indices
var squaredDistance = 0.0
val nnzv1 = indices.length
val nnzv2 = v2.size
var iv1 = if (nnzv1 > 0) indices(kv1) else -1
while (kv2 < nnzv2) {
var score = 0.0
if (kv2 != iv1) {
score = v2(kv2)
} else {
score = v1.values(kv1) - v2(kv2)
if (kv1 < nnzv1 - 1) {
kv1 += 1
iv1 = indices(kv1)
}
}
squaredDistance += score * score
kv2 += 1
}
squaredDistance
}
/**
* Check equality between sparse/dense vectors
*/
private[ml] def equals(
v1Indices: IndexedSeq[Int],
v1Values: Array[Double],
v2Indices: IndexedSeq[Int],
v2Values: Array[Double]): Boolean = {
val v1Size = v1Values.length
val v2Size = v2Values.length
var k1 = 0
var k2 = 0
var allEqual = true
while (allEqual) {
while (k1 < v1Size && v1Values(k1) == 0) k1 += 1
while (k2 < v2Size && v2Values(k2) == 0) k2 += 1
if (k1 >= v1Size || k2 >= v2Size) {
return k1 >= v1Size && k2 >= v2Size // check end alignment
}
allEqual = v1Indices(k1) == v2Indices(k2) && v1Values(k1) == v2Values(k2)
k1 += 1
k2 += 1
}
allEqual
}
/** Max number of nonzero entries used in computing hash code. */
private[linalg] val MAX_HASH_NNZ = 128
}
/**
* A dense vector represented by a value array.
*/
@Since("2.0.0")
class DenseVector @Since("2.0.0") ( @Since("2.0.0") val values: Array[Double]) extends Vector {
override def size: Int = values.length
override def toString: String = values.mkString("[", ",", "]")
override def toArray: Array[Double] = values
private[spark] override def asBreeze: BV[Double] = new BDV[Double](values)
override def apply(i: Int): Double = values(i)
override def copy: DenseVector = {
new DenseVector(values.clone())
}
override def equals(other: Any): Boolean = super.equals(other)
override def hashCode(): Int = {
var result: Int = 31 + size
var i = 0
val end = values.length
var nnz = 0
while (i < end && nnz < Vectors.MAX_HASH_NNZ) {
val v = values(i)
if (v != 0.0) {
result = 31 * result + i
val bits = java.lang.Double.doubleToLongBits(values(i))
result = 31 * result + (bits ^ (bits >>> 32)).toInt
nnz += 1
}
i += 1
}
result
}
override def numActives: Int = size
override def numNonzeros: Int = {
// same as values.count(_ != 0.0) but faster
var nnz = 0
values.foreach { v =>
if (v != 0.0) {
nnz += 1
}
}
nnz
}
private[linalg] override def toSparseWithSize(nnz: Int): SparseVector = {
val ii = new Array[Int](nnz)
val vv = new Array[Double](nnz)
var k = 0
foreachNonZero { (i, v) =>
ii(k) = i
vv(k) = v
k += 1
}
new SparseVector(size, ii, vv)
}
override def argmax: Int = {
if (size == 0) {
-1
} else {
var maxIdx = 0
var maxValue = values(0)
var i = 1
while (i < size) {
if (values(i) > maxValue) {
maxIdx = i
maxValue = values(i)
}
i += 1
}
maxIdx
}
}
private[spark] override def iterator: Iterator[(Int, Double)] = {
val localValues = values
Iterator.tabulate(size)(i => (i, localValues(i)))
}
private[spark] override def activeIterator: Iterator[(Int, Double)] =
iterator
}
@Since("2.0.0")
object DenseVector {
/** Extracts the value array from a dense vector. */
@Since("2.0.0")
def unapply(dv: DenseVector): Option[Array[Double]] = Some(dv.values)
}
/**
* A sparse vector represented by an index array and a value array.
*
* @param size size of the vector.
* @param indices index array, assume to be strictly increasing.
* @param values value array, must have the same length as the index array.
*/
@Since("2.0.0")
class SparseVector @Since("2.0.0") (
override val size: Int,
@Since("2.0.0") val indices: Array[Int],
@Since("2.0.0") val values: Array[Double]) extends Vector {
// validate the data
{
require(size >= 0, "The size of the requested sparse vector must be no less than 0.")
require(indices.length == values.length, "Sparse vectors require that the dimension of the" +
s" indices match the dimension of the values. You provided ${indices.length} indices and " +
s" ${values.length} values.")
require(indices.length <= size, s"You provided ${indices.length} indices and values, " +
s"which exceeds the specified vector size ${size}.")
if (indices.nonEmpty) {
require(indices(0) >= 0, s"Found negative index: ${indices(0)}.")
}
var prev = -1
indices.foreach { i =>
require(prev < i, s"Index $i follows $prev and is not strictly increasing")
prev = i
}
require(prev < size, s"Index $prev out of bounds for vector of size $size")
}
override def toString: String =
s"($size,${indices.mkString("[", ",", "]")},${values.mkString("[", ",", "]")})"
override def toArray: Array[Double] = {
val data = new Array[Double](size)
var i = 0
val nnz = indices.length
while (i < nnz) {
data(indices(i)) = values(i)
i += 1
}
data
}
override def copy: SparseVector = {
new SparseVector(size, indices.clone(), values.clone())
}
private[spark] override def asBreeze: BV[Double] = new BSV[Double](indices, values, size)
override def apply(i: Int): Double = {
if (i < 0 || i >= size) {
throw new IndexOutOfBoundsException(s"Index $i out of bounds [0, $size)")
}
val j = util.Arrays.binarySearch(indices, i)
if (j < 0) 0.0 else values(j)
}
override def equals(other: Any): Boolean = super.equals(other)
override def hashCode(): Int = {
var result: Int = 31 + size
val end = values.length
var k = 0
var nnz = 0
while (k < end && nnz < Vectors.MAX_HASH_NNZ) {
val v = values(k)
if (v != 0.0) {
val i = indices(k)
result = 31 * result + i
val bits = java.lang.Double.doubleToLongBits(v)
result = 31 * result + (bits ^ (bits >>> 32)).toInt
nnz += 1
}
k += 1
}
result
}
override def numActives: Int = values.length
override def numNonzeros: Int = {
var nnz = 0
values.foreach { v =>
if (v != 0.0) {
nnz += 1
}
}
nnz
}
private[linalg] override def toSparseWithSize(nnz: Int): SparseVector = {
if (nnz == numActives) {
this
} else {
val ii = new Array[Int](nnz)
val vv = new Array[Double](nnz)
var k = 0
foreachNonZero { (i, v) =>
ii(k) = i
vv(k) = v
k += 1
}
new SparseVector(size, ii, vv)
}
}
override def argmax: Int = {
if (size == 0) {
-1
} else if (numActives == 0) {
0
} else {
// Find the max active entry.
var maxIdx = indices(0)
var maxValue = values(0)
var maxJ = 0
var j = 1
val na = numActives
while (j < na) {
val v = values(j)
if (v > maxValue) {
maxValue = v
maxIdx = indices(j)
maxJ = j
}
j += 1
}
// If the max active entry is nonpositive and there exists inactive ones, find the first zero.
if (maxValue <= 0.0 && na < size) {
if (maxValue == 0.0) {
// If there exists an inactive entry before maxIdx, find it and return its index.
if (maxJ < maxIdx) {
var k = 0
while (k < maxJ && indices(k) == k) {
k += 1
}
maxIdx = k
}
} else {
// If the max active value is negative, find and return the first inactive index.
var k = 0
while (k < na && indices(k) == k) {
k += 1
}
maxIdx = k
}
}
maxIdx
}
}
/**
* Create a slice of this vector based on the given indices.
* @param selectedIndices Unsorted list of indices into the vector.
* This does NOT do bound checking.
* @param sorted Whether the input indices are already sorted.
* This does NOT do ordering checking.
* @return New SparseVector with values in the order specified by the given indices.
*
* NOTE: The API needs to be discussed before making this public.
*/
private[spark] def slice(selectedIndices: Array[Int], sorted: Boolean = false): SparseVector = {
val localIndices = indices
val localValues = values
val ns = selectedIndices.length
val indexBuff = mutable.ArrayBuilder.make[Int]
val valueBuff = mutable.ArrayBuilder.make[Double]
if (sorted) {
val nk = localIndices.length
var k = 0
var s = 0
while (k < nk && s < ns) {
val i = localIndices(k)
val v = localValues(k)
if (v != 0) {
while (s < ns && selectedIndices(s) < i) { s += 1 }
if (s < ns && selectedIndices(s) == i) {
indexBuff += s
valueBuff += v
s += 1
}
}
k += 1
}
} else {
var s = 0
while (s < ns) {
val j = java.util.Arrays.binarySearch(localIndices, selectedIndices(s))
if (j >= 0) {
val v = localValues(j)
if (v != 0) {
indexBuff += s
valueBuff += v
}
}
s += 1
}
}
new SparseVector(ns, indexBuff.result, valueBuff.result)
}
private[spark] override def iterator: Iterator[(Int, Double)] = {
val localSize = size
val localNumActives = numActives
val localIndices = indices
val localValues = values
new Iterator[(Int, Double)]() {
private var i = 0
private var j = 0
private var k = localIndices.headOption.getOrElse(-1)
override def hasNext: Boolean = i < localSize
override def next(): (Int, Double) = {
val v = if (i == k) {
j += 1
k = if (j < localNumActives) localIndices(j) else -1
localValues(j - 1)
} else 0.0
i += 1
(i - 1, v)
}
}
}
private[spark] override def activeIterator: Iterator[(Int, Double)] = {
val localIndices = indices
val localValues = values
Iterator.tabulate(numActives)(j => (localIndices(j), localValues(j)))
}
}
@Since("2.0.0")
object SparseVector {
@Since("2.0.0")
def unapply(sv: SparseVector): Option[(Int, Array[Int], Array[Double])] =
Some((sv.size, sv.indices, sv.values))
}
| maropu/spark | mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala | Scala | apache-2.0 | 24,086 |
/*
* #%L
* Maven plugin for ooxoo to generate sources from modeling
* %%
* Copyright (C) 2006 - 2017 Open Design Flow
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package com.idyria.osi.ooxoo.maven
import java.util
import org.apache.maven.plugin.AbstractMojo
import org.apache.maven.plugin.MojoExecutionException
import org.apache.maven.project.MavenProject
import scala.beans.BeanProperty
/*
import org.apache.maven.reporting.MavenReport
import org.apache.maven.reporting.MavenReportException*/
import org.apache.maven.plugins.annotations._
import com.idyria.osi.ooxoo.model._
import com.idyria.osi.ooxoo.model.out.scala._
import com.idyria.osi.ooxoo.model.writers._
import java.io._
import scala.io.Source
import org.scala_tools.maven.mojo.annotations._
import org.odfi.tea.file.DirectoryUtilities
/**
* Generate sources from model, and copy model also to output
*
*/
@goal("generate")
@phase("generate-sources")
@requiresProject(true)
class GenerateSourcesMojo extends AbstractMojo /*with MavenReport*/ {
// @Parameter(property = "project", defaultValue = "${project}")
@required
@readOnly
@parameter
@defaultValue("${project}")
var project: MavenProject = _
/*@parameter
@alias("ooxoo.force")
@defaultValue("false")*/
//@Parameter(property = "ooxoo.force", defaultValue = "true")
@parameter
@alias("ooxoo.force")
@expression("ooxoo.force")
@defaultValue("false")
var ooxooForce: Boolean = false
@Parameter(property = "ooxoo.cleanOutputs", defaultValue = "true")
var cleanOutputs: Boolean = true
@required
@readOnly
@parameter
@defaultValue("${project.build.sourceDirectory}")
var sourceFolder = new File("src/main/scala")
//var sourceFolder = new File("src/main/scala")
@Parameter(defaultValue = "${project.build.testSourceDirectory}")
var testSourceFolder = new File("src/test/scala")
var modelsFolder = new File("src/main/xmodels")
@required
@readOnly
@parameter
@defaultValue("${project.build.directory}/generated-sources/")
var outputBaseFolder = new File("target/generated-sources/")
@required
@readOnly
@parameter
@defaultValue("${project.build.directory}/maven-status/maven-ooxoo-plugin/")
var statusFolder = new File("target/maven-status/maven-ooxoo-plugin")
@throws(classOf[MojoExecutionException])
override def execute() = {
getLog().info("Forcing: " + ooxooForce);
getLog().info("Looking for xmodels to generate with project: " + project);
getLog().info("Source Folders: " + project.getCompileSourceRoots)
project.getCompileSourceRoots.forEach {
sourceFolderStr =>
val sourceFolder = new File(sourceFolderStr).getCanonicalFile
getLog().info("Processing Source Folder: " + sourceFolder);
sourceFolder.exists() match {
case true =>
statusFolder.mkdirs()
/*Thread.currentThread.getContextClassLoader match {
case urlCl : java.net.URLClassLoader =>
urlCl.getURLs.foreach {
u => println("Available in classLoader: "+u)
}
case _ =>
}*/
var xModelFiles = List[File]()
//-- Map to store instances of producers, for reuse purpose
var producers = Map[Class[_ <: ModelProducer], ModelProducer]()
// Search in models folder
//---------------
if (modelsFolder.exists) {
//-- Search the xmodels
//---------------------------------
var xmodelsFiles = modelsFolder.listFiles(new FilenameFilter() {
def accept(dir: File, name: String): Boolean = {
name.matches(".*\\\\.xmodel")
}
})
xModelFiles = xModelFiles ::: xmodelsFiles.toList
}
//-- Search in source package
//--------------
val fileVisitor =
new java.nio.file.SimpleFileVisitor[java.nio.file.Path] {
override def visitFile(
file: java.nio.file.Path,
attributes: java.nio.file.attribute.BasicFileAttributes
) = {
// Only Retain files ending with .xmodel.scala
file.toString.endsWith(".xmodel.scala") match {
case true =>
xModelFiles = xModelFiles :+ file.toFile
case false =>
}
java.nio.file.FileVisitResult.CONTINUE
}
}
java.nio.file.Files.walkFileTree(sourceFolder.toPath, fileVisitor)
if (testSourceFolder.exists) {
java.nio.file.Files.walkFileTree(testSourceFolder.toPath, fileVisitor)
//getLog().info("Test Folder: " + testSourceFolder.toPath);
}
//-- Process all models
//-- - First Filter the on that don't have to be regenerated
//-- - Then Produce
//------------------
ooxooForce match {
case true => getLog().info("Forcing regeneration of models");
case false =>
}
xModelFiles
.filter { f =>
ooxooForce match {
case true => true
case false =>
// Get or set a timestamp file to detect if model file changed since last run
//-------------------
//-- Set timestamps. If modified is greater than the last timestamp -> regenerate
var lastTimeStamp: Long = 0
var lastModified = f.lastModified
statusFolder.mkdirs
var timestampFile = new File(statusFolder, s"${f.getName}.ts")
timestampFile.exists match {
case true =>
lastTimeStamp = java.lang.Long.parseLong(
Source.fromFile(timestampFile).mkString
)
case false =>
}
// Write Actual timestamp
//-------------
//java.nio.file.Files.write(timestampFile.toPath,new String(s"${System.currentTimeMillis}").getBytes)
lastModified > lastTimeStamp
}
}
.foreach { f =>
getLog().info("(Re)generating model: " + f);
// Get Model as String
//--------------------------
var source = Source.fromFile(f)
var content = source.mkString
// Compile to get annotated producers
//---------------------
var modelInfos = ModelCompiler.compile(f)
// Produce for all defined producers
//---------------
if (modelInfos.producers != null && modelInfos.producers
.value() != null) {
modelInfos.producers.value().foreach { producerAnnotation =>
// Get Producer
//---------
var producer = producers.get(producerAnnotation.value) match {
case Some(producer) =>
producer
case None =>
var producer = producerAnnotation.value
.getDeclaredConstructor()
.newInstance()
producers = producers + (producerAnnotation.value -> producer)
producer
}
// Produce or produce later
//----------
producer.outputType match {
// Report, so save and generate when reports are generated
case outputType if (outputType.startsWith("report.")) =>
// Produce now as sources
case _ =>
// Prepare Output
//--------------
var outputFolder =
new File(outputBaseFolder, producer.outputType)
/*if (cleanOutputs) {
println("Cleaning: "+outputFolder)
DirectoryUtilities.deleteDirectoryContent(outputFolder)
}*/
outputFolder.mkdirs()
var out = new FileWriters(outputFolder)
ModelCompiler.produce(modelInfos, producer, out)
// Add Target Folder to compile source if existing
//-----------------
if (outputFolder.exists) {
//this.project.addCompileSourceRoot(outputFolder.getAbsolutePath);
}
}
}
// EOF Foreach producers
}
// EOF Something to produce
// Write Actual timestamp
//-------------
var timestampFile = new File(statusFolder, s"${f.getName}.ts")
java.nio.file.Files.write(
timestampFile.toPath,
new String(s"${System.currentTimeMillis}").getBytes
)
}
// EOF Xfiles loop
// Add All Target Folder generated sources as compile unit
//-------------------
if (outputBaseFolder.exists) {
//outputBaseFolder.listFiles.filter(_.isDirectory).foreach(f => this.project.addCompileSourceRoot(f.getAbsolutePath))
}
case false =>
getLog().info("Source Folder does not exist: " + sourceFolder);
}
}
}
// Reporting
//---------------------------
var defferedReporting = List[(ModelInfos, ModelProducer)]()
var reportingOutputDirectory: java.io.File = null
def canGenerateReport(): Boolean = {
defferedReporting.size > 0
}
/* def generate(sink: org.codehaus.doxia.sink.Sink, locale: java.util.Locale): Unit = {
}*/
def getCategoryName(): String = {
"OOXOO"
}
def getDescription(x$1: java.util.Locale): String = {
"OOXOO Reports"
}
def getName(locale: java.util.Locale): String = {
"OOXOO"
}
def getOutputName(): String = {
"OOXOO"
}
def getReportOutputDirectory(): java.io.File = {
this.reportingOutputDirectory match {
case dir if (dir == null) =>
new File(
project.getBasedir,
project.getBuild.getOutputDirectory + "/OOXOO"
).getCanonicalFile
case dir => dir
}
}
def isExternalReport(): Boolean = {
true
}
def setReportOutputDirectory(dir: java.io.File): Unit = {
this.reportingOutputDirectory = dir
}
override def getPluginContext: java.util.Map[_, _] = super.getPluginContext
}
| richnou/ooxoo-core | maven-ooxoo-plugin/src/main/scala/com/idyria/ooxoo/maven/GenerateSourcesMojo.scala | Scala | agpl-3.0 | 12,068 |
package kalmanb.akka.push
import scala.util.Failure
import scala.util.Success
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorRef
import akka.pattern._
import akka.util.Timeout
object WebServer {
case class Request(url: String)
}
class WebServer(db: ActorRef, processor: ActorRef) extends Actor with ActorLogging {
import WebServer._
import scala.concurrent.duration._
implicit val timeout = Timeout(5 minutes)
// We're also going to need somewhere to execute the future
// We'll just use this Actors ExecutionContext
implicit val exec = context.dispatcher
def receive = {
case Request(url) โ {
log.info(s"Processing $url")
// We need to read, process and write to DB, then return updated
val result = for {
read โ db ? Db.Read(s"select * from $url")
processed โ processor ? read
updated โ db ? Db.Write(processed.asInstanceOf[String]) // Oh no Actor messages arn't typed
} yield updated.asInstanceOf[String]
// Now return to user, success or failure
result.onComplete {
case Success(result) โ {
// Return 200 OK to user with results
log.info(s"done - $result")
}
case Failure(failure) โ log.error(s"Oh no!, $failure")
}
}
}
}
| kalmanb/akka-examples | src/main/scala/kalmanb/akka/push/WebServer.scala | Scala | apache-2.0 | 1,313 |
package com.nulabinc.backlog.r2b.exporter.convert
import javax.inject.Inject
import com.nulabinc.backlog.migration.common.convert.Writes
import com.nulabinc.backlog.migration.common.domain.BacklogIssueCategory
import com.taskadapter.redmineapi.bean.IssueCategory
/**
* @author
* uchida
*/
private[exporter] class IssueCategoriesWrites @Inject() ()
extends Writes[Seq[IssueCategory], Seq[BacklogIssueCategory]] {
override def writes(
categories: Seq[IssueCategory]
): Seq[BacklogIssueCategory] = {
categories.map(toBacklog)
}
private[this] def toBacklog(category: IssueCategory) = {
BacklogIssueCategory(
optId = Some(category.getId.intValue()),
name = category.getName,
delete = false
)
}
}
| nulab/BacklogMigration-Redmine | src/main/scala/com/nulabinc/backlog/r2b/exporter/convert/IssueCategoriesWrites.scala | Scala | mit | 752 |
package org.qcri.rheem.api.util
import org.qcri.rheem.core.types.DataSetType
/**
* This class waits for a [[org.qcri.rheem.core.types.DataSetType]] to be set and verifies that there are no
* two different sets.
*/
class TypeTrap {
/**
* Stores the [[DataSetType]].
*/
private var _dataSetType: DataSetType[_] = _
/**
* Set the [[DataSetType]] for this instance.
*
* @throws IllegalArgumentException if a different [[DataSetType]] has been set before
* @param dst the [[DataSetType]]
*/
def dataSetType_=(dst: DataSetType[_]): Unit = {
_dataSetType match {
case null => _dataSetType = dst
case `dst` =>
case other => throw new IllegalArgumentException(s"Conflicting types ${_dataSetType} and ${dst}.")
}
}
/**
* Return the previously set [[DataSetType]].
*
* @return the previously set [[DataSetType]] or, if none has been set, [[DataSetType.none()]]
*/
def dataSetType = _dataSetType match {
case null => DataSetType.none()
case other => other
}
/**
* Return the [[Class]] of the previously set [[DataSetType]].
*
* @return the [[Class]] of the previously set [[DataSetType]] or, if none has been set, that of [[DataSetType.none()]]
*/
def typeClass = dataSetType.getDataUnitType.toBasicDataUnitType.getTypeClass
}
| jonasrk/rheem | rheem-api/src/main/scala/org/qcri/rheem/api/util/TypeTrap.scala | Scala | apache-2.0 | 1,341 |
/**
* Copyright (C) 2012-2014 Kaj Magnus Lindberg (born 1979)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.debiki.core
import java.{util => ju}
import collection.{immutable => imm, mutable => mut}
import com.debiki.core.{PostActionPayload => PAP}
import scala.collection.immutable
import Prelude._
import PageParts._
import FlagType.FlagType
/** The "interesting" part of a PostAction, for example, if the action is to edit
* a post, the payload would be the diff, and if the action is to create a new
* post, the payload is the text of the new post.
*/
sealed abstract class PostActionPayload {
def textLengthUtf8: Int = 0
}
object PostActionPayload {
/** Creates a page title, a page body, a comment, or a page config post.
*
* @param approval Defined iff the post was approved on creation, and clarifies why it was.
* @param where If defined, this is an inline comment and the value specifies where
* in the parent post it is to be placed. COULD move to separate Meta post?
* Benefits: Editing and versioning of `where', without affecting this Post.text.
* Benefit 2: There could be > 1 meta-Where for each post, so you could make e.g. a
* generic comment that results in ?? arrows to e.g. 3 other comments ??
*/
case class CreatePost(
parentPostId: Option[PostId],
text: String,
approval: Option[Approval],
multireplyPostIds: immutable.Set[PostId] = immutable.Set.empty,
where: Option[String] = None) extends PostActionPayload {
dieIf(multireplyPostIds.size == 1, "DwE70FN1")
override def textLengthUtf8: Int = text.getBytes("UTF-8").length
}
/** Edits the text of a post.
*
* @param text A diff from the current post text to the new. (Should rename to .diff?)
* @param autoApplied If this edit was applied automatically on creation, e.g. because
* someone edited his/her own comment.
* Currently not in use (yes it is!?? or?) And I'd have to
* refactor page-actions-smart.scala fairly much for `autoApplied`to work,
* since currently all appl info is instead handled via EditApp:s.
* - Perhaps easier to remove this field, and construct
* an additional EditApp action when reading an Edit from database,
* if the db says it was auto approved? But I might need this field
* anyway, when *saving* an edit, so the db knows it should mark it as
* auto applied.
* @param approval If the related post is to be automatically approved, when this
* edit is auto applied. (Example: a moderator edits a comment
* that is already approved, then the edit would be
* auto applied, and the related post would be approved implicitly,
* (since it was already approved, and a *moderator* did the edit.))
*/
case class EditPost(
text: String, // (Should rename to `diff`?)
autoApplied: Boolean,
approval: Option[Approval]) extends PostActionPayload {
override def textLengthUtf8: Int = text.getBytes("UTF-8").length
// An edit that hasn't been applied cannot have been approved.
// (It might have been applied, but not approved, however, if a
// user edits his/her own comment, and the changes are then pending
// moderator review.)
require(approval.isEmpty || autoApplied)
}
/** Edit applications (i.e. when edits are applied).
* COULD rename to ApplyEdit
*/
case class EditApp(editId: ActionId, approval: Option[Approval]) extends PostActionPayload
/** Approves comments and edits.
*/
case class ApprovePost(approval: Approval) extends PostActionPayload
val PrelApprovePost = ApprovePost(Approval.Preliminary)
val WellBehavedApprovePost = ApprovePost(Approval.WellBehavedUser)
/** Rejects all edits that have been applied since the last time the post
* was approved.
*/
case class RejectEdits(deleteEdits: Boolean) extends PostActionPayload {
if (deleteEdits)
unimplemented("RejectEdits(deleteEdits = true) not implemented")
}
class Vote extends PostActionPayload
/** The user liked the post, e.g. because it's funny or informative. */
case object VoteLike extends Vote
/** The user e.g. thinks the comment has factual errors, or disagrees with it. */
case object VoteWrong extends Vote
case object VoteOffTopic extends Vote
/** Pins a post at e.g. position 3. This pushes any other posts already pinned
* at e.g. positions 3, 4, and 5 one step to the right, to positions 4, 5 and 6.
* So after a while, the effective position of a pinned post might have changed
* from X to X + Y where Y is the number of new posts pinned at or before X.
* The effective position of a post is computed lazily when the page is rendered.
*
* @param position 1 means place first, 2 means place first but one, and so on.
* -1 means place last, -2 means last but one, and so on.
*/
case class PinPostAtPosition(position: Int) extends PostActionPayload {
illArgIf(position == 0, "DwE25FK8")
}
/** Gives extra votes to a post. A negative value means downvotes. Can be used
* to promote or demote things the admin / moderator likes or dislikes.
* However, a pushpin icon shows that the post has been pinned. So one
* cannot use this functionality to fool other people into believing a post is
* more (or less) popular that what it actually is.
* Concerning pinning downvotes, if you think that's unfair, because the
* post will be moved away and fewer people will see it and read it (and notice it's
* pinned): well, the moderator can *delete* it as well. It's more "fair" and
* more honest to pin visible downvotes, than to outright delete the whole
* comment/thread?
*//*
case class PinVotesToPost(extraVotes: Int) extends PostActionPayload {
illArgIf(extraVotes == 0, "DwE71Fb0")
}*/
///case object UnpinPost extends PostActionPayload
class CollapseSomething extends PostActionPayload
case object CollapsePost extends CollapseSomething
/** Collapses a thread: collapses it, and perhaps tucks it away under a Collapsed Threads
* section (which would be far away to the right?, if the thread is laid out horizontally).
*
* Use on old obsolete threads, e.g. a comment about a spelling mistake
* that has since been fixed. Or on uninteresting off-topic threads.
*/
case object CollapseTree extends CollapseSomething
/** Closes a thread. It'll be tucked away under a Closed Threads section,
* and perhaps not shown when rendering page.
*/
case object CloseTree extends PostActionPayload
/** Deletes a single comment.
*/
case class DeletePost(clearFlags: Boolean) extends PostActionPayload
/** Deletes a comment and all replies, recursively.
*/
case object DeleteTree extends PostActionPayload
/** Deletes things an edit suggestion or a flag. (But not a post โ use DeletePost
* and DeleteTree instead.)
*/
case class Delete(targetActionId: ActionId) extends PostActionPayload
/** Hides a post, e.g. because it was flagged as spam, and clears any flags.
*/
case object HidePostClearFlags extends PostActionPayload
/** Flags a post as e.g. spam, or inappropriate (offensive, illegal, whatever).
*/
case class Flag(tyype: FlagType, reason: String) extends PostActionPayload
/** Deletes all flags for the relevant post.
*/
case object ClearFlags extends PostActionPayload
}
object FlagType extends Enumeration {
type FlagType = Value
val Spam, Inapt, Other = Value
}
| debiki/debiki-server-old | modules/debiki-core/src/main/scala/com/debiki/core/PostActionPayload.scala | Scala | agpl-3.0 | 8,176 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.io.File
import org.apache.spark.SparkConf
import org.apache.spark.sql.catalyst.util.stringToFile
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
class OffsetSeqLogSuite extends SharedSparkSession {
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
/** test string offset type */
case class StringOffset(override val json: String) extends Offset
test("OffsetSeqMetadata - deserialization") {
val key = SQLConf.SHUFFLE_PARTITIONS.key
def getConfWith(shufflePartitions: Int): Map[String, String] = {
Map(key -> shufflePartitions.toString)
}
// None set
assert(new OffsetSeqMetadata(0, 0, Map.empty) === OffsetSeqMetadata("""{}"""))
// One set
assert(new OffsetSeqMetadata(1, 0, Map.empty) ===
OffsetSeqMetadata("""{"batchWatermarkMs":1}"""))
assert(new OffsetSeqMetadata(0, 2, Map.empty) ===
OffsetSeqMetadata("""{"batchTimestampMs":2}"""))
assert(OffsetSeqMetadata(0, 0, getConfWith(shufflePartitions = 2)) ===
OffsetSeqMetadata(s"""{"conf": {"$key":2}}"""))
// Two set
assert(new OffsetSeqMetadata(1, 2, Map.empty) ===
OffsetSeqMetadata("""{"batchWatermarkMs":1,"batchTimestampMs":2}"""))
assert(OffsetSeqMetadata(1, 0, getConfWith(shufflePartitions = 3)) ===
OffsetSeqMetadata(s"""{"batchWatermarkMs":1,"conf": {"$key":3}}"""))
assert(OffsetSeqMetadata(0, 2, getConfWith(shufflePartitions = 3)) ===
OffsetSeqMetadata(s"""{"batchTimestampMs":2,"conf": {"$key":3}}"""))
// All set
assert(OffsetSeqMetadata(1, 2, getConfWith(shufflePartitions = 3)) ===
OffsetSeqMetadata(s"""{"batchWatermarkMs":1,"batchTimestampMs":2,"conf": {"$key":3}}"""))
// Drop unknown fields
assert(OffsetSeqMetadata(1, 2, getConfWith(shufflePartitions = 3)) ===
OffsetSeqMetadata(
s"""{"batchWatermarkMs":1,"batchTimestampMs":2,"conf": {"$key":3}},"unknown":1"""))
}
test("OffsetSeqLog - serialization - deserialization") {
withTempDir { temp =>
val dir = new File(temp, "dir") // use non-existent directory to test whether log make the dir
val metadataLog = new OffsetSeqLog(spark, dir.getAbsolutePath)
val batch0 = OffsetSeq.fill(LongOffset(0), LongOffset(1), LongOffset(2))
val batch1 = OffsetSeq.fill(StringOffset("one"), StringOffset("two"), StringOffset("three"))
val batch0Serialized = OffsetSeq.fill(batch0.offsets.flatMap(_.map(o =>
SerializedOffset(o.json))): _*)
val batch1Serialized = OffsetSeq.fill(batch1.offsets.flatMap(_.map(o =>
SerializedOffset(o.json))): _*)
assert(metadataLog.add(0, batch0))
assert(metadataLog.getLatest() === Some(0 -> batch0Serialized))
assert(metadataLog.get(0) === Some(batch0Serialized))
assert(metadataLog.add(1, batch1))
assert(metadataLog.get(0) === Some(batch0Serialized))
assert(metadataLog.get(1) === Some(batch1Serialized))
assert(metadataLog.getLatest() === Some(1 -> batch1Serialized))
assert(metadataLog.get(None, Some(1)) ===
Array(0 -> batch0Serialized, 1 -> batch1Serialized))
// Adding the same batch does nothing
metadataLog.add(1, OffsetSeq.fill(LongOffset(3)))
assert(metadataLog.get(0) === Some(batch0Serialized))
assert(metadataLog.get(1) === Some(batch1Serialized))
assert(metadataLog.getLatest() === Some(1 -> batch1Serialized))
assert(metadataLog.get(None, Some(1)) ===
Array(0 -> batch0Serialized, 1 -> batch1Serialized))
}
}
test("deserialization log written by future version") {
withTempDir { dir =>
stringToFile(new File(dir, "0"), "v99999")
val log = new OffsetSeqLog(spark, dir.getCanonicalPath)
val e = intercept[IllegalStateException] {
log.get(0)
}
Seq(
s"maximum supported log version is v${OffsetSeqLog.VERSION}, but encountered v99999",
"produced by a newer version of Spark and cannot be read by this version"
).foreach { message =>
assert(e.getMessage.contains(message))
}
}
}
ignore("read Spark 2.1.0 log format") {
val (batchId, offsetSeq) = readFromResource("offset-log-version-2.1.0")
assert(batchId === 0)
assert(offsetSeq.offsets === Seq(
Some(SerializedOffset("""{"logOffset":345}""")),
Some(SerializedOffset("""{"topic-0":{"0":1}}"""))
))
assert(offsetSeq.metadata === Some(OffsetSeqMetadata(0L, 1480981499528L)))
}
private def readFromResource(dir: String): (Long, OffsetSeq) = {
val input = getClass.getResource(s"/structured-streaming/$dir")
val log = new OffsetSeqLog(spark, input.toString)
log.getLatest().get
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/OffsetSeqLogSuite.scala | Scala | apache-2.0 | 6,511 |
package com.metl.data
import org.scalatest._
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.{ShouldMatchers, HavePropertyMatcher, HavePropertyMatchResult}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalatest.OptionValues._
import net.liftweb.util.Helpers._
import net.liftweb.common._
import scala.xml._
import com.metl.data._
import Privacy._
class MeTLTextExtractorSuite extends FunSuite with GeneratorDrivenPropertyChecks with BeforeAndAfter with ShouldMatchers with QueryXml with MeTLTextMatchers with MeTLDataGenerators {
var xmlSerializer: GenericXmlSerializer = _
before {
xmlSerializer = new GenericXmlSerializer(EmptyBackendAdaptor)
}
test("convert metl text to xml") {
forAll (genText) { (genText: MeTLText) =>
implicit val xml = xmlSerializer.fromMeTLText(genText)
genText should have (
server (ServerConfiguration.empty),
author (queryXml[String]("author")),
target (queryXml[String]("target")),
privacy (queryXml[Privacy]("privacy")),
slide (queryXml[String]("slide")),
identity (queryXml[String]("identity")),
tag (queryXml[String]("tag")),
caret (queryXml[Int]("caret")),
text (queryXml[String]("text")),
style (queryXml[String]("style")),
family (queryXml[String]("family")),
weight (queryXml[String]("weight")),
size (queryXml[Double]("size")),
decoration (queryXml[String]("decoration")),
color (queryXml[Color]("color")),
width (queryXml[Double]("width")),
height (queryXml[Double]("height")),
x (queryXml[Double]("x")),
y (queryXml[Double]("y"))
)
}
}
test("extract metl text from xml") {
val content = <message>
<textbox>
<author>eecrole</author>
<target>test</target>
<privacy>private</privacy>
<slide>4</slide>
<identity>eecrole:223445834582</identity>
<tag>eecrole:223445834582</tag>
<caret>0</caret>
<text>Hello World!</text>
<style>Underline</style>
<family>Helvetica</family>
<weight>Bold</weight>
<size>12.0</size>
<decoration>Italics</decoration>
<color>#ffff0000</color>
<width>200</width>
<height>100</height>
<x>120</x>
<y>300</y>
</textbox>
</message>
val result = xmlSerializer.toMeTLData(content)
assert(result === MeTLText(ServerConfiguration.empty, "eecrole", -1L, "Hello World!", 100.0, 200.0, 0, 120.0, 300.0, "eecrole:223445834582",
"Underline", "Helvetica", "Bold", 12.0, "Italics", "eecrole:223445834582", "test", Privacy.PRIVATE, "4", Color(255, 255, 0, 0)))
}
test("extract metl dirty text from xml") {
val content = <message>
<dirtyText>
<author>eecrole</author>
<target>test</target>
<privacy>public</privacy>
<slide>4</slide>
<identity>metlDirtyText</identity>
</dirtyText>
</message>
val result = xmlSerializer.toMeTLData(content).asInstanceOf[MeTLDirtyText]
result should have (
server (ServerConfiguration.empty),
author ("eecrole"),
timestamp (-1L),
target ("test"),
privacy (Privacy.PUBLIC),
slide ("4"),
identity ("metlDirtyText")
)
}
test("serialize metl dirty text to xml") {
forAll (genDirtyText) { (genDirtyText: MeTLDirtyText) =>
implicit val xml = xmlSerializer.fromMeTLDirtyText(genDirtyText)
genDirtyText should have (
server (ServerConfiguration.empty),
author (queryXml[String]("author")),
target (queryXml[String]("target")),
privacy (queryXml[Privacy]("privacy")),
slide (queryXml[String]("slide")),
identity (queryXml[String]("identity"))
)
}
}
}
| StackableRegiments/analyticalmetlx | src/test/scala/com/metl/data/MeTLTextSuite.scala | Scala | apache-2.0 | 3,793 |
package libs
import java.io._
class Logger(filepath: String) {
val startTime = System.currentTimeMillis()
val logfile = new PrintWriter(new File(filepath))
def log(message: String, i: Int = -1) {
val elapsedTime = 1F * (System.currentTimeMillis() - startTime) / 1000
if (i == -1) {
logfile.write(elapsedTime.toString + ": " + message + "\n")
} else {
logfile.write(elapsedTime.toString + ", i = " + i.toString + ": "+ message + "\n")
}
logfile.flush()
}
}
| amplab/SparkNet | src/main/scala/libs/Logger.scala | Scala | mit | 500 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package expressions
import base.Import
import com.intellij.lang.PsiBuilder
import com.intellij.psi.tree.TokenSet
import lexer.ScalaTokenTypes
import top.TmplDef
import com.intellij.lang.PsiBuilder.Marker
import statements.{EmptyDcl, Dcl, Def}
import builder.ScalaPsiBuilder
import parser.util.ParserPatcher
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
/*
* BlockStat ::= Import
* | ['implicit'] Def
* | {LocalModifier} TmplDef
* | Expr1
*/
object BlockStat {
def parse(builder: ScalaPsiBuilder): Boolean = {
val tokenType = builder.getTokenType
val patcher = ParserPatcher.getSuitablePatcher(builder)
tokenType match {
case ScalaTokenTypes.kIMPORT => {
Import parse builder
return true
}
case ScalaTokenTypes.tSEMICOLON => {
builder.advanceLexer()
return true
}
case ScalaTokenTypes.kDEF | ScalaTokenTypes.kVAL | ScalaTokenTypes.kVAR | ScalaTokenTypes.kTYPE => {
if (!Def.parse(builder, false, true)) {
if (Dcl.parse(builder)) {
builder error ErrMsg("wrong.declaration.in.block")
return true
} else {
EmptyDcl.parse(builder)
builder error ErrMsg("wrong.declaration.in.block")
return true
}
}
}
case ScalaTokenTypes.kCLASS | ScalaTokenTypes.kTRAIT | ScalaTokenTypes.kOBJECT => {
return TmplDef.parse(builder)
}
case _ if patcher.parse(builder) => parse(builder)
case _ => {
if (!Expr1.parse(builder)) {
if (!Def.parse(builder, false, true)) {
if (!TmplDef.parse(builder)) {
if (Dcl.parse(builder)) {
builder error ErrMsg("wrong.declaration.in.block")
return true
}
else {
if (EmptyDcl.parse(builder)) {
builder error ErrMsg("wrong.declaration.in.block")
return true
} else
return false
}
}
}
}
}
}
true
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/expressions/BlockStat.scala | Scala | apache-2.0 | 2,224 |
package com.gmail.at.pukanito.model.graph
import scala.collection.SeqLike
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Builder
/**
* Representation of a relative or absolute path through GraphItems.
*
* A path is a sequence of NodeKeys.
*/
abstract class Path extends Seq[NodeKey] with SeqLike[NodeKey, Path] {
override protected[this] def newBuilder: Builder[NodeKey, Path] = Path.newBuilder
/**
* Appends another path to this path.
*
* @param p the other path to append.
* @return a new path consisting of this path with p appended.
*/
// scalastyle:off method.name spaces.after.plus
def +(p: Path): Path
// scalastyle:on method.name spaces.after.plus
}
/**
* Implementation for the empty path.
*/
case class EmptyPath private[graph] () extends Path {
override def apply(idx: Int): NodeKey = throw new NoSuchElementException
override def iterator: Iterator[NodeKey] = Iterator.empty
override def length: Int = 0
// scalastyle:off method.name spaces.after.plus public.methods.have.type
override def +(p: Path) = p
// scalastyle:on method.name spaces.after.plus public.methods.have.type
}
/**
* Implementation for the non-empty path.
*
* @param path the keys representing the path.
*/
case class NonEmptyPath private[graph] (
path: NodeKey*
) extends Path {
val pathElements = Seq[NodeKey](path:_*)
override def apply(idx: Int): NodeKey = pathElements(idx)
override def iterator: Iterator[NodeKey] = pathElements.iterator
override def length: Int = pathElements.length
// scalastyle:off method.name spaces.after.plus public.methods.have.type
override def +(p: Path) = p match {
// scalastyle:on method.name spaces.after.plus public.methods.have.type
case _: EmptyPath => this
case that: NonEmptyPath => new NonEmptyPath((this.pathElements ++ that.pathElements):_*)
}
}
/**
* Types and helper methods for Path.
*
* Creating a graph path from simple keys:
* {{{
* val path = Path("key1" -> val1 [ , "key2" -> val2, ... ] )
* }}}
*
* instead of:
*
* {{{
* val path = Path(NodeKey("key1" -> val1) [ , NodeKey("key2" -> val2), ... ] )
* }}}
*
* With compound keys it is necessary to use NodeKey(...).
*/
object Path {
import scala.language.implicitConversions
/**
* Helper to convert a NodeKey to a Path (for implicit conversion).
*
* @param key graph item key to convert.
* @return a Path
*/
implicit def nodeKey2Path(key: NodeKey): Path = Path(key)
/**
* Helper to create a Path from keys.
*
* @param keys maps containing the keys of each level in the path to be created.
* @return a Path with the specified keys.
*/
def apply(keys: NodeKey*): Path = keys.size match {
case 0 => new EmptyPath
case _ => new NonEmptyPath(keys:_*)
}
/**
* Helper for match ... case extraction.
*/
def unapplySeq(x: Seq[NodeKey]): Option[Seq[NodeKey]] = Some(x)
def fromSeq(buf: Seq[NodeKey]): Path = apply(buf:_*)
/**
* Helper for creating new instances.
*/
def newBuilder: Builder[NodeKey, Path] = new ArrayBuffer mapResult fromSeq
}
| pukanito/bigdatanalysis | src/main/scala/com/gmail/at/pukanito/model/graph/Path.scala | Scala | gpl-3.0 | 3,126 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.amaterasu.common.dataobjects
case class Export(dataset: String, format: String)
| eyalbenivri/amaterasu | common/src/main/scala/org/apache/ameterasu/common/dataobjects/Export.scala | Scala | apache-2.0 | 901 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.math.{BigDecimal => JavaBigDecimal}
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.unsafe.types.UTF8String.{IntWrapper, LongWrapper}
object Cast {
/**
* Returns true iff we can cast `from` type to `to` type.
*/
def canCast(from: DataType, to: DataType): Boolean = (from, to) match {
case (fromType, toType) if fromType == toType => true
case (NullType, _) => true
case (_, StringType) => true
case (StringType, BinaryType) => true
case (StringType, BooleanType) => true
case (DateType, BooleanType) => true
case (TimestampType, BooleanType) => true
case (_: NumericType, BooleanType) => true
case (StringType, TimestampType) => true
case (BooleanType, TimestampType) => true
case (DateType, TimestampType) => true
case (_: NumericType, TimestampType) => true
case (StringType, DateType) => true
case (TimestampType, DateType) => true
case (StringType, CalendarIntervalType) => true
case (StringType, _: NumericType) => true
case (BooleanType, _: NumericType) => true
case (DateType, _: NumericType) => true
case (TimestampType, _: NumericType) => true
case (_: NumericType, _: NumericType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
resolvableNullability(fn || forceNullable(fromType, toType), tn)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
canCast(fromKey, toKey) &&
(!forceNullable(fromKey, toKey)) &&
canCast(fromValue, toValue) &&
resolvableNullability(fn || forceNullable(fromValue, toValue), tn)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (fromField, toField) =>
canCast(fromField.dataType, toField.dataType) &&
resolvableNullability(
fromField.nullable || forceNullable(fromField.dataType, toField.dataType),
toField.nullable)
}
case (udt1: UserDefinedType[_], udt2: UserDefinedType[_]) if udt1.userClass == udt2.userClass =>
true
case _ => false
}
/**
* Return true if we need to use the `timeZone` information casting `from` type to `to` type.
* The patterns matched reflect the current implementation in the Cast node.
* c.f. usage of `timeZone` in:
* * Cast.castToString
* * Cast.castToDate
* * Cast.castToTimestamp
*/
def needsTimeZone(from: DataType, to: DataType): Boolean = (from, to) match {
case (StringType, TimestampType) => true
case (DateType, TimestampType) => true
case (TimestampType, StringType) => true
case (TimestampType, DateType) => true
case (ArrayType(fromType, _), ArrayType(toType, _)) => needsTimeZone(fromType, toType)
case (MapType(fromKey, fromValue, _), MapType(toKey, toValue, _)) =>
needsTimeZone(fromKey, toKey) || needsTimeZone(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).exists {
case (fromField, toField) =>
needsTimeZone(fromField.dataType, toField.dataType)
}
case _ => false
}
/**
* Return true iff we may truncate during casting `from` type to `to` type. e.g. long -> int,
* timestamp -> date.
*/
def mayTruncate(from: DataType, to: DataType): Boolean = (from, to) match {
case (from: NumericType, to: DecimalType) if !to.isWiderThan(from) => true
case (from: DecimalType, to: NumericType) if !from.isTighterThan(to) => true
case (from, to) if illegalNumericPrecedence(from, to) => true
case (TimestampType, DateType) => true
case (StringType, to: NumericType) => true
case _ => false
}
private def illegalNumericPrecedence(from: DataType, to: DataType): Boolean = {
val fromPrecedence = TypeCoercion.numericPrecedence.indexOf(from)
val toPrecedence = TypeCoercion.numericPrecedence.indexOf(to)
toPrecedence > 0 && fromPrecedence > toPrecedence
}
def forceNullable(from: DataType, to: DataType): Boolean = (from, to) match {
case (NullType, _) => true
case (_, _) if from == to => false
case (StringType, BinaryType) => false
case (StringType, _) => true
case (_, StringType) => false
case (FloatType | DoubleType, TimestampType) => true
case (TimestampType, DateType) => false
case (_, DateType) => true
case (DateType, TimestampType) => false
case (DateType, _) => true
case (_, CalendarIntervalType) => true
case (_, _: DecimalType) => true // overflow
case (_: FractionalType, _: IntegralType) => true // NaN, infinity
case _ => false
}
private def resolvableNullability(from: Boolean, to: Boolean) = !from || to
}
/**
* Cast the child expression to the target data type.
*
* When cast from/to timezone related types, we need timeZoneId, which will be resolved with
* session local timezone by an analyzer [[ResolveTimeZone]].
*/
@ExpressionDescription(
usage = "_FUNC_(expr AS type) - Casts the value `expr` to the target data type `type`.",
examples = """
Examples:
> SELECT _FUNC_('10' as int);
10
""")
case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with NullIntolerant {
def this(child: Expression, dataType: DataType) = this(child, dataType, None)
override def toString: String = s"cast($child as ${dataType.simpleString})"
override def checkInputDataTypes(): TypeCheckResult = {
if (Cast.canCast(child.dataType, dataType)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
s"cannot cast ${child.dataType} to $dataType")
}
}
override def nullable: Boolean = Cast.forceNullable(child.dataType, dataType) || child.nullable
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
// When this cast involves TimeZone, it's only resolved if the timeZoneId is set;
// Otherwise behave like Expression.resolved.
override lazy val resolved: Boolean =
childrenResolved && checkInputDataTypes().isSuccess && (!needsTimeZone || timeZoneId.isDefined)
private[this] def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType)
// [[func]] assumes the input is no longer null because eval already does the null check.
@inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T])
// UDFToString
private[this] def castToString(from: DataType): Any => Any = from match {
case BinaryType => buildCast[Array[Byte]](_, UTF8String.fromBytes)
case DateType => buildCast[Int](_, d => UTF8String.fromString(DateTimeUtils.dateToString(d)))
case TimestampType => buildCast[Long](_,
t => UTF8String.fromString(DateTimeUtils.timestampToString(t, timeZone)))
case _ => buildCast[Any](_, o => UTF8String.fromString(o.toString))
}
// BinaryConverter
private[this] def castToBinary(from: DataType): Any => Any = from match {
case StringType => buildCast[UTF8String](_, _.getBytes)
}
// UDFToBoolean
private[this] def castToBoolean(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
if (StringUtils.isTrueString(s)) {
true
} else if (StringUtils.isFalseString(s)) {
false
} else {
null
}
})
case TimestampType =>
buildCast[Long](_, t => t != 0)
case DateType =>
// Hive would return null when cast from date to boolean
buildCast[Int](_, d => null)
case LongType =>
buildCast[Long](_, _ != 0)
case IntegerType =>
buildCast[Int](_, _ != 0)
case ShortType =>
buildCast[Short](_, _ != 0)
case ByteType =>
buildCast[Byte](_, _ != 0)
case DecimalType() =>
buildCast[Decimal](_, !_.isZero)
case DoubleType =>
buildCast[Double](_, _ != 0)
case FloatType =>
buildCast[Float](_, _ != 0)
}
// TimestampConverter
private[this] def castToTimestamp(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, utfs => DateTimeUtils.stringToTimestamp(utfs, timeZone).orNull)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0)
case LongType =>
buildCast[Long](_, l => longToTimestamp(l))
case IntegerType =>
buildCast[Int](_, i => longToTimestamp(i.toLong))
case ShortType =>
buildCast[Short](_, s => longToTimestamp(s.toLong))
case ByteType =>
buildCast[Byte](_, b => longToTimestamp(b.toLong))
case DateType =>
buildCast[Int](_, d => DateTimeUtils.daysToMillis(d, timeZone) * 1000)
// TimestampWritable.decimalToTimestamp
case DecimalType() =>
buildCast[Decimal](_, d => decimalToTimestamp(d))
// TimestampWritable.doubleToTimestamp
case DoubleType =>
buildCast[Double](_, d => doubleToTimestamp(d))
// TimestampWritable.floatToTimestamp
case FloatType =>
buildCast[Float](_, f => doubleToTimestamp(f.toDouble))
}
private[this] def decimalToTimestamp(d: Decimal): Long = {
(d.toBigDecimal * 1000000L).longValue()
}
private[this] def doubleToTimestamp(d: Double): Any = {
if (d.isNaN || d.isInfinite) null else (d * 1000000L).toLong
}
// converting seconds to us
private[this] def longToTimestamp(t: Long): Long = t * 1000000L
// converting us to seconds
private[this] def timestampToLong(ts: Long): Long = math.floor(ts.toDouble / 1000000L).toLong
// converting us to seconds in double
private[this] def timestampToDouble(ts: Long): Double = {
ts / 1000000.0
}
// DateConverter
private[this] def castToDate(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => DateTimeUtils.stringToDate(s).orNull)
case TimestampType =>
// throw valid precision more than seconds, according to Hive.
// Timestamp.nanos is in 0 to 999,999,999, no more than a second.
buildCast[Long](_, t => DateTimeUtils.millisToDays(t / 1000L, timeZone))
}
// IntervalConverter
private[this] def castToInterval(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => CalendarInterval.fromString(s.toString))
}
// LongConverter
private[this] def castToLong(from: DataType): Any => Any = from match {
case StringType =>
val result = new LongWrapper()
buildCast[UTF8String](_, s => if (s.toLong(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0L)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t))
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toLong(b)
}
// IntConverter
private[this] def castToInt(from: DataType): Any => Any = from match {
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toInt(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1 else 0)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toInt)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b)
}
// ShortConverter
private[this] def castToShort(from: DataType): Any => Any = from match {
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toShort(result)) {
result.value.toShort
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toShort else 0.toShort)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toShort)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toShort
}
// ByteConverter
private[this] def castToByte(from: DataType): Any => Any = from match {
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toByte(result)) {
result.value.toByte
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toByte else 0.toByte)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toByte)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte
}
/**
* Change the precision / scale in a given decimal to those set in `decimalType` (if any),
* returning null if it overflows or modifying `value` in-place and returning it if successful.
*
* NOTE: this modifies `value` in-place, so don't call it on external data.
*/
private[this] def changePrecision(value: Decimal, decimalType: DecimalType): Decimal = {
if (value.changePrecision(decimalType.precision, decimalType.scale)) value else null
}
/**
* Create new `Decimal` with precision and scale given in `decimalType` (if any),
* returning null if it overflows or creating a new `value` and returning it if successful.
*/
private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal =
value.toPrecision(decimalType.precision, decimalType.scale)
private[this] def castToDecimal(from: DataType, target: DecimalType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try {
changePrecision(Decimal(new JavaBigDecimal(s.toString)), target)
} catch {
case _: NumberFormatException => null
})
case BooleanType =>
buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else Decimal.ZERO, target))
case DateType =>
buildCast[Int](_, d => null) // date can't cast to decimal in Hive
case TimestampType =>
// Note that we lose precision here.
buildCast[Long](_, t => changePrecision(Decimal(timestampToDouble(t)), target))
case dt: DecimalType =>
b => toPrecision(b.asInstanceOf[Decimal], target)
case t: IntegralType =>
b => changePrecision(Decimal(t.integral.asInstanceOf[Integral[Any]].toLong(b)), target)
case x: FractionalType =>
b => try {
changePrecision(Decimal(x.fractional.asInstanceOf[Fractional[Any]].toDouble(b)), target)
} catch {
case _: NumberFormatException => null
}
}
// DoubleConverter
private[this] def castToDouble(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try s.toString.toDouble catch {
case _: NumberFormatException => null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1d else 0d)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t))
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toDouble(b)
}
// FloatConverter
private[this] def castToFloat(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try s.toString.toFloat catch {
case _: NumberFormatException => null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1f else 0f)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t).toFloat)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toFloat(b)
}
private[this] def castArray(fromType: DataType, toType: DataType): Any => Any = {
val elementCast = cast(fromType, toType)
// TODO: Could be faster?
buildCast[ArrayData](_, array => {
val values = new Array[Any](array.numElements())
array.foreach(fromType, (i, e) => {
if (e == null) {
values(i) = null
} else {
values(i) = elementCast(e)
}
})
new GenericArrayData(values)
})
}
private[this] def castMap(from: MapType, to: MapType): Any => Any = {
val keyCast = castArray(from.keyType, to.keyType)
val valueCast = castArray(from.valueType, to.valueType)
buildCast[MapData](_, map => {
val keys = keyCast(map.keyArray()).asInstanceOf[ArrayData]
val values = valueCast(map.valueArray()).asInstanceOf[ArrayData]
new ArrayBasedMapData(keys, values)
})
}
private[this] def castStruct(from: StructType, to: StructType): Any => Any = {
val castFuncs: Array[(Any) => Any] = from.fields.zip(to.fields).map {
case (fromField, toField) => cast(fromField.dataType, toField.dataType)
}
// TODO: Could be faster?
buildCast[InternalRow](_, row => {
val newRow = new GenericInternalRow(from.fields.length)
var i = 0
while (i < row.numFields) {
newRow.update(i,
if (row.isNullAt(i)) null else castFuncs(i)(row.get(i, from.apply(i).dataType)))
i += 1
}
newRow
})
}
private[this] def cast(from: DataType, to: DataType): Any => Any = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the codegen path.
if (DataType.equalsStructurally(from, to)) {
identity
} else {
to match {
case dt if dt == from => identity[Any]
case StringType => castToString(from)
case BinaryType => castToBinary(from)
case DateType => castToDate(from)
case decimal: DecimalType => castToDecimal(from, decimal)
case TimestampType => castToTimestamp(from)
case CalendarIntervalType => castToInterval(from)
case BooleanType => castToBoolean(from)
case ByteType => castToByte(from)
case ShortType => castToShort(from)
case IntegerType => castToInt(from)
case FloatType => castToFloat(from)
case LongType => castToLong(from)
case DoubleType => castToDouble(from)
case array: ArrayType =>
castArray(from.asInstanceOf[ArrayType].elementType, array.elementType)
case map: MapType => castMap(from.asInstanceOf[MapType], map)
case struct: StructType => castStruct(from.asInstanceOf[StructType], struct)
case udt: UserDefinedType[_]
if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass =>
identity[Any]
case _: UserDefinedType[_] =>
throw new SparkException(s"Cannot cast $from to $to.")
}
}
}
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
protected override def nullSafeEval(input: Any): Any = cast(input)
override def genCode(ctx: CodegenContext): ExprCode = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the interpreted path.
if (DataType.equalsStructurally(child.dataType, dataType)) {
child.genCode(ctx)
} else {
super.genCode(ctx)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx)
ev.copy(code = eval.code +
castCode(ctx, eval.value, eval.isNull, ev.value, ev.isNull, dataType, nullSafeCast))
}
// three function arguments are: child.primitive, result.primitive and result.isNull
// it returns the code snippets to be put in null safe evaluation region
private[this] type CastFunction = (String, String, String) => String
private[this] def nullSafeCastFunction(
from: DataType,
to: DataType,
ctx: CodegenContext): CastFunction = to match {
case _ if from == NullType => (c, evPrim, evNull) => s"$evNull = true;"
case _ if to == from => (c, evPrim, evNull) => s"$evPrim = $c;"
case StringType => castToStringCode(from, ctx)
case BinaryType => castToBinaryCode(from)
case DateType => castToDateCode(from, ctx)
case decimal: DecimalType => castToDecimalCode(from, decimal, ctx)
case TimestampType => castToTimestampCode(from, ctx)
case CalendarIntervalType => castToIntervalCode(from)
case BooleanType => castToBooleanCode(from)
case ByteType => castToByteCode(from, ctx)
case ShortType => castToShortCode(from, ctx)
case IntegerType => castToIntCode(from, ctx)
case FloatType => castToFloatCode(from)
case LongType => castToLongCode(from, ctx)
case DoubleType => castToDoubleCode(from)
case array: ArrayType =>
castArrayCode(from.asInstanceOf[ArrayType].elementType, array.elementType, ctx)
case map: MapType => castMapCode(from.asInstanceOf[MapType], map, ctx)
case struct: StructType => castStructCode(from.asInstanceOf[StructType], struct, ctx)
case udt: UserDefinedType[_]
if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass =>
(c, evPrim, evNull) => s"$evPrim = $c;"
case _: UserDefinedType[_] =>
throw new SparkException(s"Cannot cast $from to $to.")
}
// Since we need to cast child expressions recursively inside ComplexTypes, such as Map's
// Key and Value, Struct's field, we need to name out all the variable names involved in a cast.
private[this] def castCode(ctx: CodegenContext, childPrim: String, childNull: String,
resultPrim: String, resultNull: String, resultType: DataType, cast: CastFunction): String = {
s"""
boolean $resultNull = $childNull;
${ctx.javaType(resultType)} $resultPrim = ${ctx.defaultValue(resultType)};
if (!$childNull) {
${cast(childPrim, resultPrim, resultNull)}
}
"""
}
private[this] def castToStringCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case BinaryType =>
(c, evPrim, evNull) => s"$evPrim = UTF8String.fromBytes($c);"
case DateType =>
(c, evPrim, evNull) => s"""$evPrim = UTF8String.fromString(
org.apache.spark.sql.catalyst.util.DateTimeUtils.dateToString($c));"""
case TimestampType =>
val tz = ctx.addReferenceMinorObj(timeZone)
(c, evPrim, evNull) => s"""$evPrim = UTF8String.fromString(
org.apache.spark.sql.catalyst.util.DateTimeUtils.timestampToString($c, $tz));"""
case _ =>
(c, evPrim, evNull) => s"$evPrim = UTF8String.fromString(String.valueOf($c));"
}
}
private[this] def castToBinaryCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) => s"$evPrim = $c.getBytes();"
}
private[this] def castToDateCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val intOpt = ctx.freshName("intOpt")
(c, evPrim, evNull) => s"""
scala.Option<Integer> $intOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDate($c);
if ($intOpt.isDefined()) {
$evPrim = ((Integer) $intOpt.get()).intValue();
} else {
$evNull = true;
}
"""
case TimestampType =>
val tz = ctx.addReferenceMinorObj(timeZone)
(c, evPrim, evNull) =>
s"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays($c / 1000L, $tz);"
case _ =>
(c, evPrim, evNull) => s"$evNull = true;"
}
private[this] def changePrecision(d: String, decimalType: DecimalType,
evPrim: String, evNull: String): String =
s"""
if ($d.changePrecision(${decimalType.precision}, ${decimalType.scale})) {
$evPrim = $d;
} else {
$evNull = true;
}
"""
private[this] def castToDecimalCode(
from: DataType,
target: DecimalType,
ctx: CodegenContext): CastFunction = {
val tmp = ctx.freshName("tmpDecimal")
from match {
case StringType =>
(c, evPrim, evNull) =>
s"""
try {
Decimal $tmp = Decimal.apply(new java.math.BigDecimal($c.toString()));
${changePrecision(tmp, target, evPrim, evNull)}
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) =>
s"""
Decimal $tmp = $c ? Decimal.apply(1) : Decimal.apply(0);
${changePrecision(tmp, target, evPrim, evNull)}
"""
case DateType =>
// date can't cast to decimal in Hive
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
// Note that we lose precision here.
(c, evPrim, evNull) =>
s"""
Decimal $tmp = Decimal.apply(
scala.math.BigDecimal.valueOf(${timestampToDoubleCode(c)}));
${changePrecision(tmp, target, evPrim, evNull)}
"""
case DecimalType() =>
(c, evPrim, evNull) =>
s"""
Decimal $tmp = $c.clone();
${changePrecision(tmp, target, evPrim, evNull)}
"""
case x: IntegralType =>
(c, evPrim, evNull) =>
s"""
Decimal $tmp = Decimal.apply((long) $c);
${changePrecision(tmp, target, evPrim, evNull)}
"""
case x: FractionalType =>
// All other numeric types can be represented precisely as Doubles
(c, evPrim, evNull) =>
s"""
try {
Decimal $tmp = Decimal.apply(scala.math.BigDecimal.valueOf((double) $c));
${changePrecision(tmp, target, evPrim, evNull)}
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
}
}
private[this] def castToTimestampCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val tz = ctx.addReferenceMinorObj(timeZone)
val longOpt = ctx.freshName("longOpt")
(c, evPrim, evNull) =>
s"""
scala.Option<Long> $longOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestamp($c, $tz);
if ($longOpt.isDefined()) {
$evPrim = ((Long) $longOpt.get()).longValue();
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1L : 0L;"
case _: IntegralType =>
(c, evPrim, evNull) => s"$evPrim = ${longToTimeStampCode(c)};"
case DateType =>
val tz = ctx.addReferenceMinorObj(timeZone)
(c, evPrim, evNull) =>
s"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.daysToMillis($c, $tz) * 1000;"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = ${decimalToTimestampCode(c)};"
case DoubleType =>
(c, evPrim, evNull) =>
s"""
if (Double.isNaN($c) || Double.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * 1000000L);
}
"""
case FloatType =>
(c, evPrim, evNull) =>
s"""
if (Float.isNaN($c) || Float.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * 1000000L);
}
"""
}
private[this] def castToIntervalCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
s"""$evPrim = CalendarInterval.fromString($c.toString());
if(${evPrim} == null) {
${evNull} = true;
}
""".stripMargin
}
private[this] def decimalToTimestampCode(d: String): String =
s"($d.toBigDecimal().bigDecimal().multiply(new java.math.BigDecimal(1000000L))).longValue()"
private[this] def longToTimeStampCode(l: String): String = s"$l * 1000000L"
private[this] def timestampToIntegerCode(ts: String): String =
s"java.lang.Math.floor((double) $ts / 1000000L)"
private[this] def timestampToDoubleCode(ts: String): String = s"$ts / 1000000.0"
private[this] def castToBooleanCode(from: DataType): CastFunction = from match {
case StringType =>
val stringUtils = StringUtils.getClass.getName.stripSuffix("$")
(c, evPrim, evNull) =>
s"""
if ($stringUtils.isTrueString($c)) {
$evPrim = true;
} else if ($stringUtils.isFalseString($c)) {
$evPrim = false;
} else {
$evNull = true;
}
"""
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = $c != 0;"
case DateType =>
// Hive would return null when cast from date to boolean
(c, evPrim, evNull) => s"$evNull = true;"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = !$c.isZero();"
case n: NumericType =>
(c, evPrim, evNull) => s"$evPrim = $c != 0;"
}
private[this] def castToByteCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("wrapper")
ctx.addMutableState("UTF8String.IntWrapper", wrapper,
s"$wrapper = new UTF8String.IntWrapper();")
(c, evPrim, evNull) =>
s"""
if ($c.toByte($wrapper)) {
$evPrim = (byte) $wrapper.value;
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? (byte) 1 : (byte) 0;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (byte) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toByte();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (byte) $c;"
}
private[this] def castToShortCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("wrapper")
ctx.addMutableState("UTF8String.IntWrapper", wrapper,
s"$wrapper = new UTF8String.IntWrapper();")
(c, evPrim, evNull) =>
s"""
if ($c.toShort($wrapper)) {
$evPrim = (short) $wrapper.value;
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? (short) 1 : (short) 0;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (short) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toShort();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (short) $c;"
}
private[this] def castToIntCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("wrapper")
ctx.addMutableState("UTF8String.IntWrapper", wrapper,
s"$wrapper = new UTF8String.IntWrapper();")
(c, evPrim, evNull) =>
s"""
if ($c.toInt($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1 : 0;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (int) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toInt();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (int) $c;"
}
private[this] def castToLongCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("wrapper")
ctx.addMutableState("UTF8String.LongWrapper", wrapper,
s"$wrapper = new UTF8String.LongWrapper();")
(c, evPrim, evNull) =>
s"""
if ($c.toLong($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1L : 0L;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (long) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toLong();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (long) $c;"
}
private[this] def castToFloatCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
s"""
try {
$evPrim = Float.valueOf($c.toString());
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1.0f : 0.0f;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (float) (${timestampToDoubleCode(c)});"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toFloat();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (float) $c;"
}
private[this] def castToDoubleCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
s"""
try {
$evPrim = Double.valueOf($c.toString());
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1.0d : 0.0d;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = ${timestampToDoubleCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toDouble();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (double) $c;"
}
private[this] def castArrayCode(
fromType: DataType, toType: DataType, ctx: CodegenContext): CastFunction = {
val elementCast = nullSafeCastFunction(fromType, toType, ctx)
val arrayClass = classOf[GenericArrayData].getName
val fromElementNull = ctx.freshName("feNull")
val fromElementPrim = ctx.freshName("fePrim")
val toElementNull = ctx.freshName("teNull")
val toElementPrim = ctx.freshName("tePrim")
val size = ctx.freshName("n")
val j = ctx.freshName("j")
val values = ctx.freshName("values")
(c, evPrim, evNull) =>
s"""
final int $size = $c.numElements();
final Object[] $values = new Object[$size];
for (int $j = 0; $j < $size; $j ++) {
if ($c.isNullAt($j)) {
$values[$j] = null;
} else {
boolean $fromElementNull = false;
${ctx.javaType(fromType)} $fromElementPrim =
${ctx.getValue(c, fromType, j)};
${castCode(ctx, fromElementPrim,
fromElementNull, toElementPrim, toElementNull, toType, elementCast)}
if ($toElementNull) {
$values[$j] = null;
} else {
$values[$j] = $toElementPrim;
}
}
}
$evPrim = new $arrayClass($values);
"""
}
private[this] def castMapCode(from: MapType, to: MapType, ctx: CodegenContext): CastFunction = {
val keysCast = castArrayCode(from.keyType, to.keyType, ctx)
val valuesCast = castArrayCode(from.valueType, to.valueType, ctx)
val mapClass = classOf[ArrayBasedMapData].getName
val keys = ctx.freshName("keys")
val convertedKeys = ctx.freshName("convertedKeys")
val convertedKeysNull = ctx.freshName("convertedKeysNull")
val values = ctx.freshName("values")
val convertedValues = ctx.freshName("convertedValues")
val convertedValuesNull = ctx.freshName("convertedValuesNull")
(c, evPrim, evNull) =>
s"""
final ArrayData $keys = $c.keyArray();
final ArrayData $values = $c.valueArray();
${castCode(ctx, keys, "false",
convertedKeys, convertedKeysNull, ArrayType(to.keyType), keysCast)}
${castCode(ctx, values, "false",
convertedValues, convertedValuesNull, ArrayType(to.valueType), valuesCast)}
$evPrim = new $mapClass($convertedKeys, $convertedValues);
"""
}
private[this] def castStructCode(
from: StructType, to: StructType, ctx: CodegenContext): CastFunction = {
val fieldsCasts = from.fields.zip(to.fields).map {
case (fromField, toField) => nullSafeCastFunction(fromField.dataType, toField.dataType, ctx)
}
val rowClass = classOf[GenericInternalRow].getName
val result = ctx.freshName("result")
val tmpRow = ctx.freshName("tmpRow")
val fieldsEvalCode = fieldsCasts.zipWithIndex.map { case (cast, i) =>
val fromFieldPrim = ctx.freshName("ffp")
val fromFieldNull = ctx.freshName("ffn")
val toFieldPrim = ctx.freshName("tfp")
val toFieldNull = ctx.freshName("tfn")
val fromType = ctx.javaType(from.fields(i).dataType)
s"""
boolean $fromFieldNull = $tmpRow.isNullAt($i);
if ($fromFieldNull) {
$result.setNullAt($i);
} else {
$fromType $fromFieldPrim =
${ctx.getValue(tmpRow, from.fields(i).dataType, i.toString)};
${castCode(ctx, fromFieldPrim,
fromFieldNull, toFieldPrim, toFieldNull, to.fields(i).dataType, cast)}
if ($toFieldNull) {
$result.setNullAt($i);
} else {
${ctx.setColumn(result, to.fields(i).dataType, i, toFieldPrim)};
}
}
"""
}
val fieldsEvalCodes = if (ctx.INPUT_ROW != null && ctx.currentVars == null) {
ctx.splitExpressions(
expressions = fieldsEvalCode,
funcName = "castStruct",
arguments = ("InternalRow", tmpRow) :: (rowClass, result) :: Nil)
} else {
fieldsEvalCode.mkString("\n")
}
(c, evPrim, evNull) =>
s"""
final $rowClass $result = new $rowClass(${fieldsCasts.length});
final InternalRow $tmpRow = $c;
$fieldsEvalCodes
$evPrim = $result;
"""
}
override def sql: String = dataType match {
// HiveQL doesn't allow casting to complex types. For logical plans translated from HiveQL, this
// type of casting can only be introduced by the analyzer, and can be omitted when converting
// back to SQL query string.
case _: ArrayType | _: MapType | _: StructType => child.sql
case _ => s"CAST(${child.sql} AS ${dataType.sql})"
}
}
/**
* Cast the child expression to the target data type, but will throw error if the cast might
* truncate, e.g. long -> int, timestamp -> data.
*/
case class UpCast(child: Expression, dataType: DataType, walkedTypePath: Seq[String])
extends UnaryExpression with Unevaluable {
override lazy val resolved = false
}
| adrian-ionescu/apache-spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala | Scala | apache-2.0 | 40,282 |
/**
* Copyright 2009 Latterfrosken Software Development Limited
*
* This file is part of Lafros GUI-Alerts.
*
* Lafros GUI-Alerts is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* Lafros GUI-Alerts is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with Lafros GUI-Alerts. If not, see <http://www.gnu.org/licenses/>. */
package com.lafros.gui.alerts
import Alert._
import scala.swing.{BorderPanel, CheckBox, Component, Publisher, Separator}
import scala.swing.event.ButtonClicked
import BorderPanel.Position._
import java.awt.BorderLayout
import javax.swing.{JComponent, JPanel, SwingUtilities}
/**
* toggle panel, to which may be added a component which is concealable. When
* adding a <tt>MonField</tt> to the <tt>concealableComponent</tt>,
* <tt>togPanel.listenTo(monField)</tt> will cause <tt>togPanel</tt> to open
* automatically whenever <tt>monField</tt>'s <tt>alert</tt> property is anything
* other than <tt>NoAlert</tt>.
*
* @author Rob Dickens */
class TogPanel extends BorderPanel {
override lazy val peer = new JPanel(new BorderLayout) with SuperMixin {
override def updateUI() {
super.updateUI()
if (!open) updateConcealableUi = true
}
// def setFocusable(b: Boolean) {
// super.setFocusable(b)
// if (togButton != null)
// togButton.setFocusable(b)
// }
}
private val alwaysVisibleContainer = new BorderPanel
//
private var togButton: CheckBox = _
private var separator: Separator = _
private var _concealableComponent, _alwaysVisibleComponent: Option[Component] = None
private var _proppedOpen, goingFromOrToZeroAlerts, updateConcealableUi: Boolean = _
private var alertCount: Int = _
layout(alwaysVisibleContainer) = North
reactions += {
case AlertChanged(monField, previousValue) =>
//println("TogPanel reacting to AlertChanged, from "+ previousValue +" to "+ monField.alert)
monField.alert match {
case NoAlert => decAlerts()
case _ => if (previousValue == NoAlert) incAlerts()
}
}
/**
* looks out for <tt>MonField</tt>s with alerts. */
override def listenTo(ps: Publisher*) = xTo(super.listenTo(ps: _*), incAlerts, ps: _*)
/**
* looks out for <tt>MonField</tt>s with alerts. */
override def deafTo(ps: Publisher*) = xTo(super.deafTo(ps: _*), decAlerts, ps: _*)
private def xTo(f1: => Unit, f2: => Unit, ps: Publisher*) {
f1
for (p <- ps) p match {
case monField: MonField => monField.alert match {
case NonIntrusive | Intrusive => f2
case NoAlert =>
}
case _ =>
}
}
/**
* component beneath which the concealable one is displayed. */
def alwaysVisibleComponent = _alwaysVisibleComponent
/**
* sets this property. */
def alwaysVisibleComponent_=(arg: Option[Component]) {
arg match {
case Some(c) => alwaysVisibleContainer.layout(c) = West
case None => _alwaysVisibleComponent match {
case Some(c) => alwaysVisibleContainer.layout -= c
case None => return
}
}
redraw(alwaysVisibleContainer.peer)
_alwaysVisibleComponent = arg
}
/**
* supplying this component causes a check-box to appear (after the
* <tt>alwaysVisibleComponent</tt>), selection of which causes the
* component to be displayed underneath. */
def concealableComponent = _concealableComponent
/**
* sets this property. */
def concealableComponent_=(arg: Option[Component]) {
arg match {
case Some(c) =>
_concealableComponent match {
case Some(c) =>
if (togButton.selected) {
layout(c) = Center
redraw(peer)
}
case None =>
togButton = new CheckBox("...") {
reactions += {
case ButtonClicked(_) =>
if (!goingFromOrToZeroAlerts) _proppedOpen = !_proppedOpen
if (togButton.selected) {
separator = new Separator
alwaysVisibleContainer.layout(separator) = South
layout(c) = Center
if (updateConcealableUi) {
SwingUtilities.updateComponentTreeUI(c.peer)
updateConcealableUi = false
}
}
else {
alwaysVisibleContainer.layout -= separator
separator = null
layout -= c
}
redraw(TogPanel.this.peer)
}
}
//togButton.setFocusable(isFocusable())
alwaysVisibleContainer.layout(togButton) = Center
redraw(alwaysVisibleContainer.peer)
if (alertCount > 0) {
goingFromOrToZeroAlerts = true
togButton.doClick()
goingFromOrToZeroAlerts = false
}
}
case None =>
_concealableComponent match {
case Some(c) =>
// remove existing one together with checkbox
alwaysVisibleContainer.layout -= togButton
if (togButton.selected) {
layout -= c
alwaysVisibleContainer.layout -= separator
redraw(peer)
separator = null
}
else
redraw(alwaysVisibleContainer.peer)
togButton = null
case None => return
}
}
_concealableComponent = arg
}
/**
* determines whether or not the <tt>TogPanel</tt> remains <tt>open</tt> when
* the <tt>alert</tt> property of all <tt>MonField</tt> descendants of the
* <tt>concealableComponent</tt> has the value, <tt>Alert.NoAlert</tt>.
* Explicitly opening or closing the <tt>TogPanel</tt> sets this property to
* <tt>true</tt> or <tt>false</tt>, respectively. */
def proppedOpen = _proppedOpen
/**
* sets this property. */
def proppedOpen_=(b: Boolean) = if (b != _proppedOpen) {
// if alerts then just change flag
if (alertCount > 0) _proppedOpen = b
else togButton.doClick() // changes flag as well
}
/**
* returns <tt>true</tt> if the <tt>concealableComponent</tt> is displayed. */
def open = if (togButton == null) false else togButton.selected
//
// called by MonLabel upon changing TO alert-mode
private def incAlerts() {
alertCount += 1
if (alertCount == 1 &&
togButton != null) {
if (!togButton.selected) {
goingFromOrToZeroAlerts = true
togButton.doClick()
goingFromOrToZeroAlerts = false
}
togButton.enabled = false
}
}
//
// called by MonLabel upon changing FROM alert-mode
private def decAlerts() {
alertCount -= 1
if (alertCount == 0 &&
togButton != null) {
togButton.enabled = true
if (togButton.selected &&
!_proppedOpen) {
goingFromOrToZeroAlerts = true
togButton.doClick()
goingFromOrToZeroAlerts = false
}
}
}
private def redraw(c: JComponent) {
if (c.isDisplayable()) {
c.validate()
c.revalidate()
c.repaint()
}
}
}
| robcd/lafros-gui | lafros-gui-alerts/src/main/scala/com/lafros/gui/alerts/TogPanel.scala | Scala | gpl-3.0 | 7,115 |
package com.github.mdr.mash.editor
import com.github.mdr.mash.editor.SyntaxSelection.expandSelection
import org.scalatest.{ FlatSpec, Matchers }
import com.github.mdr.mash.repl.LineBufferTestHelper.lineBuffer
class SyntaxSelectionTest extends FlatSpec with Matchers {
"idenโถtifier" ==> "โทidentifierโถ"
"foo + baโถr" ==> "foo + โทbarโถ"
"foo + โถbar" ==> "foo + โทbarโถ"
"foo + barโถ" ==> "โทfoo + barโถ"
"foo.baโถr" ==> "foo.โทbarโถ"
"foo --baโถr=baz" ==> "foo โท--barโถ=baz"
"1 + 2โถ * 3" ==> "1 + โท2 * 3โถ"
"1 + 2โถ * 3 * 4" ==> "1 + โท2 * 3โถ * 4"
"1 + โท2 * 3โถ * 4" ==> "1 + โท2 * 3 * 4โถ"
"1 + โท2 * 3 * 4โถ" ==> "โท1 + 2 * 3 * 4โถ"
"def foo = 4โถ2" ==> "def foo = โท42โถ"
"def foo = 42 # commโถent" ==> "def foo = 42 โท# commentโถ"
"def foo = 42 โท# commentโถ" ==> "โทdef foo = 42 # commentโถ"
" โถ" ==> "โท โถ"
"โถ" ==> "โถ"
"""{
| # Comment
| โถdef fooโท = 42
|}""" ==>
"""{
| โท# Comment
| def foo = 42โถ
|}"""
"""{
| # Comment
| โถclass Fooโท
|}""" ==>
"""{
| โท# Comment
| class Fooโถ
|}"""
implicit class RichString(s: String) {
def ==>(expectedStr: String) {
"Expanding selection using AST" should s"expand $s into $expectedStr" in {
val expected = lineBuffer(expectedStr.stripMargin)
val input = lineBuffer(s.stripMargin)
val actual = expandSelection(input, mish = false).map(selection โ input.withSelection(selection)).getOrElse(input)
actual shouldEqual expected
}
}
}
}
| mdr/mash | src/test/scala/com/github/mdr/mash/editor/SyntaxSelectionTest.scala | Scala | mit | 1,613 |
package dao.helper
import database.UniqueTable
import models.UniqueDbEntity
import org.joda.time.DateTime
import slick.jdbc.PostgresProfile.api._
import utils.date.DateTimeOps.DateTimeConverter
import scala.concurrent.Future
trait Updated[T <: Table[DbModel] with UniqueTable, DbModel <: UniqueDbEntity] {
self: Core
with Expandable[DbModel]
with Accessible[T, DbModel]
with Retrieved[T, DbModel, _] =>
protected def shouldUpdate(existing: DbModel, toUpdate: DbModel): Boolean
final def update(entity: DbModel): Future[DbModel] = db.run(updateQuery(entity))
final def updateMany(entities: List[DbModel]): Future[List[DbModel]] = {
val query = entities.map(updateQuery)
db.run(DBIO.sequence(query))
}
final def updateQuery(entity: DbModel) = {
val query = filterValidOnly(_.id === entity.id)
val singleQuery = query.exactlyOne { existing =>
if (existing.id == entity.id && shouldUpdate(existing, entity))
(for {
u1 <- query.update(entity)
u2 <- query.map(_.lastModified).update(DateTime.now.timestamp)
} yield u1 + u2).transactionally.map(_ => entity)
else
DBIO.failed(ModelAlreadyExists(entity, existing))
}
databaseExpander.fold {
singleQuery
} { expander =>
(for {
q <- singleQuery
e <- expander.expandUpdateOf(q)
} yield e).transactionally
}
}
}
| THK-ADV/lwm-reloaded | app/dao/helper/Updated.scala | Scala | mit | 1,406 |
package models
import reactivemongo.bson._
import play.api.libs.json._
import play.api.libs.json.Json._
import play.api.libs.json.Writes._
object Formats {
val objectIDRegExFormat = "^[0-9a-fA-F]{24}$".r
def isObjectIDValid(input: String): Boolean = (objectIDRegExFormat findFirstIn input).nonEmpty
implicit object ObjectIdReads extends Format[BSONObjectID] {
def reads(json: JsValue): JsResult[BSONObjectID] = json.asOpt[JsObject] map { oid =>
(oid \\ "$oid" ).asOpt[String] map { str =>
if (isObjectIDValid(str))
JsSuccess(BSONObjectID(str))
else
JsError("Invalid ObjectId %s".format(str))
} getOrElse (JsError("Value is not an ObjectId"))
} getOrElse (JsError("Value is not an ObjectId"))
def writes(oid: BSONObjectID): JsValue = Json.obj("$oid" -> JsString(oid.stringify))
}
} | luanlv/website | app/models/Formats.scala | Scala | apache-2.0 | 856 |
package at.forsyte.apalache.tla.bmcmt.rules.aux
import at.forsyte.apalache.tla.bmcmt.smt.SolverContext
import at.forsyte.apalache.tla.bmcmt.{SymbState, SymbStateDecoder, SymbStateRewriter}
import at.forsyte.apalache.tla.lir.TlaEx
/**
* An abstract version of an oracle that is used e.g. in CherryPick.
*
* @author Igor Konnov
*/
trait Oracle extends Serializable {
/**
* Produce an expression that states that the oracle values equals to the given integer position.
* The actual implementation may be different from an integer comparison.
*
* @param state a symbolic state
* @param position a position the oracle should be equal to
*/
def whenEqualTo(state: SymbState, position: Int): TlaEx
/**
* Produce a ground expression that contains assertions for the possible oracle values.
*
* @param state a symbolic state
* @param assertions a sequence of assertions, one per oracle value
* @return an expression ite(oracle = 0, ite(oracle = 1, ...))
*/
def caseAssertions(state: SymbState, assertions: Seq[TlaEx]): TlaEx
/**
* Get a symbolic state and decode the value of the oracle variable into an integer.
* This method assumes that the solver context has produced an SMT model.
*
* @param solverContext a solver context
* @param state a symbolic state
* @return an integer value of the oracle
*/
def evalPosition(solverContext: SolverContext, state: SymbState): Int
}
| konnov/dach | tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/rules/aux/Oracle.scala | Scala | apache-2.0 | 1,467 |
///*
// * DARWIN Genetic Algorithms Framework Project.
// * Copyright (c) 2003, 2005, 2007, 2009, 2011, 2016, 2017. Phasmid Software
// *
// * Originally, developed in Java by Rubecula Software, LLC and hosted by SourceForge.
// * Converted to Scala by Phasmid Software and hosted by github at https://github.com/rchillyard/Darwin
// *
// * This file is part of Darwin.
// *
// * Darwin is free software: you can redistribute it and/or modify
// * it under the terms of the GNU General Public License as published by
// * the Free Software Foundation, either version 3 of the License, or
// * (at your option) any later version.
// *
// * This program is distributed in the hope that it will be useful,
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// * GNU General Public License for more details.
// *
// * You should have received a copy of the GNU General Public License
// * along with this program. If not, see <http://www.gnu.org/licenses/>.
// */
//
//package com.phasmid.darwin.evolution
//
//import com.phasmid.darwin.Ecological
//import com.phasmid.darwin.base.{CaseIdentifiable, SelfAuditing}
//import com.phasmid.darwin.eco._
//import com.phasmid.darwin.genetics._
//import com.phasmid.darwin.visualization.{Visualization}
//import com.phasmid.laScala.values.Incrementable
//import com.phasmid.laScala.{Prefix, Version}
//
//import scala.annotation.tailrec
//
///**
// * Created by scalaprof on 7/27/16.
// *
// * @param organisms a collection of organisms, each of type Z
// * @param generation a version representing this generation
// * @param env the Environment in which this Colony flourishes
// * @tparam V the generation type (defined to be Incrementable)
// * @tparam X the underlying type of the xs
// * @tparam Z the Organism type
// * @tparam Repr the Representation type for this Colony
// */
//abstract class AbstractColony[V: Incrementable, X, Z : Fit : Reproduction : Viable : Visualization : Ordering, Repr](organisms: Iterable[Z], generation: Version[V], env: Environment[T, X]) extends BaseEvolvable[V, Z, Repr](organisms, generation) with Ecological[T, X] with Theocratic[Repr] {
//
//// /**
//// * Method to create an Organism of type Z from a Nucleus
//// *
//// * @param nucleus the Nucleus from which to create an Organism
//// * @return an instance of Z
//// */
//// def createOrganism(nucleus: Nucleus[B]): Z
//
// /**
// * Method to signal a change in status for the individual referenced.
// *
// * @param z the individual organism
// * @param kill true if this organism is to be killed off
// */
// def updateOrganism(z: Z, kill: Boolean): Unit = viable.updateOrganism(z, kill)
//
// /**
// * Evaluate the fitness of a member of this Evolvable
// *
// * @param x the member
// * @return true if x is fit enough to survive this generation
// * @throws Exception if the logic to evaluate the fitness of x fails in some unexpected way
// */
// def evaluateFitness(x: Z): Boolean = fit.isFit(x,env)
//
// /**
// * This method yields a new Evolvable by reproduction.
// * If the ploidy of X is haploid, then reproduction will be asexual, otherwise mating must occur between male/female pairs.
// *
// * CONSIDER returning Iterable rather than Iterator
// *
// * @return a new Evolvable
// */
// override def offspring: Iterable[Z] =
// if (reproduction.isSexual)
// organisms filter (fit.isFit(_,env)) // FIXME implement me
// else
// organisms filter (fit.isFit(_,env)) // FIXME implement me
//
//
// def cullMembers(): Repr = (for (v <- generation.next()) yield build(Nil)(v)).get
//
//// def seedMembers(size: Int, genome: Genome[B, G, P], p_ : Int, random: RNG[B]): Repr = {
//// @tailrec def inner(bns: Seq[Nucleus[B]], br: Random[B], n: Int): (Seq[Nucleus[B]], Random[B]) =
//// if (n == 0) (bns, br)
//// else {
//// val (bn, br_) = genome.recombine(random)
//// inner(bns :+ bn, br_, n - 1)
//// }
////
//// val (bns, _) = inner(Nil, random, size)
//// build(bns map createOrganism, generation)
//// }
//
// private val fit = implicitly[Fit[Z]]
// private val reproduction = implicitly[Reproduction[Z]]
// private val viable = implicitly[Viable[Z]]
// private val visualization = implicitly[Visualization[Z]]
//
// private def neonate[B](bs: Sequence[B]): Z = reproduction.neonate(bs)
//
// /**
// * Method to seed a this
// * {@link Evolvable} which a certain number of
// * members.
// */
// override def seedMembers[B](size: Int, bss: Stream[Sequence[B]]): Repr = build(bss map neonate take size, generation)
//}
//
///**
// * Created by scalaprof on 7/27/16.
// *
// * @param name an identifier for this Colony
// * @param organisms a collection of organisms, each of type OrganismType
// * @param generation a version representing this generation
// * @param environment the Environment in which this Colony flourishes
// * @tparam T the Trait type
// * @tparam V the generation type (defined to be Incrementable)
// * @tparam X the Eco-type
// * @tparam Y the underlying type of the Colony, i.e. the type of the members
// */
//case class Colony[T, V: Incrementable, X, Y : Fit : Reproduction : Viable : Visualization](name: String, organisms: Iterable[Y], override val generation: Version[V], environment: Environment[T, X]) extends AbstractColony[T, V, X, Y, Colony[T, V, X, Y]](organisms, generation, environment) with SelfAuditing {
//
// import com.phasmid.darwin.evolution.Random.RandomizableLong
//
//// implicit val idStreamer: Streamer[Long] = Streamer(RNG[Long](0).toStream)
//
//// private val genome = species.genome
////
//// private val visualizer: Visualizer[T, X] = species.visualizer
//
//// def seedMembers(size: Int, random: RNG[B]): Colony[B, T, V, X, Y] = seedMembers(size, genome, 2, random)
//
// def build(xs: Iterable[Y], v: Version[V]): Colony[T, V, X, Y] = new Colony(name, xs, v, environment)
//
//// private val builder = implicitly[OrganismBuilder[Y]]
//
//// def createOrganism(nucleus: Nucleus[B]): Y = {
//// val result = builder.build(generation, species, nucleus, environment)
//// visualizer.createAvatar(result)
//// result
//// }
//
//// def updateOrganism(o: Y, kill: Boolean): Unit = {
//// val yv = implicitly[Visualization[Y]]
//// if (kill) yv.destroyAvatar(o)
//// else yv.updateAvatar(o)
//// }
//
// def apply(phenotype: Phenotype[T]): Adaptatype[X] = throw GeneticsException("apply not implemented") // FIXME implement me (??)
//
// override def render(indent: Int)(implicit tab: (Int) => Prefix): String = CaseIdentifiable.renderAsCaseClass(this.asInstanceOf[Colony[Any, Any, Any, Organism[Any, Any]]])(indent)
//
// override def toString: String = s"$name generation $generation with ${organisms.size} organisms"
//
// // CONSIDER removing the parameter tab from the invocation: it isn't really needed (in all defs of render)
// // override def render(indent: Int = 0)(implicit tab: (Int) => Prefix): String = RenderableCaseClass(this.asInstanceOf[Colony[Any, Any, Any, Any, Long, Any, SexualAdaptedOrganism[B,G,T,V,X]]]).render(indent)(tab)
//
//}
//
////trait ColonyBuilder[Y] {
////
//// def build[B, G, P, T, V, X, Z](name: String, generation: Version[V], species: Species[B, G, P, T, X], ecology: Ecology[T, X]): Y
////}
//
//object Colony {
//
// def apply[B, G, T, V: Incrementable, X, Y : Fit : Reproduction : Visualization : Viable](name: String, generation: Version[V], environment: Environment[T, X]): Colony[T, V, X, Y] = Colony(name, Nil, generation, environment)
//
// def apply[B, G, T, X, Y : Fit : Reproduction : Visualization : Viable](name: String, environment: Environment[T, X]): Colony[T, Long, X, Y] = apply(name, Version.longVersion("0"), environment)
//
//}
//
//trait Viable[Z] {
// def updateOrganism(z: Z, kill: Boolean): Unit
//
//} | rchillyard/Darwin | src/main/scala/com/phasmid/darwin/evolution/OldColony.scala | Scala | gpl-3.0 | 8,049 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.library
import scala.scalajs.js
import scala.scalajs.js.annotation._
import org.scalajs.jasminetest.JasmineTest
import org.scalajs.testsuite.Typechecking._
object UseAsTest extends JasmineTest {
describe("js.use(x).as[T] - success cases") {
it("should support basic typechecking") {
class A {
@JSExport
def m(a: Int, b: String): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support covariance in return types") {
class A {
@JSExport
def m(a: Int, b: String): js.Array[Int] = ???
}
js.use(new A).as[JSBasic]
}
it("should support contravariance in argument") {
class A {
@JSExport
def m(a: Int, b: Any): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support explicit names in JSExports") {
class A {
@JSExport("m")
def f(a: Int, b: String): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support JSName") {
class A {
@JSExport
def m(a: Int, b: String): js.Object = ???
}
class B {
@JSExport("m")
def bar(a: Int, b: String): js.Object = ???
}
js.use(new A).as[JSBasicJSName]
js.use(new B).as[JSBasicJSName]
}
it("should support JSExportAll") {
@JSExportAll
class A {
def m(a: Int, b: String): js.Object = ???
}
class B extends A
js.use(new A).as[JSBasic]
js.use(new B).as[JSBasic]
}
it("should support inherited exports") {
abstract class A {
@JSExport
def m(a: Int, b: String): js.Object
}
class B extends A {
def m(a: Int, b: String): js.Object = ???
}
js.use(new B).as[JSBasic]
}
it("should support JSExportAll on superclass") {
@JSExportAll
abstract class A {
def m(a: Int, b: String): js.Object
}
class B extends A {
def m(a: Int, b: String): js.Object = ???
}
js.use(new B).as[JSBasic]
}
it("should work with JSExportAll with an apply method") {
@JSExportAll
class A {
@JSExport("bar")
def apply(x: Int): Int = x * 2
}
val a = js.use(new A).as[JSNamedApply]
expect(a(2)).toEqual(4)
expect(a.bar(2)).toEqual(4)
}
it("should resolve generics in JSRaw types") {
class A {
@JSExport
def arr: js.Array[Int] = ???
}
js.use(new A).as[JSGeneric[Int]]
js.use(new A).as[JSGenericInt]
}
it("should resolve type members in JSRaw types") {
class A {
@JSExport
def foo(x: Int): Int = ???
}
js.use(new A).as[JSTypeMember { type R = Int }]
}
it("should resolve exports with class-level type parameter") {
class A[T] {
@JSExport
def arr: js.Array[T] = ???
}
class B extends A[Int]
js.use(new A[Int]).as[JSGeneric[Int]]
js.use(new B).as[JSGeneric[Int]]
}
it("should resolve exports with type member") {
class A {
type T
@JSExport
def arr: js.Array[T] = ???
}
class B extends A {
type T = Int
}
js.use(new B).as[JSGeneric[Int]]
}
it("should resolve overloading") {
@JSExportAll
class A {
def m(a: Int, b: String): js.Object = ???
def m(b: String): Int = ???
@JSExport("m")
def strangeName(a: Int): js.Object = ???
}
js.use(new A).as[JSOverload]
}
it("should support vals/getters") {
@JSExportAll
class A {
val a: Int = 1
def b: String = ???
// Test covariance as well
def c: js.Array[Int] = ???
}
js.use(new A).as[JSGetters]
}
it("should support setters") {
class A {
@JSExport("a")
def fooA_=(x: Int): Unit = ???
@JSExport
def b_=(x: String): Unit = ???
@JSExport("c_=")
def barC_=(x: js.Object): Unit = ???
}
js.use(new A).as[JSSetters]
}
it("should support vars") {
class A {
@JSExport
def a: Int = ???
@JSExport
def a_=(x: Int): Unit = ???
@JSExport("b")
var fooB: String = _
@JSExport
var c: js.Object = _
}
js.use(new A).as[JSVars]
}
it("should support basic default arguments") {
@JSExportAll
class A {
def sum4(a: Int, b: Int = 1, c: Int = 2, d: Int = 3): Int = a + b + c + d
def sum2(a: Int, b: Int = 1): Int = a + b
}
js.use(new A).as[JSDefaultArgs]
}
it("should allow additional default arguments at the end of the params") {
class A {
@JSExport
def m(a: Int, b: String, c: Int = ???, d: String = ???): js.Object = ???
}
js.use(new A).as[JSBasic]
}
it("should support repeated parameter lists") {
@JSExportAll
class A {
def rep(a: Int, b: String*): Unit = ???
def rep(a: Int*): Unit = ???
}
js.use(new A).as[JSRepeated]
}
it("should flatten multi parameter lists in raw JS type") {
@JSExportAll
class A {
def multi(a: Int, b: String): Int = ???
}
js.use(new A).as[JSMulti]
}
it("should flatten multi parameter lists in exported method") {
@JSExportAll
class B {
def m(a: Int)(b: String): js.Object = ???
}
js.use(new B).as[JSBasic]
}
it("should support anonymous types") {
js.use(new { @JSExport def m(a: Int, b: String): js.Object = ??? }).as[JSBasic]
}
it("should allow Nothing") {
if (false) {
js.use(???).as[JSBasic]
}
}
it("should allow Null") {
js.use(null).as[JSBasic]
}
}
describe("js.use(x).as[T] - failure cases") {
it("fails with polymorphic methods") {
typeErrorWithMsg(
"js.use(new Object).as[JSPolyMethod]",
"Polymorphic methods are currently not supported. Offending " +
"method: org.scalajs.testsuite.library.UseAsTest.JSPolyMethod.poly")
}
it("fails with non-type refinements") {
typeErrorWithMsg(
"js.use(???).as[JSBasic { def foo: Int }]",
"Refinement foo is not a type. Only types may be refined with as.")
}
it("fails with non trait") {
typeErrorWithMsg(
"js.use(???).as[js.Date]",
"Only traits can be used with as")
}
it("fails with apply in a raw JS type") {
typeErrorWithMsg(
"js.use(new Object).as[JSWithApply]",
"org.scalajs.testsuite.library.UseAsTest.JSWithApply defines an apply " +
"method. This cannot be implemented by any Scala exported type, " +
"since it would need to chain Function's prototype.")
}
it("fails with @JSBracketAccess in a raw JS type") {
typeErrorWithMsg(
"js.use(new Object).as[JSWithBracketAccess]",
"org.scalajs.testsuite.library.UseAsTest.JSWithBracketAccess " +
"defines a @JSMemberBracketAccess method. Existence of such a " +
"method cannot be statically checked for any Scala exported type.")
}
it("fails with @JSBracketCall in a raw JS type") {
typeErrorWithMsg(
"js.use(new Object).as[JSWithBracketCall]",
"org.scalajs.testsuite.library.UseAsTest.JSWithBracketCall defines " +
"a @JSMemberBracketCall method. Existence of such a method cannot " +
"be statically checked for any Scala exported type.")
}
it("fails with class parents") {
typeErrorWithMsg(
"js.use(???).as[JSNonClassParent]",
"Supertype scala.scalajs.js.Date of trait JSNonClassParent is a " +
"class. Cannot be used with as.")
}
it("fails with a missing method") {
class A {
@JSExport
def e(a: Int, b: String): js.Object = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSBasic]",
"A does not export a member named m")
}
it("fails with a missing overload") {
class A {
@JSExport
def m(a: Int, b: String): js.Object = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSOverload]",
"A does not export method m(Int): scala.scalajs.js.Object")
}
it("fails with wrong argument types") {
class A {
@JSExport
def m(a: String, b: Int): js.Object = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSBasic]",
"A does not export method m(Int, String): scala.scalajs.js.Object")
}
it("fails with wrong return types") {
class A {
@JSExport
def m(a: Int, b: String): Any = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSBasic]",
"A does not export method m(Int, String): scala.scalajs.js.Object")
}
it("fails with a missing default argument") {
@JSExportAll
class A {
def sum4(a: Int, b: Int = 1, c: Int = 2, d: Int = 3): Int = a + b + c + d
def sum2(a: Int, b: Int): Int = a + b // should have default
}
typeErrorWithMsg(
"js.use(new A).as[JSDefaultArgs]",
"A does not export method sum2(Int, Int = ???): Int")
}
it("fails with a mismatching repeated argument") {
@JSExportAll
class A {
def rep(a: Int, b: String): Unit = ??? // should be repeated
def rep(a: Int*): Unit = ???
}
typeErrorWithMsg(
"js.use(new A).as[JSRepeated]",
"A does not export method rep(Int, String*): Unit")
class B {
@JSExport
def m(a: Int, b: String*): js.Object = ??? // should not be repeated
}
typeErrorWithMsg(
"js.use(new B).as[JSBasic]",
"B does not export method m(Int, String): scala.scalajs.js.Object")
}
}
trait JSBasic extends js.Object {
def m(a: Int, b: String): js.Object = js.native
}
trait JSBasicJSName extends js.Object {
@JSName("m")
def foo(a: Int, b: String): js.Object = js.native
}
trait JSNamedApply extends js.Object {
@JSName("apply")
def apply(x: Int): Int = js.native
def bar(x: Int): Int = js.native
}
trait JSGeneric[T] extends js.Object {
def arr: js.Array[T] = js.native
}
trait JSGenericInt extends JSGeneric[Int]
trait JSTypeMember extends js.Object {
type R
def foo(x: R): Int = js.native
}
trait JSOverload extends JSBasic {
def m(b: String): Int = js.native
def m(a: Int): js.Object = js.native
}
trait JSGetters extends js.Object {
def a: Int = js.native
val b: String = js.native
def c: js.Object = js.native
}
trait JSSetters extends js.Object {
def a_=(x: Int): Unit = js.native
@JSName("b")
def fooJS_=(x: String): Unit = js.native
@JSName("c_=")
def barJS_=(x: js.Array[Int]): Unit = js.native
}
trait JSVars extends js.Object {
var a: Int = js.native
def b: String = js.native
def b_=(x: String): Unit = js.native
@JSName("c")
var fooJS: js.Object = js.native
}
trait JSDefaultArgs extends js.Object {
def sum4(a: Int, b: Int = ???, c: Int = ???, d: Int = ???): Int = js.native
def sum2(a: Int, b: Int = ???): Int = js.native
}
trait JSRepeated extends js.Object {
def rep(a: Int, b: String*): Unit = js.native
def rep(a: Int*): Unit = js.native
}
trait JSMulti extends js.Object {
def multi(a: Int)(b: String): Int = js.native
}
trait JSPolyMethod extends js.Object {
def poly[T](a: T): js.Array[T] = js.native
}
trait JSWithApply extends js.Object {
def apply(a: String): Int = js.native
}
trait JSWithBracketAccess extends js.Object {
@JSBracketAccess
def foo(a: String): Int = js.native
}
trait JSWithBracketCall extends js.Object {
@JSBracketCall
def foo(name: String, b: String): Int = js.native
}
trait JSNonClassParent extends js.Date
}
| doron123/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/library/UseAsTest.scala | Scala | bsd-3-clause | 12,580 |
import sbt._
import Keys._
import Status.{isSnapshot, publishStatus}
import org.apache.ivy.util.url.CredentialsStore
object Release extends Build
{
lazy val publishRelease = TaskKey[Unit]("publish-release")
lazy val publishAllArtifacts = TaskKey[Unit]("publish-all-artifacts")
lazy val launcherRemotePath = SettingKey[String]("launcher-remote-path")
lazy val remoteBase = SettingKey[String]("remote-base")
lazy val remoteID = SettingKey[String]("remote-id")
lazy val publishLauncher = TaskKey[String]("publish-launcher")
lazy val fullRelease = TaskKey[Unit]("full-release")
lazy val prerelease = TaskKey[Unit]("prerelease")
lazy val wikiRepository = SettingKey[File]("wiki-repository")
lazy val pagesRepository = SettingKey[File]("pages-repository")
lazy val updatedPagesRepository = TaskKey[File]("updated-pages-repository")
lazy val updatedWikiRepository = TaskKey[File]("updated-wiki-repository")
lazy val copyAPIDoc = TaskKey[File]("copy-api-doc")
lazy val pushAPIDoc = TaskKey[Unit]("push-api-doc")
lazy val pushWiki = TaskKey[Unit]("push-wiki")
lazy val pushMain = TaskKey[Unit]("push-main")
lazy val sbtRemoteRepo = SettingKey[String]("sbt-remote-repo")
lazy val wikiRemoteRepo = SettingKey[String]("wiki-remote-repo")
def settings(nonRoots: => Seq[ProjectReference], launcher: ScopedTask[File]): Seq[Setting[_]] =
(if(CredentialsFile.exists) releaseSettings(nonRoots, launcher) else Nil) ++
(if(file(".release.sbt") exists) fullReleaseSettings else Nil)
def releaseSettings(nonRoots: => Seq[ProjectReference], launcher: ScopedTask[File]): Seq[Setting[_]] = Seq(
publishTo in ThisBuild <<= publishResolver,
remoteID <<= publishStatus("typesafe-ivy-" + _),
credentials in Global += Credentials(CredentialsFile),
remoteBase <<= publishStatus( "https://typesafe.artifactoryonline.com/typesafe/ivy-" + _ ),
publishAllArtifacts <<= Util.inAll(nonRoots, publish.task),
publishLauncher <<= deployLauncher(launcher),
publishRelease <<= Seq(publishLauncher, publishAllArtifacts).dependOn,
launcherRemotePath <<= (organization, version) { (org, v) => List(org, LaunchJarName, v, LaunchJarName + ".jar").mkString("/") }
)
def fullReleaseSettings: Seq[Setting[_]] = Seq(
pushAPIDoc <<= pushAPIDoc0,
copyAPIDoc <<= copyAPIDoc0,
pushWiki <<= pushWiki0,
pushMain <<= pushMain0,
prerelease := println(Prerelease),
fullRelease <<= fullRelease0,
sbtRemoteRepo := "[email protected]:harrah/xsbt.git",
wikiRemoteRepo := "[email protected]:harrah/xsbt.wiki.git",
updatedPagesRepository <<= updatedRepo(pagesRepository, sbtRemoteRepo, Some("gh-pages")),
updatedWikiRepository <<= updatedRepo(wikiRepository, wikiRemoteRepo, None)
)
def deployLauncher(launcher: ScopedTask[File]) =
(launcher, launcherRemotePath, credentials, remoteBase, streams) map { (launchJar, remotePath, creds, base, s) =>
val (uname, pwd) = getCredentials(creds, s.log)
val request = dispatch.url(base) / remotePath <<< (launchJar, BinaryType) as (uname, pwd)
val http = new dispatch.Http
try { http(request.as_str) } finally { http.shutdown() }
}
def getCredentials(cs: Seq[Credentials], log: Logger): (String, String) =
{
val Some(creds) = Credentials.forHost(cs, "typesafe.artifactoryonline.com")
(creds.userName, creds.passwd)
}
def snapshotPattern(version: String) = Resolver.localBasePattern.replaceAll("""\\[revision\\]""", version)
def publishResolver: Project.Initialize[Option[Resolver]] = (remoteID, remoteBase) { (id, base) =>
Some( Resolver.url(id, url(base))(Resolver.ivyStylePatterns) )
}
def updatedRepo(repo: SettingKey[File], remote: SettingKey[String], branch: Option[String]) =
(repo, remote, streams) map { (local, uri, s) => updated(remote = uri, cwd = local, branch = branch, log = s.log); local }
def copyAPIDoc0 = (updatedPagesRepository, doc, Sxr.sxr, streams) map { (repo, newAPI, newSXR, s) =>
git("rm", "-r", "latest")(repo, s.log)
IO.copyDirectory(newAPI, repo / "latest" / "api")
IO.copyDirectory(newSXR, repo / "latest" / "sxr")
repo
}
def fullRelease0 = Seq(pushWiki, pushMain, pushAPIDoc, publishRelease).dependOn
def pushMain0 = (baseDirectory, version, streams) map { (repo, v, s) => commitAndPush(v, tag = Some("v" + v))(repo, s.log) }
def pushWiki0 = (wikiRepository, streams) map { (repo, s) => commitAndPush("updated for release")(repo, s.log) }
def pushAPIDoc0 = (copyAPIDoc, streams) map { (repo, s) => commitAndPush("updated api and sxr documentation")(repo, s.log) }
def commitAndPush(msg: String, tag: Option[String] = None)(repo: File, log: Logger)
{
git("add", ".")(repo, log)
git("commit", "-m", msg, "--allow-empty")(repo, log)
for(tagString <- tag) git("tag", tagString)(repo, log)
push(repo, log)
}
def push(cwd: File, log: Logger) = git("push", "--tags", "-n")(cwd, log)
def pull(cwd: File, log: Logger) = git("pull")(cwd, log)
def updated(remote: String, branch: Option[String], cwd: File, log: Logger): Unit =
if(cwd.exists)
pull(cwd, log)
else
branch match {
case None => git("clone", remote, ".")(cwd, log)
case Some(b) => git("clone", "-b", b, remote, ".")(cwd, log)
}
def git(args: String*)(cwd: File, log: Logger): Unit =
{
IO.createDirectory(cwd)
val full = "git" +: args
log.info(cwd + "$ " + full.mkString(" "))
val code = Process(full, cwd) ! log
if(code != 0) error("Nonzero exit code for git " + args.take(1).mkString + ": " + code)
}
final val BinaryType = "binary/octet-stream"
final val RemoteHost = "typesafe.artifactoryonline.com"
final val RemoteRealm = "Artifactory Realm"
final val LaunchJarName = "sbt-launch"
lazy val CredentialsFile: File = Path.userHome / ".ivy2" / ".typesafe-credentials"
def Prerelease = """
Before running full-release, the following should be done manually from the root 'xsbt' project:
1. Ensure all code is committed and the working directory is completely clean. 'git status' should show no untracked files.
2. 'test'
3. 'scripted'
4. Set the release version in README, build definition, and in src/main/conscript configurations.
5. Run 'show updated-wiki-repository'. Update versions, documentation for release in displayed directory.
6. Add notes/<version>.markdown (pending)
7. 'preview-notes' (pending)
"""
}
| ornicar/xsbt | project/Release.scala | Scala | bsd-3-clause | 6,244 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical.statsEstimation
import org.apache.spark.sql.catalyst.expressions.AttributeMap
import org.apache.spark.sql.catalyst.plans.{LeftAnti, LeftSemi}
import org.apache.spark.sql.catalyst.plans.logical._
/**
* An [[LogicalPlanVisitor]] that computes a single dimension for plan stats: size in bytes.
*/
object SizeInBytesOnlyStatsPlanVisitor extends LogicalPlanVisitor[Statistics] {
/**
* A default, commonly used estimation for unary nodes. We assume the input row number is the
* same as the output row number, and compute sizes based on the column types.
*/
private def visitUnaryNode(p: UnaryNode): Statistics = {
// There should be some overhead in Row object, the size should not be zero when there is
// no columns, this help to prevent divide-by-zero error.
val childRowSize = EstimationUtils.getSizePerRow(p.child.output)
val outputRowSize = EstimationUtils.getSizePerRow(p.output)
// Assume there will be the same number of rows as child has.
var sizeInBytes = (p.child.stats.sizeInBytes * outputRowSize) / childRowSize
if (sizeInBytes == 0) {
// sizeInBytes can't be zero, or sizeInBytes of BinaryNode will also be zero
// (product of children).
sizeInBytes = 1
}
// Don't propagate rowCount and attributeStats, since they are not estimated here.
Statistics(sizeInBytes = sizeInBytes)
}
/**
* For leaf nodes, use its `computeStats`. For other nodes, we assume the size in bytes is the
* product of all of the children's `computeStats`.
*/
override def default(p: LogicalPlan): Statistics = p match {
case p: LeafNode => p.computeStats()
case _: LogicalPlan =>
Statistics(sizeInBytes = p.children.map(_.stats.sizeInBytes).filter(_ > 0L).product)
}
override def visitAggregate(p: Aggregate): Statistics = {
if (p.groupingExpressions.isEmpty) {
Statistics(
sizeInBytes = EstimationUtils.getOutputSize(p.output, outputRowCount = 1),
rowCount = Some(1))
} else {
visitUnaryNode(p)
}
}
override def visitDistinct(p: Distinct): Statistics = visitUnaryNode(p)
override def visitExcept(p: Except): Statistics = p.left.stats.copy()
override def visitExpand(p: Expand): Statistics = {
val sizeInBytes = visitUnaryNode(p).sizeInBytes * p.projections.length
Statistics(sizeInBytes = sizeInBytes)
}
override def visitFilter(p: Filter): Statistics = visitUnaryNode(p)
override def visitGenerate(p: Generate): Statistics = default(p)
override def visitGlobalLimit(p: GlobalLimit): Statistics = {
val limit = p.limitExpr.eval().asInstanceOf[Int]
val childStats = p.child.stats
val rowCount: BigInt = childStats.rowCount.map(_.min(limit)).getOrElse(limit)
// Don't propagate column stats, because we don't know the distribution after limit
Statistics(
sizeInBytes = EstimationUtils.getOutputSize(p.output, rowCount, childStats.attributeStats),
rowCount = Some(rowCount))
}
override def visitIntersect(p: Intersect): Statistics = {
val leftSize = p.left.stats.sizeInBytes
val rightSize = p.right.stats.sizeInBytes
val sizeInBytes = if (leftSize < rightSize) leftSize else rightSize
Statistics(
sizeInBytes = sizeInBytes)
}
override def visitJoin(p: Join): Statistics = {
p.joinType match {
case LeftAnti | LeftSemi =>
// LeftSemi and LeftAnti won't ever be bigger than left
p.left.stats
case _ =>
default(p)
}
}
override def visitLocalLimit(p: LocalLimit): Statistics = {
val limit = p.limitExpr.eval().asInstanceOf[Int]
val childStats = p.child.stats
if (limit == 0) {
// sizeInBytes can't be zero, or sizeInBytes of BinaryNode will also be zero
// (product of children).
Statistics(sizeInBytes = 1, rowCount = Some(0))
} else {
// The output row count of LocalLimit should be the sum of row counts from each partition.
// However, since the number of partitions is not available here, we just use statistics of
// the child. Because the distribution after a limit operation is unknown, we do not propagate
// the column stats.
childStats.copy(attributeStats = AttributeMap(Nil))
}
}
override def visitPivot(p: Pivot): Statistics = default(p)
override def visitProject(p: Project): Statistics = visitUnaryNode(p)
override def visitRepartition(p: Repartition): Statistics = p.child.stats
override def visitRepartitionByExpr(p: RepartitionByExpression): Statistics = p.child.stats
override def visitRebalancePartitions(p: RebalancePartitions): Statistics = p.child.stats
override def visitSample(p: Sample): Statistics = {
val ratio = p.upperBound - p.lowerBound
var sizeInBytes = EstimationUtils.ceil(BigDecimal(p.child.stats.sizeInBytes) * ratio)
if (sizeInBytes == 0) {
sizeInBytes = 1
}
val sampleRows = p.child.stats.rowCount.map(c => EstimationUtils.ceil(BigDecimal(c) * ratio))
// Don't propagate column stats, because we don't know the distribution after a sample operation
Statistics(sizeInBytes, sampleRows)
}
override def visitScriptTransform(p: ScriptTransformation): Statistics = default(p)
override def visitUnion(p: Union): Statistics = {
Statistics(sizeInBytes = p.children.map(_.stats.sizeInBytes).sum)
}
override def visitWindow(p: Window): Statistics = visitUnaryNode(p)
override def visitSort(p: Sort): Statistics = default(p)
override def visitTail(p: Tail): Statistics = {
val limit = p.limitExpr.eval().asInstanceOf[Int]
val childStats = p.child.stats
val rowCount: BigInt = childStats.rowCount.map(_.min(limit)).getOrElse(limit)
Statistics(
sizeInBytes = EstimationUtils.getOutputSize(p.output, rowCount, childStats.attributeStats),
rowCount = Some(rowCount))
}
override def visitWithCTE(p: WithCTE): Statistics = p.plan.stats
}
| shaneknapp/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statsEstimation/SizeInBytesOnlyStatsPlanVisitor.scala | Scala | apache-2.0 | 6,762 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.workbench.lift.util
import de.fuberlin.wiwiss.silk.config.Prefixes
import de.fuberlin.wiwiss.silk.linkagerule.{Operator, LinkageRule}
import xml.Elem
import de.fuberlin.wiwiss.silk.linkagerule.similarity.{Comparison, Aggregation}
import de.fuberlin.wiwiss.silk.util.DPair
import de.fuberlin.wiwiss.silk.linkagerule.input.{PathInput, TransformInput}
/**
* Renders a linkage rule as a tree.
* jQuery.treeview.js and linkageRuleTree.css must be in the header.
*/
object LinkageRuleTree {
def render(rule: LinkageRule) = {
<ul class="details-tree">
{ for(aggregation <- rule.operator.toList) yield renderOperator(aggregation) }
</ul>
}
/**
* Renders a linkage rule operator.
*/
private def renderOperator(op: Operator): Elem = op match {
case Aggregation(id, required, weight, aggregator, operators) => {
<li>
<span class="aggregation">Aggregation: {aggregator.pluginId}</span>
<ul>
{ operators.map(renderOperator) }
</ul>
</li>
}
case Comparison(id, required, weight, threshold, indexing, metric, DPair(input1, input2)) => {
<li>
<span class="comparison">Comparison: {metric.pluginId} ({threshold.toString})</span>
<ul>
{ renderOperator(input1) }
{ renderOperator(input2) }
</ul>
</li>
}
case TransformInput(id, transformer, inputs) => {
<li>
<span class="transformation">Transformation: {transformer.pluginId}</span>
<ul>
{ inputs.map(renderOperator) }
</ul>
</li>
}
case PathInput(id, path) => {
<li>
<span class="input">Input: {path.serialize}</span>
</li>
}
}
} | fusepoolP3/p3-silk | silk-workbench-outdated/src/main/scala/de/fuberlin/wiwiss/silk/workbench/lift/util/LinkageRuleTree.scala | Scala | apache-2.0 | 2,293 |
// Test: Cuttle timeseries with many jobs!
// This a very simple cuttle project using the time series scheduler
// to execute a lot of jobs to do load tests
package com.criteo.cuttle.examples
import com.criteo.cuttle._
import com.criteo.cuttle.timeseries._
import java.time.ZoneOffset.UTC
import java.time._
import scala.concurrent.Future
import scala.concurrent.duration._
object TestTimeSeriesWithManyJobs {
def main(args: Array[String]): Unit = {
val start: Instant = LocalDate.now.atStartOfDay.toInstant(UTC)
val jobs: Workflow = (1 to 1500).toArray
.map({ i =>
Job(s"hello-with-a-relatively-long-id-just-for-the-fun-to-break-things$i",
daily(UTC, start),
s"Hello $i",
tags = Set(Tag("hello"), Tag(s"hello-${i / 100}xx"))) { implicit e =>
val partitionToCompute = e.context.start + "-" + e.context.end
e.streams.info(s"Hello $i for $partitionToCompute")
Future.successful(Completed)
}
})
.foldLeft(Workflow.empty)(_ and _)
val world: Job[TimeSeries] = Job("world", daily(UTC, start), "World", tags = Set(Tag("world"))) { implicit e =>
e.streams.info("World!")
e.park(1.seconds).map(_ => Completed)
}
CuttleProject("Hello World", version = "123", env = ("dev", false)) {
world dependsOn jobs
}.start(logsRetention = Some(1.minute))
}
}
| criteo/cuttle | examples/src/test/scala/com/criteo/cuttle/examples/TestTimeSeriesWithManyJobs.scala | Scala | apache-2.0 | 1,394 |
package app
import skinny.test.SkinnyFlatSpec
class HelloSpec extends SkinnyFlatSpec {
addFilter(app.Hello, "/*")
addFilter(app.AsyncHello, "/*")
it should "work fine with GET Requests" in {
get("/hello") {
status should equal(200)
body should equal("Hello, Anonymous")
}
get("/hello?name=Martin") {
status should equal(200)
body should equal("Hello, Martin")
}
}
it should "work fine with POST Requests" in {
post("/hello", Map()) {
status should equal(200)
body should equal("Hello, Anonymous")
}
post("/hello", Map("name" -> "Martin")) {
status should equal(200)
body should equal("Hello, Martin")
}
}
it should "work fine with AsyncResult" in {
get("/hello/async") {
status should equal(200)
body should equal("Hello, Anonymous")
}
get("/hello/async?name=Martin") {
status should equal(200)
body should equal("Hello, Martin")
}
}
it should "return JSON response" in {
get("/hello/json") {
status should equal(200)
header("Content-Type") should equal("application/json; charset=utf-8")
body should equal("""{"message":"Hello, Anonymous"}""")
}
get("/hello/json/async?name=Martin") {
status should equal(200)
header("Content-Type") should equal("application/json; charset=utf-8")
body should equal("""{"message":"Hello, Martin"}""")
}
}
it should "return Scalate response" in {
get("/hello/scalate") {
header("Content-Type") should equal("text/html; charset=UTF-8")
status should equal(200)
}
}
}
| holycattle/skinny-framework | skinny-engine-blank-app/src/test/scala/app/HelloSpec.scala | Scala | mit | 1,622 |
package testhelpers
import commons.repositories.ActionRunner
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.routing._
import slick.dbio.DBIO
import slick.jdbc.JdbcBackend
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
object TestUtils {
val config = Map(
"play.evolutions.enabled" -> "true",
"play.evolutions.autoApply" -> "true",
"slick.dbs.default.profile" -> "slick.jdbc.H2Profile$",
"slick.dbs.default.db.driver" -> "org.h2.Driver",
"slick.dbs.default.db.url" -> "jdbc:h2:mem:play;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false",
"slick.dbs.default.db.user" -> "user",
"slick.dbs.default.db.password" -> ""
)
def appWithEmbeddedDb: Application = new GuiceApplicationBuilder()
.configure(config)
.build
def appWithEmbeddedDbWithFakeRoutes(router: Router): Application = new GuiceApplicationBuilder()
.configure(config)
.router(router)
.build
def runAndAwaitResult[T](action: DBIO[T])(implicit actionRunner: ActionRunner, duration: Duration): T = {
val future: Future[T] = actionRunner.runInTransaction(action)
Await.result(future, duration)
}
}
| Dasiu/play-framework-test-project | test/testhelpers/TestUtils.scala | Scala | mit | 1,204 |
package net.badgerclaw.dropwizard.inject.option
import javax.inject.Singleton
import javax.ws.rs.ext.ParamConverterProvider
import org.glassfish.hk2.utilities.binding.AbstractBinder
class OptionParamBinder extends AbstractBinder {
override def configure(): Unit =
bind(classOf[OptionParamConverterProvider]).to(classOf[ParamConverterProvider]).in(classOf[Singleton])
}
| jostly/scala-dropwizard | src/main/scala/net/badgerclaw/dropwizard/inject/option/OptionParamBinder.scala | Scala | mit | 378 |
package ru.maizy.dev.heartbeat
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2015
* See LICENSE.txt for details.
*/
object Roles extends Enumeration with utils.EnumerationMap {
type Role = Value
val No = Value("no")
val Stat = Value("stat")
val Frontend = Value("frontend")
}
object Modes extends Enumeration with utils.EnumerationMap {
type Mode = Value
val Production = Value("production")
val Emulator = Value("emulator")
}
case class Options(
mode: Modes.Mode = Modes.Production,
port: Int = 0,
host: String = "127.0.0.1",
// production mode
role: Roles.Value = Roles.No,
statsByNode: Int = 1,
// emulator mode
program: Option[EmulatorProgram.Value] = None
)
object OptionParser {
private val parser = new scopt.OptionParser[Options]("akka-cluster-heartbeat") {
override def showUsageOnError = true
private def inEnum(enum: utils.EnumerationMap, value: String) =
if (enum.valuesMap.contains(value)) {
success
} else {
val allowed = enum.valuesMap.keys
failure(s"Value '$value' not in allowed values list (${allowed.mkString(", ")})")
}
private def enumValues(enum: utils.EnumerationMap) = enum.valuesMap.keys.mkString("|")
head("akka-cluster-heartbeat", Version.toString)
help("help")
version("version")
opt[String]('h', "host") action { (value, c) => c.copy(host = value) }
(cmd("node")
action { (_, c) => c.copy(mode = Modes.Production) }
text { "production mode (add node to cluster)" }
children(
opt[Int]('p', "port")
text { "port or 0 to random choose" }
validate { v => if (v < 0 || v > 65535) failure("should be 0 to 65535") else success }
action { (value, c) => c.copy(port = value) },
opt[String]('r', "role")
required()
valueName enumValues(Roles)
validate { inEnum(Roles, _) }
action { (value, c) => c.copy(role = Roles.valuesMap.get(value).get) },
opt[Int]('s', "stats-by-node")
validate { v => if (v < 0) failure("should be great than 0") else success }
action { (value, c) => c.copy(statsByNode = value) }
)
)
(cmd("emulator")
action { (_, c) => c.copy(mode = Modes.Emulator) }
text { "emulator mode" }
children(
opt[Int]('p', "port")
action { (value, c) => c.copy(port = value) },
opt[String]('r', "program")
required()
valueName enumValues(EmulatorProgram)
validate { inEnum(EmulatorProgram, _) }
action { (value, c) => c.copy(program = EmulatorProgram.valuesMap.get(value)) }
)
)
}
def parse(args: Seq[String]): Option[Options] =
parser.parse(args, Options())
}
| maizy/akka-cluster-heartbeat | src/main/scala/ru/maizy/dev/heartbeat/Options.scala | Scala | mit | 2,775 |
/*
* Copyright 2014โ2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.niflheim
import quasar.precog.common._
import org.specs2.mutable.Specification
import org.specs2._
import org.scalacheck._
import java.io.File
class V1CookedBlockFormatSpecs extends CookedBlockFormatSpecs {
val format = V1CookedBlockFormat
}
case class VersionedCookedBlockFormatSpecs() extends CookedBlockFormatSpecs {
val format = VersionedCookedBlockFormat(Map(1 -> V1CookedBlockFormat))
}
trait CookedBlockFormatSpecs extends Specification with ScalaCheck with SegmentFormatSupport {
def format: CookedBlockFormat
implicit val arbFile = Arbitrary(for {
parts <- Gen.listOfN(3, Gen.identifier map { part =>
part.substring(0, math.min(part.length, 5))
})
} yield new File(parts.mkString("/", "/", ".cooked")))
implicit val arbSegmentId = Arbitrary(genSegmentId)
"cooked block format" should {
"round trip empty segments" in {
surviveRoundTrip(format)(CookedBlockMetadata(999L, 0, new Array[(SegmentId, File)](0)))
}
"round trip simple segments" in {
surviveRoundTrip(format)(CookedBlockMetadata(999L, 1,
Array(SegmentId(1234L, CPath("a.b.c"), CLong) -> new File("/hello/there/abc.cooked"))
))
}
// this test seems to run forever?
/*"roundtrip arbitrary blocks" in {
forAll { files: List[(SegmentId, File)] =>
surviveRoundTrip(format)(CookedBlockMetadata(999L, files.length, files.toArray))
}.set(maxDiscardRatio = 20f)
}*/
}
//def surviveRoundTrip(format: CookedBlockFormat)(segments0: Array[(SegmentId, File)]) = {
def surviveRoundTrip(format: CookedBlockFormat)(segments0: CookedBlockMetadata) = {
val out = new InMemoryWritableByteChannel
format.writeCookedBlock(out, segments0) must beLike {
case _root_.scalaz.Success(_) =>
val in = new InMemoryReadableByteChannel(out.toArray)
format.readCookedBlock(in) must beLike {
case _root_.scalaz.Success(segments1) =>
segments1 must_== segments0
}
}
}
}
| drostron/quasar | niflheim/src/test/scala/quasar/niflheim/CookedBlockSpecs.scala | Scala | apache-2.0 | 2,611 |
package io.flow.build
import io.apibuilder.spec.v0.models.Service
trait Controller {
private[this] val internalErrors = scala.collection.mutable.Map[String, Seq[String]]()
private[this] val GlobalError = "Global"
protected[this] def addError(message: String): Unit = {
addError(GlobalError, message)
}
protected[this] def addError(key: String, error: String): Unit = {
internalErrors.get(key) match {
case None => {
internalErrors.put(key, Seq(error))
}
case Some(existing) => {
internalErrors.put(key, existing ++ Seq(error))
}
}
()
}
def name: String
def command: String
/**
* Run things and return a list of errors
*/
def run(
buildType: BuildType,
downloadCache: DownloadCache,
services: Seq[Service]
) (
implicit ec: scala.concurrent.ExecutionContext
): Unit
def errors(): Map[String, Seq[String]] = internalErrors.toMap
}
| flowcommerce/api-lint | src/main/scala/io/flow/build/Controller.scala | Scala | mit | 946 |
/*
* Copyright 2014โ2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.api.ColumnType
import quasar.common.{CPath, CPathField}
import org.specs2.execute.PendingUntilFixed._
import org.specs2.matcher.Matcher
import org.specs2.specification.core.SpecStructure
import scala.collection.immutable.{Map, Set}
import java.lang.String
abstract class ScalarStageSpec
extends ScalarStageSpec.IdsSpec
with ScalarStageSpec.WrapSpec
with ScalarStageSpec.ProjectSpec
with ScalarStageSpec.MaskSpec
with ScalarStageSpec.PivotSpec
with ScalarStageSpec.FocusedSpec
with ScalarStageSpec.CartesianSpec
with ScalarStageSpec.FullSpec
/*
* Test names must begin with the prefix specified in their
* `override def is` implementation followed by `-#*` in
* order for `pendingFragments` to come into effect.
*/
object ScalarStageSpec {
def parseNumber(prefix: String, name: String): Option[Int] = {
val regexPrefix = s"${prefix}-[1-9][0-9]*".r
val regexIdx = s"[1-9][0-9]*".r
regexPrefix.findPrefixOf(name)
.flatMap(regexIdx.findFirstIn)
.map(_.toInt)
}
def pendingFragments(sis: SpecStructure, pendingExamples: Set[Int], prefix: String)
: SpecStructure =
sis.copy(lazyFragments = () => sis.fragments.map { f =>
parseNumber(prefix, f.description.show) match {
case Some(i) =>
if (pendingExamples.contains(i))
f.updateExecution(_.mapResult(_.pendingUntilFixed))
else
f
case None => f
}
})
/*
* Please note that this is currently *over*-specified.
* We don't technically need monotonic ids or even numerical
* ones, we just need *unique* identities. That assertion is
* quite hard to encode though. If we find we need such an
* implementation in the future, these assertions should be
* changed.
*/
trait IdsSpec extends JsonSpec {
import IdStatus.{ExcludeId, IdOnly, IncludeId}
val idsPendingExamples: Set[Int]
"ExcludeId" should {
"ids-1 emit scalar rows unmodified" in {
val input = ldjson("""
1
"hi"
true
""")
input must interpretIdsAs(ExcludeId, input)
}
"ids-2 emit vector rows unmodified" in {
val input = ldjson("""
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
[{ "d": {} }]
""")
input must interpretIdsAs(ExcludeId, input)
}
}
"IdOnly" should {
"ids-3 return monotonic integers for each scalar row" in {
val input = ldjson("""
1
"hi"
true
""")
val expected = ldjson("""
0
1
2
""")
input must interpretIdsAs(IdOnly, expected)
}
"ids-4 return monotonic integers for each vector row" in {
val input = ldjson("""
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
[{ "d": {} }]
""")
val expected = ldjson("""
0
1
2
""")
input must interpretIdsAs(IdOnly, expected)
}
}
"IncludeId" should {
"ids-5 wrap each scalar row in monotonic integers" in {
val input = ldjson("""
1
"hi"
true
""")
val expected = ldjson("""
[0, 1]
[1, "hi"]
[2, true]
""")
input must interpretIdsAs(IncludeId, expected)
}
"ids-6 wrap each vector row in monotonic integers" in {
val input = ldjson("""
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
[{ "d": {} }]
""")
val expected = ldjson("""
[0, [1, 2, 3]]
[1, { "a": "hi", "b": { "c": null } }]
[2, [{ "d": {} }]]
""")
input must interpretIdsAs(IncludeId, expected)
}
}
override def is: SpecStructure =
pendingFragments(super.is, idsPendingExamples, "ids")
def evalIds(idStatus: IdStatus, stream: JsonStream): JsonStream
def interpretIdsAs(idStatus: IdStatus, expected: JsonStream) : Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream => evalIds(idStatus, str) }
}
trait WrapSpec extends JsonSpec {
protected final type Wrap = ScalarStage.Wrap
protected final val Wrap = ScalarStage.Wrap
val wrapPendingExamples: Set[Int]
"wrap" should {
"wrap-1 nest scalars" in {
val input = ldjson("""
1
"hi"
true
""")
val expected = ldjson("""
{ "foo": 1 }
{ "foo": "hi" }
{ "foo": true }
""")
input must wrapInto("foo")(expected)
}
"wrap-2 nest vectors" in {
val input = ldjson("""
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
[{ "d": {} }]
""")
val expected = ldjson("""
{ "bar": [1, 2, 3] }
{ "bar": { "a": "hi", "b": { "c": null } } }
{ "bar": [{ "d": {} }] }
""")
input must wrapInto("bar")(expected)
}
"wrap-3 nest empty objects" in {
val input = ldjson("""
"a"
{}
[]
1
""")
val expected = ldjson("""
{ "bar": "a" }
{ "bar": {} }
{ "bar": [] }
{ "bar": 1 }
""")
input must wrapInto("bar")(expected)
}
}
override def is: SpecStructure =
pendingFragments(super.is, wrapPendingExamples, "wrap")
def evalWrap(wrap: Wrap, stream: JsonStream): JsonStream
def wrapInto(name: String)(expected: JsonStream): Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream => evalWrap(Wrap(name), str)}
}
trait ProjectSpec extends JsonSpec {
protected final type Project = ScalarStage.Project
protected final val Project = ScalarStage.Project
val projectPendingExamples: Set[Int]
"project" should {
"prj-1 passthrough at identity" in {
val input = ldjson("""
1
"two"
false
[1, 2, 3]
{ "a": 1, "b": "two" }
[]
{}
""")
input must projectInto(".")(input)
}
"prj-2 extract .a" in {
val input = ldjson("""
{ "a": 1, "b": "two" }
{ "a": "foo", "b": "two" }
{ "a": true, "b": "two" }
{ "a": [], "b": "two" }
{ "a": {}, "b": "two" }
{ "a": [1, 2], "b": "two" }
{ "a": { "c": 3 }, "b": "two" }
""")
val expected = ldjson("""
1
"foo"
true
[]
{}
[1, 2]
{ "c": 3 }
""")
input must projectInto(".a")(expected)
}
"prj-3 extract .a.b" in {
val input = ldjson("""
{ "a": { "b": 1 }, "b": "two" }
{ "a": { "b": "foo" }, "b": "two" }
{ "a": { "b": true }, "b": "two" }
{ "a": { "b": [] }, "b": "two" }
{ "a": { "b": {} }, "b": "two" }
{ "a": { "b": [1, 2] }, "b": "two" }
{ "a": { "b": { "c": 3 } }, "b": "two" }
""")
val expected = ldjson("""
1
"foo"
true
[]
{}
[1, 2]
{ "c": 3 }
""")
input must projectInto(".a.b")(expected)
}
"prj-4 extract .a[1]" in {
val input = ldjson("""
{ "a": [3, 1], "b": "two" }
{ "a": [3, "foo"], "b": "two" }
{ "a": [3, true], "b": "two" }
{ "a": [3, []], "b": "two" }
{ "a": [3, {}], "b": "two" }
{ "a": [3, [1, 2]], "b": "two" }
{ "a": [3, { "c": 3 }], "b": "two" }
""")
val expected = ldjson("""
1
"foo"
true
[]
{}
[1, 2]
{ "c": 3 }
""")
input must projectInto(".a[1]")(expected)
}
"prj-5 extract [1]" in {
val input = ldjson("""
[0, 1]
[0, "foo"]
[0, true]
[0, []]
[0, {}]
[0, [1, 2]]
[0, { "c": 3 }]
""")
val expected = ldjson("""
1
"foo"
true
[]
{}
[1, 2]
{ "c": 3 }
""")
input must projectInto("[1]")(expected)
}
"prj-6 extract [1][0]" in {
val input = ldjson("""
[0, [1]]
[0, ["foo"]]
[0, [true]]
[0, [[]]]
[0, [{}]]
[0, [[1, 2]]]
[0, [{ "c": 3 }]]
""")
val expected = ldjson("""
1
"foo"
true
[]
{}
[1, 2]
{ "c": 3 }
""")
input must projectInto("[1][0]")(expected)
}
"prj-7 extract [1].a" in {
val input = ldjson("""
[0, { "a": 1 }]
[false, { "a": "foo" }]
[1, { "a": true }]
[[], { "a": [] }]
["foo", { "a": {} }]
[{}, { "a": [1, 2] }]
[0, { "a": { "c": 3 } }]
""")
val expected = ldjson("""
1
"foo"
true
[]
{}
[1, 2]
{ "c": 3 }
""")
input must projectInto("[1].a")(expected)
}
"prj-8 elide rows not containing object path" in {
val input = ldjson("""
{ "x": 1 }
{ "x": 2, "y": 3 }
{ "y": 4, "z": 5 }
["a", "b"]
4
"seven"
{ "z": 4, "x": 8 }
false
{ "y": "nope", "x": {} }
{ "one": 1, "two": 2 }
{}
[]
""")
val expected = ldjson("""
1
2
8
{}
""")
input must projectInto(".x")(expected)
}
"prj-9 only extract paths starting from root" in {
val input = ldjson("""
{ "z": "b", "x": { "y": 4 } }
{ "x": 2, "y": { "x": 1 } }
{ "a": { "x": { "z": false, "y": true } }, "b": "five" }
{ "x": { "y": 1, "z": 2 } }
""")
val expected = ldjson("""
4
1
""")
input must projectInto(".x.y")(expected)
}
"prj-10 elide rows not containing array path" in {
val input = ldjson("""
[0, 1, 2, -1, -2]
[3]
[4, 5]
{ "y": 6, "z": 7 }
["a", "b", "c"]
["a", [8]]
["a", { "x": 9 }]
4.8
"seven"
false
null
{}
[]
""")
val expected = ldjson("""
1
5
"b"
[8]
{ "x": 9 }
""")
input must projectInto("[1]")(expected)
}
}
override def is: SpecStructure =
pendingFragments(super.is, projectPendingExamples, "prj")
def evalProject(project: Project, stream: JsonStream): JsonStream
def projectInto(path: String)(expected: JsonStream): Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream =>
evalProject(Project(CPath.parse(path)), str)
}
}
trait MaskSpec extends JsonSpec {
import ColumnType._
protected final type Mask = ScalarStage.Mask
protected final val Mask = ScalarStage.Mask
val maskPendingExamples: Set[Int]
"masks" should {
"mask-1 drop everything when empty" in {
val input = ldjson("""
1
"hi"
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
true
[{ "d": {} }]
""")
val expected = ldjson("")
input must maskInto()(expected)
}
"mask-2 retain two scalar types at identity" in {
val input = ldjson("""
1
"hi"
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
true
[]
[{ "d": {} }]
""")
val expected = ldjson("""
1
true
""")
input must maskInto("." -> Set(Number, Boolean))(expected)
}
"mask-3 retain different sorts of numbers at identity" in {
val input = ldjson("""
42
3.14
null
27182e-4
"derp"
""")
val expected = ldjson("""
42
3.14
27182e-4
""")
input must maskInto("." -> Set(Number))(expected)
}
"mask-4 retain different sorts of objects at identity" in {
val input = ldjson("""
1
"hi"
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
true
{}
[{ "d": {} }]
{ "a": true }
""")
val expected = ldjson("""
{ "a": "hi", "b": { "c": null } }
{}
{ "a": true }
""")
input must maskInto("." -> Set(Object))(expected)
}
"mask-5 retain different sorts of arrays at identity" in {
val input = ldjson("""
1
"hi"
[1, 2, 3]
{ "a": "hi", "b": { "c": null } }
true
[]
[{ "d": {} }]
{ "a": true }
""")
val expected = ldjson("""
[1, 2, 3]
[]
[{ "d": {} }]
""")
input must maskInto("." -> Set(Array))(expected)
}
"mask-6 retain two scalar types at .a.b" in {
val input = ldjson("""
{ "a": { "b": 1 } }
null
{ "a": { "b": "hi" } }
{ "foo": true }
{ "a": { "b": [1, 2, 3] } }
[1, 2, 3]
{ "a": { "b": { "a": "hi", "b": { "c": null } } } }
{ "a": { "c": 42 } }
{ "a": { "b": true } }
{ "a": { "b": [] } }
{ "a": { "b": [{ "d": {} }] } }
""")
val expected = ldjson("""
{ "a": { "b": 1 } }
{ "a": { "b": true } }
""")
input must maskInto(".a.b" -> Set(Number, Boolean))(expected)
}
"mask-7 retain different sorts of numbers at .a.b" in {
val input = ldjson("""
{ "a": { "b": 42 } }
null
{ "foo": true }
{ "a": { "b": 3.14 } }
[1, 2, 3]
{ "a": { "b": null } }
{ "a": { "b": 27182e-4 } }
{ "a": { "b": "derp" } }
""")
val expected = ldjson("""
{ "a": { "b": 42 } }
{ "a": { "b": 3.14 } }
{ "a": { "b": 27182e-4 } }
""")
input must maskInto(".a.b" -> Set(Number))(expected)
}
"mask-8 retain different sorts of objects at .a.b" in {
val input = ldjson("""
{ "a": { "b": 1 } }
{ "a": { "b": "hi" } }
{ "a": { "b": [1, 2, 3] } }
{ "a": { "b": { "a": "hi", "b": { "c": null } } } }
{ "a": { "b": true } }
{ "a": { "b": {} } }
{ "a": { "b": [{ "d": {} }] } }
{ "a": { "b": { "a": true } } }
""")
val expected = ldjson("""
{ "a": { "b": { "a": "hi", "b": { "c": null } } } }
{ "a": { "b": {} } }
{ "a": { "b": { "a": true } } }
""")
input must maskInto(".a.b" -> Set(Object))(expected)
}
"mask-9 retain different sorts of arrays at .a.b" in {
val input = ldjson("""
{ "a": { "b": 1 } }
{ "a": { "b": "hi" } }
{ "a": { "b": [1, 2, 3] } }
{ "a": { "b": { "a": "hi", "b": { "c": null } } } }
{ "a": { "b": true } }
{ "a": { "b": [] } }
{ "a": { "b": [{ "d": {} }] } }
{ "a": { "b": { "a": true } } }
""")
val expected = ldjson("""
{ "a": { "b": [1, 2, 3] } }
{ "a": { "b": [] } }
{ "a": { "b": [{ "d": {} }] } }
""")
input must maskInto(".a.b" -> Set(Array))(expected)
}
"mask-10 discard unmasked structure" in {
val input = ldjson("""
{ "a": { "b": 42, "c": true }, "c": [] }
""")
val expected = ldjson("""
{ "a": { "c": true } }
""")
input must maskInto(".a.c" -> Set(Boolean))(expected)
}
"mask-11 compose disjunctively across paths" in {
val input = ldjson("""
{ "a": { "b": 42, "c": true }, "c": [] }
""")
val expected = ldjson("""
{ "a": { "c": true }, "c": [] }
""")
input must maskInto(".a.c" -> Set(Boolean), ".c" -> Set(Array))(expected)
}
"mask-12 compose disjunctively across suffix-overlapped paths" in {
val input = ldjson("""
{ "a": { "x": 42, "b": { "c": true } }, "b": { "c": [] }, "c": [1, 2] }
""")
val expected = ldjson("""
{ "a": { "b": { "c": true } }, "b": { "c": [] } }
""")
input must maskInto(".a.b.c" -> Set(Boolean), ".b.c" -> Set(Array))(expected)
}
"mask-13 compose disjunctively across paths where one side is false" in {
val input = ldjson("""
{ "a": { "b": 42, "c": true } }
""")
val expected = ldjson("""
{ "a": { "c": true } }
""")
input must maskInto(".a.c" -> Set(Boolean), ".a" -> Set(Array))(expected)
}
"mask-14 subsume inner by outer" in {
val input = ldjson("""
{ "a": { "b": 42, "c": true }, "c": [] }
""")
val expected = ldjson("""
{ "a": { "b": 42, "c": true } }
""")
input must maskInto(".a.b" -> Set(Boolean), ".a" -> Set(Object))(expected)
}
"mask-15 disallow the wrong sort of vector" in {
val input = ldjson("""
{ "a": true }
[1, 2, 3]
""")
val expected1 = ldjson("""
{ "a": true }
""")
val expected2 = ldjson("""
[1, 2, 3]
""")
input must maskInto("." -> Set(Object))(expected1)
input must maskInto("." -> Set(Array))(expected2)
}
"mask-16 compact surrounding array" in {
ldjson("[1, 2, 3]") must maskInto("[1]" -> Set(Number))(ldjson("[2]"))
}
"mask-17 compact surrounding array with multiple values retained" in {
val input = ldjson("""
[1, 2, 3, 4, 5]
""")
val expected = ldjson("""
[1, 3, 4]
""")
input must maskInto(
"[0]" -> Set(Number),
"[2]" -> Set(Number),
"[3]" -> Set(Number))(expected)
}
"mask-18 compact surrounding nested array with multiple values retained" in {
val input = ldjson("""
{ "a": { "b": [1, 2, 3, 4, 5], "c" : null } }
""")
val expected = ldjson("""
{ "a": { "b": [1, 3, 4] } }
""")
input must maskInto(
".a.b[0]" -> Set(Number),
".a.b[2]" -> Set(Number),
".a.b[3]" -> Set(Number))(expected)
}
"mask-19 compact array containing nested arrays with single nested value retained" in {
val input = ldjson("""
{ "a": [[[1, 3, 5], "k"], "foo", { "b": [5, 6, 7], "c": [] }], "d": "x" }
""")
val expected = ldjson("""
{ "a": [{"b": [5, 6, 7] }] }
""")
input must maskInto(".a[2].b" -> Set(Array))(expected)
}
"mask-20 remove object entirely when no values are retained" in {
ldjson("""{ "a": 42 }""") must maskInto(".a" -> Set(Boolean))(ldjson(""))
}
"mask-21 remove array entirely when no values are retained" in {
ldjson("[42]") must maskInto("[0]" -> Set(Boolean))(ldjson(""))
}
"mask-22 retain vector at depth and all recursive contents" in {
val input = ldjson("""{ "a": { "b": { "c": { "e": true }, "d": 42 } } }""")
input must maskInto(".a.b" -> Set(Object))(input)
}
// minimization of `multilevelFlatten.test`
"mask-23 disjunctively retain values in an array" in {
val input = ldjson("""
["a", 13]
["b", []]
["c", {}]
["d", [12]]
["e", { "z": 14}]
""")
val expected = ldjson("""
["a"]
["b"]
["c", {}]
["d"]
["e", { "z": 14}]
""")
input must maskInto("[0]" -> ColumnType.Top, "[1]" -> Set(ColumnType.Object))(expected)
}
"mask-24 disjunctively retain values in an array with compaction" in {
val input = ldjson("""
[13, "a"]
[[], "b"]
[{}, "c"]
[[12], "d"]
[{ "z": 14}, "e"]
""")
val expected = ldjson("""
["a"]
["b"]
[{}, "c"]
["d"]
[{ "z": 14}, "e"]
""")
input must maskInto("[0]" -> Set(ColumnType.Object), "[1]" -> ColumnType.Top)(expected)
}
"mask-25 disjunctively retain values in an object" in {
val input = ldjson("""
{ "v": "a", "w": 13 }
{ "v": "b", "w": [] }
{ "v": "c", "w": {} }
{ "v": "d", "w": [12] }
{ "v": "e", "w": { "z": 14} }
""")
val expected = ldjson("""
{ "v": "a" }
{ "v": "b" }
{ "v": "c", "w": {} }
{ "v": "d" }
{ "v": "e", "w": { "z": 14} }
""")
input must maskInto(".v" -> ColumnType.Top, ".w" -> Set(ColumnType.Object))(expected)
}
"mask all values at Top to themselves" >> {
// minimization of `arrayLengthHeterogeneous.test`
"mask-26 at identity path" in {
val input = ldjson("""
[[1], {"z":2}, [], {}, "foo", null, 42, 42.2, true]
{"a":[1], "b":{"z":2}, "c":[], "d":{}, "e":"foo", "f":null, "g":42, "h":42.2, "i":true}
[]
{}
"foo"
null
42
42.2
true
""")
input must maskInto("." -> ColumnType.Top)(input)
}
"mask-27 at object projected path" in {
val input = ldjson("""
{"y": [[1], {"z":2}, [], {}, "foo", null, 42, 42.2, true]}
{"y": {"a":[1], "b":{"z":2}, "c":[], "d":{}, "e":"foo", "f":null, "g":42, "h":42.2, "i":true}}
{"y": []}
{"y": {}}
{"y": "foo"}
{"y": null}
{"y": 42}
{"y": 42.2}
{"y": true}
""")
input must maskInto(".y" -> ColumnType.Top)(input)
}
"mask-28 at array projected path" in {
val input = ldjson("""
[[[1], {"z":2}, [], {}, "foo", null, 42, 42.2, true]]
[{"a":[1], "b":{"z":2}, "c":[], "d":{}, "e":"foo", "f":null, "g":42, "h":42.2, "i":true}]
[[]]
[{}]
["foo"]
[null]
[42]
[42.2]
[true]
""")
input must maskInto("[0]" -> ColumnType.Top)(input)
}
}
"retain each non-temporal scalar type at identity" >> {
val input = ldjson("""
1
2.2
27182e-4
"hi"
true
false
null
[]
{}
[1, 2, 3]
{ "a": "hi", "b": null }
""")
"mask-29 Null" in {
val expected = ldjson("""null""")
input must maskInto("." -> Set(Null))(expected)
}
"mask-30 Boolean" in {
val expected = ldjson("""
true
false
""")
input must maskInto("." -> Set(Boolean))(expected)
}
"mask-31 Number" in {
val expected = ldjson("""
1
2.2
27182e-4
""")
input must maskInto("." -> Set(Number))(expected)
}
"mask-32 String" in {
val expected = ldjson(""""hi"""")
input must maskInto("." -> Set(ColumnType.String))(expected)
}
}
"mask-33 mask multiple columns at Top" in {
val input = ldjson("""
{ "x": "hi", "y": null }
[4, 5]
""")
input must maskInto(
".x" -> ColumnType.Top,
".y" -> ColumnType.Top,
"[0]" -> ColumnType.Top,
"[1]" -> ColumnType.Top)(input)
}
}
override def is: SpecStructure =
pendingFragments(super.is, maskPendingExamples, "mask")
def evalMask(mask: Mask, stream: JsonStream): JsonStream
def maskInto(
masks: (String, Set[ColumnType])*)(
expected: JsonStream)
: Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream =>
evalMask(Mask(Map(masks.map({ case (k, v) => CPath.parse(k) -> v }): _*)), str)
}
}
trait PivotSpec extends JsonSpec {
protected final type Pivot = ScalarStage.Pivot
protected final val Pivot = ScalarStage.Pivot
val pivotPendingExamples: Set[Int]
"pivot" should {
"shift an array" >> {
val input = ldjson("""
[1, 2, 3]
[4, 5, 6]
[7, 8, 9, 10]
[11]
[]
[12, 13]
""")
"pivot-1 ExcludeId" >> {
val expected = ldjson("""
1
2
3
4
5
6
7
8
9
10
11
12
13
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Array)(expected)
}
"pivot-2 IdOnly" >> {
val expected = ldjson("""
0
1
2
0
1
2
0
1
2
3
0
0
1
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Array)(expected)
}
"pivot-3 IncludeId" >> {
val expected = ldjson("""
[0, 1]
[1, 2]
[2, 3]
[0, 4]
[1, 5]
[2, 6]
[0, 7]
[1, 8]
[2, 9]
[3, 10]
[0, 11]
[0, 12]
[1, 13]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Array)(expected)
}
}
"shift an object" >> {
val input = ldjson("""
{ "a": 1, "b": 2, "c": 3 }
{ "d": 4, "e": 5, "f": 6 }
{ "g": 7, "h": 8, "i": 9, "j": 10 }
{ "k": 11 }
{}
{ "l": 12, "m": 13 }
""")
"pivot-4 ExcludeId" >> {
val expected = ldjson("""
1
2
3
4
5
6
7
8
9
10
11
12
13
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Object)(expected)
}
"pivot-5 IdOnly" >> {
val expected = ldjson("""
"a"
"b"
"c"
"d"
"e"
"f"
"g"
"h"
"i"
"j"
"k"
"l"
"m"
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Object)(expected)
}
"pivot-6 IncludeId" >> {
val expected = ldjson("""
["a", 1]
["b", 2]
["c", 3]
["d", 4]
["e", 5]
["f", 6]
["g", 7]
["h", 8]
["i", 9]
["j", 10]
["k", 11]
["l", 12]
["m", 13]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Object)(expected)
}
}
"omit undefined row in object pivot" >> {
val input = ldjson("""
{ "a": 1 }
12
{ "b": 2 }
""")
"pivot-12 ExcludeId" in {
val expected = ldjson("""
1
2
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Object)(expected)
}
"pivot-13 IdOnly" in {
val expected = ldjson("""
"a"
"b"
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Object)(expected)
}
"pivot-14 IncludeId" in {
val expected = ldjson("""
["a", 1]
["b", 2]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Object)(expected)
}
}
"omit undefined row in array pivot" >> {
val input = ldjson("""
[11]
12
[13]
""")
"pivot-15 ExcludeId" in {
val expected = ldjson("""
11
13
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Array)(expected)
}
"pivot-16 IdOnly" in {
val expected = ldjson("""
0
0
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Array)(expected)
}
"pivot-17 IncludeId" in {
val expected = ldjson("""
[0, 11]
[0, 13]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Array)(expected)
}
}
"preserve empty arrays as values of an array pivot" >> {
val input = ldjson("""
[ 1, "two", [] ]
[ [] ]
[ [], 3, "four" ]
""")
"pivot-18 ExludeId" in {
val expected = ldjson("""
1
"two"
[]
[]
[]
3
"four"
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Array)(expected)
}
"pivot-19 IdOnly" in {
val expected = ldjson("""
0
1
2
0
0
1
2
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Array)(expected)
}
"pivot-20 IncludeId" in {
val expected = ldjson("""
[0, 1]
[1, "two"]
[2, []]
[0, []]
[0, []]
[1, 3]
[2, "four"]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Array)(expected)
}
}
"preserve empty objects as values of an object pivot" >> {
val input = ldjson("""
{ "1": 1, "2": "two", "3": {} }
{ "4": {} }
{ "5": {}, "6": 3, "7": "four" }
""")
"pivot-21 ExcludeId" in {
val expected = ldjson("""
1
"two"
{}
{}
{}
3
"four"
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Object)(expected)
}
"pivot-22 IdOnly" in {
val expected = ldjson("""
"1"
"2"
"3"
"4"
"5"
"6"
"7"
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Object)(expected)
}
"pivot-23 IncludeId" in {
val expected = ldjson("""
["1", 1]
["2", "two"]
["3", {}]
["4", {}]
["5", {}]
["6", 3]
["7", "four"]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Object)(expected)
}
}
"omit results when object pivoting a value of a different kind" >> {
val input = ldjson("""
1
"three"
false
null
["x", true, {}, []]
{ "a": 1, "b": "two", "c": {}, "d": [] }
""")
"pivot-24 ExcludeId" in {
val expected = ldjson("""
1
"two"
{}
[]
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Object)(expected)
}
"pivot-25 IdOnly" in {
val expected = ldjson("""
"a"
"b"
"c"
"d"
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Object)(expected)
}
"pivot-26 IncludeId" in {
val expected = ldjson("""
["a", 1]
["b", "two"]
["c", {}]
["d", []]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Object)(expected)
}
}
"pivot-10 omit results when array pivoting a value of a different kind" >> {
val input = ldjson("""
1
"two"
false
null
["x", true, {}, []]
{ "a": 1, "b": "two", "c": {}, "d": [] }
""")
"pivot-27 ExcludeId" in {
val expected = ldjson("""
"x"
true
{}
[]
""")
input must pivotInto(IdStatus.ExcludeId, ColumnType.Array)(expected)
}
"pivot-28 IdOnly" in {
val expected = ldjson("""
0
1
2
3
""")
input must pivotInto(IdStatus.IdOnly, ColumnType.Array)(expected)
}
"pivot-29 IncludeId" in {
val expected = ldjson("""
[0, "x"]
[1, true]
[2, {}]
[3, []]
""")
input must pivotInto(IdStatus.IncludeId, ColumnType.Array)(expected)
}
}
"omit empty vector from pivot results" >> {
val input = ldjson("""
{}
[]
""")
"pivot-30 ExcludeId" in {
input must pivotInto(IdStatus.ExcludeId, ColumnType.Array)(ldjson(""))
input must pivotInto(IdStatus.ExcludeId, ColumnType.Object)(ldjson(""))
}
"pivot-31 IdOnly" in {
input must pivotInto(IdStatus.IdOnly, ColumnType.Array)(ldjson(""))
input must pivotInto(IdStatus.IdOnly, ColumnType.Object)(ldjson(""))
}
"pivot-32 IncludeId" in {
input must pivotInto(IdStatus.IncludeId, ColumnType.Array)(ldjson(""))
input must pivotInto(IdStatus.IncludeId, ColumnType.Object)(ldjson(""))
}
}
}
override def is: SpecStructure =
pendingFragments(super.is, pivotPendingExamples, "pivot")
def evalPivot(pivot: Pivot, stream: JsonStream): JsonStream
def pivotInto(
idStatus: IdStatus,
structure: ColumnType.Vector)(
expected: JsonStream)
: Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream =>
evalPivot(Pivot(idStatus, structure), str)
}
}
trait FocusedSpec extends JsonSpec {
import ColumnType._
import IdStatus._
import ScalarStage.{Mask, Pivot, Project, Wrap}
val focusedPendingExamples: Set[Int]
"sequential focused stages" should {
"foc-5 Wrap . Pivot (no-op)" in {
val stages = List(
Wrap("foo"),
Pivot(ExcludeId, Object))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
input must interpretInto(stages)(input)
}
"foc-6 Wrap . Pivot (empty set)" in {
val stages = List(
Wrap("foo"),
Pivot(ExcludeId, Array))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
input must interpretInto(stages)(ldjson(""))
}
"foc-7 Wrap . Wrap" in {
val stages = List(
Wrap("foo"),
Wrap("bar"))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
val expected = ldjson("""
{ "bar": { "foo": 1 } }
{ "bar": { "foo": "two" } }
{ "bar": { "foo": false } }
{ "bar": { "foo": null } }
{ "bar": { "foo": [1, 2, 3] } }
{ "bar": { "foo": { "a": 7, "b": "two" } } }
{ "bar": { "foo": [] } }
{ "bar": { "foo": {} } }
""")
input must interpretInto(stages)(expected)
}
"foc-8 Wrap . Project (no-op)" in {
val stages = List(
Wrap("foo"),
project("foo"))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
input must interpretInto(stages)(input)
}
"foc-9 Wrap . Project (empty set)" in {
val stages = List(
Wrap("foo"),
project("bar"))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
input must interpretInto(stages)(ldjson(""))
}
"foc-10 Wrap . Mask (identity)" in {
val stages = List(
Wrap("foo"),
mask("." -> Set(Object)))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
val expected = ldjson("""
{ "foo": 1 }
{ "foo": "two" }
{ "foo": false }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
""")
input must interpretInto(stages)(expected)
}
"foc-11 Wrap . Mask (empty set)" in {
val stages = List(
Wrap("foo"),
mask("." -> Set(Array)))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 7, "b": "two" }
[]
{}
""")
input must interpretInto(stages)(ldjson(""))
}
"foc-12 Project . Pivot (object)" in {
val stages = List(
project("foo"),
Pivot(ExcludeId, Object))
val input = ldjson("""
{ "foo": 1 }
{ "foo": "two" }
{ "foo": false }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
{ "bar": 2.2 }
true
""")
val expected = ldjson("""
7
"two"
""")
input must interpretInto(stages)(expected)
}
"foc-13 Project . Pivot (array)" in {
val stages = List(
project("foo"),
Pivot(ExcludeId, Array))
val input = ldjson("""
{ "foo": 1 }
{ "foo": "two" }
{ "foo": false }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
{ "bar": 2.2 }
true
""")
val expected = ldjson("""
1
2
3
""")
input must interpretInto(stages)(expected)
}
"foc-14 Project . Wrap" in {
val stages = List(
project("foo"),
Wrap("foo"))
val input = ldjson("""
{ "foo": 1 }
{ "foo": "two", "bar": "a" }
{ "foo": false, "baz": -1, "ack": -2 }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
{ "bar": 2.2 }
true
""")
val expected = ldjson("""
{ "foo": 1 }
{ "foo": "two" }
{ "foo": false }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
""")
input must interpretInto(stages)(expected)
}
"foc-15 Project . Project" in {
val stages = List(
project("foo"),
project("a"))
val input = ldjson("""
{ "foo": 1 }
{ "foo": "two", "bar": "a" }
{ "foo": false, "baz": -1, "ack": -2 }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
{ "a": 2.2 }
true
""")
val expected = ldjson("""
7
""")
input must interpretInto(stages)(expected)
}
"foc-16 Project . Mask" in {
val stages = List(
project("foo"),
mask("." -> Set(Object)))
val input = ldjson("""
{ "foo": 1 }
{ "foo": "two", "bar": "a" }
{ "foo": false, "baz": -1, "ack": -2 }
{ "foo": null }
{ "foo": [1, 2, 3] }
{ "foo": { "a": 7, "b": "two" } }
{ "foo": [] }
{ "foo": {} }
{ "a": 2.2 }
true
""")
val expected = ldjson("""
{ "a": 7, "b": "two" }
{}
""")
input must interpretInto(stages)(expected)
}
"foc-17 Mask . Pivot" in {
val stages = List(
mask("." -> Set(Object)),
Pivot(ExcludeId, Object))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
{ "a": 1, "b": "two" }
[]
{}
""")
val expected = ldjson("""
1
"two"
""")
input must interpretInto(stages)(expected)
}
"foc-18 Mask . Wrap" in {
val stages = List(
mask("." -> Set(Object)),
Wrap("foo"))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
[1, 2, 3]
{ "a": 1, "b": "two" }
[]
{}
""")
val expected = ldjson("""
{ "foo": { "a": 1, "b": "two" } }
{ "foo": {} }
""")
input must interpretInto(stages)(expected)
}
"foc-19 Mask . Project" in {
val stages = List(
mask("." -> Set(Object)),
project("b"))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
[1, 2, 3]
{ "a": 1, "b": "two" }
[]
{}
""")
val expected = ldjson("""
"two"
""")
input must interpretInto(stages)(expected)
}
"foc-20 Mask . Mask" in {
val stages = List(
mask("." -> Set(Object)),
mask("." -> Set(Object)))
val input = ldjson("""
1
"two"
false
null
[1, 2, 3]
[1, 2, 3]
{ "a": 1, "b": "two" }
[]
{}
""")
val expected = ldjson("""
{ "a": 1, "b": "two" }
{}
""")
input must interpretInto(stages)(expected)
}
"Pivot . Wrap" >> {
val input = ldjson("""
1
"two"
false
null
[2, "foo"]
[3, [2.2]]
[4, { "p": true }]
{ "a": 5, "b": "bar" }
{ "a": 6, "b": [1.1] }
{ "a": 7, "b": { "z": false } }
[]
{}
""")
"Object" >> {
"foc-29 IncludeId" in {
val stages = List(Pivot(IncludeId, Object), Wrap("foo"))
val expected = ldjson("""
{ "foo": ["a", 5] }
{ "foo": ["b", "bar"] }
{ "foo": ["a", 6] }
{ "foo": ["b", [1.1]] }
{ "foo": ["a", 7] }
{ "foo": ["b", { "z": false }] }
""")
input must interpretInto(stages)(expected)
}
"foc-30 ExcludeId" in {
val stages = List(Pivot(ExcludeId, Object), Wrap("foo"))
val expected = ldjson("""
{ "foo": 5 }
{ "foo": "bar" }
{ "foo": 6 }
{ "foo": [1.1] }
{ "foo": 7 }
{ "foo": { "z": false } }
""")
input must interpretInto(stages)(expected)
}
"foc-31 IdOnly" in {
val stages = List(Pivot(IdOnly, Object), Wrap("foo"))
val expected = ldjson("""
{ "foo": "a" }
{ "foo": "b" }
{ "foo": "a" }
{ "foo": "b" }
{ "foo": "a" }
{ "foo": "b" }
""")
input must interpretInto(stages)(expected)
}
}
"Array" >> {
"foc-32 IncludeId" in {
val stages = List(Pivot(IncludeId, Array), Wrap("foo"))
val expected = ldjson("""
{ "foo": [0, 2] }
{ "foo": [1, "foo"] }
{ "foo": [0, 3] }
{ "foo": [1, [2.2]] }
{ "foo": [0, 4] }
{ "foo": [1, { "p": true }] }
""")
input must interpretInto(stages)(expected)
}
"foc-33 ExcludeId" in {
val stages = List(Pivot(ExcludeId, Array), Wrap("foo"))
val expected = ldjson("""
{ "foo": 2 }
{ "foo": "foo" }
{ "foo": 3 }
{ "foo": [2.2] }
{ "foo": 4 }
{ "foo": { "p": true } }
""")
input must interpretInto(stages)(expected)
}
"foc-34 IdOnly" in {
val stages = List(Pivot(IdOnly, Array), Wrap("foo"))
val expected = ldjson("""
{ "foo": 0 }
{ "foo": 1 }
{ "foo": 0 }
{ "foo": 1 }
{ "foo": 0 }
{ "foo": 1 }
""")
input must interpretInto(stages)(expected)
}
}
}
"Pivot . Mask" >> {
val input = ldjson("""
1
"two"
false
null
[2, "foo"]
[3, [2.2]]
[4, { "p": true }]
[5, {}]
{ "a": 6, "b": "bar" }
{ "a": 7, "b": [1.1] }
{ "a": 8, "b": { "z": false } }
{ "a": 9, "b": {} }
[]
{}
""")
"Object" >> {
"foc-35 IncludeId" in {
val stages = List(
Pivot(IncludeId, Object),
Mask(Map(
CPath.parse("[0]") -> ColumnType.Top,
CPath.parse("[1]") -> Set(ColumnType.Object))))
val expected = ldjson("""
["a"]
["b"]
["a"]
["b"]
["a"]
["b", { "z": false }]
["a"]
["b", {}]
""")
input must interpretInto(stages)(expected)
}
"foc-36 ExcludeId" in {
val stages = List(Pivot(ExcludeId, Object), mask("." -> Set(Object)))
val expected = ldjson("""
{ "z": false }
{}
""")
input must interpretInto(stages)(expected)
}
"foc-37 IdOnly" in {
val stages = List(Pivot(IdOnly, Object), mask("." -> Set(ColumnType.String)))
val expected = ldjson("""
"a"
"b"
"a"
"b"
"a"
"b"
"a"
"b"
""")
input must interpretInto(stages)(expected)
}
}
"Array" >> {
"foc-38 IncludeId" in {
val stages = List(
Pivot(IncludeId, Array),
Mask(Map(
CPath.parse("[0]") -> ColumnType.Top,
CPath.parse("[1]") -> Set(ColumnType.Object))))
val expected = ldjson("""
[0]
[1]
[0]
[1]
[0]
[1, { "p": true }]
[0]
[1, {}]
""")
input must interpretInto(stages)(expected)
}
"foc-39 ExcludeId" in {
val stages = List(Pivot(ExcludeId, Array), mask("." -> Set(Object)))
val expected = ldjson("""
{ "p": true }
{}
""")
input must interpretInto(stages)(expected)
}
"foc-40 IdOnly" in {
val stages = List(Pivot(IdOnly, Array), mask("." -> Set(ColumnType.Number)))
val expected = ldjson("""
0
1
0
1
0
1
0
1
""")
input must interpretInto(stages)(expected)
}
}
}
"Pivot . Project" >> {
val input = ldjson("""
1
"two"
false
null
[2, "foo"]
[3, [2.2]]
[4, { "p": 10 }]
[5, {}]
{ "a": 6, "b": "bar" }
{ "a": 7, "b": [1.1] }
{ "a": 8, "b": { "z": 11 } }
{ "a": 9, "b": {} }
[]
{}
""")
"Object" >> {
"foc-41 IncludeId" in {
val stages = List(Pivot(IncludeId, Object), project("z"))
val expected = ldjson("")
input must interpretInto(stages)(expected)
}
"foc-42 ExcludeId" in {
val stages = List(Pivot(ExcludeId, Object), project("z"))
val expected = ldjson("11")
input must interpretInto(stages)(expected)
}
"foc-43 IdOnly" in {
val stages = List(Pivot(IdOnly, Object), project("z"))
val expected = ldjson("")
input must interpretInto(stages)(expected)
}
}
"Array" >> {
"foc-44 IncludeId" in {
val stages = List(Pivot(IncludeId, Array), project("p"))
val expected = ldjson("")
input must interpretInto(stages)(expected)
}
"foc-45 ExcludeId" in {
val stages = List(Pivot(ExcludeId, Array), project("p"))
val expected = ldjson("10")
input must interpretInto(stages)(expected)
}
"foc-46 IdOnly" in {
val stages = List(Pivot(IdOnly, Array), project("p"))
val expected = ldjson("")
input must interpretInto(stages)(expected)
}
}
}
"Pivot . Pivot" >> {
val input = ldjson("""
1
"two"
false
null
[2, "foo"]
[3, [2.2]]
[4, { "p": 10 }]
[5, {}]
{ "a": 6, "b": "bar" }
{ "a": 7, "b": [1.1] }
{ "a": 8, "b": { "z": 11 } }
{ "a": 9, "b": {} }
[]
{}
""")
"Object Object" >> {
"foc-47 IncludeId IncludeId" in {
val stages = List(Pivot(IncludeId, Object), Pivot(IncludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-48 IncludeId ExcludeId" in {
val stages = List(Pivot(IncludeId, Object), Pivot(ExcludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-49 IncludeId IdOnly" in {
val stages = List(Pivot(IncludeId, Object), Pivot(IdOnly, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-50 ExcludeId IncludeId" in {
val stages = List(Pivot(ExcludeId, Object), Pivot(IncludeId, Object))
input must interpretInto(stages)(ldjson("""["z", 11]"""))
}
"foc-51 ExcludeId ExcludeId" in {
val stages = List(Pivot(ExcludeId, Object), Pivot(ExcludeId, Object))
input must interpretInto(stages)(ldjson("11"))
}
"foc-52 ExcludeId IdOnly" in {
val stages = List(Pivot(ExcludeId, Object), Pivot(IdOnly, Object))
input must interpretInto(stages)(ldjson(""""z""""))
}
"foc-53 IdOnly IncludeId" in {
val stages = List(Pivot(IdOnly, Object), Pivot(IncludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-54 IdOnly ExcludeId" in {
val stages = List(Pivot(IdOnly, Object), Pivot(ExcludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-55 IdOnly IdOnly" in {
val stages = List(Pivot(IdOnly, Object), Pivot(IdOnly, Object))
input must interpretInto(stages)(ldjson(""))
}
}
"Object Array" >> {
"foc-56 IncludeId IncludeId" in {
val stages = List(Pivot(IncludeId, Object), Pivot(IncludeId, Array))
val expected = ldjson("""
[0, "a"]
[1, 6]
[0, "b"]
[1, "bar"]
[0, "a"]
[1, 7]
[0, "b"]
[1, [1.1]]
[0, "a"]
[1, 8]
[0, "b"]
[1, { "z": 11 }]
[0, "a"]
[1, 9]
[0, "b"]
[1, {}]
""")
input must interpretInto(stages)(expected)
}
"foc-57 IncludeId ExcludeId" in {
val stages = List(Pivot(IncludeId, Object), Pivot(ExcludeId, Array))
val expected = ldjson("""
"a"
6
"b"
"bar"
"a"
7
"b"
[1.1]
"a"
8
"b"
{ "z": 11 }
"a"
9
"b"
{}
""")
input must interpretInto(stages)(expected)
}
"foc-58 IncludeId IdOnly" in {
val stages = List(Pivot(IncludeId, Object), Pivot(IdOnly, Array))
val expected = ldjson("""
0
1
0
1
0
1
0
1
0
1
0
1
0
1
0
1
""")
input must interpretInto(stages)(expected)
}
"foc-59 ExcludeId IncludeId" in {
val stages = List(Pivot(ExcludeId, Object), Pivot(IncludeId, Array))
input must interpretInto(stages)(ldjson("[0, 1.1]"))
}
"foc-60 ExcludeId ExcludeId" in {
val stages = List(Pivot(ExcludeId, Object), Pivot(ExcludeId, Array))
input must interpretInto(stages)(ldjson("1.1"))
}
"foc-61 ExcludeId IdOnly" in {
val stages = List(Pivot(ExcludeId, Object), Pivot(IdOnly, Array))
input must interpretInto(stages)(ldjson("0"))
}
"foc-62 IdOnly IncludeId" in {
val stages = List(Pivot(IdOnly, Object), Pivot(IncludeId, Array))
input must interpretInto(stages)(ldjson(""))
}
"foc-63 IdOnly ExcludeId" in {
val stages = List(Pivot(IdOnly, Object), Pivot(ExcludeId, Array))
input must interpretInto(stages)(ldjson(""))
}
"foc-64 IdOnly IdOnly" in {
val stages = List(Pivot(IdOnly, Object), Pivot(IdOnly, Array))
input must interpretInto(stages)(ldjson(""))
}
}
"Array Object" >> {
"foc-65 IncludeId IncludeId" in {
val stages = List(Pivot(IncludeId, Array), Pivot(IncludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-66 IncludeId ExcludeId" in {
val stages = List(Pivot(IncludeId, Array), Pivot(ExcludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-67 IncludeId IdOnly" in {
val stages = List(Pivot(IncludeId, Array), Pivot(IdOnly, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-68 ExcludeId IncludeId" in {
val stages = List(Pivot(ExcludeId, Array), Pivot(IncludeId, Object))
input must interpretInto(stages)(ldjson("""["p", 10]"""))
}
"foc-69 ExcludeId ExcludeId" in {
val stages = List(Pivot(ExcludeId, Array), Pivot(ExcludeId, Object))
input must interpretInto(stages)(ldjson("10"))
}
"foc-70 ExcludeId IdOnly" in {
val stages = List(Pivot(ExcludeId, Array), Pivot(IdOnly, Object))
input must interpretInto(stages)(ldjson(""""p""""))
}
"foc-71 IdOnly IncludeId" in {
val stages = List(Pivot(IdOnly, Array), Pivot(IncludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-72 IdOnly ExcludeId" in {
val stages = List(Pivot(IdOnly, Array), Pivot(ExcludeId, Object))
input must interpretInto(stages)(ldjson(""))
}
"foc-73 IdOnly IdOnly" in {
val stages = List(Pivot(IdOnly, Array), Pivot(IdOnly, Object))
input must interpretInto(stages)(ldjson(""))
}
}
"Array Array" >> {
"foc-74 IncludeId IncludeId" in {
val stages = List(Pivot(IncludeId, Array), Pivot(IncludeId, Array))
val expected = ldjson("""
[0, 0]
[1, 2]
[0, 1]
[1, "foo"]
[0, 0]
[1, 3]
[0, 1]
[1, [2.2]]
[0, 0]
[1, 4]
[0, 1]
[1, { "p": 10 }]
[0, 0]
[1, 5]
[0, 1]
[1, {}]
""")
input must interpretInto(stages)(expected)
}
"foc-75 IncludeId ExcludeId" in {
val stages = List(Pivot(IncludeId, Array), Pivot(ExcludeId, Array))
val expected = ldjson("""
0
2
1
"foo"
0
3
1
[2.2]
0
4
1
{ "p": 10 }
0
5
1
{}
""")
input must interpretInto(stages)(expected)
}
"foc-76 IncludeId IdOnly" in {
val stages = List(Pivot(IncludeId, Array), Pivot(IdOnly, Array))
val expected = ldjson("""
0
1
0
1
0
1
0
1
0
1
0
1
0
1
0
1
""")
input must interpretInto(stages)(expected)
}
"foc-77 ExcludeId IncludeId" in {
val stages = List(Pivot(ExcludeId, Array), Pivot(IncludeId, Array))
input must interpretInto(stages)(ldjson("[0, 2.2]"))
}
"foc-78 ExcludeId ExcludeId" in {
val stages = List(Pivot(ExcludeId, Array), Pivot(ExcludeId, Array))
input must interpretInto(stages)(ldjson("2.2"))
}
"foc-79 ExcludeId IdOnly" in {
val stages = List(Pivot(ExcludeId, Array), Pivot(IdOnly, Array))
input must interpretInto(stages)(ldjson("0"))
}
"foc-80 IdOnly IncludeId" in {
val stages = List(Pivot(IdOnly, Array), Pivot(IncludeId, Array))
input must interpretInto(stages)(ldjson(""))
}
"foc-81 IdOnly ExcludeId" in {
val stages = List(Pivot(IdOnly, Array), Pivot(ExcludeId, Array))
input must interpretInto(stages)(ldjson(""))
}
"foc-82 IdOnly IdOnly" in {
val stages = List(Pivot(IdOnly, Array), Pivot(IdOnly, Array))
input must interpretInto(stages)(ldjson(""))
}
}
}
}
override def is: SpecStructure =
pendingFragments(super.is, focusedPendingExamples, "foc")
def evalFocused(stages: List[ScalarStage.Focused], stream: JsonStream): JsonStream
def mask(masks: (String, Set[ColumnType])*): Mask =
Mask(Map(masks.map({ case (k, v) => CPath.parse(k) -> v }): _*))
def project(path: String): Project =
Project(CPath.parse(path))
def interpretInto(
stages: List[ScalarStage.Focused])(
expected: JsonStream)
: Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream =>
evalFocused(stages, str)
}
}
trait CartesianSpec extends JsonSpec {
protected final type Cartesian = ScalarStage.Cartesian
protected final val Cartesian = ScalarStage.Cartesian
val cartesianPendingExamples: Set[Int]
"cartesian" should {
// a0 as a1, b0 as b1, c0 as c1, d0 as d1
"cart-1 cross fields with no parse instructions" in {
val input = ldjson("""
{ "a0": "hi", "b0": null, "c0": { "x": 42 }, "d0": [1, 2, 3] }
""")
val expected = ldjson("""
{ "a1": "hi", "b1": null, "c1": { "x": 42 }, "d1": [1, 2, 3] }
""")
val targets = Map(
(CPathField("a1"), (CPathField("a0"), Nil)),
(CPathField("b1"), (CPathField("b0"), Nil)),
(CPathField("c1"), (CPathField("c0"), Nil)),
(CPathField("d1"), (CPathField("d0"), Nil)))
input must cartesianInto(targets)(expected)
}
// a0 as a1, b0 as b1
"cart-2 cross fields with no parse instructions ignoring extra fields" in {
val input = ldjson("""
{ "a0": "hi", "b0": null, "c0": 42 }
""")
val expected = ldjson("""
{ "a1": "hi", "b1": null }
""")
val targets = Map(
(CPathField("a1"), (CPathField("a0"), Nil)),
(CPathField("b1"), (CPathField("b0"), Nil)))
input must cartesianInto(targets)(expected)
}
// a0 as a1, b0 as b1, d0 as d1
"cart-3 cross fields with no parse instructions ignoring absent fields" in {
val input = ldjson("""
{ "a0": "hi", "b0": null }
""")
val expected = ldjson("""
{ "a1": "hi", "b1": null }
""")
val targets = Map(
(CPathField("a1"), (CPathField("a0"), Nil)),
(CPathField("b1"), (CPathField("b0"), Nil)),
(CPathField("d1"), (CPathField("d0"), Nil)))
input must cartesianInto(targets)(expected)
}
// a0[_] as a1, b0 as b1, c0{_} as c1
"cart-4 cross fields with single pivot" in {
import ScalarStage.Pivot
val input = ldjson("""
{ "a0": [1, 2, 3], "b0": null, "c0": { "x": 4, "y": 5 } }
""")
val expected = ldjson("""
{ "a1": 1, "b1": null, "c1": 4 }
{ "a1": 1, "b1": null, "c1": 5 }
{ "a1": 2, "b1": null, "c1": 4 }
{ "a1": 2, "b1": null, "c1": 5 }
{ "a1": 3, "b1": null, "c1": 4 }
{ "a1": 3, "b1": null, "c1": 5 }
""")
val targets = Map(
(CPathField("a1"),
(CPathField("a0"), List(Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("b1"),
(CPathField("b0"), Nil)),
(CPathField("c1"),
(CPathField("c0"), List(Pivot(IdStatus.ExcludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
// a[_].x0.y0{_} as y, a[_].x1[_] as z, b{_:} as b, c as c
"cart-5 cross fields with multiple nested pivots" in {
import ScalarStage.{Pivot, Project}
val input = ldjson("""
{
"a": [ { "x0": { "y0": { "f": "eff", "g": "gee" }, "y1": { "h": 42 } }, "x1": [ "0", 0, null ] } ],
"b": { "k1": null, "k2": null },
"c": true
}
""")
val expected = ldjson("""
{ "y": "eff", "z": "0" , "b": "k1", "c": true }
{ "y": "gee", "z": "0" , "b": "k1", "c": true }
{ "y": "eff", "z": 0 , "b": "k1", "c": true }
{ "y": "gee", "z": 0 , "b": "k1", "c": true }
{ "y": "eff", "z": null, "b": "k1", "c": true }
{ "y": "gee", "z": null, "b": "k1", "c": true }
{ "y": "eff", "z": "0" , "b": "k2", "c": true }
{ "y": "gee", "z": "0" , "b": "k2", "c": true }
{ "y": "eff", "z": 0 , "b": "k2", "c": true }
{ "y": "gee", "z": 0 , "b": "k2", "c": true }
{ "y": "eff", "z": null, "b": "k2", "c": true }
{ "y": "gee", "z": null, "b": "k2", "c": true }
""")
val targets = Map(
(CPathField("y"),
(CPathField("a"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Project(CPath.parse("x0")),
Project(CPath.parse("y0")),
Pivot(IdStatus.ExcludeId, ColumnType.Object)))),
(CPathField("z"),
(CPathField("a"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Project(CPath.parse("x1")),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("b"),
(CPathField("b"), List(
Pivot(IdStatus.IdOnly, ColumnType.Object)))),
(CPathField("c"),
(CPathField("c"), Nil)))
input must cartesianInto(targets)(expected)
}
// a as a, b[_] as ba, b{_} as bm
"cart-6 emit defined fields when some are undefined" in {
import ScalarStage.{Mask, Pivot}
val input = ldjson("""
{ "a": 1, "b": [ "two", "three" ] }
{ "a": 2, "b": { "x": "four", "y": "five" } }
{ "a": 3, "b": 42 }
""")
val expected = ldjson("""
{ "a": 1, "ba": "two" }
{ "a": 1, "ba": "three" }
{ "a": 2, "bm": "four" }
{ "a": 2, "bm": "five" }
{ "a": 3 }
""")
val targets = Map(
(CPathField("a"), (CPathField("a"), Nil)),
(CPathField("ba"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("bm"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.ExcludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
// a[_] as a, b[_] as b
"cart-9 pivoting retains row alignment through undefineds" in {
import ScalarStage.{Mask, Pivot}
val input = ldjson("""
{ "a": [1], "b": [4, 5] }
{ "a": [2] }
{ "a": [3], "b": [6] }
""")
val expected = ldjson("""
{ "a": 1, "b": 4 }
{ "a": 1, "b": 5 }
{ "a": 2 }
{ "a": 3, "b": 6 }
""")
val targets = Map(
(CPathField("a"), (CPathField("a"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("b"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
// a[_] as a, b[_] as b
"cart-10 pivoting retains row alignment through undefineds (pt II)" in {
import ScalarStage.{Mask, Pivot}
val input = ldjson("""
{ "a": [1], "b": [4, 5] }
{ "a": [2] }
{ "b": [6] }
{ "a": [3], "b": [7] }
""")
val expected = ldjson("""
{ "a": 1, "b": 4 }
{ "a": 1, "b": 5 }
{ "a": 2 }
{ "b": 6 }
{ "a": 3, "b": 7 }
""")
val targets = Map(
(CPathField("a"), (CPathField("a"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("b"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
// a0 as a1, b0 as b1
"cart-11 cross fields when some are undefined" in {
val input = ldjson("""
{ "a0": 1 }
{ "a0": 2, "b0": "foo" }
{ "b0": "bar" }
{ "c": 12 }
""")
val expected = ldjson("""
{ "a1": 1 }
{ "a1": 2, "b1": "foo" }
{ "b1": "bar" }
""")
val targets = Map(
(CPathField("a1"), (CPathField("a0"), Nil)),
(CPathField("b1"), (CPathField("b0"), Nil)))
input must cartesianInto(targets)(expected)
}
// minimization of `multilevelFlatten.test`
// x[0] as x0, x[1] as x1
"cart-12 cross fields when some are undefined after array projection" in {
import ScalarStage.Project
val input = ldjson("""
{ "x": ["foo"] }
{ "x": ["bar", 42] }
""")
val expected = ldjson("""
{ "x0": "foo" }
{ "x0": "bar", "x1": 42 }
""")
val targets = Map(
(CPathField("x0"), (CPathField("x"), List(
Project(CPath.parse("[0]"))))),
(CPathField("x1"), (CPathField("x"), List(
Project(CPath.parse("[1]"))))))
input must cartesianInto(targets)(expected)
}
// x.a as xa, x.b as xb
"cart-13 cross fields when some are undefined after object projection" in {
import ScalarStage.Project
val input = ldjson("""
{ "x": { "a": "foo" } }
{ "x": { "a": "bar", "b": 42 } }
""")
val expected = ldjson("""
{ "xa": "foo" }
{ "xa": "bar", "xb": 42 }
""")
val targets = Map(
(CPathField("xa"), (CPathField("x"), List(
Project(CPath.parse(".a"))))),
(CPathField("xb"), (CPathField("x"), List(
Project(CPath.parse(".b"))))))
input must cartesianInto(targets)(expected)
}
// minimization of `flattenArrayValueAndIndexWithField.test`
// a as x0, b[_] as x1
"cart-14 cross fields when some are undefined after array pivot" in {
import ScalarStage.Pivot
val input = ldjson("""
{ "a": 1, "b": [true, true, true] }
{ "a": 2, "b": [false, false] }
{ "a": 3, "b": 42 }
{ "a": 4 }
""")
val expected = ldjson("""
{ "x0": 1, "x1": [0, true] }
{ "x0": 1, "x1": [1, true] }
{ "x0": 1, "x1": [2, true] }
{ "x0": 2, "x1": [0, false] }
{ "x0": 2, "x1": [1, false] }
{ "x0": 3 }
{ "x0": 4 }
""")
val targets = Map(
(CPathField("x0"), (CPathField("a"), Nil)),
(CPathField("x1"), (CPathField("b"), List(
Pivot(IdStatus.IncludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
// a as x0, b{_} as x1
"cart-15 cross fields when some are undefined after object pivot" in {
import ScalarStage.Pivot
val input = ldjson("""
{ "a": 1, "b": {"x":true, "y":true, "z":true} }
{ "a": 2, "b": {"x":false, "y":false} }
{ "a": 3, "b": 42 }
{ "a": 4 }
""")
val expected = ldjson("""
{ "x0": 1, "x1": ["x", true] }
{ "x0": 1, "x1": ["y", true] }
{ "x0": 1, "x1": ["z", true] }
{ "x0": 2, "x1": ["x", false] }
{ "x0": 2, "x1": ["y", false] }
{ "x0": 3 }
{ "x0": 4 }
""")
val targets = Map(
(CPathField("x0"), (CPathField("a"), Nil)),
(CPathField("x1"), (CPathField("b"), List(
Pivot(IdStatus.IncludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
// Nested pivots in a cartesian can emit multiple rows where the
// pivoted value is undefined. When these undefined rows are crossed
// in the cartesian, they must be preserved.
"nested pivoting in a cartouche preserves undefineds" >> {
"Pivot . Pivot" >> {
"Array Array " >> {
import ScalarStage.Pivot
val input = ldjson("""
{ "a": 1, "b": [["one", "two"], "three", ["four"]] }
{ "a": 2, "b": [{ "x": "five", "y": "six" }, { "z": "seven" }] }
{ "a": 3, "b": "eight" }
{ "a": 4, "c": "nine" }
""")
"cart-24 ExcludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": "one" }
{ "a0": 1, "b0": "two" }
{ "a0": 1 }
{ "a0": 1, "b0": "four" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
"cart-25 IdOnly" in {
val expected = ldjson("""
{ "a0": 1, "b0": 0 }
{ "a0": 1, "b0": 1 }
{ "a0": 1 }
{ "a0": 1, "b0": 0 }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Pivot(IdStatus.IdOnly, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
"cart-26 IncludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": [0, "one"] }
{ "a0": 1, "b0": [1, "two"] }
{ "a0": 1 }
{ "a0": 1, "b0": [0, "four"] }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Pivot(IdStatus.IncludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
}
"Object Object" >> {
import ScalarStage.Pivot
val input = ldjson("""
{ "a": 1, "b": { "x": { "q": "one", "r": "two"}, "y": "three", "z": { "s": "four" } } }
{ "a": 2, "b": { "a": ["five", "six"], "b": "seven" } }
{ "a": 3, "b": "eight" }
{ "a": 4, "c": "nine" }
""")
"cart-27 ExcludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": "one" }
{ "a0": 1, "b0": "two" }
{ "a0": 1 }
{ "a0": 1, "b0": "four" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Pivot(IdStatus.ExcludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
"cart-28 IdOnly" in {
val expected = ldjson("""
{ "a0": 1, "b0": "q" }
{ "a0": 1, "b0": "r" }
{ "a0": 1 }
{ "a0": 1, "b0": "s" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Pivot(IdStatus.IdOnly, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
"cart-29 IncludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": ["q", "one"] }
{ "a0": 1, "b0": ["r", "two"] }
{ "a0": 1 }
{ "a0": 1, "b0": ["s", "four"] }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Pivot(IdStatus.IncludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
}
}
"Mask . Pivot . Mask . Pivot" >> {
"Array Array" >> {
import ScalarStage.{Mask, Pivot}
val input = ldjson("""
{ "a": 1, "b": [["one", "two"], "three", ["four"]] }
{ "a": 2, "b": [{ "x": "five", "y": "six" }, { "z": "seven" }] }
{ "a": 3, "b": "eight" }
{ "a": 4, "c": "nine" }
""")
"cart-30 ExcludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": "one" }
{ "a0": 1, "b0": "two" }
{ "a0": 1 }
{ "a0": 1, "b0": "four" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
"cart-31 IdOnly" in {
val expected = ldjson("""
{ "a0": 1, "b0": 0 }
{ "a0": 1, "b0": 1 }
{ "a0": 1 }
{ "a0": 1, "b0": 0 }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.IdOnly, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
"cart-32 IncludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": [0, "one"] }
{ "a0": 1, "b0": [1, "two"] }
{ "a0": 1 }
{ "a0": 1, "b0": [0, "four"] }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Mask(Map(CPath.Identity -> Set(ColumnType.Array))),
Pivot(IdStatus.IncludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
}
"Object Object" >> {
import ScalarStage.{Mask, Pivot}
val input = ldjson("""
{ "a": 1, "b": { "x": { "q": "one", "r": "two"}, "y": "three", "z": { "s": "four" } } }
{ "a": 2, "b": { "a": ["five", "six"], "b": "seven" } }
{ "a": 3, "b": "eight" }
{ "a": 4, "c": "nine" }
""")
"cart-33 ExcludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": "one" }
{ "a0": 1, "b0": "two" }
{ "a0": 1 }
{ "a0": 1, "b0": "four" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.ExcludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
"cart-34 IdOnly" in {
val expected = ldjson("""
{ "a0": 1, "b0": "q" }
{ "a0": 1, "b0": "r" }
{ "a0": 1 }
{ "a0": 1, "b0": "s" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.IdOnly, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
"cart-35 IncludeId" in {
val expected = ldjson("""
{ "a0": 1, "b0": ["q", "one"] }
{ "a0": 1, "b0": ["r", "two"] }
{ "a0": 1 }
{ "a0": 1, "b0": ["s", "four"] }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Mask(Map(CPath.Identity -> Set(ColumnType.Object))),
Pivot(IdStatus.IncludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
}
}
"Project . Pivot . Project . Pivot" >> {
"Array Array" >> {
import ScalarStage.{Pivot, Project}
val input = ldjson("""
{ "a": 1, "b": { "x": ["one", "two"] } }
{ "a": 2, "b": { "x": [{ "q": ["three", "four"] }, { "p": "five" }, "six"] } }
{ "a": 3, "b": { "x": "seven" } }
{ "a": 4, "b": "eight" }
{ "a": 5, "c": "nine" }
""")
"cart-36 ExcludeId" in {
val expected = ldjson("""
{ "a0": 1 }
{ "a0": 1 }
{ "a0": 2, "b0": "three" }
{ "a0": 2, "b0": "four" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
{ "a0": 5 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Project(CPath.parse(".x")),
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Project(CPath.parse(".q")),
Pivot(IdStatus.ExcludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
"cart-37 IdOnly" in {
val expected = ldjson("""
{ "a0": 1 }
{ "a0": 1 }
{ "a0": 2, "b0": 0 }
{ "a0": 2, "b0": 1 }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
{ "a0": 5 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Project(CPath.parse(".x")),
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Project(CPath.parse(".q")),
Pivot(IdStatus.IdOnly, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
"cart-38 IncludeId" in {
val expected = ldjson("""
{ "a0": 1 }
{ "a0": 1 }
{ "a0": 2, "b0": [0, "three"] }
{ "a0": 2, "b0": [1, "four"] }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
{ "a0": 5 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Project(CPath.parse(".x")),
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Project(CPath.parse(".q")),
Pivot(IdStatus.IncludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
}
"Object Object" >> {
import ScalarStage.{Pivot, Project}
val input = ldjson("""
{ "a": 1, "b": { "x": { "s": "one", "t": "two" } } }
{ "a": 2, "b": { "x": { "z": { "q": { "f": "three", "g": "four" } }, "y": { "p": "five" }, "r": "six" } } }
{ "a": 3, "b": { "x": "seven" } }
{ "a": 4, "b": "eight" }
{ "a": 5, "c": "nine" }
""")
"cart-39 ExcludeId" in {
val expected = ldjson("""
{ "a0": 1 }
{ "a0": 1 }
{ "a0": 2, "b0": "three" }
{ "a0": 2, "b0": "four" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
{ "a0": 5 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Project(CPath.parse(".x")),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Project(CPath.parse(".q")),
Pivot(IdStatus.ExcludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
"cart-40 IdOnly" in {
val expected = ldjson("""
{ "a0": 1 }
{ "a0": 1 }
{ "a0": 2, "b0": "f" }
{ "a0": 2, "b0": "g" }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
{ "a0": 5 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Project(CPath.parse(".x")),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Project(CPath.parse(".q")),
Pivot(IdStatus.IdOnly, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
"cart-41 IncludeId" in {
val expected = ldjson("""
{ "a0": 1 }
{ "a0": 1 }
{ "a0": 2, "b0": ["f", "three"] }
{ "a0": 2, "b0": ["g", "four"] }
{ "a0": 2 }
{ "a0": 2 }
{ "a0": 3 }
{ "a0": 4 }
{ "a0": 5 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Project(CPath.parse(".x")),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Project(CPath.parse(".q")),
Pivot(IdStatus.IncludeId, ColumnType.Object)))))
input must cartesianInto(targets)(expected)
}
}
}
"Pivot . Wrap" >> {
"cart-42 Array " in {
import ScalarStage.{Pivot, Wrap}
val input = ldjson("""
{ "a": 1, "b": [["one", "two"], "three", ["four"]] }
{ "a": 2, "b": [{ "x": "five", "y": "six" }, { "z": "seven" }] }
{ "a": 3, "b": "eight" }
{ "a": 4, "c": "nine" }
""")
val expected = ldjson("""
{ "a0": 1, "b0": { "q": ["one", "two"] } }
{ "a0": 1, "b0": { "q": "three" } }
{ "a0": 1, "b0": { "q": ["four"] } }
{ "a0": 2, "b0": { "q": { "x": "five", "y": "six" } } }
{ "a0": 2, "b0": { "q": { "z": "seven" } } }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array),
Wrap("q")))))
input must cartesianInto(targets)(expected)
}
"cart-43 Object " in {
import ScalarStage.{Pivot, Wrap}
val input = ldjson("""
{ "a": 1, "b": { "x": { "t": "one", "r": "two"}, "y": "three", "z": { "s": "four" } } }
{ "a": 2, "b": { "a": ["five", "six"], "b": "seven" } }
{ "a": 3, "b": "eight" }
{ "a": 4, "c": "nine" }
""")
val expected = ldjson("""
{ "a0": 1, "b0": { "q": { "t": "one", "r": "two"} } }
{ "a0": 1, "b0": { "q": "three" } }
{ "a0": 1, "b0": { "q": { "s": "four" } } }
{ "a0": 2, "b0": { "q": ["five", "six"] } }
{ "a0": 2, "b0": { "q": "seven" } }
{ "a0": 3 }
{ "a0": 4 }
""")
val targets = Map(
(CPathField("a0"), (CPathField("a"), Nil)),
(CPathField("b0"), (CPathField("b"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Wrap("q")))))
input must cartesianInto(targets)(expected)
}
}
}
// a0[_] as a, b0[_] as b
"cart-44 cross with undefined values on both sides" >> {
import ScalarStage.Pivot
val input = ldjson("""
{ "a0": [1] }
{ "a0": [2], "b0": ["z"] }
{ "b0": ["y"] }
{ "a0": [3], "b0": "x" }
{ "a0": 4, "b0": ["w"] }
{ "a0": 5, "b0": "v" }
""")
val expected = ldjson("""
{ "a": 1 }
{ "a": 2, "b": "z" }
{ "b": "y" }
{ "a": 3 }
{ "b": "w" }
""")
val targets = Map(
(CPathField("a"), (CPathField("a0"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("b"), (CPathField("b0"), List(
Pivot(IdStatus.ExcludeId, ColumnType.Array)))))
input must cartesianInto(targets)(expected)
}
}
override def is: SpecStructure =
pendingFragments(super.is, cartesianPendingExamples, "cart")
def evalCartesian(cartesian: Cartesian, stream: JsonStream): JsonStream
def cartesianInto(
cartouches: Map[CPathField, (CPathField, List[ScalarStage.Focused])])(
expected: JsonStream)
: Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream =>
evalCartesian(Cartesian(cartouches), str)
}
}
trait FullSpec extends JsonSpec {
import ScalarStage.{Cartesian, Pivot, Project}
val fullPendingExamples: Set[Int]
"scalar stages" should {
"full-1 evaluate basic nested cartesians" in {
val targets = List(
Project(CPath.parse("a")),
Pivot(IdStatus.ExcludeId, ColumnType.Object),
Cartesian(Map(
(CPathField("b0"), (CPathField("b"), List(Pivot(IdStatus.ExcludeId, ColumnType.Object)))),
(CPathField("c0"), (CPathField("c"), Nil)))),
Cartesian(Map(
(CPathField("b1"), (CPathField("b0"), List(Pivot(IdStatus.ExcludeId, ColumnType.Array)))),
(CPathField("c1"), (CPathField("c0"), Nil)))))
val stages = ScalarStages(IdStatus.ExcludeId, targets)
val input = ldjson("""
{"a": {"x": {"b": {"k": [1, 2, 3], "j": 4}, "c": 5}, "y": {"b": {"k": [6, 7, 8], "j": 9}, "c": 10}}}
""")
val expected = ldjson("""
{"b1": 1, "c1": 5}
{"b1": 2, "c1": 5}
{"b1": 3, "c1": 5}
{"c1": 5}
{"b1": 6, "c1": 10}
{"b1": 7, "c1": 10}
{"b1": 8, "c1": 10}
{"c1": 10}
""")
input must interpretFullInto(stages)(expected)
}
}
override def is: SpecStructure =
pendingFragments(super.is, fullPendingExamples, "full")
def evalFull(stages: ScalarStages, stream: JsonStream): JsonStream
def interpretFullInto(
stages: ScalarStages)(
expected: JsonStream)
: Matcher[JsonStream] =
bestSemanticEqual(expected) ^^ { str: JsonStream =>
evalFull(stages, str)
}
}
}
| slamdata/quasar | frontend/src/test/scala/quasar/ScalarStageSpec.scala | Scala | apache-2.0 | 95,579 |
package poly.algebra
import poly.algebra.factory._
import poly.algebra.specgroup._
/**
* Represents an upper semilattice that has a specific top element.
*
* An instance of this typeclass should satisfy the following axioms:
* - $lawSupremumAssociativity
* - $lawSupremumCommutativity
* - $lawSupremumIdempotency
* - $lawTop
* @author Tongfei Chen
*/
trait BoundedUpperSemilattice[@sp(Boolean) X] extends UpperSemilattice[X] with HasTop[X]
object BoundedUpperSemilattice extends ImplicitGetter[BoundedUpperSemilattice]
trait BoundedUpperSemilatticeWithEq[@sp(Boolean) X] extends BoundedUpperSemilattice[X] with EqUpperSemilattice[X] { self =>
override def reverse: EqBoundedLowerSemilattice[X] = new EqBoundedLowerSemilattice[X] {
def le(x: X, y: X) = self.le(y, x)
def inf(x: X, y: X) = self.sup(x, y)
def bot = self.top
override def reverse = self
}
}
object BoundedUpperSemilatticeWithEq extends ImplicitGetter[BoundedUpperSemilatticeWithEq]
| ctongfei/poly-algebra | src/main/scala/poly/algebra/BoundedUpperSemilattice.scala | Scala | mit | 983 |
package com.sksamuel.elastic4s
import org.elasticsearch.action.search._
import org.elasticsearch.client.Client
import org.elasticsearch.common.unit.TimeValue
import org.elasticsearch.index.query.QueryBuilder
import org.elasticsearch.search.aggregations.AggregationBuilders
import org.elasticsearch.search.rescore.RescoreBuilder
import org.elasticsearch.search.sort.SortBuilder
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import scala.language.implicitConversions
/** @author Stephen Samuel */
trait SearchDsl
extends QueryDsl
with FilterDsl
with FacetDsl
with HighlightDsl
with ScriptFieldDsl
with SuggestionDsl
with IndexesTypesDsl {
implicit def toRichResponse(resp: SearchResponse): RichSearchResponse = new RichSearchResponse(resp)
def select(indexes: String*): SearchDefinition = search(indexes: _*)
def search(indexes: String*): SearchDefinition = new SearchDefinition(IndexesTypes(indexes))
def rescore(query: QueryDefinition): RescoreDefinition = {
new RescoreDefinition(query)
}
def multi(searches: Iterable[SearchDefinition]): MultiSearchDefinition = new MultiSearchDefinition(searches)
def multi(searches: SearchDefinition*): MultiSearchDefinition = new MultiSearchDefinition(searches)
implicit object SearchDefinitionExecutable
extends Executable[SearchDefinition, SearchResponse] {
override def apply(c: Client, t: SearchDefinition): Future[SearchResponse] = {
injectFuture(c.search(t.build, _))
}
}
implicit object MultiSearchDefinitionExecutable
extends Executable[MultiSearchDefinition, MultiSearchResponse] {
override def apply(c: Client, t: MultiSearchDefinition): Future[MultiSearchResponse] = {
injectFuture(c.multiSearch(t.build, _))
}
}
}
class MultiSearchDefinition(searches: Iterable[SearchDefinition]) {
def build: MultiSearchRequest = {
val builder = new MultiSearchRequestBuilder(ProxyClients.client)
searches foreach (builder add _.build)
builder.request()
}
}
class RescoreDefinition(query: QueryDefinition) {
val builder = RescoreBuilder.queryRescorer(query.builder)
var windowSize = 50
def window(size: Int): RescoreDefinition = {
this.windowSize = size
this
}
def originalQueryWeight(weight: Double): RescoreDefinition = {
builder.setQueryWeight(weight.toFloat)
this
}
def rescoreQueryWeight(weight: Double): RescoreDefinition = {
builder.setRescoreQueryWeight(weight.toFloat)
this
}
def scoreMode(scoreMode: String): RescoreDefinition = {
builder.setScoreMode(scoreMode)
this
}
}
class SearchDefinition(indexesTypes: IndexesTypes) {
val _builder = {
new SearchRequestBuilder(ProxyClients.client)
.setIndices(indexesTypes.indexes: _*)
.setTypes(indexesTypes.types: _*)
}
def build = _builder.request()
private var includes: Array[String] = Array.empty
private var excludes: Array[String] = Array.empty
/** Adds a single string query to this search
*
* @param string the query string
*/
def query(string: String): SearchDefinition = query(new QueryStringQueryDefinition(string))
def query(block: => QueryDefinition): SearchDefinition = query2(block.builder)
def query2(block: => QueryBuilder): SearchDefinition = {
_builder.setQuery(block)
this
}
def bool(block: => BoolQueryDefinition): SearchDefinition = {
_builder.setQuery(block.builder)
this
}
def inner(inners: InnerHitDefinition*): this.type = inner(inners)
def inner(inners: Iterable[InnerHitDefinition]): this.type = {
for ( inner <- inners )
_builder.addInnerHit(inner.name, inner.inner)
this
}
@deprecated("Use postFilter as a direct replacement keyword. Filter was removed in elasticsearch.", "1.5")
def filter(filter: FilterDefinition): this.type = postFilter(filter)
def postFilter(block: => FilterDefinition): this.type = {
_builder.setPostFilter(block.builder)
this
}
def queryCache(queryCache: Boolean): this.type = {
_builder.setQueryCache(queryCache)
this
}
@deprecated("Facets are deprecated, use aggregations", "1.3.0")
def facets(iterable: Iterable[FacetDefinition]): SearchDefinition = {
iterable.foreach(facet => _builder.addFacet(facet.builder))
this
}
@deprecated("Facets are deprecated, use aggregations", "1.3.0")
def facets(f: FacetDefinition*): SearchDefinition = facets(f.toIterable)
def aggregations(iterable: Iterable[AbstractAggregationDefinition]): SearchDefinition = {
iterable.foreach(agg => _builder.addAggregation(agg.builder))
this
}
def aggregations(a: AbstractAggregationDefinition*): SearchDefinition = aggregations(a.toIterable)
def aggs(a: AbstractAggregationDefinition*): SearchDefinition = aggregations(a.toIterable)
def aggs(iterable: Iterable[AbstractAggregationDefinition]): SearchDefinition = aggregations(iterable)
def aggregations(json: String): this.type = {
_builder.setAggregations(json.getBytes("UTF-8"))
this
}
def sort(sorts: SortDefinition*): SearchDefinition = sort2(sorts.map(_.builder): _*)
def sort2(sorts: SortBuilder*): SearchDefinition = {
sorts.foreach(_builder.addSort)
this
}
/** This method introduces zero or more script field definitions into the search construction
*
* @param sfieldDefs zero or more [[ScriptFieldDefinition]] instances
* @return this, an instance of [[SearchDefinition]]
*/
def scriptfields(sfieldDefs: ScriptFieldDefinition*): this.type = {
import scala.collection.JavaConverters._
sfieldDefs.foreach {
case ScriptFieldDefinition(name, script, Some(lang), Some(params)) => _builder
.addScriptField(name, lang, script, params.asJava)
case ScriptFieldDefinition(name, script, Some(lang), None) => _builder
.addScriptField(name, lang, script, Map.empty[String, AnyRef].asJava)
case ScriptFieldDefinition(name, script, None, Some(params)) => _builder
.addScriptField(name, script, params.asJava)
case ScriptFieldDefinition(name, script, None, None) => _builder.addScriptField(name, script)
}
this
}
def suggestions(suggestions: SuggestionDefinition*): SearchDefinition = {
suggestions.foreach(_builder addSuggestion _.builder)
this
}
/** Adds a single prefix query to this search
*
* @param tuple - the field and prefix value
*
* @return this
*/
def prefix(tuple: (String, Any)) = {
val q = new PrefixQueryDefinition(tuple._1, tuple._2)
_builder.setQuery(q.builder.buildAsBytes)
this
}
/** Adds a single regex query to this search
*
* @param tuple - the field and regex value
*
* @return this
*/
def regex(tuple: (String, Any)) = {
val q = new RegexQueryDefinition(tuple._1, tuple._2)
_builder.setQuery(q.builder.buildAsBytes)
this
}
def term(tuple: (String, Any)) = {
val q = new TermQueryDefinition(tuple._1, tuple._2)
_builder.setQuery(q.builder.buildAsBytes)
this
}
def range(field: String) = {
val q = new RangeQueryDefinition(field)
_builder.setQuery(q.builder.buildAsBytes)
this
}
/** Expects a query in json format and sets the query of the search request.
* Query must be valid json beginning with '{' and ending with '}'.
* Field names must be double quoted.
*
* Example:
* {{{
* search in "*" types("users", "tweets") limit 5 rawQuery {
* """{ "prefix": { "bands": { "prefix": "coldplay", "boost": 5.0, "rewrite": "yes" } } }"""
* } searchType SearchType.Scan
* }}}
*/
def rawQuery(json: String): SearchDefinition = {
_builder.setQuery(json)
this
}
def highlighting(options: HighlightOptionsDefinition, highlights: HighlightDefinition*) = {
options._encoder.foreach(encoder => _builder.setHighlighterEncoder(encoder.elastic))
options._tagSchema.foreach(arg => _builder.setHighlighterTagsSchema(arg.elastic))
options._order.foreach(arg => _builder.setHighlighterOrder(arg.elastic))
_builder.setHighlighterPostTags(options._postTags: _*)
_builder.setHighlighterPreTags(options._preTags: _*)
_builder.setHighlighterRequireFieldMatch(options._requireFieldMatch)
highlights.foreach(highlight => _builder.addHighlightedField(highlight.builder))
this
}
def highlighting(highlights: HighlightDefinition*): SearchDefinition = {
highlights.foreach(highlight => _builder.addHighlightedField(highlight.builder))
this
}
def routing(r: String): SearchDefinition = {
_builder.setRouting(r)
this
}
def start(i: Int): SearchDefinition = from(i)
def from(i: Int): SearchDefinition = {
_builder.setFrom(i)
this
}
def limit(i: Int): SearchDefinition = size(i)
def size(i: Int): SearchDefinition = {
_builder.setSize(i)
this
}
def preference(pref: Preference): SearchDefinition = preference(pref.elastic)
def preference(pref: String): SearchDefinition = {
_builder.setPreference(pref)
this
}
def rescore(rescore: RescoreDefinition): SearchDefinition = {
_builder.setRescoreWindow(rescore.windowSize)
_builder.setRescorer(rescore.builder)
this
}
def scroll(keepAlive: String): SearchDefinition = {
_builder.setScroll(keepAlive)
this
}
def searchType(searchType: SearchType) = {
_builder.setSearchType(searchType.elasticType)
this
}
def version(enabled: Boolean): SearchDefinition = {
_builder.setVersion(enabled)
this
}
def indexBoost(map: Map[String, Double]): SearchDefinition = indexBoost(map.toList: _*)
def indexBoost(tuples: (String, Double)*): SearchDefinition = {
tuples.foreach(arg => _builder.addIndexBoost(arg._1, arg._2.toFloat))
this
}
def explain(enabled: Boolean): SearchDefinition = {
_builder.setExplain(enabled)
this
}
def minScore(score: Double): SearchDefinition = {
_builder.setMinScore(score.toFloat)
this
}
def timeout(duration: FiniteDuration): this.type = {
_builder.setTimeout(TimeValue.timeValueMillis(duration.toMillis))
this
}
def stats(groups: String*): this.type = {
_builder.setStats(groups: _*)
this
}
def trackScores(enabled: Boolean): SearchDefinition = {
_builder.setTrackScores(enabled)
this
}
def types(types: String*): SearchDefinition = {
_builder.setTypes(types: _*)
this
}
def fields(fields: String*): SearchDefinition = {
_builder.addFields(fields: _*)
this
}
def fetchSource(fetch: Boolean): SearchDefinition = {
_builder.setFetchSource(fetch)
this
}
def sourceInclude(includes: String*): this.type = {
this.includes = includes.toArray
_builder.setFetchSource(this.includes, this.excludes)
this
}
def sourceExclude(excludes: String*): this.type = {
this.excludes = excludes.toArray
_builder.setFetchSource(this.includes, this.excludes)
this
}
}
| l15k4/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/SearchDsl.scala | Scala | apache-2.0 | 10,918 |
package is.hail.types.virtual
import is.hail.annotations.ExtendedOrdering
import is.hail.types.physical.PVoid
case object TVoid extends Type {
override def _toPretty = "Void"
override def pyString(sb: StringBuilder): Unit = {
sb.append("void")
}
def mkOrdering(missingEqual: Boolean): ExtendedOrdering = null
override def scalaClassTag: scala.reflect.ClassTag[_ <: AnyRef] = throw new UnsupportedOperationException("No ClassTag for Void")
override def _typeCheck(a: Any): Boolean = throw new UnsupportedOperationException("No elements of Void")
override def isRealizable = false
}
| hail-is/hail | hail/src/main/scala/is/hail/types/virtual/TVoid.scala | Scala | mit | 606 |
package scala.meta
package internal
package prettyprinters
import scala.meta.internal.ast.{Quasi, Origin}
import scala.meta.dialects.{Scala211, QuasiquoteTerm}
import scala.meta.prettyprinters.Options.Lazy
object TreeToString {
def apply(tree: Tree) = {
val dialect = tree.origin match {
case Origin.Parsed(_, dialect, _) => dialect
case Origin.None if tree.isInstanceOf[Quasi] => QuasiquoteTerm(Scala211, multiline = true)
case Origin.None => Scala211 // this dialect is as good as any as a default
}
val prettyprinter = TreeSyntax[Tree](dialect, Lazy)
val code = prettyprinter(tree).toString
tree match {
case _: Quasi => code
case Ctor.Primary(_, name, _) => s"def this$code"
case _ => code
}
}
}
| Dveim/scalameta | scalameta/trees/src/main/scala/scala/meta/internal/prettyprinters/TreeToString.scala | Scala | bsd-3-clause | 766 |
package santa.ping
import net.minecraft.client.Minecraft
import net.minecraft.util.text.ITextComponent
import net.minecraft.util.text.Style
import net.minecraftforge.client.event.ClientChatReceivedEvent
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent
class PingEventHandler {
@SubscribeEvent
def onChatMessage(event: ClientChatReceivedEvent): Unit = {
val mc = Minecraft.getMinecraft
val component: ITextComponent = event.getMessage
def playSoundSendMessage(): Unit = {
mc.getSoundHandler.playSound(Ping.sound)
val style = new Style
if (Ping.config.customColor.nonEmpty) style.setColor(Ping.config.customColor.get)
style.setBold(Ping.config.bold)
style.setItalic(Ping.config.italic)
style.setStrikethrough(Ping.config.strikethrough)
style.setUnderlined(Ping.config.underline)
component.setStyle(style)
}
val player = mc.thePlayer
val name = player.getName
val text = component.getUnformattedText.toLowerCase.replaceFirst("<.+>", "")
val names: Array[String] = if (Ping.config.customNames.isEmpty) Array(name) else Ping.config.customNames.get :+ name
if (names.map(n => n.toLowerCase).exists(text.contains)) playSoundSendMessage()
}
} | elifoster/Ping | src/main/scala/santa/ping/PingEventHandler.scala | Scala | mit | 1,245 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.model.pojo
/**
* ่ฝๅคๅฏ็จๅ็ฆ็จ็ๅฎไฝ
*
* @author chaostone
*/
trait Enabled {
/**
* ๆฅ่ฏขๆฏๅฆๅฏ็จ
*
* @return ๆฏๅฆๅฏ็จ
*/
var enabled: Boolean = _
}
| beangle/data | model/src/main/scala/org/beangle/data/model/pojo/Enabled.scala | Scala | lgpl-3.0 | 938 |
package uk.co.morleydev.zander.client.test.spec.update
import uk.co.morleydev.zander.client.test.spec.{ResponseCodes, SpecTest}
import uk.co.morleydev.zander.client.util.using
import uk.co.morleydev.zander.client.test.gen.GenNative
import java.io.File
class UpdateWithExistingUpToDateArtefactsAndSourcesInCacheAndInstalledArtefactsThatAreUpToDateTests extends SpecTest {
override def noBuildTestCase(compiler : String, mode: String) = {
describe("Given the project/compiler endpoint exists and the cache already contains the source but no artefacts") {
describe("When up-to-date artefacts are installed and update is carried out for %s.%s".format(compiler, mode)) {
using(this.start()) {
testHarness =>
val artefactVersion = GenNative.genAlphaNumericString(10, 100)
val gitUrl = "http://git_url/request/at_me"
val expectedFiles = Seq[String]("include/" + GenNative.genAlphaNumericString(1, 20),
"include/sub_dir/" + GenNative.genAlphaNumericString(1, 20) + ".h",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".a",
"lib/subdir/" + GenNative.genAlphaNumericString(1, 20) + ".a",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"lib/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.2",
"lib/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"lib/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.32",
"lib/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"bin/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"bin/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"bin/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.25.a",
"bin/subdir/" + GenNative.genAlphaNumericString(1, 20) + ".dll",
"bin/subdir2/" + GenNative.genAlphaNumericString(1, 20) + ".so",
"bin/subdir/" + GenNative.genAlphaNumericString(1, 20) + ".so.12.25.a")
.map(s => new File(s).toString)
testHarness
.givenAServer()
.givenFullGitPipelineIsPossible(artefactVersion, isUpdate = true)
.whenUpdating(compiler = compiler, mode = mode)
.whenTheCacheAlreadyContainsTheSourceCode()
.whenTheCacheAlreadyContainsArtefacts(artefactVersion, expectedFiles)
.whenTheArtefactsAreLocallyInstalled(artefactVersion, expectedFiles)
.expectSuccessfulRequest(gitUrl)
.invokeMain()
.thenTheExpectedServerRequestsWereHandled()
.thenAGitUpdateWasInvoked()
.thenAGitCheckoutWasInvoked()
.thenTheGitVersionWasRetrieved()
.thenExpectedResponseCodeWasReturned(ResponseCodes.Success)
.thenTheLocalArtefactsWereTaggedWithTheExpectedVersion(artefactVersion)
.thenTheLocalArtefactsWereTaggedWithTheExpectedFiles(expectedFiles)
.thenTheExpectedFilesWereInstalledLocally(expectedFiles)
}
}
}
}
runSingleNoBuildCases()
}
| MorleyDev/zander.client | src/test/scala/uk/co/morleydev/zander/client/test/spec/update/UpdateWithExistingUpToDateArtefactsAndSourcesInCacheAndInstalledArtefactsThatAreUpToDateTests.scala | Scala | mit | 3,228 |
package io.vamp.common.json
import org.json4s.JsonAST.{JObject, JString}
import org.json4s._
object ThrowableSerializer {
def apply(message: Option[String]): SerializationFormat = new SerializationFormat {
override def customSerializers = super.customSerializers :+ new ThrowableSerializer(message)
}
}
class ThrowableSerializer(message: Option[String] = None) extends Serializer[Throwable] {
override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
case t: Throwable => new JObject(List(JField("message", JString(message.getOrElse(t.getMessage)))))
}
override def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Throwable] = throw new UnsupportedOperationException()
}
| BanzaiMan/vamp-common | src/main/scala/io/vamp/common/json/ThrowableSerializer.scala | Scala | apache-2.0 | 748 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.r
import java.io.File
import java.util.Arrays
import org.apache.spark.{SparkEnv, SparkException}
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.api.python.PythonUtils
private[spark] object RUtils {
// Local path where R binary packages built from R source code contained in the spark
// packages specified with "--packages" or "--jars" command line option reside.
var rPackages: Option[String] = None
/**
* Get the SparkR package path in the local spark distribution.
*/
def localSparkRPackagePath: Option[String] = {
val sparkHome = sys.env.get("SPARK_HOME").orElse(sys.props.get("spark.test.home"))
sparkHome.map(
Seq(_, "R", "lib").mkString(File.separator)
)
}
/**
* Check if SparkR is installed before running tests that use SparkR.
*/
def isSparkRInstalled: Boolean = {
localSparkRPackagePath.filter { pkgDir =>
new File(Seq(pkgDir, "SparkR").mkString(File.separator)).exists
}.isDefined
}
/**
* Get the list of paths for R packages in various deployment modes, of which the first
* path is for the SparkR package itself. The second path is for R packages built as
* part of Spark Packages, if any exist. Spark Packages can be provided through the
* "--packages" or "--jars" command line options.
*
* This assumes that Spark properties `spark.master` and `spark.submit.deployMode`
* and environment variable `SPARK_HOME` are set.
*/
def sparkRPackagePath(isDriver: Boolean): Seq[String] = {
val (master, deployMode) =
if (isDriver) {
(sys.props("spark.master"), sys.props("spark.submit.deployMode"))
} else {
val sparkConf = SparkEnv.get.conf
(sparkConf.get("spark.master"), sparkConf.get("spark.submit.deployMode", "client"))
}
val isYarnCluster = master != null && master.contains("yarn") && deployMode == "cluster"
val isYarnClient = master != null && master.contains("yarn") && deployMode == "client"
// In YARN mode, the SparkR package is distributed as an archive symbolically
// linked to the "sparkr" file in the current directory and additional R packages
// are distributed as an archive symbolically linked to the "rpkg" file in the
// current directory.
//
// Note that this does not apply to the driver in client mode because it is run
// outside of the cluster.
if (isYarnCluster || (isYarnClient && !isDriver)) {
val sparkRPkgPath = new File("sparkr").getAbsolutePath
val rPkgPath = new File("rpkg")
if (rPkgPath.exists()) {
Seq(sparkRPkgPath, rPkgPath.getAbsolutePath)
} else {
Seq(sparkRPkgPath)
}
} else {
// Otherwise, assume the package is local
val sparkRPkgPath = localSparkRPackagePath.getOrElse {
throw new SparkException("SPARK_HOME not set. Can't locate SparkR package.")
}
if (!rPackages.isEmpty) {
Seq(sparkRPkgPath, rPackages.get)
} else {
Seq(sparkRPkgPath)
}
}
}
/** Check if R is installed before running tests that use R commands. */
def isRInstalled: Boolean = {
try {
val builder = new ProcessBuilder(Arrays.asList("R", "--version"))
builder.start().waitFor() == 0
} catch {
case e: Exception => false
}
}
def getEncryptionEnabled(sc: JavaSparkContext): Boolean = PythonUtils.getEncryptionEnabled(sc)
}
| ahnqirage/spark | core/src/main/scala/org/apache/spark/api/r/RUtils.scala | Scala | apache-2.0 | 4,244 |
package io.github.mandar2812.dynaml.graphics.charts.repl
import unfiltered.request._
import unfiltered.response._
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Promise}
/**
* User: austin
* Date: 12/1/14
*
* An unfiltered web-app for displaying graphs
*/
class PlotServer extends UnfilteredWebApp[UnfilteredWebApp.Arguments] {
// this is fulfilled by the plot command, to allow a browser to wait for plot to reload
var p = Promise[Unit]()
private class WebApp extends unfiltered.filter.Plan {
def intent = {
// handle jsonp
case req @ GET(Path(Seg("check" :: Nil)) & Params(params)) =>
implicit val responder = req
val str = """[]"""
val response = params.get("callback") match {
case Some(v) =>
val callbackName = v.head
s"$callbackName($str)"
case _ => str
}
// block for plot command to fulfill promise, and release this result to trigger browser reload
Await.result(p.future, Duration.Inf)
JsonContent ~> ResponseString(response)
case _ => Pass
}
}
def parseArgs(args: Array[String]) = {
val parsed = new UnfilteredWebApp.Arguments{}
parsed.parse(args)
parsed
}
def setup(parsed: UnfilteredWebApp.Arguments): unfiltered.filter.Plan = {
new WebApp
}
def htmlRoot: String = "/"
} | transcendent-ai-labs/DynaML | dynaml-core/src/main/scala/io/github/mandar2812/dynaml/graphics/charts/repl/PlotServer.scala | Scala | apache-2.0 | 1,382 |
package com.typesafe.sbt.dart
import sbt._
import com.typesafe.sbt.web.SbtWeb
import com.typesafe.sbt.web.SbtWeb._
import com.typesafe.sbt.web.pipeline.Pipeline
import sbt.Keys.baseDirectory
import sbt.Keys.unmanagedResourceDirectories
import sbt.Keys.resourceGenerators
import sbt.Keys.resourceDirectories
import sbt.Keys.state
import play.PlayRunHook
import java.net.InetSocketAddress
import play.Play.autoImport._
import PlayKeys._
import com.typesafe.sbt.web.Import._
import com.typesafe.sbt.packager.universal.Keys._
//import play.twirl.sbt.Import._
object Import {
object DartKeys {
val dart2js = TaskKey[Unit]("dart2js", "Build dart js applications.")
val dartWeb = SettingKey[File]("dart-web", "Dart web directory.")
}
}
object SbtDart extends Plugin with DartProcessor {
val autoImport = Import
import SbtWeb.autoImport._
import WebKeys._
import autoImport.DartKeys._
override def projectSettings: Seq[Setting[_]] = Seq(
dart2js := {
runPub(baseDirectory.value, (target in dart2js).value.absolutePath, List()).get.exitValue()
},
dist <<= dist dependsOn (dart2js),
stage <<= stage dependsOn (dart2js),
target in dart2js := webTarget.value / dart2js.key.label,
unmanagedResourceDirectories in Assets <+= (target in dart2js)(base => base / "web"),
playRunHooks <+= (baseDirectory, target in dart2js).map { (base, output) => Pub(base, output.absolutePath) },
resourceDirectories in Assets <+= baseDirectory / "web",
dartWeb in Assets := baseDirectory.value / "web",
resourceGenerators in Assets <+= dart2jsCompiler)
def dartSources: Def.Initialize[Task[Pipeline.Stage]] = Def.task {
mappings =>
println("Dart sources copy")
val build = (target in dart2js).value
val src = (dartWeb in dart2js).value
val watch = (base: File) => base ** "*"
watch(src).get.map(f => (f, f.relativeTo(src))).filter {
case (f, Some(r)) => true
case _ => false
} map (e => (e._1, e._2.get.toString()))
}
def dartbuiltFiles: Def.Initialize[Task[Pipeline.Stage]] = Def.task {
mappings =>
println("Dart2js ...")
val build = (target in dart2js).value
runCommand(pubExePath + " build --output " + build.absolutePath)
val watch = (base: File) => base ** "*"
val o = watch(build);
val oo = o.get
val ret = oo.map(f => (f, f.relativeTo(build / "web"))).filter {
case (f, Some(r)) => true
case _ => false
} map (e => (e._1, e._2.get.toString()))
ret
}
private def runPub(base: sbt.File, output: String, args: List[String]) = {
println(s"Will run: pub --output=$output $args in ${base.getPath}")
if (System.getProperty("os.name").startsWith("Windows")) {
val process: ProcessBuilder = Process("cmd" :: "/c" :: "pub.exe" :: "build" :: "--output=" + output :: args, base)
println(s"Will run: ${process.toString} in ${base.getPath}")
Some(process.run)
} else {
val process: ProcessBuilder = Process("pub" :: "build" :: "--output=" + output :: args, base)
println(s"Will run: ${process.toString} in ${base.getPath}")
Some(process.run)
}
}
object Pub {
def apply(base: File, output: String): PlayRunHook = {
object PubProcess extends PlayRunHook {
var process: Option[Process] = None
override def afterStarted(addr: InetSocketAddress): Unit = {
process = runPub(base, output, Nil)
}
override def afterStopped(): Unit = {
process.map(p => p.destroy())
process = None
}
}
PubProcess
}
}
def Dart2jsCompiler(name: String,
watch: File => PathFinder,
proc: DartProcessor): sbt.Def.Initialize[sbt.Task[Seq[java.io.File]]] = {
(state, baseDirectory, dartWeb in Assets, target in dart2js) map { (state, base, web, dst) =>
{
(watch(web) filter (f => f != web) x relativeTo(Seq(base))).flatMap {
case (f, n) =>
val target = dst / n
if (f.isDirectory())
List()
else {
IO.copyFile(f, target, true)
List(target)
}
}
}
}
}
val dart2jsCompiler = Dart2jsCompiler("dart" + "-js-compiler",
src => (src ** "*.dart"),
null)
}
| cheleb/sbt-dart | src/main/scala/com/typesafe/sbt/dart/SbtDart.scala | Scala | apache-2.0 | 4,324 |
package aia.state
import akka.agent.Agent
import akka.actor.ActorSystem
import concurrent.Await
import concurrent.duration._
import akka.util.Timeout
//import concurrent.ExecutionContext.Implicits.global
case class BookStatistics(val nameBook: String, nrSold: Int)
case class StateBookStatistics(val sequence: Long,
books: Map[String, BookStatistics])
class BookStatisticsMgr(system: ActorSystem) {
implicit val ex = system.dispatcher //todo: change chapter 2.2 =>2.3
val stateAgent = Agent(new StateBookStatistics(0, Map())) //todo: change chapter 2.2 =>2.3
def addBooksSold(book: String, nrSold: Int): Unit = {
stateAgent send (oldState => {
val bookStat = oldState.books.get(book) match {
case Some(bookState) =>
bookState.copy(nrSold = bookState.nrSold + nrSold)
case None => new BookStatistics(book, nrSold)
}
oldState.copy(oldState.sequence + 1,
oldState.books + (book -> bookStat))
})
}
def addBooksSoldAndReturnNewState(book: String,
nrSold: Int): StateBookStatistics = {
val future = stateAgent alter (oldState => {
val bookStat = oldState.books.get(book) match {
case Some(bookState) =>
bookState.copy(nrSold = bookState.nrSold + nrSold)
case None => new BookStatistics(book, nrSold)
}
oldState.copy(oldState.sequence + 1,
oldState.books + (book -> bookStat))
})
Await.result(future, 1 second)
}
def getStateBookStatistics(): StateBookStatistics = {
stateAgent.get()
}
}
| RayRoestenburg/akka-in-action | chapter-state/src/main/scala/aia/state/Agent.scala | Scala | mit | 1,597 |
package debop4s.rediscala.spring
import java.util.concurrent.TimeUnit
import debop4s.core.concurrent._
import debop4s.rediscala.serializer.SnappyFstValueFormatter
import org.slf4j.LoggerFactory
import org.springframework.cache.Cache
import org.springframework.cache.Cache.ValueWrapper
import org.springframework.cache.support.SimpleValueWrapper
import redis.RedisClient
import redis.api.Limit
import scala.async.Async._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import scala.util.control.NonFatal
/**
* Redis๋ฅผ ์บ์ ์๋ฒ๋ก ์ฌ์ฉํ๋ Spring @Cache ๋ฅผ ์ง์ํ๋ Cache ์
๋๋ค.
* Created by debop on 2014. 2. 22.
*/
class RedisCache(val name: String,
prefix: String,
val redis: RedisClient,
expiration: Long = 0) extends Cache {
private lazy val log = LoggerFactory.getLogger(getClass)
log.info(s"Create RedisCache name=$name, prefix=$prefix, expiration=$expiration, redis=$redis")
// cache value ๋ฅผ ์์ถ์ ์ฅํ๊ธฐ ์ํด Snappy ์์ถ๊ณผ Fst Serialization์ ์ฌ์ฉํฉ๋๋ค.
implicit val valueFormatter = new SnappyFstValueFormatter[Any]()
// redis ์๋ต์ ๊ธฐ๋ค๋ฆฌ๋ ๊ธฐ๋ณธ timeout ์
๋๋ค.
implicit val timeout = FiniteDuration(30, TimeUnit.SECONDS)
// ์บ์ ํค์ expiration์ ๊ด๋ฆฌํ๊ธฐ ์ํด ๋ฐ๋ก sorted set์ผ๋ก ๊ด๋ฆฌํฉ๋๋ค.
val setName = s"spring:cache:keys:$name"
// ์บ์ ๊ฐ์ ๊ด๋ฆฌํ๊ธฐ ์ํด Hash Set ์ ์ฌ์ฉํฉ๋๋ค.
val itemName = s"spring:cache:items:$name"
// ์บ์ ์ญ์ ์ lock์ ๊ฑธ์ด ์ ํจํ์ง ๋ชปํ ๋ฐ์ดํฐ๋ฅผ ์ญ์ ํ๊ธฐ ์ํด ์ฌ์ฉํฉ๋๋ค.
val cacheLockName = s"cache:lock:$name"
var waitTimeoutForLock = 5 // msec
override def getNativeCache: AnyRef = redis
override def getName: String = name
/** redis hash set์ ์ ์ฅ๋ ์บ์ ํญ๋ชฉ์ ์กฐํํฉ๋๋ค. */
def get(key: Any): ValueWrapper = {
val keyStr = computeKey(key)
log.trace(s"์บ์ ์กฐํ. redis hashset=$itemName, field=$keyStr")
waitForLock(redis)
redis.hget[Any](itemName, keyStr).map(_.fold(null.asInstanceOf[ValueWrapper])(v => new SimpleValueWrapper(v))).await
// redis.hget[Any](itemName, keyStr).await
// .fold(null.asInstanceOf[ValueWrapper]) {
// value => new SimpleValueWrapper(value)
// }
}
/** redis hash set์ ์ ์ฅ๋ ์บ์ ํญ๋ชฉ์ ์กฐํํฉ๋๋ค. */
def get[@miniboxed T](key: Any, clazz: Class[T]): T = {
val keyStr = computeKey(key)
log.trace(s"์บ์ ์กฐํ. redis hashset=$itemName, field=$keyStr, clazz=${ clazz.getSimpleName }")
waitForLock(redis)
redis.hget[Any](itemName, keyStr).map(_.orNull.asInstanceOf[T]).await
}
/**
* ์บ์ ๊ฐ์ hash set์ ์ ์ฅํ๊ณ ,
* ์บ์ expiration ๊ด๋ฆฌ๋ฅผ ์ํด expiration ์ ๋ณด๋ฅผ sorted set์ ๋ฐ๋ก ์ ์ฅํฉ๋๋ค.
*/
private def putAsync(key: Any, value: Any): Future[Boolean] = {
val keyStr = computeKey(key)
log.trace(s"Spring Cache๋ฅผ ์ ์ฅํฉ๋๋ค. redis hashset=$itemName, field=$keyStr, value=$value")
waitForLock(redis)
val f: Future[Boolean] = async {
// ์บ์ ๊ฐ ์ ์ฅ
val r = await(redis.hset(itemName, keyStr, value))
// ์บ์ ๊ฐ์ Expiration ์ ์ฅ
if (expiration > 0) {
await(redis.zadd(setName, (System.currentTimeMillis() + expiration, keyStr)))
}
r
}
f onFailure { case e => log.error(s"cannot put $key", e) }
f
}
override def put(key: Any, value: Any): Unit = {
putAsync(key, value).stay
}
/** ํด๋น ํค์ ์บ์ ๊ฐ์ด ์์ผ๋ฉด ์ ์ฅํ๊ณ , ๊ธฐ์กด ์บ์ ๊ฐ์ ๋ฐํํฉ๋๋ค. */
override def putIfAbsent(key: scala.Any, value: scala.Any): ValueWrapper = {
val result = get(key)
if (result == null) {
putAsync(key, value)
}
result
}
/**
* ์บ์ ํญ๋ชฉ์ ์ญ์ ํฉ๋๋ค.
*/
override def evict(key: Any) {
val keyStr = computeKey(key)
log.trace(s"์บ์๋ฅผ ์ญ์ ํฉ๋๋ค. redis hashset=$itemName, field=$keyStr")
async {
await(redis.hdel(itemName, keyStr))
await(redis.zrem(setName, keyStr))
}.stay
}
/**
* ๋ชจ๋ ์บ์ ํญ๋ชฉ์ ๋ชจ๋ ์ญ์ ํ๋ค.
*/
override def clear() {
try {
doClear()
} catch {
case NonFatal(e) =>
log.warn(s"Spring cache๋ฅผ ์ญ์ ํ๋๋ฐ ์์ธ๊ฐ ๋ฐ์ํ์ต๋๋ค. name=$name, hashset=$itemName", e)
}
}
/**
* ์ ํจ๊ธฐ๊ฐ์ด ์ง๋ ์บ์๋ค์ ์ฐพ์ ์ญ์ ํฉ๋๋ค.
*/
def deleteExpiredItems(): Unit = {
try {
val expireTime = System.currentTimeMillis()
async {
val keys = await(redis.zrangebyscore[String](setName, Limit(0.0), Limit(expireTime)))
if (keys.nonEmpty) {
log.trace(s"์ ํจ๊ธฐ๊ฐ์ด ์ง๋ ์บ์ ํค๋ฅผ ์ญ์ ํฉ๋๋ค. keys=$keys")
await { redis.hdel(itemName, keys: _*) }
await { redis.zrem(setName, keys: _*) }
}
}.stay
} catch {
case NonFatal(e) =>
log.warn(s"Expired cache item์ ์ญ์ ํ๋๋ฐ ์คํจํ์ต๋๋ค.", e)
}
}
/**
* ํ์ฌ ์ํ ์ค์ธ ์บ์ ์ ๋ณด๋ฅผ ๋ชจ๋ ์ญ์ ํฉ๋๋ค.
*/
private def doClear() {
log.trace(s"Spring cache๋ฅผ ๋ชจ๋ ์ ๊ฑฐํฉ๋๋ค... name=$name, itemName=$itemName, setName=$setName")
// ํ์ฌ ๋ค๋ฅธ Lock ์ด ๊ฑธ๋ ค ์์ผ๋ฉด ์์
์ ์ทจ์ํ๋ค.
if (redis.exists(cacheLockName).await)
return
try {
// Lock์ ์ค์ ํ๋ค. ๋ค๋ฅธ clear ์์
์ ๋ชปํ๋๋ก...
redis.set(cacheLockName, cacheLockName)
async {
await(redis.del(itemName))
await(redis.del(setName))
log.debug(s"Spring cache๋ฅผ ๋ชจ๋ ์ ๊ฑฐํ์ต๋๋ค. name=$name, itemName=$itemName, setName=$setName")
}.stay
} finally {
log.trace(s"Lock์ ์ ๊ฑฐํฉ๋๋ค. lock=$cacheLockName")
redis.del(cacheLockName).await
}
}
def computeKey(key: Any): String = {
prefix + key.toString
}
/**
* ์บ์ ๊ฐ ์ญ์ ์์
์ค์ Lock ์ ๊ฑธ์ด ์ฌ์ฉํ์ง ๋ชปํ๊ฒ ํ๋ค. ์ฌ์ฉํ๋ ค๋ฉด, ์ด Lock ์ด ํด์ ๋ ๋๋ฅผ ๊ธฐ๋ค๋ ค์ผ ํ๋ค.
*/
private def waitForLock(redis: RedisClient): Boolean = {
var retry = false
var foundLock = false
do {
retry = false
if (redis.exists(cacheLockName).await) {
foundLock = true
try {
Thread.sleep(waitTimeoutForLock)
} catch {
case ignored: InterruptedException =>
case NonFatal(e) =>
throw new RuntimeException(s"Spring cache์ Lock ์ด ํด์ ๋๊ธฐ๋ฅผ ๊ธฐ๋ค๋ฆฌ๋ ๋์ ์์ธ๊ฐ ๋ฐ์ํ์ต๋๋ค. name=$name", e)
}
retry = true
}
} while (retry)
foundLock
}
}
object RedisCache {
implicit val akkaSystem = akka.actor.ActorSystem()
def apply(name: String, prefix: String, redis: RedisClient, expiration: Long): RedisCache =
new RedisCache(name, prefix, redis, expiration)
def apply(name: String,
prefix: String,
expiration: Long = 0,
host: String = "locahost",
port: Int = 6379,
database: Option[Int] = Some(0)): RedisCache = {
val redis = RedisClient(host, port, db = database)
new RedisCache(name, prefix, redis, expiration)
}
}
| debop/debop4s | debop4s-rediscala/src/main/scala/debop4s/rediscala/spring/RedisCache.scala | Scala | apache-2.0 | 7,333 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.utils
import org.apache.flink.table.planner.plan.metadata.FlinkRelMetadataQuery
import org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram
import org.apache.calcite.rel.RelFieldCollation.Direction
import org.apache.calcite.rel.{RelCollation, RelNode}
import org.apache.calcite.sql.validate.SqlMonotonicity
import org.apache.calcite.util.ImmutableBitSet
import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalRel
import scala.collection.JavaConversions._
/**
* Base class of Strategy to choose different rank process function.
*/
sealed trait RankProcessStrategy
/**
* A placeholder strategy which will be inferred after [[FlinkChangelogModeInferenceProgram]]
*/
case object UndefinedStrategy extends RankProcessStrategy
/**
* A strategy which only works when input only contains insertion changes
*/
case object AppendFastStrategy extends RankProcessStrategy
/**
* A strategy which works when input contains update or deletion changes
*/
case object RetractStrategy extends RankProcessStrategy
/**
* A strategy which only works when input shouldn't contains deletion changes and input should
* have the given [[primaryKeys]] and should be monotonic on the order by field.
*/
case class UpdateFastStrategy(primaryKeys: Array[Int]) extends RankProcessStrategy {
override def toString: String = "UpdateFastStrategy" + primaryKeys.mkString("[", ",", "]")
}
object RankProcessStrategy {
/**
* Gets [[RankProcessStrategy]] based on input, partitionKey and orderKey.
*/
def analyzeRankProcessStrategies(
rank: StreamPhysicalRel,
partitionKey: ImmutableBitSet,
orderKey: RelCollation): Seq[RankProcessStrategy] = {
val mq = rank.getCluster.getMetadataQuery
val fieldCollations = orderKey.getFieldCollations
val isUpdateStream = !ChangelogPlanUtils.inputInsertOnly(rank)
val input = rank.getInput(0)
if (isUpdateStream) {
val uniqueKeys = mq.getUniqueKeys(input)
if (uniqueKeys == null || uniqueKeys.isEmpty
// unique key should contains partition key
|| !uniqueKeys.exists(k => k.contains(partitionKey))) {
// and we fall back to using retract rank
Seq(RetractStrategy)
} else {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val monotonicity = fmq.getRelModifiedMonotonicity(input)
val isMonotonic = if (monotonicity == null) {
false
} else {
if (fieldCollations.isEmpty) {
false
} else {
fieldCollations.forall { collation =>
val fieldMonotonicity = monotonicity.fieldMonotonicities(collation.getFieldIndex)
val direction = collation.direction
if ((fieldMonotonicity == SqlMonotonicity.DECREASING
|| fieldMonotonicity == SqlMonotonicity.STRICTLY_DECREASING)
&& direction == Direction.ASCENDING) {
// sort field is ascending and its monotonicity is decreasing
true
} else if ((fieldMonotonicity == SqlMonotonicity.INCREASING
|| fieldMonotonicity == SqlMonotonicity.STRICTLY_INCREASING)
&& direction == Direction.DESCENDING) {
// sort field is descending and its monotonicity is increasing
true
} else if (fieldMonotonicity == SqlMonotonicity.CONSTANT) {
// sort key is a grouping key of upstream agg, it is monotonic
true
} else {
false
}
}
}
}
if (isMonotonic) {
//FIXME choose a set of primary key
Seq(UpdateFastStrategy(uniqueKeys.iterator().next().toArray), RetractStrategy)
} else {
Seq(RetractStrategy)
}
}
} else {
Seq(AppendFastStrategy)
}
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/RankProcessStrategy.scala | Scala | apache-2.0 | 4,755 |
package amailp.intellij.robot.psi.manipulator
import com.intellij.psi.{PsiElement, AbstractElementManipulator}
import com.intellij.openapi.util.TextRange
import amailp.intellij.robot.psi
import amailp.intellij.robot.psi.utils.ExtRobotPsiUtils
class ResourceValue extends AbstractElementManipulator[psi.ResourceValue] {
override def handleContentChange(element: psi.ResourceValue, range: TextRange, newContent: String): psi.ResourceValue = {
val newElement = new ExtRobotPsiUtils {
def utilsPsiElement: PsiElement = element
}.createResourceValue(newContent)
element.getNode.getTreeParent.replaceChild(element.getNode, newElement.getNode)
element
}
}
| puhnastik/robot-plugin | src/main/scala/amailp/intellij/robot/psi/manipulator/ResourceValue.scala | Scala | gpl-3.0 | 677 |
package controllers
import javax.inject.Inject
import play.api.mvc.{ControllerComponents, AbstractController}
class ApplicationController @Inject() (cc: ControllerComponents) extends AbstractController(cc) {
def index = Action {
Ok("OK!")
}
}
| futurice/minimal-play2 | app/controllers/ApplicationController.scala | Scala | mit | 253 |
package com.rainysoft.decisiontree.tree
import scala.collection.immutable.Map
/** Singleton to classify a sample.
*
*/
object Classifier {
/** Classifies a sample given a decision tree.
*
*/
def classify[A,B](sample: Map[String, A], tree: Tree[A,B]): B = {
tree match {
case Leaf(value) =>
return value
case SubTree(attribute, branches) =>
val sampleAttributeValue = sample(attribute)
val subTree = branches(sampleAttributeValue)
return classify[A,B](sample, subTree)
}
}
}
| MikaelUmaN/DecisionTree | src/main/scala/com/rainysoft/decisiontree/tree/Classifier.scala | Scala | mit | 543 |
package offGridOrcs
object BlueprintLibrary {
val Headquarters = {
val o = None
val c = Some(Blueprint.Element.Clearing())
val s = Some(Blueprint.Element.Stockpile())
val Z = Some(Blueprint.Element.Building())
val D = Some(Blueprint.Element.Decal())
Blueprint.build("HQ", 6, 7)(
o, c, c, c, c, c, o,
c, c, Z, Z, Z, c, c,
c, Z, Z, Z, Z, Z, c,
c, Z, Z, D, Z, Z, c,
s, Z, Z, c, Z, Z, c,
c, c, Z, c, Z, c, c,
o, c, c, c, c, c, o)
}
val Home = {
val o = None
val c = Some(Blueprint.Element.Clearing())
val s = Some(Blueprint.Element.Stockpile())
val Z = Some(Blueprint.Element.Building())
val D = Some(Blueprint.Element.Decal())
Blueprint.build("HOME", 4, 7)(
o, c, c, c, c, c, o,
c, c, Z, Z, Z, c, c,
c, Z, Z, Z, Z, Z, c,
s, Z, Z, Z, Z, Z, c,
c, c, Z, D, Z, c, c,
o, c, c, c, c, c, o,
o, o, o, o, o, o, o)
}
}
| dcecile/off-grid-orcs | src/BlueprintLibrary.scala | Scala | mit | 948 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package dotty.tools
package dotc
import core.Contexts.Context
import reporting.Reporter
/* To do:
*/
object Main extends Driver {
def resident(compiler: Compiler): Reporter = unsupported("resident") /*loop { line =>
val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
compiler.reporter.reset()
new compiler.Run() compile command.files
}*/
override def newCompiler(): Compiler = new Compiler
override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter = {
if (new config.Settings.Setting.SettingDecorator[Boolean](ctx.base.settings.resident).value(ctx))
resident(compiler)
else
super.doCompile(compiler, fileNames)
}
}
| AlexSikia/dotty | src/dotty/tools/dotc/Main.scala | Scala | bsd-3-clause | 834 |
package japgolly.scalajs.react.internal
trait Profunctor[F[_, _]] {
def lmap[A, B, C](f: F[A, B])(m: C => A): F[C, B]
def rmap[A, B, C](f: F[A, B])(m: B => C): F[A, C]
def dimap[A, B, C, D](f: F[A, B])(l: C => A, r: B => D): F[C, D] =
rmap(lmap(f)(l))(r)
}
object Profunctor {
final class Ops[F[_, _], A, B](private val f: F[A, B]) extends AnyVal {
@inline def lmap[C](m: C => A)(implicit p: Profunctor[F]): F[C, B] =
p.lmap(f)(m)
@inline def rmap[C](m: B => C)(implicit p: Profunctor[F]): F[A, C] =
p.rmap(f)(m)
@inline def dimap[C, D](l: C => A, r: B => D)(implicit p: Profunctor[F]): F[C, D] =
p.dimap(f)(l, r)
}
object Ops {
@inline implicit def toProfunctorOps[F[_, _], A, B](f: F[A, B]): Ops[F, A, B] =
new Ops(f)
}
} | japgolly/scalajs-react | coreGeneric/src/main/scala/japgolly/scalajs/react/internal/Profunctor.scala | Scala | apache-2.0 | 788 |
/*
* Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
import sbt._
object Dependencies {
val resolutionRepos = Seq(
// Required for our Scalaz snapshot
"Sonatype Snapshots" at "https://oss.sonatype.org/content/repositories/releases/",
// For some misc Scalding and Twitter libs
"Concurrent Maven Repo" at "http://conjars.org/repo",
// For Snowplow libs
"Snowplow Analytics Maven repo" at "http://maven.snplow.com/releases/",
"Snowplow Analytics Maven snapshot repo" at "http://maven.snplow.com/snapshots/",
// For user-agent-utils
"user-agent-utils repo" at "https://raw.github.com/HaraldWalker/user-agent-utils/mvn-repo/"
)
object V {
// Java
val hadoop = "1.2.1"
// Scala
val scalding = "0.11.1"
val scalaz7 = "7.0.0"
val snowplowRawEvent = "0.1.0"
val commonEnrich = "0.5.0"
// Scala (test only)
val specs2 = "1.14"
val scalazSpecs2 = "0.1.2"
val commonsCodec = "1.5"
}
object Libraries {
// Java
val hadoopCore = "org.apache.hadoop" % "hadoop-core" % V.hadoop % "provided"
// Scala
val scaldingCore = "com.twitter" %% "scalding-core" % V.scalding
val scaldingArgs = "com.twitter" %% "scalding-args" % V.scalding
val scalaz7 = "org.scalaz" %% "scalaz-core" % V.scalaz7
val snowplowRawEvent = "com.snowplowanalytics" % "snowplow-thrift-raw-event" % V.snowplowRawEvent
val commonEnrich = "com.snowplowanalytics" % "snowplow-common-enrich" % V.commonEnrich
// Scala (test only)
val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test"
val scalazSpecs2 = "org.typelevel" %% "scalaz-specs2" % V.scalazSpecs2 % "test"
val commonsCodec = "commons-codec" % "commons-codec" % V.commonsCodec % "test"
}
}
| 1974kpkpkp/snowplow | 3-enrich/scala-hadoop-enrich/project/Dependencies.scala | Scala | apache-2.0 | 2,712 |
package com.twitter.finagle.mysql.integration
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.client.DefaultPool
import com.twitter.util.{Await, Awaitable}
import org.scalatest.FunSuite
class AbortedClientTest extends FunSuite with IntegrationClient {
private def idleTime = 1.seconds
private[this] def await[T](t: Awaitable[T]): T = Await.result(t, 5.seconds)
override def configureClient(username: String, password: String, db: String) =
super
.configureClient(username, password, db)
// Configure the connection pool such that connections aren't kept around long.
.configured(
DefaultPool.Param(
// Don't keep any minimum of connections in the pool.
low = 0,
high = 100,
bufferSize = 0,
// Set idleTime to a short duration, so the connection pool will close old connections quickly.
idleTime = idleTime,
maxWaiters = 100
)
)
for (c <- client) {
test("MySql connections are closed cleanly, so MySql doesn't count them as aborted.") {
val abortedClientQuery = "SHOW GLOBAL STATUS LIKE '%Aborted_clients%'"
val initialAbortedValue: String =
await(c.select(abortedClientQuery)(row => row.stringOrNull("Value"))).head
val query = "SELECT '1' as ONE, '2' as TWO from information_schema.processlist;"
// Run a query so the mysql client gets used
await(c.select(query) { row =>
row("ONE").get
row("TWO").get
})
// Wait a bit longer than the idleTime so the connection used above is removed from the pool.
Thread.sleep((idleTime + 5.seconds).inMilliseconds)
await(c.select(abortedClientQuery) { row =>
val abortedValue = row.stringOrNull("Value")
assert(initialAbortedValue.toInt == abortedValue.toInt)
})
}
}
}
| luciferous/finagle | finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/AbortedClientTest.scala | Scala | apache-2.0 | 1,871 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.carbondata.register
import java.io.{File, IOException}
import org.apache.commons.io.FileUtils
import org.apache.spark.sql.test.util.QueryTest
import org.apache.spark.sql.{AnalysisException, CarbonEnv, Row}
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.spark.exception.ProcessMetaDataException
/**
*
*/
class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop database if exists carbon cascade")
}
def restoreData(dblocation: String, tableName: String) = {
val destination = dblocation + CarbonCommonConstants.FILE_SEPARATOR + tableName
val source = dblocation+ "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
try {
FileUtils.copyDirectory(new File(source), new File(destination))
FileUtils.deleteDirectory(new File(source))
} catch {
case e : Exception =>
throw new IOException("carbon table data restore failed.")
} finally {
}
}
def backUpData(dblocation: String, tableName: String) = {
val source = dblocation + CarbonCommonConstants.FILE_SEPARATOR + tableName
val destination = dblocation+ "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
try {
FileUtils.copyDirectory(new File(source), new File(destination))
} catch {
case e : Exception =>
throw new IOException("carbon table data backup failed.")
}
}
test("register tables test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
checkAnswer(sql("select count(*) from carbontable"), Row(1))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a")))
}
}
test("register table test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
checkAnswer(sql("select count(*) from carbontable"), Row(1))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a")))
}
}
test("register pre aggregate tables test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
backUpData(dblocation, "carbontable")
backUpData(dblocation, "carbontable_preagg1")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
restoreData(dblocation, "carbontable_preagg1")
sql("refresh table carbontable")
checkAnswer(sql("select count(*) from carbontable"), Row(3))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a"), Row("b"), Row("a")))
checkAnswer(sql("select count(*) from carbontable_preagg1"), Row(2))
checkAnswer(sql("select carbontable_c1 from carbontable_preagg1"), Seq(Row("a"), Row("b")))
}
}
test("register pre aggregate table test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
backUpData(dblocation, "carbontable")
backUpData(dblocation, "carbontable_preagg1")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
restoreData(dblocation, "carbontable_preagg1")
sql("refresh table carbontable")
checkAnswer(sql("select count(*) from carbontable"), Row(3))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a"), Row("b"), Row("a")))
checkAnswer(sql("select count(*) from carbontable_preagg1"), Row(2))
checkAnswer(sql("select carbontable_c1 from carbontable_preagg1"), Seq(Row("a"), Row("b")))
}
}
test("register pre aggregate table should fail if the aggregate table not copied") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
backUpData(dblocation, "carbontable")
backUpData(dblocation, "carbontable_preagg1")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
intercept[ProcessMetaDataException] {
sql("refresh table carbontable")
}
restoreData(dblocation, "carbontable_preagg1")
}
}
test("Update operation on carbon table should pass after registration or refresh") {
sql("drop database if exists carbon cascade")
sql("drop database if exists carbon1 cascade")
sql(s"create database carbon1 location '$dblocation'")
sql("use carbon1")
sql("drop table if exists carbontable")
sql("""create table carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
// update operation
sql("""update carbon1.carbontable d set (d.c2) = (d.c2 + 1) where d.c1 = 'a'""").show()
sql("""update carbon1.carbontable d set (d.c2) = (d.c2 + 1) where d.c1 = 'b'""").show()
checkAnswer(
sql("""select c1,c2,c3,c5 from carbon1.carbontable"""),
Seq(Row("a", 2, "aa", "aaa"), Row("b", 2, "bb", "bbb"))
)
}
}
test("Update operation on carbon table") {
sql("drop database if exists carbon1 cascade")
sql(s"create database carbon1 location '$dblocation'")
sql("use carbon1")
sql(
"""
CREATE TABLE automerge(id int, name string, city string, age int)
STORED BY 'org.apache.carbondata.format'
""")
val testData = s"$resourcesPath/sample.csv"
sql(s"LOAD DATA LOCAL INPATH '$testData' into table automerge")
backUpData(dblocation, "automerge")
sql("drop table automerge")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "automerge")
sql("refresh table automerge")
// update operation
sql("""update carbon1.automerge d set (d.id) = (d.id + 1) where d.id > 2""").show()
checkAnswer(
sql("select count(*) from automerge"),
Seq(Row(6))
)
}
}
test("Delete operation on carbon table") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
// delete operation
sql("""delete from carbontable where c3 = 'aa'""").show
checkAnswer(
sql("""select c1,c2,c3,c5 from carbon.carbontable"""),
Seq(Row("b", 1, "bb", "bbb"))
)
sql("drop table carbontable")
}
}
test("Alter table add column test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
sql("Alter table carbontable add columns(c4 string) " +
"TBLPROPERTIES('DICTIONARY_EXCLUDE'='c4', 'DEFAULT.VALUE.c4'='def')")
checkAnswer(
sql("""select c1,c2,c3,c5,c4 from carbon.carbontable"""),
Seq(Row("a", 1, "aa", "aaa", "def"), Row("b", 1, "bb", "bbb", "def"))
)
sql("drop table carbontable")
}
}
test("Alter table change column datatype test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
sql("Alter table carbontable change c2 c2 long")
checkAnswer(
sql("""select c1,c2,c3,c5 from carbon.carbontable"""),
Seq(Row("a", 1, "aa", "aaa"), Row("b", 1, "bb", "bbb"))
)
sql("drop table carbontable")
}
}
test("Alter table drop column test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dblocation'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
backUpData(dblocation, "carbontable")
sql("drop table carbontable")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
restoreData(dblocation, "carbontable")
sql("refresh table carbontable")
sql("Alter table carbontable drop columns(c2)")
checkAnswer(
sql("""select * from carbon.carbontable"""),
Seq(Row("a", "aa", "aaa"), Row("b", "bb", "bbb"))
)
sql("drop table carbontable")
}
}
override def afterAll {
sql("use default")
sql("drop database if exists carbon cascade")
}
}
| sgururajshetty/carbondata | integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala | Scala | apache-2.0 | 13,562 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.