code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/* date: Dec 7, 2013
BUTTON CARD SET REMAP
'%ButtonCardSet' code group is identical to the '%CardSet' code set.
Both are children of Notecard and are linked by the sibling address.
'ButtonCardSetRemap' removes the '%ButtonCardSet' group from this
sibling link, and attaches it to '%CardSet''s buttonAddress
argument. For example:
Prior After
%CardSet %CardSet
address 2002 address 2002
child 2003 child 2003
sibling 2005 sibling 2008 // changed address
button 0 button 2005 // " "
%ButtonCardSet %ButtonCardSet
address 2005 address 2005
child 2006 child 2006
sibling 2008 sibling 0 // changed address
button 0 button 2002 // " "
%CardSet %CardSet
address 2008 address 2008
... ...
'%ButtonCardSet' has been removed as a child of '%Notecard'.
The 1st '%CardSet' is now the parent (via button 2005) of
'%ButtonCardSet'.
*/
package com.server
import io._
object ButtonCardSetRemap {
// Extract className and address
val regex="""([%a-zA-Z0-9]+)\\s+(\\d+)""" .r
// store all CardSets, NotecardTask, NextFile, and ButtonCardSet
// object whose component addresses have been modified.
var buttonMap = Map[(String),(Array[String])]()
// Invoked by BuildStructure.
// First, convert 'struct' to List[List[String]] containing
// %<classname>parameter-pairs<%%>
def buttonCardSetRemap(struct:collection.mutable.ArrayBuffer[String])
: List[Array[String]]={
// collapse '%<classname> to %%', inclusive of parameters
// to List[Array[String]].
val listList=collectPercentToDoublePercent(struct)
modifyAddressesOfButtonCardSetGroup(listList)
val l=insertAddressChangeObjectIntoList(listList).reverse
l
}
def fetchAddress(line:String):(String,String)={
line match {
case regex(className, address)=> (className,address)
case _=>
//println("InputScript2: unknown line="+line)
(null,null)
}
}
def switchAddress(targetName:String, csline:String)={
val (line, address)=fetchAddress(csline)
// println("ButtonCardSet: swithchAddress() line="+line+" addr="+address+" targetName="+targetName)
targetName+"\\t"+address
}
def modifyAddressesOfButtonCardSetGroup(percentList:List[List[String]]) = {
var cs=Array[String]()
var bcs= Array[String]()
var nextCs=Array[String]()
// store one or more successive ButtonCardSet objects
var multiButtonList=List[Array[String]]()
val className=0
val addressIndex=1
val siblingIndex= 3
val buttonIndex= 4
// 'set' is (<%classname>parameter-pairs<%%>)
for( set <- percentList) {
val ss=set.toArray
ss(0) match { // ss(0) is <%classname>
case "%CardSet" if (bcs.isEmpty)=> //
cs=ss
case "%ButtonCardSet" if ( !cs.isEmpty)=> // prior loop was <%CardSet>
bcs=ss
//println("Button..Remap: case:BSC=> cs(1)="+cs(1)+" bcs(0)="+bcs(1) )
// One or more %ButtonCardSet may be childen of %CardSet
multiButtonList = bcs :: multiButtonList
// prior loop(s) was one or more <%ButtonCardSet>. current
// <%classname> terminates the scope of <%ButtonCardSet> so
// impliment address changes.
// (Note, potential problem because %LoadDictionary not represented)
case "%CardSet" | "%NotecardTask" | "%NextFile" if ( ! bcs.isEmpty)=>
nextCs=ss
// Sibling's address added to '%ButtonCardSet' button
// address.
cs(buttonIndex)=switchAddress("button", cs(siblingIndex) )
// 1st Notecard child following '%ButtonCardSet' object
// is added to childs sibling address.
//println("Button..Remap: cs(siblingIndex)="+cs(siblingIndex)+" cs-addr="+cs(1))
cs(siblingIndex)=switchAddress("sibling",nextCs(addressIndex))
// 'oneButton' are individual ButtonCardSet instances
for( oneButton <- multiButtonList) {
// ButtonCardSet has address of CardSet parent to
// allow return.
//println("ButtonCardSetRemap: oneButton addr="+oneButton(1)+" cs(1)="+cs(1))
oneButton(buttonIndex)=switchAddress("button", cs(addressIndex) )
}
// last buttonCardSet object has "null" sibling
// address to indicated end of list. (note: List
// is reversed).
multiButtonList.head(siblingIndex)="sibling\\t0"
// Map all objects whose addressses are changed.
mapObjectsWhoseAddressesAreModified(cs, multiButtonList,nextCs)
//displayCardSetAndButtonCardSet(cs, multiButtonList, nextCs)
// reset to begin next loop
// cs=Array[String]()
bcs=Array[String]()
nextCs=Array[String]()
// this CardSet may be a parent of ButtonCardSet
// if(ss(0)=="%CardSet" )
// cs=ss
case _=> // all <%classnames> not %CardSet,%NotecardTask,%NextFile,%ButtonCardSet
}
}
// 'percentList' ended without a '%CardSet', '%NotecardTask', and '%NextFile'
// sibling, therefore 'siblings' are '0'.
if( ! bcs.isEmpty) {
//println("loop ended")
cs(siblingIndex)="sibling\\t0"
// bcs(siblingIndex)="sibling\\t0"
mapObjectsWhoseAddressesAreModified(cs, multiButtonList,nextCs)
//displayCardSetAndButtonCardSet(cs, multiButtonList, nextCs)
}
}
// Store all CardSets, NotecardTask, NextFile, and ButtonCardSet whoses
// component addresses have been changed as values in 'buttonMap'. The
// map key is the unique symbolic address of the object.
def mapObjectsWhoseAddressesAreModified(cs:Array[String],
multiButtonList:List[Array[String]],
nextCs:Array[String]) {
buttonMap += cs(1)-> cs
for(oneButton <- multiButtonList)
buttonMap += oneButton(1)-> oneButton
buttonMap += nextCs(1)-> nextCs
}
def insertAddressChangeObjectIntoList(percentList:List[List[String]]) ={
var listArray= List[Array[String]] ()
for( list <- percentList) {
var array=list.toArray
var address=array(1)
buttonMap.get(address) match {
case Some(a)=>
listArray = a :: listArray
case None=>
listArray = array :: listArray
}
}
listArray
}
def displayCardSetAndButtonCardSet(cs:Array[String],
multiButtonList:List[Array[String]],
nextCs:Array[String]) {
cs.foreach(x=> println ("\\t"+ x) )
for(oneButton <- multiButtonList)
oneButton.foreach(x=> println ("\\t\\t"+ x) )
nextCs.foreach(x=> println("\\t\\t\\t"+ x) )
}
// gather <%classname><parameters><%% delimiter> into a List and add the List
// to a super List (List[List[String]]).
def collectPercentToDoublePercent(struct:collection.mutable.ArrayBuffer[String])
:List[List[String]]= {
var percentList=List[List[String]]()
var l=List[String]()
for(s <- struct) {
if(s != Nil) {
s match {
case a:String if(a(0)=='%' && a(1)!='%') =>
// println("%="+a)
l = a :: l
case a:String if(a.take(2)=="%%") =>
// println("%%="+a)
l = a :: l
percentList= l.reverse :: percentList
l=List[String]()
case a:String =>
// println("_=> ="+a)
l =a :: l
}
}
}
//println("percentList.size="+percentList.size)
percentList.reverse
}
}
| hangle/Script | src/ButtonCardSetRemap.scala | Scala | apache-2.0 | 7,230 |
package com.wordnik.client.model
import com.wordnik.client.model.VariableCategory
case class Inline_response_200_23 (
data: List[VariableCategory],
success: Boolean
)
| QuantiModo/QuantiModo-SDK-Scalatra | src/main/scala/com/wordnik/client/model/Inline_response_200_23.scala | Scala | gpl-2.0 | 174 |
package troy
package cql
package object ast {
type Identifier = String
}
| schemasafe/troy | cql-ast/src/main/scala/troy/cql/ast/package.scala | Scala | apache-2.0 | 76 |
package name.ryanboder.maestroid
import android.graphics.Color
import org.scaloid.common._
class MainActivity extends SActivity with TagUtil {
lazy val accelerometer = new Accelerometer(this)
lazy val recorder = new AccelerometerRecorder()
lazy val detector = new GestureDetector(this)
lazy val player = new MusicPlayer(this)
private var accelerometerViews: Array[STextView] = null
private var tempoView: STextView = null
private var volumeView: STextView = null
private var recordButton: SButton = null
onCreate {
info("onCreate")
contentView = new SVerticalLayout {
STextView("Welcome to Maestroid")
accelerometerViews = Array(STextView(), STextView(), STextView())
tempoView = STextView("Tempo: 0.0")
volumeView = STextView("Volume: 0.0")
recordButton = SButton("Record Accelerometer", toggleAccelerometerRecording).backgroundColor(Color.DKGRAY)
}
}
onDestroy {
info("onDestroy")
}
override def onResume() {
super.onResume()
info("onResume")
accelerometer.activate((data: AccelerometerData) => {
recorder(data)
updateAccelerometerViews(data)
for (gesture <- detector(data)) {
info(gesture.toString)
player(gesture)
gesture match {
case t: Tempo => tempoView.setText("Tempo: " + t.beatsPerSecond)
case v: Volume => volumeView.setText("Volume: " + v.volume)
case g: Gesture => error("Unexpected gesture " + g)
}
}
})
}
override def onPause() {
info("onPause")
accelerometer.deactivate()
super.onPause()
}
def updateAccelerometerViews(data: AccelerometerData): Unit = {
if (data.timestamp - lastLoggedAccelerometerTime > 1000000000) {
accelerometerViews(0).setText("X: " + data.acceleration.x)
accelerometerViews(1).setText("Y: " + data.acceleration.y)
accelerometerViews(2).setText("Z: " + data.acceleration.z)
lastLoggedAccelerometerTime = data.timestamp
}
}
private var lastLoggedAccelerometerTime = 0L
private def toggleAccelerometerRecording(): Unit = {
if (recorder.isRecording) {
recorder.stop()
recordButton.setBackgroundColor(Color.DKGRAY)
toast("Finished recording")
} else {
recorder.start()
recordButton.setBackgroundColor(Color.RED)
toast("Starting to record")
}
}
}
| ryan-boder/maestroid | src/main/scala/name/ryanboder/maestroid/MainActivity.scala | Scala | gpl-3.0 | 2,372 |
package com.twitter.finagle.http.filter
import com.twitter.finagle.http.{Fields, Method, Request, Response, Status}
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.finagle.{Status => _, _}
import com.twitter.io.Buf
import com.twitter.util.{Await, Awaitable, Closable, Duration, Future}
import java.net.InetSocketAddress
import java.util.concurrent.atomic.AtomicBoolean
import org.scalatest.funsuite.AnyFunSuite
class ClientNackFilterTest extends AnyFunSuite {
private def await[T](t: Awaitable[T]): T =
Await.result(t, Duration.fromSeconds(15))
test("converts nacked requests into failures with the right flags") {
def withHeaderService(key: String, value: String) =
(new ClientNackFilter).andThen(Service.mk[Request, Response] { _ =>
val resp = Response(Status.ServiceUnavailable)
resp.headerMap.set(key, value)
Future.value(resp)
})
def req = Request("/foo")
{ // Retry-After: 0 nack
val retryAfterSvc = withHeaderService(Fields.RetryAfter, "0")
val retryAfterFailure = intercept[Failure] { await(retryAfterSvc(req)) }
assert(retryAfterFailure.isFlagged(FailureFlags.Retryable))
assert(retryAfterFailure.isFlagged(FailureFlags.Rejected))
}
{ // finagle retryable nack
val nackSvc = withHeaderService(HttpNackFilter.RetryableNackHeader, "true")
val retryableFailure = intercept[Failure] { await(nackSvc(req)) }
assert(retryableFailure.isFlagged(FailureFlags.Retryable))
assert(retryableFailure.isFlagged(FailureFlags.Rejected))
}
{ // finagle non-retryable nack
val nackSvc = withHeaderService(HttpNackFilter.NonRetryableNackHeader, "true")
val nonRetryableFailure = intercept[Failure] { await(nackSvc(req)) }
assert(!nonRetryableFailure.isFlagged(FailureFlags.Retryable))
assert(nonRetryableFailure.isFlagged(FailureFlags.Rejected))
}
}
test("Lets a regular request through") {
new NackCtx(withStreaming = false) {
def nackBody: Buf = Buf.Utf8("Non-chunked nack body")
assert(await(client(Request("/foo"))).status == http.Status.Ok)
assert(serverSr.counters(Seq("myservice", "nacks")) == 0)
assert(clientSr.counters(Seq("http", "retries", "requeues")) == 0)
// reuse connections
assert(await(client(Request("/bar"))).status == http.Status.Ok)
assert(clientSr.counters(Seq("http", "connects")) == 1)
assert(serverSr.counters(Seq("myservice", "nacks")) == 0)
closeCtx()
}
}
test("Converts an aggregated Nack response") {
new NackCtx(withStreaming = false) {
def nackBody: Buf = Buf.Utf8("Non-chunked nack body")
assert(await(client(request)).status == http.Status.Ok)
assert(serverSr.counters(Seq("myservice", "nacks")) == 1)
assert(clientSr.counters(Seq("http", "retries", "requeues")) == 1)
// reuse connections
assert(await(client(request)).status == http.Status.Ok)
assert(clientSr.counters(Seq("http", "connects")) == 1)
assert(serverSr.counters(Seq("myservice", "nacks")) == 1)
closeCtx()
}
}
test("swallows the body of a HttpNack if it happens to come as a chunked response") {
new NackCtx(withStreaming = true) {
def nackBody: Buf = Buf.Utf8("Chunked nack body")
assert(await(client(request)).status == http.Status.Ok)
assert(serverSr.counters(Seq("myservice", "nacks")) == 1)
assert(clientSr.counters(Seq("http", "retries", "requeues")) == 1)
// reuse connections
assert(await(client(request)).status == http.Status.Ok)
assert(clientSr.counters(Seq("http", "connects")) == 1)
assert(serverSr.counters(Seq("myservice", "nacks")) == 1)
closeCtx()
}
}
test("fails on excessively large nack response") {
new NackCtx(withStreaming = true) {
def nackBody: Buf = Buf.Utf8("Very large" * 1024)
assert(await(client(request)).status == http.Status.Ok)
// h2 sessions should remain at 1 and the connection should not sever
assert(clientSr.counters(Seq("http", "connects")) == 1)
assert(clientSr.gauges(Seq("http", "h2pool-sessions"))() == 1.0f)
assert(clientSr.counters(Seq("http", "failures", "rejected")) == 1)
assert(serverSr.counters(Seq("myservice", "nacks")) == 1)
closeCtx()
}
}
test("always marks streaming requests as non-retryable") {
new NackCtx(withStreaming = false) {
def nackBody: Buf = Buf.Utf8("whatever")
val f = intercept[Failure] {
request.method(Method.Post)
request.setChunked(true)
request.writer.close() // don't really write anything, but the dispatcher cant know that...
await(client(request))
}
assert(f.isFlagged(FailureFlags.Rejected))
assert(!f.isFlagged(FailureFlags.Retryable))
assert(clientSr.counters(Seq("http", "requests")) == 1)
assert(serverSr.counters(Seq("myservice", "nacks")) == 1)
closeCtx()
}
}
test("adds the magic header to requests that aren't chunked") {
val markerHeader = "finagle-had-retryable-request-header"
val service = new ClientNackFilter().andThen(Service.mk { req: Request =>
val resp = Response()
if (req.headerMap.contains(HttpNackFilter.RetryableRequestHeader)) {
resp.headerMap.add(markerHeader, "")
}
Future.value(resp)
})
val nonChunkedRequest = Request(method = Method.Post, uri = "/")
nonChunkedRequest.contentString = "static"
val nonChunkedResponse = await(service(nonChunkedRequest))
assert(nonChunkedResponse.headerMap.contains(markerHeader))
val chunkedRequest = Request(method = Method.Post, uri = "/")
chunkedRequest.setChunked(true)
val chunkedResponse = await(service(chunkedRequest))
assert(!chunkedResponse.headerMap.contains(markerHeader))
}
test("multiple nack headers are not added if the request is retried") {
val first = new AtomicBoolean(false)
val service = new ClientNackFilter().andThen(Service.mk { req: Request =>
if (first.compareAndSet(false, true)) {
// Nack the first one
Future.exception(Failure.rejected)
} else {
val resp = Response()
resp.contentString =
req.headerMap.getAll(HttpNackFilter.RetryableRequestHeader).length.toString
Future.value(resp)
}
})
val request = Request(method = Method.Post, uri = "/")
request.contentString = "post"
request.contentString
val ex = intercept[FailureFlags[_]] { await(service(request)) }
assert(ex.isFlagged(FailureFlags.Retryable))
assert(request.headerMap.getAll(HttpNackFilter.RetryableRequestHeader).length == 1)
// Do it again.
val response = await(service(request))
assert(request.headerMap.getAll(HttpNackFilter.RetryableRequestHeader).length == 1)
assert(response.contentString == "1")
}
// Scaffold for checking nack behavior
private abstract class NackCtx(withStreaming: Boolean) {
private val ChunkedNack = "/chunkednack"
private val StdNack = "/stdnack"
def nackBody: Buf
val serverSr = new InMemoryStatsReceiver
val clientSr = new InMemoryStatsReceiver
@volatile var needsNack: Boolean = true
val service: Service[Request, Response] = Service.mk { req: Request =>
req.path match {
case ChunkedNack if needsNack =>
Future.value {
needsNack = false
// simulate a nack response with a chunked body by just sending a chunked body
serverSr.scope("myservice").counter("nacks").incr()
val resp = Response(status = HttpNackFilter.ResponseStatus)
resp.headerMap.set(HttpNackFilter.RetryableNackHeader, "true")
resp.setChunked(true)
resp.writer
.write(nackBody)
.before(resp.writer.close())
resp
}
case StdNack if needsNack =>
Future.value {
needsNack = false
// simulate a nack response with a chunked body by just sending a chunked body
serverSr.scope("myservice").counter("nacks").incr()
val resp = Response(status = HttpNackFilter.ResponseStatus)
resp.headerMap.set(HttpNackFilter.RetryableNackHeader, "true")
resp.content = nackBody
resp
}
case _ =>
Future.value {
val resp = Response()
resp.contentString = "the body"
resp
}
}
}
val server =
Http.server
.withStatsReceiver(serverSr)
.withLabel("myservice")
.withStreaming(true)
.serve(new InetSocketAddress(0), service)
val client =
Http.client
.withStatsReceiver(clientSr)
.withStreaming(true)
.newService(
Name.bound(Address(server.boundAddress.asInstanceOf[InetSocketAddress])),
"http"
)
val request = Request(if (withStreaming) ChunkedNack else StdNack)
// Close both the client and the server
def closeCtx(): Unit = await(Closable.all(client, server).close())
}
}
| twitter/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/filter/ClientNackFilterTest.scala | Scala | apache-2.0 | 9,133 |
package com.avast.metrics.scalaapi.impl
import java.util.function.Supplier
import com.avast.metrics.api.{Naming, Monitor => JMonitor}
import com.avast.metrics.scalaapi._
private[scalaapi] class MonitorImpl(monitor: JMonitor, naming: Naming) extends Monitor {
override def named(name: String): Monitor = new MonitorImpl(monitor.named(name), naming)
override def named(name: String, name2: String, names: String*): Monitor = new MonitorImpl(monitor.named(name, name2, names: _*), naming)
override def getName: String = monitor.getName
override def meter(name: String): Meter = new MeterImpl(monitor.newMeter(name))
override def counter(name: String): Counter = new CounterImpl(monitor.newCounter(name))
override def timer(name: String): Timer = new TimerImpl(monitor.newTimer(name))
override def timerPair(name: String): TimerPair = new TimerPairImpl(
timer(naming.successTimerName(name)),
timer(naming.failureTimerName(name))
)
override def gauge[A](name: String)(value: () => A): Gauge[A] = gauge(name, replaceExisting = false)(value)
override def gauge[A](name: String, replaceExisting: Boolean)(value: () => A): Gauge[A] =
new GaugeImpl[A](
monitor.newGauge(
name,
replaceExisting,
new Supplier[A] {
override def get(): A = value()
}
)
)
override def histogram(name: String): Histogram = new HistogramImpl(monitor.newHistogram(name))
override def close(): Unit = monitor.close()
override def asJava: JMonitor = monitor
}
| avast/metrics | scala-api/src/main/scala/com/avast/metrics/scalaapi/impl/MonitorImpl.scala | Scala | mit | 1,535 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.sql.Date
import java.util.concurrent.ConcurrentHashMap
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkException
import org.apache.spark.api.java.function.FlatMapGroupsWithStateFunction
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.catalyst.plans.logical.FlatMapGroupsWithState
import org.apache.spark.sql.catalyst.plans.physical.UnknownPartitioning
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.execution.RDDScanExec
import org.apache.spark.sql.execution.streaming.{FlatMapGroupsWithStateExec, GroupStateImpl, MemoryStream}
import org.apache.spark.sql.execution.streaming.state.{StateStore, StateStoreId, StoreUpdate}
import org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.MemoryStateStore
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.types.{DataType, IntegerType}
/** Class to check custom state types */
case class RunningCount(count: Long)
case class Result(key: Long, count: Int)
class FlatMapGroupsWithStateSuite extends StateStoreMetricsTest with BeforeAndAfterAll {
import testImplicits._
import GroupStateImpl._
import GroupStateTimeout._
override def afterAll(): Unit = {
super.afterAll()
StateStore.stop()
}
test("GroupState - get, exists, update, remove") {
var state: GroupStateImpl[String] = null
def testState(
expectedData: Option[String],
shouldBeUpdated: Boolean = false,
shouldBeRemoved: Boolean = false): Unit = {
if (expectedData.isDefined) {
assert(state.exists)
assert(state.get === expectedData.get)
} else {
assert(!state.exists)
intercept[NoSuchElementException] {
state.get
}
}
assert(state.getOption === expectedData)
assert(state.hasUpdated === shouldBeUpdated)
assert(state.hasRemoved === shouldBeRemoved)
}
// Updating empty state
state = new GroupStateImpl[String](None)
testState(None)
state.update("")
testState(Some(""), shouldBeUpdated = true)
// Updating exiting state
state = new GroupStateImpl[String](Some("2"))
testState(Some("2"))
state.update("3")
testState(Some("3"), shouldBeUpdated = true)
// Removing state
state.remove()
testState(None, shouldBeRemoved = true, shouldBeUpdated = false)
state.remove() // should be still callable
state.update("4")
testState(Some("4"), shouldBeRemoved = false, shouldBeUpdated = true)
// Updating by null throw exception
intercept[IllegalArgumentException] {
state.update(null)
}
}
test("GroupState - setTimeout**** with NoTimeout") {
for (initState <- Seq(None, Some(5))) {
// for different initial state
implicit val state = new GroupStateImpl(initState, 1000, 1000, NoTimeout, hasTimedOut = false)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
}
}
test("GroupState - setTimeout**** with ProcessingTimeTimeout") {
implicit var state: GroupStateImpl[Int] = null
state = new GroupStateImpl[Int](None, 1000, 1000, ProcessingTimeTimeout, hasTimedOut = false)
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
testTimeoutDurationNotAllowed[IllegalStateException](state)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
state.update(5)
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
state.setTimeoutDuration(1000)
assert(state.getTimeoutTimestamp === 2000)
state.setTimeoutDuration("2 second")
assert(state.getTimeoutTimestamp === 3000)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
state.remove()
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
testTimeoutDurationNotAllowed[IllegalStateException](state)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
}
test("GroupState - setTimeout**** with EventTimeTimeout") {
implicit val state = new GroupStateImpl[Int](
None, 1000, 1000, EventTimeTimeout, hasTimedOut = false)
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
testTimeoutTimestampNotAllowed[IllegalStateException](state)
state.update(5)
state.setTimeoutTimestamp(10000)
assert(state.getTimeoutTimestamp === 10000)
state.setTimeoutTimestamp(new Date(20000))
assert(state.getTimeoutTimestamp === 20000)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
state.remove()
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
testTimeoutTimestampNotAllowed[IllegalStateException](state)
}
test("GroupState - illegal params to setTimeout****") {
var state: GroupStateImpl[Int] = null
// Test setTimeout****() with illegal values
def testIllegalTimeout(body: => Unit): Unit = {
intercept[IllegalArgumentException] { body }
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
}
state = new GroupStateImpl(Some(5), 1000, 1000, ProcessingTimeTimeout, hasTimedOut = false)
testIllegalTimeout { state.setTimeoutDuration(-1000) }
testIllegalTimeout { state.setTimeoutDuration(0) }
testIllegalTimeout { state.setTimeoutDuration("-2 second") }
testIllegalTimeout { state.setTimeoutDuration("-1 month") }
testIllegalTimeout { state.setTimeoutDuration("1 month -1 day") }
state = new GroupStateImpl(Some(5), 1000, 1000, EventTimeTimeout, hasTimedOut = false)
testIllegalTimeout { state.setTimeoutTimestamp(-10000) }
testIllegalTimeout { state.setTimeoutTimestamp(10000, "-3 second") }
testIllegalTimeout { state.setTimeoutTimestamp(10000, "-1 month") }
testIllegalTimeout { state.setTimeoutTimestamp(10000, "1 month -1 day") }
testIllegalTimeout { state.setTimeoutTimestamp(new Date(-10000)) }
testIllegalTimeout { state.setTimeoutTimestamp(new Date(-10000), "-3 second") }
testIllegalTimeout { state.setTimeoutTimestamp(new Date(-10000), "-1 month") }
testIllegalTimeout { state.setTimeoutTimestamp(new Date(-10000), "1 month -1 day") }
}
test("GroupState - hasTimedOut") {
for (timeoutConf <- Seq(NoTimeout, ProcessingTimeTimeout, EventTimeTimeout)) {
for (initState <- Seq(None, Some(5))) {
val state1 = new GroupStateImpl(initState, 1000, 1000, timeoutConf, hasTimedOut = false)
assert(state1.hasTimedOut === false)
val state2 = new GroupStateImpl(initState, 1000, 1000, timeoutConf, hasTimedOut = true)
assert(state2.hasTimedOut === true)
}
}
}
test("GroupState - primitive type") {
var intState = new GroupStateImpl[Int](None)
intercept[NoSuchElementException] {
intState.get
}
assert(intState.getOption === None)
intState = new GroupStateImpl[Int](Some(10))
assert(intState.get == 10)
intState.update(0)
assert(intState.get == 0)
intState.remove()
intercept[NoSuchElementException] {
intState.get
}
}
// Values used for testing StateStoreUpdater
val currentBatchTimestamp = 1000
val currentBatchWatermark = 1000
val beforeTimeoutThreshold = 999
val afterTimeoutThreshold = 1001
// Tests for StateStoreUpdater.updateStateForKeysWithData() when timeout = NoTimeout
for (priorState <- Seq(None, Some(0))) {
val priorStateStr = if (priorState.nonEmpty) "prior state set" else "no prior state"
val testName = s"NoTimeout - $priorStateStr - "
testStateUpdateWithData(
testName + "no update",
stateUpdates = state => { /* do nothing */ },
timeoutConf = GroupStateTimeout.NoTimeout,
priorState = priorState,
expectedState = priorState) // should not change
testStateUpdateWithData(
testName + "state updated",
stateUpdates = state => { state.update(5) },
timeoutConf = GroupStateTimeout.NoTimeout,
priorState = priorState,
expectedState = Some(5)) // should change
testStateUpdateWithData(
testName + "state removed",
stateUpdates = state => { state.remove() },
timeoutConf = GroupStateTimeout.NoTimeout,
priorState = priorState,
expectedState = None) // should be removed
}
// Tests for StateStoreUpdater.updateStateForKeysWithData() when timeout != NoTimeout
for (priorState <- Seq(None, Some(0))) {
for (priorTimeoutTimestamp <- Seq(NO_TIMESTAMP, 1000)) {
var testName = ""
if (priorState.nonEmpty) {
testName += "prior state set, "
if (priorTimeoutTimestamp == 1000) {
testName += "prior timeout set"
} else {
testName += "no prior timeout"
}
} else {
testName += "no prior state"
}
for (timeoutConf <- Seq(ProcessingTimeTimeout, EventTimeTimeout)) {
testStateUpdateWithData(
s"$timeoutConf - $testName - no update",
stateUpdates = state => { /* do nothing */ },
timeoutConf = timeoutConf,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = priorState, // state should not change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithData(
s"$timeoutConf - $testName - state updated",
stateUpdates = state => { state.update(5) },
timeoutConf = timeoutConf,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithData(
s"$timeoutConf - $testName - state removed",
stateUpdates = state => { state.remove() },
timeoutConf = timeoutConf,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = None) // state should be removed
}
testStateUpdateWithData(
s"ProcessingTimeTimeout - $testName - state and timeout duration updated",
stateUpdates =
(state: GroupState[Int]) => { state.update(5); state.setTimeoutDuration(5000) },
timeoutConf = ProcessingTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = currentBatchTimestamp + 5000) // timestamp should change
testStateUpdateWithData(
s"EventTimeTimeout - $testName - state and timeout timestamp updated",
stateUpdates =
(state: GroupState[Int]) => { state.update(5); state.setTimeoutTimestamp(5000) },
timeoutConf = EventTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = 5000) // timestamp should change
testStateUpdateWithData(
s"EventTimeTimeout - $testName - timeout timestamp updated to before watermark",
stateUpdates =
(state: GroupState[Int]) => {
state.update(5)
intercept[IllegalArgumentException] {
state.setTimeoutTimestamp(currentBatchWatermark - 1) // try to set to < watermark
}
},
timeoutConf = EventTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should not update
}
}
// Tests for StateStoreUpdater.updateStateForTimedOutKeys()
val preTimeoutState = Some(5)
for (timeoutConf <- Seq(ProcessingTimeTimeout, EventTimeTimeout)) {
testStateUpdateWithTimeout(
s"$timeoutConf - should not timeout",
stateUpdates = state => { assert(false, "function called without timeout") },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = afterTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = afterTimeoutThreshold) // timestamp should not change
testStateUpdateWithTimeout(
s"$timeoutConf - should timeout - no update/remove",
stateUpdates = state => { /* do nothing */ },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithTimeout(
s"$timeoutConf - should timeout - update state",
stateUpdates = state => { state.update(5) },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithTimeout(
s"$timeoutConf - should timeout - remove state",
stateUpdates = state => { state.remove() },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = None, // state should be removed
expectedTimeoutTimestamp = NO_TIMESTAMP)
}
testStateUpdateWithTimeout(
"ProcessingTimeTimeout - should timeout - timeout duration updated",
stateUpdates = state => { state.setTimeoutDuration(2000) },
timeoutConf = ProcessingTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = currentBatchTimestamp + 2000) // timestamp should change
testStateUpdateWithTimeout(
"ProcessingTimeTimeout - should timeout - timeout duration and state updated",
stateUpdates = state => { state.update(5); state.setTimeoutDuration(2000) },
timeoutConf = ProcessingTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = currentBatchTimestamp + 2000) // timestamp should change
testStateUpdateWithTimeout(
"EventTimeTimeout - should timeout - timeout timestamp updated",
stateUpdates = state => { state.setTimeoutTimestamp(5000) },
timeoutConf = EventTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = 5000) // timestamp should change
testStateUpdateWithTimeout(
"EventTimeTimeout - should timeout - timeout and state updated",
stateUpdates = state => { state.update(5); state.setTimeoutTimestamp(5000) },
timeoutConf = EventTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = 5000) // timestamp should change
test("StateStoreUpdater - rows are cloned before writing to StateStore") {
// function for running count
val func = (key: Int, values: Iterator[Int], state: GroupState[Int]) => {
state.update(state.getOption.getOrElse(0) + values.size)
Iterator.empty
}
val store = newStateStore()
val plan = newFlatMapGroupsWithStateExec(func)
val updater = new plan.StateStoreUpdater(store)
val data = Seq(1, 1, 2)
val returnIter = updater.updateStateForKeysWithData(data.iterator.map(intToRow))
returnIter.size // consume the iterator to force store updates
val storeData = store.iterator.map { case (k, v) => (rowToInt(k), rowToInt(v)) }.toSet
assert(storeData === Set((1, 2), (2, 1)))
}
test("flatMapGroupsWithState - streaming") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count if state is defined, otherwise does not return anything
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
if (count == 3) {
state.remove()
Iterator.empty
} else {
state.update(RunningCount(count))
Iterator((key, count.toString))
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc)
testStream(result, Update)(
AddData(inputData, "a"),
CheckLastBatch(("a", "1")),
assertNumStateRows(total = 1, updated = 1),
AddData(inputData, "a", "b"),
CheckLastBatch(("a", "2"), ("b", "1")),
assertNumStateRows(total = 2, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "b"), // should remove state for "a" and not return anything for a
CheckLastBatch(("b", "2")),
assertNumStateRows(total = 1, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "c"), // should recreate state for "a" and return count as 1 and
CheckLastBatch(("a", "1"), ("c", "1")),
assertNumStateRows(total = 3, updated = 2)
)
}
test("flatMapGroupsWithState - streaming + func returns iterator that updates state lazily") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count if state is defined, otherwise does not return anything
// Additionally, it updates state lazily as the returned iterator get consumed
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
values.flatMap { _ =>
val count = state.getOption.map(_.count).getOrElse(0L) + 1
if (count == 3) {
state.remove()
None
} else {
state.update(RunningCount(count))
Some((key, count.toString))
}
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc)
testStream(result, Update)(
AddData(inputData, "a", "a", "b"),
CheckLastBatch(("a", "1"), ("a", "2"), ("b", "1")),
StopStream,
StartStream(),
AddData(inputData, "a", "b"), // should remove state for "a" and not return anything for a
CheckLastBatch(("b", "2")),
StopStream,
StartStream(),
AddData(inputData, "a", "c"), // should recreate state for "a" and return count as 1 and
CheckLastBatch(("a", "1"), ("c", "1"))
)
}
test("flatMapGroupsWithState - streaming + aggregation") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count (-1 if count reached beyond 2 and state was just removed)
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
if (count == 3) {
state.remove()
Iterator(key -> "-1")
} else {
state.update(RunningCount(count))
Iterator(key -> count.toString)
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Append, GroupStateTimeout.NoTimeout)(stateFunc)
.groupByKey(_._1)
.count()
testStream(result, Complete)(
AddData(inputData, "a"),
CheckLastBatch(("a", 1)),
AddData(inputData, "a", "b"),
// mapGroups generates ("a", "2"), ("b", "1"); so increases counts of a and b by 1
CheckLastBatch(("a", 2), ("b", 1)),
StopStream,
StartStream(),
AddData(inputData, "a", "b"),
// mapGroups should remove state for "a" and generate ("a", "-1"), ("b", "2") ;
// so increment a and b by 1
CheckLastBatch(("a", 3), ("b", 2)),
StopStream,
StartStream(),
AddData(inputData, "a", "c"),
// mapGroups should recreate state for "a" and generate ("a", "1"), ("c", "1") ;
// so increment a and c by 1
CheckLastBatch(("a", 4), ("b", 2), ("c", 1))
)
}
test("flatMapGroupsWithState - batch") {
// Function that returns running count only if its even, otherwise does not return
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
if (state.exists) throw new IllegalArgumentException("state.exists should be false")
Iterator((key, values.size))
}
val df = Seq("a", "a", "b").toDS
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc).toDF
checkAnswer(df, Seq(("a", 2), ("b", 1)).toDF)
}
test("flatMapGroupsWithState - streaming with processing time timeout") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count (-1 if count reached beyond 2 and state was just removed)
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
if (state.hasTimedOut) {
state.remove()
Iterator((key, "-1"))
} else {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
state.update(RunningCount(count))
state.setTimeoutDuration("10 seconds")
Iterator((key, count.toString))
}
}
val clock = new StreamManualClock
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, ProcessingTimeTimeout)(stateFunc)
testStream(result, Update)(
StartStream(ProcessingTime("1 second"), triggerClock = clock),
AddData(inputData, "a"),
AdvanceManualClock(1 * 1000),
CheckLastBatch(("a", "1")),
assertNumStateRows(total = 1, updated = 1),
AddData(inputData, "b"),
AdvanceManualClock(1 * 1000),
CheckLastBatch(("b", "1")),
assertNumStateRows(total = 2, updated = 1),
AddData(inputData, "b"),
AdvanceManualClock(10 * 1000),
CheckLastBatch(("a", "-1"), ("b", "2")),
assertNumStateRows(total = 1, updated = 2),
StopStream,
StartStream(Trigger.ProcessingTime("1 second"), triggerClock = clock),
AddData(inputData, "c"),
AdvanceManualClock(11 * 1000),
CheckLastBatch(("b", "-1"), ("c", "1")),
assertNumStateRows(total = 1, updated = 2),
AddData(inputData, "c"),
AdvanceManualClock(20 * 1000),
CheckLastBatch(("c", "2")),
assertNumStateRows(total = 1, updated = 1)
)
}
test("flatMapGroupsWithState - streaming with event time timeout") {
// Function to maintain the max event time
// Returns the max event time in the state, or -1 if the state was removed by timeout
val stateFunc = (
key: String,
values: Iterator[(String, Long)],
state: GroupState[Long]) => {
val timeoutDelay = 5
if (key != "a") {
Iterator.empty
} else {
if (state.hasTimedOut) {
state.remove()
Iterator((key, -1))
} else {
val valuesSeq = values.toSeq
val maxEventTime = math.max(valuesSeq.map(_._2).max, state.getOption.getOrElse(0L))
val timeoutTimestampMs = maxEventTime + timeoutDelay
state.update(maxEventTime)
state.setTimeoutTimestamp(timeoutTimestampMs * 1000)
Iterator((key, maxEventTime.toInt))
}
}
}
val inputData = MemoryStream[(String, Int)]
val result =
inputData.toDS
.select($"_1".as("key"), $"_2".cast("timestamp").as("eventTime"))
.withWatermark("eventTime", "10 seconds")
.as[(String, Long)]
.groupByKey(_._1)
.flatMapGroupsWithState(Update, EventTimeTimeout)(stateFunc)
testStream(result, Update)(
StartStream(ProcessingTime("1 second")),
AddData(inputData, ("a", 11), ("a", 13), ("a", 15)), // Set timeout timestamp of ...
CheckLastBatch(("a", 15)), // "a" to 15 + 5 = 20s, watermark to 5s
AddData(inputData, ("a", 4)), // Add data older than watermark for "a"
CheckLastBatch(), // No output as data should get filtered by watermark
AddData(inputData, ("dummy", 35)), // Set watermark = 35 - 10 = 25s
CheckLastBatch(), // No output as no data for "a"
AddData(inputData, ("a", 24)), // Add data older than watermark, should be ignored
CheckLastBatch(("a", -1)) // State for "a" should timeout and emit -1
)
}
test("mapGroupsWithState - streaming") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count (-1 if count reached beyond 2 and state was just removed)
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
if (count == 3) {
state.remove()
(key, "-1")
} else {
state.update(RunningCount(count))
(key, count.toString)
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.mapGroupsWithState(stateFunc) // Types = State: MyState, Out: (Str, Str)
testStream(result, Update)(
AddData(inputData, "a"),
CheckLastBatch(("a", "1")),
assertNumStateRows(total = 1, updated = 1),
AddData(inputData, "a", "b"),
CheckLastBatch(("a", "2"), ("b", "1")),
assertNumStateRows(total = 2, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "b"), // should remove state for "a" and return count as -1
CheckLastBatch(("a", "-1"), ("b", "2")),
assertNumStateRows(total = 1, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "c"), // should recreate state for "a" and return count as 1
CheckLastBatch(("a", "1"), ("c", "1")),
assertNumStateRows(total = 3, updated = 2)
)
}
test("mapGroupsWithState - batch") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
if (state.exists) throw new IllegalArgumentException("state.exists should be false")
(key, values.size)
}
checkAnswer(
spark.createDataset(Seq("a", "a", "b"))
.groupByKey(x => x)
.mapGroupsWithState(stateFunc)
.toDF,
spark.createDataset(Seq(("a", 2), ("b", 1))).toDF)
}
testQuietly("StateStore.abort on task failure handling") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
if (FlatMapGroupsWithStateSuite.failInTask) throw new Exception("expected failure")
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
state.update(RunningCount(count))
(key, count)
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.mapGroupsWithState(stateFunc) // Types = State: MyState, Out: (Str, Str)
def setFailInTask(value: Boolean): AssertOnQuery = AssertOnQuery { q =>
FlatMapGroupsWithStateSuite.failInTask = value
true
}
testStream(result, Update)(
setFailInTask(false),
AddData(inputData, "a"),
CheckLastBatch(("a", 1L)),
AddData(inputData, "a"),
CheckLastBatch(("a", 2L)),
setFailInTask(true),
AddData(inputData, "a"),
ExpectFailure[SparkException](), // task should fail but should not increment count
setFailInTask(false),
StartStream(),
CheckLastBatch(("a", 3L)) // task should not fail, and should show correct count
)
}
test("output partitioning is unknown") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => key
val inputData = MemoryStream[String]
val result = inputData.toDS.groupByKey(x => x).mapGroupsWithState(stateFunc)
testStream(result, Update)(
AddData(inputData, "a"),
CheckLastBatch("a"),
AssertOnQuery(_.lastExecution.executedPlan.outputPartitioning === UnknownPartitioning(0))
)
}
test("disallow complete mode") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[Int]) => {
Iterator[String]()
}
var e = intercept[IllegalArgumentException] {
MemoryStream[String].toDS().groupByKey(x => x).flatMapGroupsWithState(
OutputMode.Complete, GroupStateTimeout.NoTimeout)(stateFunc)
}
assert(e.getMessage === "The output mode of function should be append or update")
val javaStateFunc = new FlatMapGroupsWithStateFunction[String, String, Int, String] {
import java.util.{Iterator => JIterator}
override def call(
key: String,
values: JIterator[String],
state: GroupState[Int]): JIterator[String] = { null }
}
e = intercept[IllegalArgumentException] {
MemoryStream[String].toDS().groupByKey(x => x).flatMapGroupsWithState(
javaStateFunc, OutputMode.Complete,
implicitly[Encoder[Int]], implicitly[Encoder[String]], GroupStateTimeout.NoTimeout)
}
assert(e.getMessage === "The output mode of function should be append or update")
}
def testStateUpdateWithData(
testName: String,
stateUpdates: GroupState[Int] => Unit,
timeoutConf: GroupStateTimeout,
priorState: Option[Int],
priorTimeoutTimestamp: Long = NO_TIMESTAMP,
expectedState: Option[Int] = None,
expectedTimeoutTimestamp: Long = NO_TIMESTAMP): Unit = {
if (priorState.isEmpty && priorTimeoutTimestamp != NO_TIMESTAMP) {
return // there can be no prior timestamp, when there is no prior state
}
test(s"StateStoreUpdater - updates with data - $testName") {
val mapGroupsFunc = (key: Int, values: Iterator[Int], state: GroupState[Int]) => {
assert(state.hasTimedOut === false, "hasTimedOut not false")
assert(values.nonEmpty, "Some value is expected")
stateUpdates(state)
Iterator.empty
}
testStateUpdate(
testTimeoutUpdates = false, mapGroupsFunc, timeoutConf,
priorState, priorTimeoutTimestamp, expectedState, expectedTimeoutTimestamp)
}
}
def testStateUpdateWithTimeout(
testName: String,
stateUpdates: GroupState[Int] => Unit,
timeoutConf: GroupStateTimeout,
priorTimeoutTimestamp: Long,
expectedState: Option[Int],
expectedTimeoutTimestamp: Long = NO_TIMESTAMP): Unit = {
test(s"StateStoreUpdater - updates for timeout - $testName") {
val mapGroupsFunc = (key: Int, values: Iterator[Int], state: GroupState[Int]) => {
assert(state.hasTimedOut === true, "hasTimedOut not true")
assert(values.isEmpty, "values not empty")
stateUpdates(state)
Iterator.empty
}
testStateUpdate(
testTimeoutUpdates = true, mapGroupsFunc, timeoutConf = timeoutConf,
preTimeoutState, priorTimeoutTimestamp, expectedState, expectedTimeoutTimestamp)
}
}
def testStateUpdate(
testTimeoutUpdates: Boolean,
mapGroupsFunc: (Int, Iterator[Int], GroupState[Int]) => Iterator[Int],
timeoutConf: GroupStateTimeout,
priorState: Option[Int],
priorTimeoutTimestamp: Long,
expectedState: Option[Int],
expectedTimeoutTimestamp: Long): Unit = {
val store = newStateStore()
val mapGroupsSparkPlan = newFlatMapGroupsWithStateExec(
mapGroupsFunc, timeoutConf, currentBatchTimestamp)
val updater = new mapGroupsSparkPlan.StateStoreUpdater(store)
val key = intToRow(0)
// Prepare store with prior state configs
if (priorState.nonEmpty) {
val row = updater.getStateRow(priorState.get)
updater.setTimeoutTimestamp(row, priorTimeoutTimestamp)
store.put(key.copy(), row.copy())
}
// Call updating function to update state store
val returnedIter = if (testTimeoutUpdates) {
updater.updateStateForTimedOutKeys()
} else {
updater.updateStateForKeysWithData(Iterator(key))
}
returnedIter.size // consumer the iterator to force state updates
// Verify updated state in store
val updatedStateRow = store.get(key)
assert(
updater.getStateObj(updatedStateRow).map(_.toString.toInt) === expectedState,
"final state not as expected")
if (updatedStateRow.nonEmpty) {
assert(
updater.getTimeoutTimestamp(updatedStateRow.get) === expectedTimeoutTimestamp,
"final timeout timestamp not as expected")
}
}
def newFlatMapGroupsWithStateExec(
func: (Int, Iterator[Int], GroupState[Int]) => Iterator[Int],
timeoutType: GroupStateTimeout = GroupStateTimeout.NoTimeout,
batchTimestampMs: Long = NO_TIMESTAMP): FlatMapGroupsWithStateExec = {
MemoryStream[Int]
.toDS
.groupByKey(x => x)
.flatMapGroupsWithState[Int, Int](Append, timeoutConf = timeoutType)(func)
.logicalPlan.collectFirst {
case FlatMapGroupsWithState(f, k, v, g, d, o, s, m, _, t, _) =>
FlatMapGroupsWithStateExec(
f, k, v, g, d, o, None, s, m, t,
Some(currentBatchTimestamp), Some(currentBatchWatermark), RDDScanExec(g, null, "rdd"))
}.get
}
def testTimeoutDurationNotAllowed[T <: Exception: Manifest](state: GroupStateImpl[_]): Unit = {
val prevTimestamp = state.getTimeoutTimestamp
intercept[T] { state.setTimeoutDuration(1000) }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutDuration("2 second") }
assert(state.getTimeoutTimestamp === prevTimestamp)
}
def testTimeoutTimestampNotAllowed[T <: Exception: Manifest](state: GroupStateImpl[_]): Unit = {
val prevTimestamp = state.getTimeoutTimestamp
intercept[T] { state.setTimeoutTimestamp(2000) }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutTimestamp(2000, "1 second") }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutTimestamp(new Date(2000)) }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutTimestamp(new Date(2000), "1 second") }
assert(state.getTimeoutTimestamp === prevTimestamp)
}
def newStateStore(): StateStore = new MemoryStateStore()
val intProj = UnsafeProjection.create(Array[DataType](IntegerType))
def intToRow(i: Int): UnsafeRow = {
intProj.apply(new GenericInternalRow(Array[Any](i))).copy()
}
def rowToInt(row: UnsafeRow): Int = row.getInt(0)
}
object FlatMapGroupsWithStateSuite {
var failInTask = true
class MemoryStateStore extends StateStore() {
import scala.collection.JavaConverters._
private val map = new ConcurrentHashMap[UnsafeRow, UnsafeRow]
override def iterator(): Iterator[(UnsafeRow, UnsafeRow)] = {
map.entrySet.iterator.asScala.map { case e => (e.getKey, e.getValue) }
}
override def filter(c: (UnsafeRow, UnsafeRow) => Boolean): Iterator[(UnsafeRow, UnsafeRow)] = {
iterator.filter { case (k, v) => c(k, v) }
}
override def get(key: UnsafeRow): Option[UnsafeRow] = Option(map.get(key))
override def put(key: UnsafeRow, newValue: UnsafeRow): Unit = map.put(key, newValue)
override def remove(key: UnsafeRow): Unit = { map.remove(key) }
override def remove(condition: (UnsafeRow) => Boolean): Unit = {
iterator.map(_._1).filter(condition).foreach(map.remove)
}
override def commit(): Long = version + 1
override def abort(): Unit = { }
override def id: StateStoreId = null
override def version: Long = 0
override def updates(): Iterator[StoreUpdate] = { throw new UnsupportedOperationException }
override def numKeys(): Long = map.size
override def hasCommitted: Boolean = true
}
}
| MLnick/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala | Scala | apache-2.0 | 37,479 |
package engine
import com.typesafe.scalalogging.LazyLogging
import engine.Task.{TaskContext, TaskState, TreeNode}
object Task {
case class TaskContext(engine: Engine, workflow: Workflow, task: Task)
object TaskState extends Enumeration {
val New, Done, Running = Value
}
trait TreeNode[T] {
private var _parent: Option[TreeNode[T]] = None
private var _children: List[TreeNode[T]] = List()
def parent = _parent
def children = _children
def addChild(node: TreeNode[T]): Unit = {
require(node.parent.isEmpty)
node._parent = Some(this)
_children = node :: _children
}
def value: T
override def toString = {
val childrenStr = if (children.isEmpty) "" else " [\\n" + children.map(_.toString).mkString("\\n") + "\\n]"
"(" + value.toString + childrenStr + ")"
}
}
}
final class Task(val id: Int, val taskDef: TaskDefinition, wf: Workflow) extends TreeNode[Task] with Cache with LazyLogging {
private var _state: TaskState.Value = TaskState.New
def state = _state
def workflow: Workflow = wf
def value: Task = this
logger.debug("Created task \\"%s\\" with id %d".format(this, id))
def execute(implicit engine: Engine): Option[ActionResult] = {
_state = TaskState.Running
val context = TaskContext(engine, workflow, this)
val actionResult: Option[ActionResult] = taskDef.action(context)
if (actionResult.isDefined) {
logger.debug("Executed task \\"%s\\" with id %d".format(taskDef.name, id))
_state = TaskState.Done
}
actionResult
}
def isExecuted: Boolean = Set(TaskState.Done) contains _state
override def toString = taskDef.name
}
| mpod/scala-workflow | backend/src/main/scala/engine/Task.scala | Scala | gpl-3.0 | 1,673 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.projection
import java.net.URLEncoder
import akka.actor.ActorRef
import akka.actor.Props
import akka.cluster.ddata.DistributedData
import akka.actor.Terminated
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.annotation.ApiMayChange
import akka.cluster.ddata.LWWMap
import akka.cluster.ddata.LWWMapKey
import akka.cluster.ddata.Replicator.Changed
import akka.cluster.ddata.Replicator.Subscribe
import akka.cluster.ddata.Replicator.Update
import akka.cluster.ddata.Replicator.UpdateFailure
import akka.cluster.ddata.Replicator.UpdateSuccess
import akka.cluster.ddata.Replicator.WriteConsistency
import akka.cluster.ddata.Replicator.WriteMajority
import akka.cluster.ddata.SelfUniqueAddress
import com.lightbend.lagom.projection.Started
import com.lightbend.lagom.projection.State
import com.lightbend.lagom.projection.Status
import com.lightbend.lagom.projection.Stopped
import scala.concurrent.duration._
import com.lightbend.lagom.projection.ProjectionSerializable
@ApiMayChange
object ProjectionRegistryActor {
def props = Props(new ProjectionRegistryActor)
type WorkerKey = String // a WorkerKey is a unique String representing WorkerCoordinates
type ProjectionName = String
case class WorkerRequestCommand(coordinates: WorkerCoordinates, requestedStatus: Status)
case class ProjectionRequestCommand(projectionName: ProjectionName, requestedStatus: Status)
case class RegisterProjection(projectionName: ProjectionName, tagNames: Set[String])
case class ReportForDuty(coordinates: WorkerCoordinates)
case class WorkerCoordinates(projectionName: ProjectionName, tagName: String) extends ProjectionSerializable {
val asKey: WorkerKey = s"$projectionName-$tagName"
val workerActorName: String = URLEncoder.encode(asKey, "utf-8")
val supervisingActorName: String = URLEncoder.encode(s"backoff-$asKey", "utf-8")
}
// Read-Only command. Returns `State` representing the state of
// the projection workers as currently seen in this node. It contains both the
// requested and the observed status for each worker (both are eventually consistent
// values since both may have been edited concurrently in other nodes).
case object GetState
}
class ProjectionRegistryActor extends Actor with ActorLogging {
import ProjectionRegistryActor._
val replicator: ActorRef = DistributedData(context.system).replicator
implicit val node: SelfUniqueAddress = DistributedData(context.system).selfUniqueAddress
// All usages os `data` in this actor are unaffected by `UpdateTimeout` (see
// https://github.com/lagom/lagom/pull/2208). In general uses, using WriteMajority(5 sec) could be an issue
// in big clusters.
// TODO (nice to have): make the 5 second timeout configurable.
val writeConsistency: WriteConsistency = WriteMajority(timeout = 5.seconds)
// (a) Replicator contains data of all workers (requested and observed status, plus a name index)
private val RequestedStatusDataKey: LWWMapKey[WorkerCoordinates, Status] =
LWWMapKey[WorkerCoordinates, Status]("projection-registry-requested-status")
private val ObservedStatusDataKey: LWWMapKey[WorkerCoordinates, Status] =
LWWMapKey[WorkerCoordinates, Status]("projection-registry-observed-status")
replicator ! Subscribe(RequestedStatusDataKey, self)
replicator ! Subscribe(ObservedStatusDataKey, self)
// (b) Keep a local copy to simplify the implementation of some ops
var requestedStatusLocalCopy: Map[WorkerCoordinates, Status] = Map.empty[WorkerCoordinates, Status]
var observedStatusLocalCopy: Map[WorkerCoordinates, Status] = Map.empty[WorkerCoordinates, Status]
// (c) Actor indices contain only data of workers running locally
var actorIndex: Map[WorkerCoordinates, ActorRef] = Map.empty[WorkerCoordinates, ActorRef]
// required to handle Terminate(deadActor)
var reversedActorIndex: Map[ActorRef, WorkerCoordinates] = Map.empty[ActorRef, WorkerCoordinates]
// (d) this index helps locate worker coordinates given a project name. It is not a CRDT assuming
// all nodes know all projections and use the same tag names. This is filled when projection
// drivers register the projection (which happens even before ClusterDistribution is started in
// the local node).
var nameIndex: Map[ProjectionName, Set[WorkerCoordinates]] = Map.empty[ProjectionName, Set[WorkerCoordinates]]
// (e) Users may request a status before the projection was registered, in that case, we stash
// the request in this map.
var unknownProjections: Map[ProjectionName, Status] = Map.empty[ProjectionName, Status]
val DefaultRequestedStatus: Status = {
val autoStartEnabled = context.system.settings.config.getBoolean("lagom.projection.auto-start.enabled")
if (autoStartEnabled) Started
else Stopped
}
override def receive: Receive = {
case ReportForDuty(coordinates) =>
log.debug(s"Registering worker $coordinates to [${sender().path.toString}]")
// keep track
actorIndex = actorIndex.updated(coordinates, sender())
reversedActorIndex = reversedActorIndex.updated(sender, coordinates)
// when worker registers, we must reply with the requested status (if it's been set already, or DefaultInitialStatus if not).
val initialStatus = requestedStatusLocalCopy.getOrElse(coordinates, DefaultRequestedStatus)
log.debug(s"Setting initial status [$initialStatus] on worker $coordinates [${sender().path.toString}]")
sender ! initialStatus
// watch
context.watch(sender)
case GetState =>
sender ! State.fromReplicatedData(
nameIndex,
requestedStatusLocalCopy,
observedStatusLocalCopy,
DefaultRequestedStatus,
Stopped // unless observed somewhere (and replicated), we consider a worker stopped.
)
case RegisterProjection(projectionName, tagNames) =>
log.debug(s"Registering projection $projectionName for tags $tagNames.")
nameIndex += (projectionName -> tagNames.map {
WorkerCoordinates(projectionName, _)
})
// If we have stashed requestsfor this projection name, unstash them:
unknownProjections.get(projectionName).foreach { requestedStatus =>
self ! ProjectionRequestCommand(projectionName, requestedStatus)
unknownProjections -= projectionName
}
// XyzRequestCommand's come from `ProjectionRegistry` and contain a requested Status
case command: WorkerRequestCommand =>
log.debug(s"Propagating request $command.")
updateStateChangeRequests(command.coordinates, command.requestedStatus)
case command: ProjectionRequestCommand =>
log.debug(s"Propagating request $command.")
val projectionWorkers: Option[Set[WorkerCoordinates]] = nameIndex.get(command.projectionName)
projectionWorkers match {
case Some(workerSet) =>
workerSet.foreach(coordinates => updateStateChangeRequests(coordinates, command.requestedStatus))
case None => unknownProjections += (command.projectionName -> command.requestedStatus)
}
// Bare Status come from worker and contain an observed Status
case observedStatus: Status =>
log.debug(s"Observed [${sender().path.toString}] as $observedStatus.")
reversedActorIndex.get(sender()) match {
case Some(workerName) => updateObservedStates(workerName, observedStatus)
case None => log.error(s"Unknown actor [${sender().path.toString}] reports status $observedStatus.")
}
case UpdateSuccess(_, _) => //noop: the update op worked nicely, nothing to see here
// There's three types of UpdateFailure and 3 target CRDTs totalling 9 possible cases of
// which only UpdateTimeout(_,_) is relevant.
// case UpdateTimeout(ObservedStatusDataKey, _) =>
// the observed status changes very rarely, but when it changes it may change multiple times in a short
// period. The fast/often changes probably happen on a cluster rollup, up/down-scale, etc... In any case,
// data eventually will become stable (unchanging)in which case data is eventually gossiped and last writer wins.
// case UpdateTimeout(RequestedStatusDataKey, _) =>
// the request status changes very rarely. It is safe to ignore timeouts when using WriteMajority because
// data is eventually gossiped.
// case UpdateTimeout(NameIndexDataKey, _) =>
// the data in the nameIndex is only-grow until reaching a full hardcoded representation. It is safe to
// ignore timeouts when using WriteMajority because data is eventually gossiped.
// In any other UpdateFailure cases, noop:
// - ModifyFailure: using LWWMap with `put` as the modify operation will never fail, latest wins.
// - StoreFailure: doesn't apply because we don't use durable CRDTs
case _: UpdateFailure[_] =>
// Changed is not sent for every single change but, instead, it is batched on the replicator. This means
// multiple changes will be notified at once. This is especially relevant when joining a cluster where
// instead of getting an avalanche of Changed messages with all the history of the CRDT only a single
// message with the latest state is received.
case changed @ Changed(RequestedStatusDataKey) => {
val replicatedEntries = changed.get(RequestedStatusDataKey).entries
val diffs: Set[(WorkerCoordinates, Status)] = replicatedEntries.toSet.diff(requestedStatusLocalCopy.toSet)
// when the requested status changes, we must forward the new value to the appropriate actor
// if it's a one of the workers in the local actorIndex
diffs
.foreach {
case (workerName, requestedStatus) =>
log.debug(s"Remotely requested worker [$workerName] as [$requestedStatus].")
actorIndex.get(workerName).foreach { workerRef =>
log.debug(
s"Setting requested status [$requestedStatus] on worker $workerName [${workerRef.path.toString}]"
)
workerRef ! requestedStatus
}
}
requestedStatusLocalCopy = replicatedEntries
}
case changed @ Changed(ObservedStatusDataKey) =>
observedStatusLocalCopy = changed.get(ObservedStatusDataKey).entries
case Terminated(deadActor) =>
log.debug(s"Worker ${deadActor.path.name} died. Marking it as Stopped.")
// when a watched actor dies, we mark it as stopped. It will eventually
// respawn (thanks to EnsureActive) and come back to it's requested status.
reversedActorIndex.get(deadActor).foreach { coordinates =>
updateObservedStates(coordinates, Stopped)
}
// ... and then update indices and stop watching
actorIndex = actorIndex - reversedActorIndex(deadActor)
reversedActorIndex = reversedActorIndex - deadActor
}
private def updateStateChangeRequests(coordinates: WorkerCoordinates, requested: Status): Unit = {
replicator ! Update(RequestedStatusDataKey, LWWMap.empty[WorkerCoordinates, Status], writeConsistency)(
_.:+(coordinates -> requested)
)
}
private def updateObservedStates(coordinates: WorkerCoordinates, status: Status): Unit = {
replicator ! Update(ObservedStatusDataKey, LWWMap.empty[WorkerCoordinates, Status], writeConsistency)(
_.:+(coordinates -> status)
)
}
}
| rcavalcanti/lagom | projection/core/src/main/scala/com/lightbend/lagom/internal/projection/ProjectionRegistryActor.scala | Scala | apache-2.0 | 11,502 |
/*
* Copyright (C) 2012 Pavel Fatin <http://pavelfatin.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.pavelfatin.fs
package manager
package action
import swing.{Frame, Component, Dialog}
import java.io.{OutputStream, InputStream}
import java.util.Calendar
abstract class AbstractAction(context: ActionContext) extends Function0[Unit] {
protected val frame: Frame = context.frame
protected val parent: Component = Component.wrap(frame.peer.getRootPane)
protected def active: FolderPanel = if (context.left.active) context.left else context.right
protected def inactive: FolderPanel = if (context.left.active) context.right else context.left
protected def kindOf(entry: FileSystemEntry): String = entry match {
case _: File => "file"
case _: Directory => "folder"
}
protected def describe(entry: FileSystemEntry): String =
s"""${kindOf(entry)} "${entry.name}""""
protected def lengthOf(entry: FileSystemEntry): Long = entry match {
case file: File => file.length
case directory: Directory =>
val (directories, files) = directory.entries
(files ++ directories).map(lengthOf).sum
}
protected def afterConfirmation[T](message: String, title: String)(block: => T): Option[T] =
if (confirmed(message, title)) Some(block) else None
protected def confirmed(message: String, title: String): Boolean =
Dialog.showConfirmation(parent, message, title) == Dialog.Result.Yes
protected def afterEditing(text: String, message: String, title: String)(block: String => Unit) {
Dialog.showInput(parent, message, title, initial = text).foreach(block)
}
protected def afterCheckingClashes(name: String, directory: Directory, path: String)(block: => Unit) {
val (directories, files) = directory.entries
val clashes = (directories ++ files).exists(_.name == name)
if (clashes) {
val message = s"""Entry with name "$name" already exists in $path"""
Dialog.showMessage(parent, message, "Unable to proceed", Dialog.Message.Warning)
} else {
block
}
}
protected def afterCheckingFreeSpace(entry: FileSystemEntry, free: Long, path: String)(block: => Unit) {
if (lengthOf(entry) > free) {
val message = s"""Not enoug space for ${describe(entry)} in $path"""
Dialog.showMessage(parent, message, "Unable to proceed", Dialog.Message.Warning)
} else {
block
}
}
protected def transferWithProgress(total: Long, in: InputStream, out: OutputStream, message: String) {
withProgress(message, total) { callback =>
val numbers = Iterator.iterate(1L)(_ + 1)
Iterator.continually(in.read()).takeWhile(_ != -1).zip(numbers).foreach { case (b, i) =>
out.write(b)
callback(i)
}
}
}
protected def withProgress(message: String, total: Long)(block: (Long => Unit) => Unit) {
ProgressDialog.withProgress(context.frame, message, total)(block)
}
protected def delete(directory: Directory) {
val (directories, files) = directory.entries
files.foreach(_.delete())
directories.foreach(delete)
directory.delete()
}
protected def copy(source: File, destination: Directory, path: String, action: String)
(transform: Calendar => Calendar) {
val date = transform(source.date)
val file = destination.createFile(source.name, date)
file.hidden = source.hidden
source.readIn { in =>
file.writeIn { out =>
transferWithProgress(source.length, in, out, s"$action ${source.name} to $path")
}
}
file.date = date // by-pass date changes performed by OS
}
protected def copy(source: Directory, destination: Directory, path: String, action: String)
(transform: Calendar => Calendar) {
val date = transform(source.date)
val directory = destination.createDirectory(source.name, date)
directory.hidden = source.hidden
val (directories, files) = source.entries
val directoryPath = path + s"${directory.name}/"
files.foreach(copy(_, directory, directoryPath, action)(transform))
directories.foreach(copy(_, directory, directoryPath, action)(transform))
directory.date = date // by-pass date changes performed by OS
}
}
| pavelfatin/toyfs | src/main/scala/com/pavelfatin/fs/manager/action/AbstractAction.scala | Scala | gpl-3.0 | 4,829 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa
import org.geotools.factory.CommonFactoryFinder
import org.geotools.filter.text.ecql.ECQL
import org.locationtech.geomesa.filter.expression.AttributeExpression
import org.locationtech.geomesa.filter.expression.AttributeExpression.{FunctionLiteral, PropertyLiteral}
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter._
import org.opengis.filter.expression.{Expression, Function, Literal, PropertyName}
import org.opengis.filter.spatial._
import org.opengis.filter.temporal._
import scala.collection.JavaConversions._
import scala.util.Try
package object filter {
// Claim: FilterFactory implementations seem to be thread-safe away from
// 'namespace' and 'function' calls.
// As such, we can get away with using a shared Filter Factory.
implicit val ff: FilterFactory2 = CommonFactoryFinder.getFilterFactory2
def filterToString(filter: Filter): String = Try(ECQL.toCQL(filter)).getOrElse(filter.toString)
def filterToString(filter: Option[Filter]): String = filter.map(filterToString).getOrElse("None")
def filtersToString(filters: Seq[Filter]): String = filters.map(filterToString).mkString(", ")
/**
* This function rewrites a org.opengis.filter.Filter in terms of a top-level OR with children filters which
* 1) do not contain further ORs, (i.e., ORs bubble up)
* 2) only contain at most one AND which is at the top of their 'tree'
*
* Note that this further implies that NOTs have been 'pushed down' and do have not have ANDs nor ORs as children.
*
* In boolean logic, this form is called disjunctive normal form (DNF).
*
* @param filter An arbitrary filter.
* @return A filter in DNF (described above).
*/
def rewriteFilterInDNF(filter: Filter)(implicit ff: FilterFactory): Filter = {
val ll = logicDistributionDNF(filter)
if (ll.size == 1) {
if (ll.head.size == 1) {
ll.head.head
} else {
ff.and(ll.head)
}
} else {
val children = ll.map { l =>
l.size match {
case 1 => l.head
case _ => ff.and(l)
}
}
ff.or(children)
}
}
/**
*
* @param x: An arbitrary @org.opengis.filter.Filter
* @return A List[ List[Filter] ] where the inner List of Filters are to be joined by
* Ands and the outer list combined by Ors.
*/
private[filter] def logicDistributionDNF(x: Filter): List[List[Filter]] = x match {
case or: Or => or.getChildren.toList.flatMap(logicDistributionDNF)
case and: And => and.getChildren.foldRight (List(List.empty[Filter])) {
(f, dnf) => for {
a <- logicDistributionDNF (f)
b <- dnf
} yield a ++ b
}
case not: Not =>
not.getFilter match {
case and: And => logicDistributionDNF(deMorgan(and))
case or: Or => logicDistributionDNF(deMorgan(or))
case _: Filter => List(List(not))
}
case f: Filter => List(List(f))
}
/**
* This function rewrites a org.opengis.filter.Filter in terms of a top-level AND with children filters which
* 1) do not contain further ANDs, (i.e., ANDs bubble up)
* 2) only contain at most one OR which is at the top of their 'tree'
*
* Note that this further implies that NOTs have been 'pushed down' and do have not have ANDs nor ORs as children.
*
* In boolean logic, this form is called conjunctive normal form (CNF).
*
* The main use case for this function is to aid in splitting filters between a combination of a
* GeoMesa data store and some other data store. This is done with the AndSplittingFilter class.
* In the examples below, anything with "XAttr" is assumed to be a filter that CANNOT be answered
* through GeoMesa. In having a filter split on the AND, the portion of the filter that GeoMesa
* CAN answer will be applied in GeoMesa, returning a result set, and then the portion that GeoMesa CANNOT
* answer will be applied on that result set.
*
* Examples:
* 1. (
* (GmAttr ILIKE 'test')
* OR
* (date BETWEEN '2014-01-01T10:30:00.000Z' AND '2014-01-02T10:30:00.000Z')
* )
* AND
* (XAttr ILIKE = 'example')
*
* Converting to CNF will allow easily splitting the filter on the AND into two children
* - one child is the "GmAttr" and "date" filters that can be answered with GeoMesa
* - one child is the "XAttr" filter that cannot be answered by GeoMesa
*
* In this case, the GeoMesa child filter will be processed first, and then the "XAttr" filter will
* be processed on the GeoMesa result set to return a subset of the GeoMesa results.
*
* 2. (GmAttr ILIKE 'test')
* AND
* (
* (date BETWEEN '2014-01-01T10:30:00.000Z' AND '2014-01-02T10:30:00.000Z')
* OR
* (XAttr1 ILIKE = 'example1')
* )
* AND
* (XAttr2 ILIKE = 'example2')
*
* Converting to CNF still allows easily splitting the filter on the AND into three children
* - one child is the "GmAttr" filter
* - one child is the "date" OR "XAttr1" filter
* - one child is the "XAttr2" filter
*
* In this case, the "GmAttr" child will be processed first, returning a result set from GeoMesa
* called RS1. Then, RS1 will be further filtered with the "date" predicate that can be handled
* by GeoMesa, returning a subset of RS1 called SS1. The additional filter which cannot be answered
* by GeoMesa, "XAttr1," will be applied to RS1 and return subset SS2. Finally, the final child,
* the "XAttr2" filter, which cannot be answered by GeoMesa, will be applied to both SS1 and SS2 to
* return SS3, a JOIN of SS1+SS2 filtered with "XAttr2."
*
* 3. (GmAttr ILIKE 'test')
* OR
* (XAttr ILIKE = 'example')
*
* This is the worst-case-scenario for a query that is answered through two data stores, both
* GeoMesa and some other store.
*
* CNF converts this to:
* - one child of "GmAttr" OR "XAttr"
*
* In this case, the "GmAttr" will return a result set, RS1. The reason this is the
* worst-case-scenario is because, to answer the "XAttr" portion of the query (which cannot be
* answered by GeoMesa), a "Filter.INCLUDE" A.K.A a full table scan (on Accumulo) A.K.A. every
* record in GeoMesa is necessary to find the results that satisfy the "XAttr" portion of the
* query. This will product result set RS2. The returned results will be a JOIN of RS1+RS2.
*
*
* @param filter An arbitrary filter.
* @return A filter in CNF (described above).
*/
def rewriteFilterInCNF(filter: Filter)(implicit ff: FilterFactory): Filter = {
val ll = logicDistributionCNF(FilterHelper.simplify(filter))
if (ll.size == 1) {
if (ll.head.size == 1) ll.head.head else ff.or(ll.head)
} else {
val children = ll.map { l =>
l.size match {
case 1 => l.head
case _ => ff.or(l)
}
}
ff.and(children)
}
}
/**
*
* @param x: An arbitrary @org.opengis.filter.Filter
* @return A List[ List[Filter] ] where the inner List of Filters are to be joined by
* Ors and the outer list combined by Ands.
*/
def logicDistributionCNF(x: Filter): List[List[Filter]] = x match {
case and: And => and.getChildren.toList.flatMap(logicDistributionCNF)
case or: Or => or.getChildren.foldRight (List(List.empty[Filter])) {
(f, cnf) => for {
a <- logicDistributionCNF(f)
b <- cnf
} yield a ++ b
}
case not: Not =>
not.getFilter match {
case and: And => logicDistributionCNF(deMorgan(and))
case or: Or => logicDistributionCNF(deMorgan(or))
case _: Filter => List(List(not))
}
case f: Filter => List(List(f))
}
/**
* The input is a filter which had a Not applied to it.
* This function uses deMorgan's law to 'push the Not down'
* as well as cancel adjacent Nots.
*/
private[filter] def deMorgan(f: Filter)(implicit ff: FilterFactory): Filter = f match {
case and: And => ff.or(and.getChildren.map(a => ff.not(a)))
case or: Or => ff.and(or.getChildren.map(a => ff.not(a)))
case not: Not => not.getFilter
}
type PartionedFilter = (Seq[Filter], Seq[Filter])
// Takes a filter and returns a Seq of Geometric/Topological filters under it.
// As a note, currently, only 'good' filters are considered.
// The list of acceptable filters is defined by 'spatialFilters'
// The notion of 'good' here means *good* to handle to the STII.
// Of particular note, we should not give negations to the STII.
def partitionSubFilters(filter: Filter, filterFilter: Filter => Boolean): PartionedFilter = {
filter match {
case a: And => decomposeAnd(a).partition(filterFilter)
case _ => Seq(filter).partition(filterFilter)
}
}
def partitionPrimarySpatials(filter: Filter, sft: SimpleFeatureType): PartionedFilter =
partitionSubFilters(filter, isPrimarySpatialFilter(_, sft))
def partitionPrimarySpatials(filters: Seq[Filter], sft: SimpleFeatureType): PartionedFilter =
filters.partition(isPrimarySpatialFilter(_, sft))
def partitionPrimaryTemporals(filters: Seq[Filter], sft: SimpleFeatureType): PartionedFilter = {
val isTemporal = isPrimaryTemporalFilter(_: Filter, sft)
filters.partition(isTemporal)
}
def partitionIndexedAttributes(filters: Seq[Filter], sft: SimpleFeatureType): PartionedFilter =
filters.partition(isIndexedAttributeFilter(_, sft))
def partitionID(filter: Filter): (Seq[Filter], Seq[Filter]) = partitionSubFilters(filter, isIdFilter)
def isIdFilter(f: Filter): Boolean = f.isInstanceOf[Id]
def isPrimarySpatialFilter(filter: Filter, sft: SimpleFeatureType): Boolean = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val geom = sft.getGeomField
val primary = filter match {
case f: BinarySpatialOperator =>
checkOrder(f.getExpression1, f.getExpression2)
.exists(p => p.name == null || p.name.isEmpty || p.name == geom)
case _ => false
}
primary && isSpatialFilter(filter)
}
// Defines the topological predicates we like for use in the STII.
def isSpatialFilter(f: Filter): Boolean = {
f match {
case _: BBOX => true
case _: DWithin => true
case _: Contains => true
case _: Crosses => true
case _: Intersects => true
case _: Overlaps => true
case _: Within => true
case _ => false // Beyond, Disjoint, DWithin, Equals, Touches
}
}
// noinspection ExistsEquals
def isTemporalFilter(f: Filter, dtg: String): Boolean = getAttributeProperty(f).exists(_ == dtg)
def isPrimaryTemporalFilter(f: Filter, sft: SimpleFeatureType): Boolean = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
sft.getDtgField.exists(isTemporalFilter(f, _))
}
def attrIndexed(name: String, sft: SimpleFeatureType): Boolean = {
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
Option(sft.getDescriptor(name)).exists(_.isIndexed)
}
def isIndexedAttributeFilter(f: Filter, sft: SimpleFeatureType): Boolean =
getAttributeProperty(f).exists(attrIndexed(_, sft))
private def getAttributeProperty(f: Filter): Option[String] = {
f match {
// equals checks
case f: PropertyIsEqualTo => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: TEquals => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
// like checks
case f: PropertyIsLike =>
if (likeEligible(f)) {
val prop = f.getExpression.asInstanceOf[PropertyName].getPropertyName
Some(prop)
} else {
None
}
// range checks
case f: PropertyIsGreaterThan => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: PropertyIsGreaterThanOrEqualTo => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: PropertyIsLessThan => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: PropertyIsLessThanOrEqualTo => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: PropertyIsBetween =>
val prop = f.getExpression.asInstanceOf[PropertyName].getPropertyName
val (left, right) = (f.getLowerBoundary, f.getUpperBoundary) match {
case (l: Literal, r: Literal) => (l, r)
case _ => (null, null)
}
if (left != null && right != null) {
Some(prop)
} else {
None
}
// date range checks
case f: Before => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: After => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
case f: During => checkOrder(f.getExpression1, f.getExpression2).map(_.name)
// not check - we only support 'not null' for an indexed attribute
case n: Not =>
Option(n.getFilter).collect { case f: PropertyIsNull => f }.map { f =>
f.getExpression.asInstanceOf[PropertyName].getPropertyName
}
case _ => None
}
}
// Currently pulling the wildcard values from the filter
// leads to inconsistent results...so use % as wildcard
// TODO try to use wildcard values from the Filter itself (https://geomesa.atlassian.net/browse/GEOMESA-309)
val MULTICHAR_WILDCARD = "%"
val SINGLE_CHAR_WILDCARD = "_"
val WILDCARD_SUFFIX = "\\uffff\\uffff\\uffff"
/* Like queries that can be handled by current reverse index */
def likeEligible(filter: PropertyIsLike): Boolean = containsNoSingles(filter) && trailingOnlyWildcard(filter)
/* contains no single character wildcards */
private def containsNoSingles(filter: PropertyIsLike) =
!filter.getLiteral.replace("\\\\\\\\", "").replace(s"\\\\$SINGLE_CHAR_WILDCARD", "").contains(SINGLE_CHAR_WILDCARD)
private def trailingOnlyWildcard(filter: PropertyIsLike) =
(filter.getLiteral.endsWith(MULTICHAR_WILDCARD) &&
filter.getLiteral.indexOf(MULTICHAR_WILDCARD) == filter.getLiteral.length - MULTICHAR_WILDCARD.length) ||
filter.getLiteral.indexOf(MULTICHAR_WILDCARD) == -1
def decomposeBinary(f: Filter): Seq[Filter] =
f match {
case b: BinaryLogicOperator => b.getChildren.toSeq.flatMap(decomposeBinary)
case f: Filter => Seq(f)
}
def decomposeAnd(f: Filter): Seq[Filter] =
f match {
case b: And => b.getChildren.toSeq.flatMap(decomposeAnd)
case f: Filter => Seq(f)
}
def decomposeOr(f: Filter): Seq[Filter] =
f match {
case b: Or => b.getChildren.toSeq.flatMap(decomposeOr)
case f: Filter => Seq(f)
}
def orFilters(filters: Seq[Filter])(implicit ff: FilterFactory): Filter =
if (filters.size == 1) { filters.head } else { ff.or(filters) }
def andFilters(filters: Seq[Filter])(implicit ff: FilterFactory): Filter =
if (filters.size == 1) { filters.head } else { ff.and(filters) }
def orOption(filters: Seq[Filter])(implicit ff: FilterFactory): Option[Filter] =
if (filters.size < 2) { filters.headOption } else { Some(ff.or(filters)) }
def andOption(filters: Seq[Filter])(implicit ff: FilterFactory): Option[Filter] =
if (filters.size < 2) { filters.headOption } else { Some(ff.and(filters)) }
def mergeFilters(f1: Filter, f2: Filter): Filter = {
if (f1 == Filter.INCLUDE) {
f2
} else if (f2 == Filter.INCLUDE) {
f1
} else if (f1 == f2) {
f1
} else {
ff.and(f1, f2)
}
}
/**
* Checks the order of properties and literals in the expression
*
* @param one first expression
* @param two second expression
* @return (prop, literal, whether the order was flipped)
*/
def checkOrder(one: Expression, two: Expression): Option[AttributeExpression] = {
(one, two) match {
case (p: PropertyName, l: Literal) => Some(PropertyLiteral(p.getPropertyName, l, flipped = false))
case (l: Literal, p: PropertyName) => Some(PropertyLiteral(p.getPropertyName, l, flipped = true))
case (f: Function, l: Literal) => attribute(f).map(FunctionLiteral(_, f, l, flipped = false))
case (l: Literal, f: Function) => attribute(f).map(FunctionLiteral(_, f, l, flipped = true))
case (p: PropertyName, f: Function) if attribute(f).isEmpty =>
Some(PropertyLiteral(p.getPropertyName, ff.literal(f.evaluate(null)), flipped = false))
case (f: Function, p: PropertyName) if attribute(f).isEmpty =>
Some(PropertyLiteral(p.getPropertyName, ff.literal(f.evaluate(null)), flipped = true))
case (f1: Function, f2: Function) =>
(attribute(f1), attribute(f2)) match {
case (Some(a), None) => Some(FunctionLiteral(a, f1, ff.literal(f2.evaluate(null))))
case (None, Some(a)) => Some(FunctionLiteral(a, f2, ff.literal(f1.evaluate(null))))
case _ => None
}
case _ => None
}
}
/**
* Checks the order of properties and literals in the expression - if the expression does not contain
* a property and a literal, throws an exception.
*
* @param one first expression
* @param two second expression
* @return (prop, literal, whether the order was flipped)
*/
def checkOrderUnsafe(one: Expression, two: Expression): AttributeExpression = {
checkOrder(one, two).getOrElse {
throw new RuntimeException(s"Expressions did not contain valid property and literal: $one, $two")
}
}
private def attribute(f: Function): Option[String] =
f.getParameters.collect { case p: PropertyName => p.getPropertyName }.headOption
}
| boundlessgeo/geomesa | geomesa-filter/src/main/scala/org/locationtech/geomesa/filter/package.scala | Scala | apache-2.0 | 18,314 |
package proofpeer.metis
import ClauseInstances._
import LiteralInstances._
import proofpeer.metis.util.Fun._
import proofpeer.metis.util._
import scala.language.higherKinds
import scala.language.implicitConversions
import scalaz._
import Scalaz._
/** A logical kernel for trusted resolution certificates.
*
* @tparam V The alphabet from which variable names are drawn
* @tparam F The alphabet from which functor names are drawn
* @tparam P The alphabet from which predicate names are drawn
*/
sealed class Kernel[V:Order,F:Order,P:Order] {
sealed abstract class Inference {
def depth: Int =
1 + (this match {
case RemoveSym(thm) => thm.rule.depth
case Irreflexive(thm) => thm.rule.depth
case InfSubst(_,thm) => thm.rule.depth
case Resolve(_,thm1,thm2) => thm1.rule.depth.max(thm2.rule.depth)
case _ => 0
})
def size: Int =
1 + (this match {
case RemoveSym(thm) => thm.rule.size
case Irreflexive(thm) => thm.rule.size
case InfSubst(_,thm) => thm.rule.size
case Resolve(_,thm1,thm2) => thm1.rule.depth + thm2.rule.size
case _ => 0
})
}
case class Axiom() extends Inference
case class Assume() extends Inference
case class Refl() extends Inference
case class Equality(
p: Literal.TermCursor[V,F,P],
tm: Term[V,F]) extends Inference
case class RemoveSym(thm: Thm) extends Inference
case class Irreflexive(thm: Thm) extends Inference
case class InfSubst(θ: Subst[V,Term[V,F]], thm: Thm) extends Inference
case class Resolve(atom: Atom[V,F,P], pos: Thm, neg: Thm) extends Inference
case class Thm private[Kernel](clause: Clause[V,F,P], rule: Inference) {
def isTautology = clause.isTautology
def isContradiction = clause.isContradiction
/** this
* -------- subst θ
* this[θ]
*/
def subst(θ: Subst[V,Term[V,F]]): Thm = {
val newCl = clause.subst(θ)
if (newCl == clause)
this
else Thm(newCl,InfSubst(θ,this))
}
/** C ∨ ~(x = x)
* ------------------- removeIrrefl
* C
*/
def removeIrrefl: Thm = {
val newCl = clause.filter {
case IrreflLit(_) => false
case _ => true
}
if (newCl == clause.lits) {
this
}
else new Thm(Clause(newCl), Irreflexive(this))
}
/** (x = y) ∨ (y = x) ∨ C
* ----------------------- removeSym
* C
*/
def removeSym: Thm = {
val newCl = ISetExtra.distinctBy(clause) {
case (Literal(p1,Eql(x1,y1)),Literal(p2,Eql(x2,y2))) =>
x1 == y2 && x2 == y1 && p1 == p2
case _ => false
}
if (newCl == clause.lits) {
this
}
else new Thm(Clause(newCl), RemoveSym(this))
}
/** Cursors to the subterms of the largest literals in a theorem. */
def largestSubterms(litOrder: LiteralOrdering[V,F,P]) =
this.clause.largestSubterms(litOrder).map(TermCursor(this,_))
}
/**
* ------------------- axiom C
* C
*/
def axiom(cl: Clause[V,F,P]): Thm = Thm(cl, Axiom())
/**
* ------------------- assume p
* p ∨ ¬p
*/
def assume(lit: Literal[V,F,P]) =
Thm(Clause(ISet.fromList(List(lit,lit.negate))), Assume())
/**
* ------------------- refl x
* x = x
*/
def refl(tm: Term[V,F]) =
Thm(Clause(ISet.singleton(Literal(true,Eql(tm,tm)))),Refl())
/** L ∨ C M ∨ D
* ---------------- resolve L, where M is the negation of L.
* C ∨ D
*/
def resolve(lit: Literal[V,F,P], thm1: Thm, thm2: Thm): Option[Thm] = {
val negLit = lit.negate
if (thm1.clause.contains(lit) && thm2.clause.contains(negLit)) {
val cl = (thm1.clause delete lit) |+| (thm2.clause delete negLit)
Some(Thm(Clause(cl),
if (lit.isPositive) Resolve(lit.atom,thm1,thm2)
else Resolve(negLit.atom,thm2,thm1)))
}
else None
}
/**
* ------------------ equality L t
* ~(s = t) ∨ ~L ∨ L'
* Where L' is the result of replacing the subterm s under L with t.
*/
def equality(tmC: Literal.TermCursor[V,F,P], t: Term[V,F]):
(Literal[V,F,P], Literal.TermCursor[V,F,P], Thm) = {
val s = tmC.get
val oldLit = tmC.top
val tmC_ = tmC.replaceWith(t)
val neq = Literal(false,Eql[V,F,P](s,t))
val newLit = tmC_.top
val newThm =
Thm(Clause(ISet.fromList(List(neq, oldLit.negate, newLit))),Equality(tmC,t))
(oldLit,tmC_,newThm)
}
// Derived rules
// =============
import proofpeer.metis.util.RichCollectionInstances._
/** ---------------------- sym x y
* ¬(x = y) ∨ (y = x)
*/
def sym(x: Term[V,F], y: Term[V,F]) = {
val xx = refl(x);
val xxLhs = (xx.clause.lits.getSingleton >>= {
lit => lit.topLeft
}).getOrBug(
"Refl should produce an equality")
val (_,_,yx) = equality(xxLhs,y)
resolve(xxLhs.top,xx,yx).getOrBug("Sym")
}
type ST[A] = State[Thm,A]
type M[A] = OptionT[ST,A]
import OptionT._
import syntax._
implicit class toTryM[A](x: M[A]) {
// Try to perform a computation, keeping the original value on failure.
def andMaybe(f: A => M[A]): M[A] =
x >>= (y => f(y).orElse(y.point[M]))
def getSuccess: M[Option[A]] =
State[Thm,Option[A]](s => x.run(s)).liftM
def orElse(y: M[A]) =
State[Thm,Option[A]](s => {
val (sx,x2) = x.run(s)
x2 match {
case None => y.run(sx)
case _ => (sx,x2)
}
})
}
implicit class toOptM[A](x: Option[A]) {
def liftOpt: M[A] = {
x.map(_.point[M]).getOrElse(none)
}
}
type TC = Literal.TermCursor[V,F,P]
def convRule(conv: Term[V,F] => Option[(Term[V,F],Thm)]): TC => M[TC] =
tmC => {
for (
oldThm <- get[Thm].liftM;
conved <- conv(tmC.get).liftOpt;
(newTm,eql) = conved;
if tmC.get != newTm;
eqLit = Literal(true,Eql[V,F,P](tmC.get,newTm));
(oldLit,newTmC,eqlThm) = equality(tmC,newTm);
thm = resolve(eqLit,eql,eqlThm).getOrBug(
"Invalid conversion");
// In case we try to convert the lhs of x = y with that same equation.
thm_ = if (oldLit == eqLit) eqlThm else thm;
newThm = resolve(oldLit,oldThm,thm_).getOrBug(
"Should be able to resolve on new literal");
() <- put[Thm](newThm).liftM)
yield newTmC
}
// Exhaustively convert this term and all its descendents.
def termConv(conv: Term[V,F] => Option[(Term[V,F],Thm)]):
Literal.TermCursor[V,F,P] => M[TC] = tmC => {
for (
// Go down and try to convert
downTmC <- (tmC.down.liftOpt >>= (termConv(conv)(_))).getSuccess;
// Go back up
nextTry = downTmC.map(_.up.getOrBug(
"Moved down. Must be able to move back up.")) getOrElse tmC;
// Convert this term and maybe loop. If that fails, convert right.
nextTmC <- ((convRule(conv)(nextTry) andMaybe (termConv(conv(_))))
orElse (nextTry.right.liftOpt >>= (termConv(conv)(_))))
.getSuccess;
// Conversion is successful if either of the previous steps were.
nextTmC2 <- (nextTmC orElse downTmC).liftOpt
)
yield nextTmC2
}
/**
* -------------, repeatTopDownConv P conv
* ~P v P' v C
*
* where P' is the result of repeatedly traversing P, applying the
* conversion conv to every subterm until the conversion fails.
* Returns None if no conversion took place.
*/
def repeatTopDownConvRule(
lit: Literal[V,F,P],
conv: Term[V,F] => Option[(Term[V,F], Thm)]) = {
for (
tmC <- lit.topLeft;
(thm,newTmC) = termConv(conv)(tmC).run(assume(lit));
newLit <- newTmC.map(_.top))
yield (thm,newLit)
}
/** Wrap a clause cursor to a subterm. */
case class TermCursor private[Kernel](
top: Thm,
clauseCursor: Clause.TermCursor[V,F,P]) {
def get = clauseCursor.get
def literal = clauseCursor.literal
def subst(θ: Subst[V,Term[V,F]]) = {
val cursor_ = clauseCursor.subst(θ)
TermCursor(Thm(cursor_.top,InfSubst(θ,top)),cursor_)
}
}
case class UnitThm(lit:Literal[V,F,P], thm: Thm)
/** Destruct a clause of exactly one literal. */
object UnitThm {
def getUnit(thm: Thm): Option[UnitThm] =
thm.clause.lits.getSingleton.map { UnitThm(_,thm) }
}
}
| proofpeer/proofpeer-metis | shared/src/main/scala/proofpeer/metis/Kernel.scala | Scala | mit | 8,571 |
package org.nisshiee.crowd4s
import org.specs2._, matcher.DataTables
import scalaz._, Scalaz._
class AuthorizeSpec extends Specification with DataTables { def is =
"Authorize" ^
"checkActive" ^
"if the group is active" ! e1^
"if the group is inactive" ! e2^
"if the group doesn't exist" ! e3^
p^
"searchAuthorizedGroup" ^
"if the user belongs to authorized group" ! e4^
"if the user belongs to authorized group but the group is inactive" ! e5^
"if the user doesn't belong to authorized group" ! e6^
p^
"checkAuthenticationResult" ^
"if the authentication failure" ! e7^
"if the authentication success" ! e8^
p^
"authorize" ^
"if authentication failure" ! e9^
"authorization success case" ! e10^
"authorization failure case" ! e11^
"if connection error" ! e12^
end
import Authorize._
def e1 = {
import NormalTestEnv._
implicit val c = case01
checkActive("group01").toEither must beRight.like {
case Some(Group("group01", _, _)) => ok
case _ => ko
}
}
def e2 = {
import NormalTestEnv._
implicit val c = case01
checkActive("group90").toEither must beRight.like {
case None => ok
case _ => ko
}
}
def e3 = {
import NormalTestEnv._
implicit val c = case01
checkActive("groupZZ").toEither must beLeft.like {
case NotFound(_, _) => ok
case _ => ko
}
}
val user01 = User("user01", "01", "User", "User01", "[email protected]", true)
val belonged = Seq("group01", "group02", "group90")
val group01 = Group("group01", "The first group", true)
val group02 = Group("group02", "The second group", true)
def e4 =
"authorized" | "resultGroup" |
Seq("group01") ! group01 |
Seq("group01", "group02") ! group01 |
Seq("group01", "group03") ! group01 |
Seq("group01", "group04") ! group01 |
Seq("group04", "group01") ! group01 |
Seq("group02", "group01") ! group02 |
Seq("group01", "group90") ! group01 |
Seq("group90", "group01") ! group01 |> { (authorized, resultGroup) =>
import NormalTestEnv._
implicit val c = case01
searchAuthorizedGroup(AuthorizedGroup(authorized), belonged)(user01) must equalTo {
AuthorizationResult.Success(user01, resultGroup).success
}
}
def e5 = {
import NormalTestEnv._
implicit val c = case01
searchAuthorizedGroup(AuthorizedGroup(Seq("group90")), belonged)(user01) must equalTo {
AuthorizationResult.AuthorizationFailure(user01).success
}
}
def e6 = {
import NormalTestEnv._
implicit val c = case01
searchAuthorizedGroup(AuthorizedGroup(Seq("group04")), belonged)(user01) must equalTo {
AuthorizationResult.AuthorizationFailure(user01).success
}
}
def e7 = {
import NormalTestEnv._
implicit val c = case01
val authorized = AuthorizedGroup(Seq("group01"))
checkAuthenticationResult(authorized)(AuthenticationResult.Failure("reason", "message")) must equalTo {
AuthorizationResult.AuthenticationFailure(AuthenticationResult.Failure("reason", "message")).success
}
}
def e8 = {
import NormalTestEnv._
implicit val c = case01
val authorized = AuthorizedGroup(Seq("group01"))
checkAuthenticationResult(authorized)(AuthenticationResult.Success(user01)) must equalTo {
AuthorizationResult.Success(user01, group01).success
}
}
def e9 =
"username" || "password" |
"user01" !! "passZZ" |
"userZZ" !! "passZZ" |> { (username, password) =>
import NormalTestEnv._
implicit val c = case01
implicit val authorized = Seq("group01") |> AuthorizedGroup.apply
Crowd.authorize(username, password).toEither must beRight.like {
case AuthorizationResult.AuthenticationFailure(_) => ok
case _ => ko
}
}
def e10 =
"authorized" | "resultGroup" |
Seq("group01") ! group01 |
Seq("group01", "group02") ! group01 |
Seq("group01", "group03") ! group01 |
Seq("group01", "group04") ! group01 |
Seq("group04", "group01") ! group01 |
Seq("group02", "group01") ! group02 |
Seq("group01", "group90") ! group01 |
Seq("group90", "group01") ! group01 |> { (authorized, resultGroup) =>
import NormalTestEnv._
implicit val c = case01
implicit val a = authorized |> AuthorizedGroup.apply
Crowd.authorize("user01", "pass01").toEither must beRight.like {
case AuthorizationResult.Success(user, group) =>
(user must equalTo(user01)) and (group must equalTo(resultGroup))
case _ => ko
}
}
def e11 =
"authorized" | "resultUser" |
Seq("group04") ! user01 |
Seq("group90") ! user01 |> { (authorized, resultUser) =>
import NormalTestEnv._
implicit val c = case01
implicit val a = authorized |> AuthorizedGroup.apply
Crowd.authorize("user01", "pass01").toEither must beRight.like {
case AuthorizationResult.AuthorizationFailure(user) => user must equalTo(resultUser)
case _ => ko
}
}
def e12 = {
import IrregularTestEnv._
"when" | "error" |
Authenticate ! ConnectionError |
GetGroupList ! ConnectionError |
GetGroup ! ConnectionError |> { (when, error) =>
implicit val c: Case = Set[Func](when)
implicit val authorized = Seq("group01") |> AuthorizedGroup.apply
Crowd.authorize("user01", "pass01").toEither must beLeft.like {
case ConnectionError => ok
case _ => ko
}
}
}
}
| nisshiee/crowd4s | src/test/scala/functions/AuthorizeSpec.scala | Scala | mit | 6,834 |
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package play.api.libs.json
import scala.annotation.implicitNotFound
import scala.collection._
import scala.reflect.ClassTag
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.node.{ ArrayNode, ObjectNode }
import Json._
/**
* Json serializer: write an implicit to define a serializer for any type
*/
@implicitNotFound(
"No Json serializer found for type ${A}. Try to implement an implicit Writes or Format for this type."
)
trait Writes[-A] {
/**
* Convert the object into a JsValue
*/
def writes(o: A): JsValue
/**
* transforms the resulting JsValue using transformer function
*/
def transform(transformer: JsValue => JsValue): Writes[A] = Writes[A] { a => transformer(this.writes(a)) }
/**
* transforms resulting JsValue using Writes[JsValue]
*/
def transform(transformer: Writes[JsValue]): Writes[A] = Writes[A] { a => transformer.writes(this.writes(a)) }
}
@implicitNotFound(
"No Json serializer as JsObject found for type ${A}. Try to implement an implicit OWrites or OFormat for this type."
)
trait OWrites[-A] extends Writes[A] {
def writes(o: A): JsObject
}
object OWrites extends PathWrites with ConstraintWrites {
import play.api.libs.functional._
implicit val functionalCanBuildOWrites: FunctionalCanBuild[OWrites] = new FunctionalCanBuild[OWrites] {
def apply[A, B](wa: OWrites[A], wb: OWrites[B]): OWrites[A ~ B] = OWrites[A ~ B] { case a ~ b => wa.writes(a).deepMerge(wb.writes(b)) }
}
implicit val contravariantfunctorOWrites: ContravariantFunctor[OWrites] = new ContravariantFunctor[OWrites] {
def contramap[A, B](wa: OWrites[A], f: B => A): OWrites[B] = OWrites[B](b => wa.writes(f(b)))
}
def apply[A](f: A => JsObject): OWrites[A] = new OWrites[A] {
def writes(a: A): JsObject = f(a)
}
}
/**
* Default Serializers.
*/
object Writes extends PathWrites with ConstraintWrites with DefaultWrites {
val constraints: ConstraintWrites = this
val path: PathWrites = this
/*implicit val contravariantfunctorWrites:ContravariantFunctor[Writes] = new ContravariantFunctor[Writes] {
def contramap[A,B](wa:Writes[A], f: B => A):Writes[B] = Writes[B]( b => wa.writes(f(b)) )
}*/
def apply[A](f: A => JsValue): Writes[A] = new Writes[A] {
def writes(a: A): JsValue = f(a)
}
}
/**
* Default Serializers.
*/
trait DefaultWrites {
/**
* Serializer for Int types.
*/
implicit object IntWrites extends Writes[Int] {
def writes(o: Int) = JsNumber(o)
}
/**
* Serializer for Short types.
*/
implicit object ShortWrites extends Writes[Short] {
def writes(o: Short) = JsNumber(o)
}
/**
* Serializer for Byte types.
*/
implicit object ByteWrites extends Writes[Byte] {
def writes(o: Byte) = JsNumber(o)
}
/**
* Serializer for Long types.
*/
implicit object LongWrites extends Writes[Long] {
def writes(o: Long) = JsNumber(o)
}
/**
* Serializer for Float types.
*/
implicit object FloatWrites extends Writes[Float] {
def writes(o: Float) = JsNumber(o)
}
/**
* Serializer for Double types.
*/
implicit object DoubleWrites extends Writes[Double] {
def writes(o: Double) = JsNumber(o)
}
/**
* Serializer for BigDecimal types.
*/
implicit object BigDecimalWrites extends Writes[BigDecimal] {
def writes(o: BigDecimal) = JsNumber(o)
}
/**
* Serializer for Boolean types.
*/
implicit object BooleanWrites extends Writes[Boolean] {
def writes(o: Boolean) = JsBoolean(o)
}
/**
* Serializer for String types.
*/
implicit object StringWrites extends Writes[String] {
def writes(o: String) = JsString(o)
}
/**
* Serializer for Jackson JsonNode
*/
implicit object JsonNodeWrites extends Writes[JsonNode] {
def writes(o: JsonNode): JsValue = JacksonJson.jsonNodeToJsValue(o)
}
/**
* Serializer for Array[T] types.
*/
implicit def arrayWrites[T: ClassTag](implicit fmt: Writes[T]): Writes[Array[T]] = new Writes[Array[T]] {
def writes(ts: Array[T]) = JsArray((ts.map(t => toJson(t)(fmt))).toList)
}
/**
* Serializer for Map[String,V] types.
*/
implicit def mapWrites[V](implicit fmtv: Writes[V]): OWrites[collection.immutable.Map[String, V]] = OWrites[collection.immutable.Map[String, V]] { ts =>
JsObject(ts.map { case (k, v) => (k, toJson(v)(fmtv)) }.toList)
}
/**
* Serializer for Traversables types.
*/
implicit def traversableWrites[A: Writes] = new Writes[Traversable[A]] {
def writes(as: Traversable[A]) = JsArray(as.map(toJson(_)).toSeq)
}
/**
* Serializer for JsValues.
*/
implicit object JsValueWrites extends Writes[JsValue] {
def writes(o: JsValue) = o
}
/**
* Serializer for Option.
*/
implicit def OptionWrites[T](implicit fmt: Writes[T]): Writes[Option[T]] = new Writes[Option[T]] {
def writes(o: Option[T]) = o match {
case Some(value) => fmt.writes(value)
case None => JsNull
}
}
/**
* Serializer for java.util.Date
* @param pattern the pattern used by SimpleDateFormat
*/
def dateWrites(pattern: String): Writes[java.util.Date] = new Writes[java.util.Date] {
def writes(d: java.util.Date): JsValue = JsString(new java.text.SimpleDateFormat(pattern).format(d))
}
/**
* Default Serializer java.uti.Date -> JsNumber(d.getTime (nb of ms))
*/
implicit object DefaultDateWrites extends Writes[java.util.Date] {
def writes(d: java.util.Date): JsValue = JsNumber(d.getTime)
}
/**
* Serializer for org.joda.time.DateTime
* @param pattern the pattern used by SimpleDateFormat
*/
def jodaDateWrites(pattern: String): Writes[org.joda.time.DateTime] = new Writes[org.joda.time.DateTime] {
val df = org.joda.time.format.DateTimeFormat.forPattern(pattern)
def writes(d: org.joda.time.DateTime): JsValue = JsString(d.toString(df))
}
/**
* Default Serializer org.joda.time.DateTime -> JsNumber(d.getMillis (nb of ms))
*/
implicit object DefaultJodaDateWrites extends Writes[org.joda.time.DateTime] {
def writes(d: org.joda.time.DateTime): JsValue = JsNumber(d.getMillis)
}
/**
* Serializer for org.joda.time.LocalDate
* @param pattern the pattern used by org.joda.time.format.DateTimeFormat
*/
def jodaLocalDateWrites(pattern: String): Writes[org.joda.time.LocalDate] = new Writes[org.joda.time.LocalDate] {
val df = org.joda.time.format.DateTimeFormat.forPattern(pattern)
def writes(d: org.joda.time.LocalDate): JsValue = JsString(d.toString(df))
}
/**
* Default Serializer org.joda.time.LocalDate -> JsString(ISO8601 format (yyyy-MM-dd))
*/
implicit object DefaultJodaLocalDateWrites extends Writes[org.joda.time.LocalDate] {
def writes(d: org.joda.time.LocalDate): JsValue = JsString(d.toString)
}
/**
* Serializer for org.joda.time.LocalTime
* @param pattern the pattern used by org.joda.time.format.DateTimeFormat
*/
def jodaLocalTimeWrites(pattern: String): Writes[org.joda.time.LocalTime] = new Writes[org.joda.time.LocalTime] {
def writes(d: org.joda.time.LocalTime): JsValue = JsString(d.toString(pattern))
}
/**
* Default Serializer org.joda.time.LocalDate -> JsString(ISO8601 format (HH:mm:ss.SSS))
*/
implicit object DefaultJodaLocalTimeWrites extends Writes[org.joda.time.LocalTime] {
def writes(d: org.joda.time.LocalTime): JsValue = JsString(d.toString)
}
/**
* Serializer for java.sql.Date
* @param pattern the pattern used by SimpleDateFormat
*/
def sqlDateWrites(pattern: String): Writes[java.sql.Date] = new Writes[java.sql.Date] {
def writes(d: java.sql.Date): JsValue = JsString(new java.text.SimpleDateFormat(pattern).format(d))
}
/**
* Serializer for java.util.UUID
*/
implicit object UuidWrites extends Writes[java.util.UUID] {
def writes(u: java.util.UUID) = JsString(u.toString())
}
/**
* Serializer for scala.Enumeration by name.
*/
implicit def enumNameWrites[E <: Enumeration]: Writes[E#Value] = new Writes[E#Value] {
def writes(value: E#Value): JsValue = JsString(value.toString)
}
}
| jyotikamboj/container | pf-framework/src/play-json/src/main/scala/play/api/libs/json/Writes.scala | Scala | mit | 8,200 |
package controllers
import controllers.GalleryController.getQuery
import db.scalikejdbc.rewrite.ImageDbNew.Limit
import db.scalikejdbc.{MonumentJdbc, Round, SelectionJdbc, User}
import org.intracer.wmua._
import play.api.mvc.{Controller, EssentialAction, Request, Result}
import play.api.i18n.Messages.Implicits._
object LargeViewController extends Controller with Secured {
import play.api.Play.current
import play.api.libs.json._
implicit val selectionWrites = Json.writes[Selection]
implicit val imageWrites = Json.writes[Image]
implicit val iwrWrites = Json.writes[ImageWithRating]
def large(asUserId: Long, pageId: Long, region: String = "all", roundId: Long, rate: Option[Int], module: String) = withAuth() {
user =>
implicit request =>
show(pageId, user, asUserId, rate, region, roundId, module)
}
def largeCurrentUser(pageId: Long, region: String = "all", rate: Option[Int], module: String) = withAuth() {
user =>
implicit request =>
show(pageId, user, user.getId, rate, region, 0, module)
}
def rateByPageId(roundId: Long, pageId: Long, select: Int, region: String = "all",
rate: Option[Int] = None, module: String, criteria: Option[Int] = None): EssentialAction = withAuth() {
user =>
implicit request =>
val roundOption = Round.findById(roundId).filter(_.active)
roundOption.fold(Redirect(routes.GalleryController.list(user.getId, 1, region, roundId, rate))) { round =>
val result = checkLargeIndex(user, rate, pageId, region, round, module)
if (criteria.isEmpty) {
SelectionJdbc.rate(pageId = pageId, juryId = user.getId, roundId = round.getId, rate = select)
} else {
val selection = SelectionJdbc.findBy(pageId, user.getId, roundId).get
CriteriaRate.updateRate(selection.getId, criteria.get, select)
}
result
}
}
def removeImage(pageId: Long, roundId: Long, region: String = "all", rate: Option[Int], module: String) =
withAuth(roundPermission(User.ADMIN_ROLES, roundId)) {
user =>
implicit request =>
SelectionJdbc.removeImage(pageId, roundId)
val round = Round.findById(roundId).get
checkLargeIndex(user, rate, pageId, region, round, module)
}
def checkLargeIndex(asUser: User,
rate: Option[Int],
pageId: Long,
region: String,
round: Round,
module: String): Result = {
val subRegions = round.specialNomination.contains("Віа Регіа")
val query = getQuery(asUser.getId, rate, round.id, regions = Set(region).filter(_ != "all"), subRegions = subRegions)
val rank = query.imageRank(pageId)
val offset = Math.max(0, rank - 3)
val files = query.copy(limit = Some(Limit(Some(5), Some(offset)))).list()
val index = files.indexWhere(_.pageId == pageId)
val newIndex = if (!round.hasCriteria) {
if (index >= files.size - 1)
files.size - 2
else index + 1
} else {
index
}
val newPageId = if (newIndex < 0)
files.lastOption.fold(-1L)(_.pageId)
else files(newIndex).pageId
val roundId = round.getId
if (newIndex >= 0) {
Redirect(routes.LargeViewController.large(asUser.getId, newPageId, region, roundId, rate, module))
} else {
if (module == "gallery") {
Redirect(routes.GalleryController.list(asUser.getId, 1, region, roundId, rate))
} else {
Redirect(routes.GalleryController.byRate(asUser.getId, 1, region, roundId))
}
}
}
def show(pageId: Long,
user: User,
asUserId: Long,
rate: Option[Int],
region: String,
roundId: Long,
module: String)(implicit request: Request[Any]): Result = {
val maybeRound = if (roundId == 0) Round.activeRounds(user).headOption else Round.findById(roundId)
val round = maybeRound.get
val subRegions = round.specialNomination.contains("Віа Регіа")
val query = getQuery(asUserId, rate, round.id, regions = Set(region).filter(_ != "all"), subRegions = subRegions)
val rank = query.imageRank(pageId)
val offset = Math.max(0, rank - 3)
val files = query.copy(limit = Some(Limit(Some(5), Some(offset)))).list()
val index = files.indexWhere(_.pageId == pageId)
if (index < 0) {
return Redirect(if (files.nonEmpty) {
routes.LargeViewController.large(asUserId, files.head.pageId, region, round.getId, rate, module)
} else {
routes.GalleryController.list(asUserId, 1, region, round.getId, rate)
})
}
val selection = if (user.canViewOrgInfo(round)) {
SelectionJdbc.byRoundAndImageWithJury(round.getId, pageId)
} else Seq.empty
val page = index / (Pager.pageSize + 1) + 1
val byCriteria = if (round.hasCriteria && asUserId != 0) {
val criteria = {
val selection = SelectionJdbc.findBy(pageId, asUserId, roundId).get
CriteriaRate.getRates(selection.getId)
}
criteria.map { c => c.criteria.toInt -> c.rate }.toMap
} else Map.empty[Int, Int]
show2(index, files, user, asUserId, rate, page, round, region, module, selection, byCriteria)
}
def show2(index: Int,
files: Seq[ImageWithRating],
user: User,
asUserId: Long,
rate: Option[Int],
page: Int,
round: Round,
region: String,
module: String,
selection: Seq[(Selection, User)],
byCriteria: Map[Int, Int] = Map.empty)
(implicit request: Request[Any]): Result = {
val extraRight = if (index - 2 < 0) 2 - index else 0
val extraLeft = if (files.size < index + 3) index + 3 - files.size else 0
val left = Math.max(0, index - 2)
val right = Math.min(index + 3, files.size)
val start = Math.max(0, left - extraLeft)
val end = Math.min(files.size, right + extraRight)
val monument = files(index).image.monumentId.flatMap(id => if (id.trim.nonEmpty) MonumentJdbc.find(id) else None)
val comments = CommentJdbc.findBySubjectAndContest(files(index).pageId, round.contestId)
render {
case Accepts.Html() => Ok(
views.html.large.large(
user, asUserId,
files, index, start, end, page, rate,
region, round, monument, module, comments, selection,
byCriteria
)
)
case Accepts.Json() => Ok(Json.toJson(
files.map(file => file.copy(selection = file.selection.map(_.copy(createdAt = None))))
))
}
}
} | intracer/wlxjury | app/controllers/LargeViewController.scala | Scala | apache-2.0 | 6,666 |
package uk.org.openeyes.jsonschema.core
import java.net.URI
import org.json4s.JsonAST._
object JsonRef {
def unapply(jv: JValue): Option[URI] = jv \\ "$ref" match {
case JString(ref) => Some(new URI(ref))
case _ => None
}
}
object DraftV4Schema {
def parentSchemaRefs(jv: JValue): Seq[URI] = jv \\ "allOf" match {
case JArray(jvs) => for (JsonRef(uri) <- jvs) yield uri
case _ => Seq()
}
def allowedTypes(jv: JValue): Set[Type] = {
jv \\ "type" match {
case JArray(values) => (for (JString(s) <- values) yield Type(s)).toSet
case JString(str) => Set(Type(str))
case _ => Set(TArray, TBoolean, TNumber, TNull, TObject, TString)
}
}
def maxItems(jv: JValue): Option[BigInt] = jv \\ "maxItems" match {
case JInt(i) => Some(i)
case _ => None
}
def minItems(jv: JValue): BigInt = jv \\ "minItems" match {
case JInt(i) => i
case _ => 0
}
def properties(jv: JValue): Map[String, JValue] = jv \\ "properties" match {
case JObject(props) => props.toMap
case _ => Map()
}
}
sealed abstract class Type
case object TArray extends Type { override def toString = "array" }
case object TBoolean extends Type { override def toString = "boolean" }
case object TInteger extends Type { override def toString = "integer" }
case object TNumber extends Type { override def toString = "number" }
case object TNull extends Type { override def toString = "null" }
case object TObject extends Type { override def toString = "object" }
case object TString extends Type { override def toString = "string" }
object Type {
def apply(str: String) = str match {
case "array" => TArray
case "boolean" => TBoolean
case "integer" => TInteger
case "number" => TNumber
case "null" => TNull
case "object" => TObject
case "string" => TString
case _ => throw new Exception("Unrecognised type: '" + str + "'")
}
}
| openeyes/oe-json-schema | core/src/main/scala/Schema.scala | Scala | gpl-3.0 | 1,908 |
package sbt
import java.io.File
import org.specs2._
import mutable.Specification
import IO.{createDirectory, delete, touch, withTemporaryDirectory}
import org.apache.ivy.util.ChecksumHelper
import IfMissing.Fail
object ComponentManagerTest extends Specification
{
val TestID = "manager-test"
"Component manager" should {
"throw an exception if 'file' is called for a non-existing component" in {
withManager { _.file(TestID)(Fail) must throwA[InvalidComponent] }
}
"throw an exception if 'file' is called for an empty component" in {
withManager { manager =>
manager.define(TestID, Nil)
( manager.file(TestID)(Fail) ) must throwA[InvalidComponent]
}
}
"return the file for a single-file component" in {
withManager { manager =>
val hash = defineFile(manager, TestID, "a")
checksum(manager.file(TestID)(Fail)) must beEqualTo(hash)
}
}
"throw an exception if 'file' is called for multi-file component" in {
withManager { manager =>
defineFiles(manager, TestID, "a", "b")
( manager.file(TestID)(Fail) ) must throwA[InvalidComponent]
}
}
"return the files for a multi-file component" in {
withManager { manager =>
val hashes = defineFiles(manager, TestID, "a", "b")
checksum(manager.files(TestID)(Fail)) must haveTheSameElementsAs(hashes)
}
}
"return the files for a single-file component" in {
withManager { manager =>
val hashes = defineFiles(manager, TestID, "a")
checksum(manager.files(TestID)(Fail)) must haveTheSameElementsAs(hashes)
}
}
"throw an exception if 'files' is called for a non-existing component" in {
withManager { _.files(TestID)(Fail) must throwA[InvalidComponent] }
}
"properly cache a file and then retrieve it to an unresolved component" in {
withManager { definingManager =>
val hash = defineFile(definingManager, TestID, "a")
try
{
definingManager.cache(TestID)
withManager { usingManager =>
checksum(usingManager.file(TestID)(Fail)) must beEqualTo(hash)
}
}
finally { definingManager.clearCache(TestID) }
}
}
}
private def checksum(files: Iterable[File]): Seq[String] = files.map(checksum).toSeq
private def checksum(file: File): String = if(file.exists) ChecksumHelper.computeAsString(file, "sha1") else ""
private def defineFile(manager: ComponentManager, id: String, name: String): String = createFile(manager, id, name)(checksum)
private def defineFiles(manager: ComponentManager, id: String, names: String*): Seq[String] = createFiles(manager, id, names : _*)(checksum)
private def createFile[T](manager: ComponentManager, id: String, name: String)(f: File => T): T = createFiles(manager, id, name)(files => f(files.toList.head))
private def createFiles[T](manager: ComponentManager, id: String, names: String*)(f: Seq[File] => T): T =
withTemporaryDirectory { dir =>
val files = names.map(name => new File(dir, name) )
files.foreach(writeRandomContent)
manager.define(id, files)
f(files)
}
private def writeRandomContent(file: File) = IO.write(file, randomString)
private def randomString = "asdf"
private def withManager[T](f: ComponentManager => T): T =
TestLogger( logger => withTemporaryDirectory { temp => f(new ComponentManager(xsbt.boot.Locks, new xsbt.boot.ComponentProvider(temp, true), None, logger)) } )
}
| harrah/xsbt | ivy/src/test/scala/ComponentManagerTest.scala | Scala | bsd-3-clause | 3,338 |
package app
import service._
import util.{UsersAuthenticator, Keys}
import util.Implicits._
class DashboardController extends DashboardControllerBase
with IssuesService with PullRequestService with RepositoryService with AccountService
with UsersAuthenticator
trait DashboardControllerBase extends ControllerBase {
self: IssuesService with PullRequestService with RepositoryService with UsersAuthenticator =>
get("/dashboard/issues/repos")(usersOnly {
searchIssues("all")
})
get("/dashboard/issues/assigned")(usersOnly {
searchIssues("assigned")
})
get("/dashboard/issues/created_by")(usersOnly {
searchIssues("created_by")
})
get("/dashboard/pulls")(usersOnly {
searchPullRequests("created_by", None)
})
get("/dashboard/pulls/owned")(usersOnly {
searchPullRequests("created_by", None)
})
get("/dashboard/pulls/public")(usersOnly {
searchPullRequests("not_created_by", None)
})
get("/dashboard/pulls/for/:owner/:repository")(usersOnly {
searchPullRequests("all", Some(params("owner") + "/" + params("repository")))
})
private def searchIssues(filter: String) = {
import IssuesService._
// condition
val condition = session.putAndGet(Keys.Session.DashboardIssues,
if(request.hasQueryString) IssueSearchCondition(request)
else session.getAs[IssueSearchCondition](Keys.Session.DashboardIssues).getOrElse(IssueSearchCondition())
)
val userName = context.loginAccount.get.userName
val repositories = getUserRepositories(userName, context.baseUrl).map(repo => repo.owner -> repo.name)
val filterUser = Map(filter -> userName)
val page = IssueSearchCondition.page(request)
//
dashboard.html.issues(
issues.html.listparts(
searchIssue(condition, filterUser, false, (page - 1) * IssueLimit, IssueLimit, repositories: _*),
page,
countIssue(condition.copy(state = "open"), filterUser, false, repositories: _*),
countIssue(condition.copy(state = "closed"), filterUser, false, repositories: _*),
condition),
countIssue(condition, Map.empty, false, repositories: _*),
countIssue(condition, Map("assigned" -> userName), false, repositories: _*),
countIssue(condition, Map("created_by" -> userName), false, repositories: _*),
countIssueGroupByRepository(condition, filterUser, false, repositories: _*),
condition,
filter)
}
private def searchPullRequests(filter: String, repository: Option[String]) = {
import IssuesService._
import PullRequestService._
// condition
val condition = session.putAndGet(Keys.Session.DashboardPulls, {
if(request.hasQueryString) IssueSearchCondition(request)
else session.getAs[IssueSearchCondition](Keys.Session.DashboardPulls).getOrElse(IssueSearchCondition())
}.copy(repo = repository))
val userName = context.loginAccount.get.userName
val repositories = getUserRepositories(userName, context.baseUrl).map(repo => repo.owner -> repo.name)
val filterUser = Map(filter -> userName)
val page = IssueSearchCondition.page(request)
val counts = countIssueGroupByRepository(
IssueSearchCondition().copy(state = condition.state), Map.empty, true, repositories: _*)
dashboard.html.pulls(
pulls.html.listparts(
searchIssue(condition, filterUser, true, (page - 1) * PullRequestLimit, PullRequestLimit, repositories: _*),
page,
countIssue(condition.copy(state = "open"), filterUser, true, repositories: _*),
countIssue(condition.copy(state = "closed"), filterUser, true, repositories: _*),
condition,
None,
false),
getPullRequestCountGroupByUser(condition.state == "closed", userName, None),
getRepositoryNamesOfUser(userName).map { RepoName =>
(userName, RepoName, counts.collectFirst { case (_, RepoName, count) => count }.getOrElse(0))
}.sortBy(_._3).reverse,
condition,
filter)
}
}
| Muscipular/gitbucket | src/main/scala/app/DashboardController.scala | Scala | apache-2.0 | 4,131 |
package hammock
package asynchttpclient
import cats._
import cats.implicits._
import cats.data.Kleisli
import cats.effect._
import org.asynchttpclient._
import java.util.{concurrent => jc}
import scala.util._
import scala.jdk.CollectionConverters._
object AsyncHttpClientInterpreter {
def apply[F[_]](implicit F: InterpTrans[F]): InterpTrans[F] = F
implicit def instance[F[_]: Async](
implicit client: AsyncHttpClient = new DefaultAsyncHttpClient()
): InterpTrans[F] = new InterpTrans[F] {
override def trans: HttpF ~> F = transK andThen λ[Kleisli[F, AsyncHttpClient, *] ~> F](_.run(client))
}
def transK[F[_]: Async]: HttpF ~> Kleisli[F, AsyncHttpClient, *] = {
def toF[A](future: jc.Future[A]): F[A] =
Async[F].async(_(Try(future.get) match {
case Failure(err) => Left(err)
case Success(a) => Right(a)
}))
λ[HttpF ~> Kleisli[F, AsyncHttpClient, *]] {
case reqF @ (Get(_) | Options(_) | Delete(_) | Head(_) | Options(_) | Trace(_) | Post(_) | Put(_) | Patch(_)) =>
Kleisli { implicit client =>
for {
req <- mapRequest[F](reqF)
ahcResponse <- toF(req.execute())
hammockResponse <- mapResponse[F](ahcResponse)
} yield hammockResponse
}
}
}
def mapRequest[F[_]: Async](reqF: HttpF[HttpResponse])(implicit client: AsyncHttpClient): F[BoundRequestBuilder] = {
def putHeaders(req: BoundRequestBuilder, headers: Map[String, String]): F[Unit] =
Async[F].delay {
req.setSingleHeaders(headers.map(kv => kv._1.asInstanceOf[CharSequence] -> kv._2).asJava)
} *> ().pure[F]
def getBuilder(reqF: HttpF[HttpResponse]): BoundRequestBuilder = reqF match {
case Get(_) => client.prepareGet(reqF.req.uri.show)
case Delete(_) => client.prepareDelete(reqF.req.uri.show)
case Head(_) => client.prepareHead(reqF.req.uri.show)
case Options(_) => client.prepareOptions(reqF.req.uri.show)
case Post(_) => client.preparePost(reqF.req.uri.show)
case Put(_) => client.preparePut(reqF.req.uri.show)
case Trace(_) => client.prepareTrace(reqF.req.uri.show)
case Patch(_) => client.preparePatch(reqF.req.uri.show)
}
for {
req <- getBuilder(reqF).pure[F]
_ <- putHeaders(req, reqF.req.headers)
_ = reqF.req.entity
.foreach(_.cata(str => req.setBody(str.content), bytes => req.setBody(bytes.content), Function.const(())))
} yield req
}
def mapResponse[F[_]: Applicative](ahcResponse: Response): F[HttpResponse] = {
def createEntity(r: Response): Entity = r.getContentType match {
case AsyncHttpClientContentType.`application/octet-stream` => Entity.ByteArrayEntity(r.getResponseBodyAsBytes)
case _ => Entity.StringEntity(r.getResponseBody)
}
HttpResponse(
Status.Statuses(ahcResponse.getStatusCode),
ahcResponse.getHeaders.names.asScala.map(name => (name, ahcResponse.getHeaders.get(name))).toMap,
createEntity(ahcResponse)
).pure[F]
}
}
| pepegar/hammock | hammock-asynchttpclient/src/main/scala/hammock/asynchttpclient/AsyncHttpClientInterpreter.scala | Scala | mit | 3,119 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.worker
import java.io._
import java.net.URI
import java.nio.charset.StandardCharsets
import scala.collection.JavaConverters._
import com.google.common.io.Files
import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.{DriverDescription, SparkHadoopUtil}
import org.apache.spark.deploy.DeployMessages.DriverStateChanged
import org.apache.spark.deploy.StandaloneResourceUtils.prepareResourcesFile
import org.apache.spark.deploy.master.DriverState
import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.{DRIVER_RESOURCES_FILE, SPARK_DRIVER_PREFIX}
import org.apache.spark.internal.config.UI.UI_REVERSE_PROXY
import org.apache.spark.internal.config.Worker.WORKER_DRIVER_TERMINATE_TIMEOUT
import org.apache.spark.resource.ResourceInformation
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.{Clock, ShutdownHookManager, SystemClock, Utils}
/**
* Manages the execution of one driver, including automatically restarting the driver on failure.
* This is currently only used in standalone cluster deploy mode.
*/
private[deploy] class DriverRunner(
conf: SparkConf,
val driverId: String,
val workDir: File,
val sparkHome: File,
val driverDesc: DriverDescription,
val worker: RpcEndpointRef,
val workerUrl: String,
val workerWebUiUrl: String,
val securityManager: SecurityManager,
val resources: Map[String, ResourceInformation] = Map.empty)
extends Logging {
@volatile private var process: Option[Process] = None
@volatile private var killed = false
// Populated once finished
@volatile private[worker] var finalState: Option[DriverState] = None
@volatile private[worker] var finalException: Option[Exception] = None
// Timeout to wait for when trying to terminate a driver.
private val driverTerminateTimeoutMs = conf.get(WORKER_DRIVER_TERMINATE_TIMEOUT)
// Decoupled for testing
def setClock(_clock: Clock): Unit = {
clock = _clock
}
def setSleeper(_sleeper: Sleeper): Unit = {
sleeper = _sleeper
}
private var clock: Clock = new SystemClock()
private var sleeper = new Sleeper {
def sleep(seconds: Int): Unit = (0 until seconds).takeWhile { _ =>
Thread.sleep(1000)
!killed
}
}
/** Starts a thread to run and manage the driver. */
private[worker] def start() = {
new Thread("DriverRunner for " + driverId) {
override def run(): Unit = {
var shutdownHook: AnyRef = null
try {
shutdownHook = ShutdownHookManager.addShutdownHook { () =>
logInfo(s"Worker shutting down, killing driver $driverId")
kill()
}
// prepare driver jars and run driver
val exitCode = prepareAndRunDriver()
// set final state depending on if forcibly killed and process exit code
finalState = if (exitCode == 0) {
Some(DriverState.FINISHED)
} else if (killed) {
Some(DriverState.KILLED)
} else {
Some(DriverState.FAILED)
}
} catch {
case e: Exception =>
kill()
finalState = Some(DriverState.ERROR)
finalException = Some(e)
} finally {
if (shutdownHook != null) {
ShutdownHookManager.removeShutdownHook(shutdownHook)
}
}
// notify worker of final driver state, possible exception
worker.send(DriverStateChanged(driverId, finalState.get, finalException))
}
}.start()
}
/** Terminate this driver (or prevent it from ever starting if not yet started) */
private[worker] def kill(): Unit = {
logInfo("Killing driver process!")
killed = true
synchronized {
process.foreach { p =>
val exitCode = Utils.terminateProcess(p, driverTerminateTimeoutMs)
if (exitCode.isEmpty) {
logWarning("Failed to terminate driver process: " + p +
". This process will likely be orphaned.")
}
}
}
}
/**
* Creates the working directory for this driver.
* Will throw an exception if there are errors preparing the directory.
*/
private def createWorkingDirectory(): File = {
val driverDir = new File(workDir, driverId)
if (!driverDir.exists() && !driverDir.mkdirs()) {
throw new IOException("Failed to create directory " + driverDir)
}
driverDir
}
/**
* Download the user jar into the supplied directory and return its local path.
* Will throw an exception if there are errors downloading the jar.
*/
private def downloadUserJar(driverDir: File): String = {
val jarFileName = new URI(driverDesc.jarUrl).getPath.split("/").last
val localJarFile = new File(driverDir, jarFileName)
if (!localJarFile.exists()) { // May already exist if running multiple workers on one node
logInfo(s"Copying user jar ${driverDesc.jarUrl} to $localJarFile")
Utils.fetchFile(
driverDesc.jarUrl,
driverDir,
conf,
SparkHadoopUtil.get.newConfiguration(conf),
System.currentTimeMillis(),
useCache = false)
if (!localJarFile.exists()) { // Verify copy succeeded
throw new IOException(
s"Can not find expected jar $jarFileName which should have been loaded in $driverDir")
}
}
localJarFile.getAbsolutePath
}
private[worker] def prepareAndRunDriver(): Int = {
val driverDir = createWorkingDirectory()
val localJarFilename = downloadUserJar(driverDir)
val resourceFileOpt = prepareResourcesFile(SPARK_DRIVER_PREFIX, resources, driverDir)
def substituteVariables(argument: String): String = argument match {
case "{{WORKER_URL}}" => workerUrl
case "{{USER_JAR}}" => localJarFilename
case other => other
}
// config resource file for driver, which would be used to load resources when driver starts up
val javaOpts = driverDesc.command.javaOpts ++ resourceFileOpt.map(f =>
Seq(s"-D${DRIVER_RESOURCES_FILE.key}=${f.getAbsolutePath}")).getOrElse(Seq.empty)
// TODO: If we add ability to submit multiple jars they should also be added here
val builder = CommandUtils.buildProcessBuilder(driverDesc.command.copy(javaOpts = javaOpts),
securityManager, driverDesc.mem, sparkHome.getAbsolutePath, substituteVariables)
// add WebUI driver log url to environment
val reverseProxy = conf.get(UI_REVERSE_PROXY)
val workerUrlRef = UIUtils.makeHref(reverseProxy, driverId, workerWebUiUrl)
builder.environment.put("SPARK_DRIVER_LOG_URL_STDOUT",
s"$workerUrlRef/logPage/?driverId=$driverId&logType=stdout")
builder.environment.put("SPARK_DRIVER_LOG_URL_STDERR",
s"$workerUrlRef/logPage/?driverId=$driverId&logType=stderr")
runDriver(builder, driverDir, driverDesc.supervise)
}
private def runDriver(builder: ProcessBuilder, baseDir: File, supervise: Boolean): Int = {
builder.directory(baseDir)
def initialize(process: Process): Unit = {
// Redirect stdout and stderr to files
val stdout = new File(baseDir, "stdout")
CommandUtils.redirectStream(process.getInputStream, stdout)
val stderr = new File(baseDir, "stderr")
val redactedCommand = Utils.redactCommandLineArgs(conf, builder.command.asScala.toSeq)
.mkString("\\"", "\\" \\"", "\\"")
val header = "Launch Command: %s\\n%s\\n\\n".format(redactedCommand, "=" * 40)
Files.append(header, stderr, StandardCharsets.UTF_8)
CommandUtils.redirectStream(process.getErrorStream, stderr)
}
runCommandWithRetry(ProcessBuilderLike(builder), initialize, supervise)
}
private[worker] def runCommandWithRetry(
command: ProcessBuilderLike, initialize: Process => Unit, supervise: Boolean): Int = {
var exitCode = -1
// Time to wait between submission retries.
var waitSeconds = 1
// A run of this many seconds resets the exponential back-off.
val successfulRunDuration = 5
var keepTrying = !killed
val redactedCommand = Utils.redactCommandLineArgs(conf, command.command)
.mkString("\\"", "\\" \\"", "\\"")
while (keepTrying) {
logInfo("Launch Command: " + redactedCommand)
synchronized {
if (killed) { return exitCode }
process = Some(command.start())
initialize(process.get)
}
val processStart = clock.getTimeMillis()
exitCode = process.get.waitFor()
// check if attempting another run
keepTrying = supervise && exitCode != 0 && !killed
if (keepTrying) {
if (clock.getTimeMillis() - processStart > successfulRunDuration * 1000L) {
waitSeconds = 1
}
logInfo(s"Command exited with status $exitCode, re-launching after $waitSeconds s.")
sleeper.sleep(waitSeconds)
waitSeconds = waitSeconds * 2 // exponential back-off
}
}
exitCode
}
}
private[deploy] trait Sleeper {
def sleep(seconds: Int): Unit
}
// Needed because ProcessBuilder is a final class and cannot be mocked
private[deploy] trait ProcessBuilderLike {
def start(): Process
def command: Seq[String]
}
private[deploy] object ProcessBuilderLike {
def apply(processBuilder: ProcessBuilder): ProcessBuilderLike = new ProcessBuilderLike {
override def start(): Process = processBuilder.start()
override def command: Seq[String] = processBuilder.command().asScala.toSeq
}
}
| ueshin/apache-spark | core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala | Scala | apache-2.0 | 10,323 |
package scala.tools.nsc
package backend.jvm
package opt
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.jdk.CollectionConverters._
import scala.collection.immutable.TreeMap
import scala.tools.asm.tree.ClassNode
import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo}
import scala.tools.testkit.BytecodeTesting
import scala.tools.testkit.BytecodeTesting._
@RunWith(classOf[JUnit4])
class ScalaInlineInfoTest extends BytecodeTesting {
override def compilerArgs = "-opt:l:none"
import compiler._
def inlineInfo(c: ClassNode): InlineInfo = c.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).head
def mapDiff[A, B](a: collection.Map[A, B], b: collection.Map[A, B]) = {
val r = new StringBuilder
for ((a, av) <- a) {
if (!b.contains(a)) r.append(s"missing in b: $a\\n")
else if (av != b(a)) r.append(s"different for $a: $av != ${b(a)}\\n")
}
for (b <- b.keys.toList diff a.keys.toList) {
r.append(s"missing in a: $b\\n")
}
r.toString
}
def assertSameMethods(c: ClassNode, nameAndSigs: collection.Set[String]): Unit = {
val r = new StringBuilder
val inClass = c.methods.iterator.asScala.map(m => m.name + m.desc).toSet
for (m <- inClass.diff(nameAndSigs)) r.append(s"method in classfile found, but no inline info: $m")
for (m <- nameAndSigs.diff(inClass)) r.append(s"inline info found, but no method in classfile: $m")
assert(r.isEmpty, r.toString)
}
@Test
def traitMembersInlineInfo(): Unit = {
val code =
"""trait T {
| def f1 = 1 // concrete method
| private def f2 = 1 // default method only (not in subclass)
| def f3 = {
| def nest = 0 // nested method (does not end up in the interface)
| nest
| }
|
| @inline
| def f4 = super.toString // super accessor
|
| object O // module accessor (method is generated)
| final def f5 = {
| object L { val x = 0 } // nested module (just flattened out)
| L.x
| }
|
| @noinline
| def f6: Int // abstract method
|
| // fields
|
| val x1 = 0
| var y2 = 0
| var x3: Int
| lazy val x4 = 0
|
| final val x5 = 0
|}
|class C extends T {
| def f6 = 0
| var x3 = 0
|}
""".stripMargin
val cs @ List(c, t, tl, to) = compileClasses(code)
val infoT = inlineInfo(t)
val expectT = InlineInfo (
false, // final class
None, // not a sam
TreeMap(
(("O", "()LT$O$;"), MethodInlineInfo(false,false,false)),
(("T$$super$toString", "()Ljava/lang/String;"), MethodInlineInfo(true ,false,false)),
(("T$_setter_$x1_$eq", "(I)V"), MethodInlineInfo(false,false,false)),
(("f1", "()I"), MethodInlineInfo(false,false,false)),
(("f1$", "(LT;)I"), MethodInlineInfo(true ,false,false)),
(("f2", "()I"), MethodInlineInfo(true ,false,false)), // no static impl method for private method f2
(("f3", "()I"), MethodInlineInfo(false,false,false)),
(("f3$", "(LT;)I"), MethodInlineInfo(true ,false,false)),
(("f4", "()Ljava/lang/String;"), MethodInlineInfo(false,true, false)),
(("f4$", "(LT;)Ljava/lang/String;"), MethodInlineInfo(true ,true, false)),
(("f5", "()I"), MethodInlineInfo(true ,false,false)),
(("f5$", "(LT;)I"), MethodInlineInfo(true ,false,false)),
(("f6", "()I"), MethodInlineInfo(false,false,true )), // no static impl method for abstract method f6
(("x1", "()I"), MethodInlineInfo(false,false,false)),
(("y2", "()I"), MethodInlineInfo(false,false,false)),
(("y2_$eq", "(I)V"), MethodInlineInfo(false,false,false)),
(("x3", "()I"), MethodInlineInfo(false,false,false)),
(("x3_$eq", "(I)V"), MethodInlineInfo(false,false,false)),
(("x4", "()I"), MethodInlineInfo(false,false,false)),
(("x4$", "(LT;)I"), MethodInlineInfo(true ,false,false)),
(("x5", "()I"), MethodInlineInfo(true, false,false)),
(("x5$", "(LT;)I"), MethodInlineInfo(true ,false,false)),
(("L$2", "(Lscala/runtime/LazyRef;)LT$L$1$;"), MethodInlineInfo(true, false,false)),
(("nest$1", "()I"), MethodInlineInfo(true, false,false)),
(("$init$", "(LT;)V"), MethodInlineInfo(true,false,false)),
(("L$lzycompute$1", "(Lscala/runtime/LazyRef;)LT$L$1$;"), MethodInlineInfo(true,false,false))
),
None // warning
)
assert(infoT == expectT, mapDiff(expectT.methodInfos, infoT.methodInfos) + infoT)
assertSameMethods(t, expectT.methodInfos.keySet.map(x => x._1 + x._2))
val infoC = inlineInfo(c)
val expectC = InlineInfo(false, None, TreeMap(
("O", "()LT$O$;") -> MethodInlineInfo(true ,false,false),
("f1", "()I") -> MethodInlineInfo(false,false,false),
("f3", "()I") -> MethodInlineInfo(false,false,false),
("f4", "()Ljava/lang/String;") -> MethodInlineInfo(false,true,false),
("f5", "()I") -> MethodInlineInfo(true,false,false),
("f6", "()I") -> MethodInlineInfo(false,false,false),
("x1", "()I") -> MethodInlineInfo(false,false,false),
("T$_setter_$x1_$eq", "(I)V") -> MethodInlineInfo(false,false,false),
("y2", "()I") -> MethodInlineInfo(false,false,false),
("y2_$eq", "(I)V") -> MethodInlineInfo(false,false,false),
("x3", "()I") -> MethodInlineInfo(false,false,false),
("x3_$eq", "(I)V") -> MethodInlineInfo(false,false,false),
("x4$lzycompute", "()I") -> MethodInlineInfo(true ,false,false),
("x4", "()I") -> MethodInlineInfo(false,false,false),
("T$$super$toString", "()Ljava/lang/String;") -> MethodInlineInfo(true ,false,false),
("<init>", "()V") -> MethodInlineInfo(false,false,false),
("O$lzycompute$1", "()V") -> MethodInlineInfo(true,false,false)
),
None)
assert(infoC == expectC, mapDiff(expectC.methodInfos, infoC.methodInfos) + infoC)
assertSameMethods(c, expectC.methodInfos.keySet.map(x => x._1 + x._2))
}
@Test
def inlineInfoSam(): Unit = {
val code =
"""@FunctionalInterface trait C { // expected to be seen as sam: g(I)I
| def f = 0
| def g(x: Int): Int
| val foo = "hi"
|}
|@FunctionalInterface abstract class D { // not actually a functional interface, but scalac doesn't error
| val biz: Int
|}
|@FunctionalInterface trait T { // expected to be seen as sam: h(Ljava/lang/String;)I
| def h(a: String): Int
|}
|@FunctionalInterface trait E extends T { // expected to be seen as sam: h(Ljava/lang/String;)I
| def hihi(x: Int) = x
|}
|@FunctionalInterface class F extends T { // not actually a functional interface, but scalac doesn't error
| def h(a: String) = 0
|}
|@FunctionalInterface trait U {
| def conc() = 10
| def nullary: Int
|}
|trait V { // not annotated @FunctionalInterface, therefore not treated as SAM by the optimizer
| def h(a: String): Int
|}
""".stripMargin
val cs = compileClasses(code)
val sams = cs.map(c => (c.name, inlineInfo(c).sam))
assertEquals(sams,
List(
("C",Some("g(I)I")),
("D",None),
("E",Some("h(Ljava/lang/String;)I")),
("F",None),
("T",Some("h(Ljava/lang/String;)I")),
("U",None),
("V", None)))
}
@Test
def lzyComputeInlineInfo(): Unit = {
val code = "class C { object O }"
val List(c, om) = compileClasses(code)
val infoC = inlineInfo(c)
val expected = Map(
("<init>", "()V") -> MethodInlineInfo(false,false,false),
("O$lzycompute$1", "()V") -> MethodInlineInfo(true,false,false),
("O", "()LC$O$;") -> MethodInlineInfo(true,false,false))
assert(infoC.methodInfos == expected, mapDiff(infoC.methodInfos, expected))
assertSameMethods(c, expected.keySet.map(x => x._1 + x._2))
}
@Test
def looksLikeForwarderTest(): Unit = {
import global.genBCode.postProcessor.backendUtils._
val code =
"""trait T { def a = 0 }
|class C(x: Int, y: Long) extends T {
| def t1 = {
| val f = (b: Byte, i: Int) => i + b
| f(1, 2)
| }
|}
|object C {
| def make(x: Int, y: java.lang.Long) = new C(x, y)
| def foo(s: String) = {
| val k = s"$s-$s"
| println(k)
| }
|}
""".stripMargin
val List(c, cm, t) = compileClasses(code)
def tst(c: ClassNode, m: String, r: Int): Unit = assertEquals(looksLikeForwarderOrFactoryOrTrivial(getAsmMethod(c, m), c.name, allowPrivateCalls = true), r)
tst(c, "a", 4)
tst(c, "$anonfun$t1$1$adapted", 3)
tst(c, "$anonfun$t1$1", 1)
tst(c, "t1", -1)
tst(t, "a$", 4)
tst(t, "a", 1)
tst(cm, "make", 2)
tst(cm, "foo", -1)
}
}
| scala/scala | test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala | Scala | apache-2.0 | 10,777 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.scalatest.{ BeforeAndAfter, FunSuite }
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.JdbcRDD
import java.sql._
class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
before {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver")
val conn = DriverManager.getConnection("jdbc:derby:target/JdbcRDDSuiteDb;create=true")
try {
val create = conn.createStatement
create.execute("""
CREATE TABLE FOO(
ID INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1),
DATA INTEGER
)""")
create.close
val insert = conn.prepareStatement("INSERT INTO FOO(DATA) VALUES(?)")
(1 to 100).foreach { i =>
insert.setInt(1, i * 2)
insert.executeUpdate
}
insert.close
} catch {
case e: SQLException if e.getSQLState == "X0Y32" =>
// table exists
} finally {
conn.close
}
}
test("basic functionality") {
sc = new SparkContext("local", "test")
val rdd = new JdbcRDD(
sc,
() => { DriverManager.getConnection("jdbc:derby:target/JdbcRDDSuiteDb") },
"SELECT DATA FROM FOO WHERE ? <= ID AND ID <= ?",
1, 100, 3,
(r: ResultSet) => { r.getInt(1) } ).cache
assert(rdd.count === 100)
assert(rdd.reduce(_+_) === 10100)
}
after {
try {
DriverManager.getConnection("jdbc:derby:;shutdown=true")
} catch {
case se: SQLException if se.getSQLState == "XJ015" =>
// normal shutdown
}
}
}
| cloudera/spark | core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala | Scala | apache-2.0 | 2,382 |
import android.Keys._
import sbt._
import sbt.Keys._
object Settings {
val common = Def.settings(
autoScalaLibrary := false,
javacOptions ++=
"-source" :: "1.7" ::
"-target" :: "1.7" ::
Nil,
minSdkVersion := "16",
organization := "com.liefery.android",
platformTarget := "android-27",
scalaVersion := "2.11.12",
targetSdkVersion := "27"
)
} | liefery/android-gallery | project/Settings.scala | Scala | mit | 441 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package examples
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.{ HBaseConfiguration, HTableDescriptor, TableName }
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.KeyValue.Type
import org.apache.hadoop.hbase.HConstants
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.CellUtil
import org.apache.spark._
import scala.collection.JavaConverters._
import org.apache.hadoop.hbase.client.Scan
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos
import org.apache.hadoop.hbase.protobuf.ProtobufUtil
import org.apache.hadoop.hbase.util.Base64
import org.apache.hadoop.hbase.client.HTableInterface
import org.apache.hadoop.hbase.client.HTable
import org.apache.hadoop.hbase.client.Get
object HBaseInput {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("HBaseTest")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
// Other options for hbase configuration are available, please check
// http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HConstants.html
conf.set(HConstants.ZOOKEEPER_QUORUM, args(0))
// Other options for configuring scan behavior are available. More information available at
// http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
conf.set(TableInputFormat.INPUT_TABLE, args(1))
// Initialize hBase table if necessary
val admin = new HBaseAdmin(conf)
if (!admin.isTableAvailable(args(1))) {
val tableDesc = new HTableDescriptor(TableName.valueOf(args(1)))
admin.createTable(tableDesc)
}
val scanner = new Scan
scanner.setReversed(true)
val start = args(2) + "_" + args(3)
val stop = args(2) + "_" + args(4)
scanner.setStartRow(Bytes.toBytes(start))
scanner.setStopRow(Bytes.toBytes(stop))
def convertScanToString(scan: Scan): String = {
val proto: ClientProtos.Scan = ProtobufUtil.toScan(scan);
return Base64.encodeBytes(proto.toByteArray());
}
conf.set(TableInputFormat.SCAN, convertScanToString(scanner))
val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
classOf[org.apache.hadoop.hbase.client.Result])
val keyValue = hBaseRDD.map(x => x._2).map(x => x.getColumn(Bytes.toBytes("identity"), Bytes.toBytes("id")))
val outPut = keyValue.flatMap { x =>
x.asScala.map { cell =>
Bytes.toString(CellUtil.cloneValue(cell))
}
}
outPut.foreach(println)
sc.stop()
}
}
| nolimitid/spark-hbase | src/main/scala/examples/HBaseInput.scala | Scala | apache-2.0 | 3,503 |
/**
* License
* =======
*
* The MIT License (MIT)
*
*
* Copyright (c) 2017 Antoine DOERAENE @sherpal
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package plot
import complex.Complex
/**
* RawShapes are created by mapping rectangular of disk shapes via functions.
*/
class RawShape(val plot: Plot, var colors: (Double, Double, Double),
val rawTriangles: List[(Complex, Complex, Complex)]) extends Shape {
private val initialTriangles = rawTriangles.map({
case (v1: Complex, v2: Complex, v3: Complex) => new Triangle(this, v1, v2, v3)
})
def drawTriangles: List[Triangle] = initialTriangles
draw()
}
| sherpal/holomorphic-maps | src/main/scala/plot/RawShape.scala | Scala | mit | 1,716 |
package statla
import spire.math.{Fractional, Numeric}
package object Util {
type CentralMoments[T] = (T, T, T, T)
def zero[T : Fractional] = Fractional[T].zero
def zeroMoments[T : Fractional] = (Fractional[T].zero, Fractional[T].zero, Fractional[T].zero, Fractional[T].zero)
def emptySample[T : Fractional] = new Sample[T](0, zeroMoments[T])
def emptyCorrelation[T : Fractional] = new Correlation[T](zero, emptySample, emptySample)
def numeric2Fractional[N : Numeric, F : Fractional](elem: N): F = {
Fractional[F].fromType[N](elem)
}
def fractional2Numeric[F : Fractional, N : Numeric](elem: F): N = {
Numeric[N].fromType[F](elem)
}
// Be careful, the order of the parameters matter, the first one is the reference to round off
def roundToLowerScale(reference: BigDecimal, toBeRounded: BigDecimal): BigDecimal = {
import scala.math.BigDecimal.RoundingMode._
toBeRounded.setScale(reference.scale, HALF_UP)
}
def compute[T : Fractional, V : Numeric](elems: Seq[V]): Sample[T] =
elems.foldLeft(emptySample[T])(_ + _)
def parCompute[T : Fractional, V : Numeric](elems: Seq[V]): Sample[T] =
elems.par.foldLeft(emptySample[T])(_ + _)
def biasedCovariance(cv: BigDecimal, N: Int): BigDecimal =
cv * (N - 1) / N
//def autocorrelationCoefficient[V : Numeric](elems: Seq[V], mean: BigDecimal, stdev: BigDecimal): BigDecimal =
// biasedCovariance(rawCovariance(elems.init, elems.tail, mean, mean), elems.length - 1) / (stdev * stdev)
// temporary function
/*def rawCovariance[T : Fractional, V : Numeric](elems1: Seq[V], elems2: Seq[V], uMean: T, vMean: T): T =
elems1.zip(elems2).map((es: (V, V)) => (numeric2Fractional[V, T](es._1) - uMean) * (numeric2Fractional[V, T](es._2) - vMean) / (elems1.length - 1)).sum
*/
def computeCorrelation[T : Fractional, V : Numeric](elems1: Seq[V], elems2: Seq[V]): Correlation[T] =
elems1.zip(elems2).foldLeft(emptyCorrelation[T])(_ + _)
}
| jvican/statla | src/main/scala/statla/Util.scala | Scala | mit | 1,966 |
package @portlet.java.package.name@
import com.liferay.util.bridges.mvc.MVCPortlet
class @portlet.java.class.name@Portlet extends MVCPortlet
| rivetlogic/liferay-voice-command | tools/portlet_scala_tmpl/docroot/WEB-INF/src/ScalaPortlet.scala | Scala | gpl-3.0 | 143 |
package process
import org.scalatest._
class OpticalCharRecognizerTest extends FlatSpec with Matchers {
"The OCR parse method" should "return Some string if id is not dividable by 7" in {
OpticalCharRecognizer.parse(1, "Test") should be (Some("Test"))
}
it should "return None if id is dividable by 7" in {
OpticalCharRecognizer.parse(7, "Not") should be (None)
}
}
| jvorhauer/akka-workshop | exercises/speedcam/src/test/scala/process/OpticalCharRecognizerTest.scala | Scala | apache-2.0 | 386 |
package controllers
import play.api.mvc.{Action, Controller}
object Application extends Controller {
def index = Action {
Ok(views.html.index("Hello Play Framework"))
}
} | tomaszym/izabela | front/app/controllers/Application.scala | Scala | gpl-2.0 | 182 |
package com.sksamuel.elastic4s.requests.searches.aggs
import com.sksamuel.elastic4s.requests.common.RefreshPolicy
import com.sksamuel.elastic4s.requests.searches.DateHistogramInterval
import com.sksamuel.elastic4s.requests.searches.aggs.responses.bucket.{DateHistogram, DateHistogramBucket}
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import scala.util.Try
class DateHistogramAggregationHttpTest extends AnyFreeSpec with DockerTests with Matchers {
Try {
client.execute {
deleteIndex("datehistaggs")
}.await
}
client.execute {
createIndex("datehistaggs") mapping {
properties(
textField("name").fielddata(true),
dateField("premiere_date").format("dd/MM/yyyy")
)
}
}.await
client.execute(
bulk(
indexInto("datehistaggs") fields("name" -> "Breaking Bad", "premiere_date" -> "20/01/2008"),
indexInto("datehistaggs") fields("name" -> "Better Call Saul", "premiere_date" -> "15/01/2008"),
indexInto("datehistaggs") fields("name" -> "Star Trek Discovery", "premiere_date" -> "27/06/2008"),
indexInto("datehistaggs") fields("name" -> "Game of Thrones", "premiere_date" -> "01/06/2008"),
indexInto("datehistaggs") fields("name" -> "Designated Survivor", "premiere_date" -> "12/03/2008"),
indexInto("datehistaggs") fields("name" -> "Walking Dead", "premiere_date" -> "19/01/2008")
).refresh(RefreshPolicy.Immediate)
).await
"date histogram agg" - {
"should return docs grouped by histogram interval" in {
val resp = client.execute {
search("datehistaggs").matchAllQuery().aggs {
dateHistogramAgg("agg1", "premiere_date").interval(DateHistogramInterval.Month)
.extendedBounds(ExtendedBounds("01/12/2007", "01/07/2008"))
}
}.await.result
resp.totalHits shouldBe 6
val agg = resp.aggs.result[DateHistogram]("agg1")
agg.buckets.map(_.copy(data = Map.empty)) shouldBe Seq(
DateHistogramBucket("01/12/2007", 1196467200000L, 0, Map.empty),
DateHistogramBucket("01/01/2008", 1199145600000L, 3, Map.empty),
DateHistogramBucket("01/02/2008", 1201824000000L, 0, Map.empty),
DateHistogramBucket("01/03/2008", 1204329600000L, 1, Map.empty),
DateHistogramBucket("01/04/2008", 1207008000000L, 0, Map.empty),
DateHistogramBucket("01/05/2008", 1209600000000L, 0, Map.empty),
DateHistogramBucket("01/06/2008", 1212278400000L, 2, Map.empty),
DateHistogramBucket("01/07/2008", 1214870400000L, 0, Map.empty)
)
}
"should return keyed docs grouped by histogram interval" in {
val resp = client.execute {
search("datehistaggs").matchAllQuery().aggs {
dateHistogramAgg("agg1", "premiere_date").interval(DateHistogramInterval.Month)
.extendedBounds(ExtendedBounds("01/12/2007", "01/07/2008"))
.keyed(true)
}
}.await.result
resp.totalHits shouldBe 6
val agg = resp.aggs.result[DateHistogram]("agg1")
agg.buckets.map(_.copy(data = Map.empty)).sortBy(_.timestamp) shouldBe Seq(
DateHistogramBucket("01/12/2007", 1196467200000L, 0, Map.empty),
DateHistogramBucket("01/01/2008", 1199145600000L, 3, Map.empty),
DateHistogramBucket("01/02/2008", 1201824000000L, 0, Map.empty),
DateHistogramBucket("01/03/2008", 1204329600000L, 1, Map.empty),
DateHistogramBucket("01/04/2008", 1207008000000L, 0, Map.empty),
DateHistogramBucket("01/05/2008", 1209600000000L, 0, Map.empty),
DateHistogramBucket("01/06/2008", 1212278400000L, 2, Map.empty),
DateHistogramBucket("01/07/2008", 1214870400000L, 0, Map.empty)
)
}
}
}
| stringbean/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/requests/searches/aggs/DateHistogramAggregationHttpTest.scala | Scala | apache-2.0 | 3,789 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2011-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package example.prolog
import PrologTree.Program
import org.kiama.util.{Compiler, TestCompiler}
/**
* Tests that check that the semantic analyser works correctly. I.e., it correctly
* diagnoses errors where they are present, and passes correct code.
*/
class SemanticAnalyserTests extends SyntaxAnalyser with Compiler[Program]
with TestCompiler[Program] {
import PrologTree.PrologTree
import org.kiama.output.PrettyPrinterTypes.{emptyDocument, Document}
import org.kiama.util.Config
import org.kiama.util.Messaging.report
filetests ("Prolog", "src/org/kiama/example/prolog/tests", ".pl", ".sem")
/**
* For the purposes of tests, the parser we want is the program one.
*/
val parser = program
/**
* Process the tree by conducting semantic analysis and reporting any errors.
*/
def process (filename : String, ast : Program, config : Config) {
val tree = new PrologTree (ast)
val analyser = new SemanticAnalyser (tree)
val messages = analyser.errors
if (messages.length > 0)
report (messages, config.error)
}
def format (m : Program) : Document =
emptyDocument
}
| solomono/kiama | library/src/org/kiama/example/prolog/SemanticAnalyserTests.scala | Scala | gpl-3.0 | 2,017 |
/*
* Copyright 2006-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package util
import common._
import xml.NodeSeq
/**
* Defines the association of this reference with a markup tag ID
*/
trait FieldIdentifier {
def uniqueFieldId: Box[String] = Empty
}
/**
* Associate a FieldIdentifier with a NodeSeq
*/
case class FieldError(field: FieldIdentifier, msg: NodeSeq) {
override def toString = field.uniqueFieldId + " : " + msg
}
object FieldError {
import scala.xml.Text
def apply(field: FieldIdentifier, msg: String) = new FieldError(field, Text(msg))
}
trait FieldContainer {
def allFields: Seq[BaseField]
}
/**
* A field that can be displayed but not edited
*/
trait ReadableField extends FieldIdentifier with ValueHolder with Bindable {
import scala.xml.Text
/**
* The human name of this field
*/
def name: String
def displayNameHtml: Box[NodeSeq] = Empty
def displayHtml: NodeSeq = displayNameHtml openOr Text(displayName)
/**
* The display name of this field (e.g., "First Name")
*/
def displayName: String = name
/**
* Default read-only rendering of field
*/
def asHtml: NodeSeq = Text(get.toString)
/**
* Given the current context, should this field be displayed
*/
def shouldDisplay_? = true
}
/**
* A field that can be set
*/
trait SettableField extends ReadableField with SettableValueHolder {
/**
* A list of functions that transform the value before it is set. The transformations
* are also applied before the value is used in a query. Typical applications
* of this are trimming and/or toLowerCase-ing strings
*/
def setFilter: List[ValueType => ValueType]
def validations: List[ValueType => List[FieldError]]
/**
* A unique 'id' for the field for form generation
*/
def fieldId: Option[NodeSeq] = None
/**
* Is the Field required (and will have a style designating it as such)
*/
def required_? = false
/**
* Is this an upload field so that a form that includes this field must be multi-part mime
*/
def uploadField_? = false
/**
* Validate this field and return a list of Validation Issues
*/
def validate: List[FieldError]
def helpAsHtml: Box[NodeSeq] = Empty
/**
* Create an input field for the item
*/
def toForm: Box[NodeSeq]
/**
* Given the current state of things, should this field be shown
*/
def show_? = true
}
trait BaseField extends SettableField with FieldContainer {
def allFields: Seq[BaseField] = List(this)
}
trait StringValidators {
self: FieldIdentifier =>
import scala.xml.Text
import java.util.regex.Pattern
type ValueType
protected def valueTypeToBoxString(in: ValueType): Box[String]
protected def boxStrToValType(in: Box[String]): ValueType
def maxLen: Int
def crop(in: ValueType): ValueType =
boxStrToValType(valueTypeToBoxString(in).map{
case null => null
case s => s.substring(0, math.min(s.length, maxLen))
})
def removeRegExChars(regEx: String)(in: ValueType): ValueType=
boxStrToValType(valueTypeToBoxString(in).map{
case null => null
case s => s.replaceAll(regEx, "")
})
def toLower(in: ValueType): ValueType =
boxStrToValType(valueTypeToBoxString(in).map{
case null => null
case s => s.toLowerCase
})
def toUpper(in: ValueType): ValueType =
boxStrToValType(valueTypeToBoxString(in).map{
case null => null
case s => s.toUpperCase
})
def trim(in: ValueType): ValueType =
boxStrToValType(valueTypeToBoxString(in).map{
case null => null
case s => s.trim
})
def notNull(in: ValueType): ValueType =
boxStrToValType(valueTypeToBoxString(in) match {
case Full(str) if null ne str => Full(str)
case _ => Full("")
})
/**
* A validation helper. Make sure the string is at least a particular
* length and generate a validation issue if not.
*/
def valMinLen(len: Int, msg: => String)(value: ValueType): List[FieldError] =
valueTypeToBoxString(value) match {
case Full(str) if (null ne str) && str.length >= len => Nil
case _ => List(FieldError(this, Text(msg)))
}
/**
* A validation helper. Make sure the string is no more than a particular
* length and generate a validation issue if not.
*/
def valMaxLen(len: Int, msg: => String)(value: ValueType): List[FieldError] =
valueTypeToBoxString(value) match {
case Full(str) if (null eq str) || str.length <= len => Nil
case _ => List(FieldError(this, Text(msg)))
}
/**
* Make sure the field matches a regular expression
*/
def valRegex(pat: Pattern, msg: => String)(value: ValueType): List[FieldError] =
valueTypeToBoxString(value).flatMap{str => if (pat.matcher(str).matches) Full(true) else Empty} match {
case Full(true) => Nil
case _ => List(FieldError(this, Text(msg)))
}
}
/**
* A base field that also has a confirm method
*/
trait ConfirmField extends BaseField {
/**
* Is this field on the confirm screen
*/
def onConfirm_? : Boolean
}
| lzpfmh/framework-2 | core/util/src/main/scala/net/liftweb/util/BaseField.scala | Scala | apache-2.0 | 5,650 |
package peapod
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import org.eclipse.jetty.server.{Handler, Request, Server}
import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, ContextHandlerCollection, ResourceHandler}
import scala.concurrent.Future
/**
* Experimental and minimal web server for showing the DAG graph of a Peapod instance. To use extend the Peapod
* instance with this trait and go to "localhost:8080" to see the graph. This uses an external service for generating
* the graph itself so the web browser needs web access for this to function. In the future this will be converted to
* use D3 to generate the graph rather than the Graphiz based implementation that exists currently.
*/
trait Web {
self: Peapod =>
import scala.concurrent.ExecutionContext.Implicits.global
val server = Future(new WebServer(this))
def stop() = {
server.map(_.server.stop())
}
}
class WebServer(p: Peapod, port: Int = 8080) {
val server = new Server(port)
val resource_handler = new ResourceHandler()
val url = this.getClass.getClassLoader.getResource("web")
resource_handler.setResourceBase(url.toString)
val contextWeb = new ContextHandler("/")
contextWeb.setHandler(resource_handler)
val context = new ContextHandler("/graph")
context.setHandler(new WebHandler(p))
val contexts = new ContextHandlerCollection()
contexts.setHandlers(Array(contextWeb, context))
server.setHandler(contexts)
server.start()
server.join()
}
class WebHandler(p: Peapod) extends AbstractHandler {
override def handle(s: String, request: Request,
httpServletRequest: HttpServletRequest,
httpServletResponse: HttpServletResponse): Unit = {
httpServletResponse.setContentType("text/html;charset=utf-8")
httpServletResponse.setStatus(HttpServletResponse.SC_OK)
request.setHandled(true)
httpServletResponse.getWriter.println(
if(httpServletRequest.getQueryString != null && request.getQueryString == "active") {
GraphFormatter.json(p.activeTasks())
} else {
GraphFormatter.json(p.allTasks())
}
)
}
} | teachingmachines/peapod | src/main/scala/peapod/Web.scala | Scala | mit | 2,177 |
package org.dsa.iot
import scala.collection.JavaConverters._
import scala.util.{ Failure, Success, Try }
import org.dsa.iot.dslink.node.{ Node, NodeBuilder, Permission, Writable }
import org.dsa.iot.dslink.node.actions.{ Action, ActionResult, EditorType, Parameter, ResultType }
import org.dsa.iot.dslink.node.value.{ Value, ValueType }
import org.dsa.iot.dslink.util.handler.Handler
import org.dsa.iot.dslink.util.json.{ JsonArray, JsonObject }
import org.slf4j.LoggerFactory
/**
* Helper functions and types for Kafka DSLink.
*/
package object kafka {
private val log = LoggerFactory.getLogger(getClass)
type ActionHandler = ActionResult => Unit
/* Action */
def action(handler: ActionHandler,
parameters: Iterable[Parameter] = Nil,
results: Iterable[Parameter] = Nil,
permission: Permission = Permission.READ,
resultType: ResultType = ResultType.VALUES): Action = {
val a = new Action(permission, new Handler[ActionResult] {
def handle(event: ActionResult) = handler(event)
})
parameters foreach a.addParameter
results foreach a.addResult
a.setResultType(resultType)
a
}
/* ActionResult */
implicit def stringExtractor(v: Value) = v.getString
implicit def numberExtractor(v: Value) = v.getNumber
implicit class RichActionResult(val event: ActionResult) extends AnyVal {
def getParam[T](name: String, check: T => Boolean = (_: T) => true, msg: String = "")(implicit ex: Value => T): T = {
val value = ex(event.getParameter(name))
if (!check(value))
throw new IllegalArgumentException(msg)
else
value
}
}
/* ValueType */
def ENUMS(enum: Enumeration) = ValueType.makeEnum(enum.values.map(_.toString).asJava)
implicit class RichValueType(val vt: ValueType) extends AnyVal {
def apply(name: String) = new Parameter(name, vt)
}
/* Parameter */
implicit class RichParameter(val param: Parameter) extends AnyVal {
def default(value: Any) = param having (_.setDefaultValue(anyToValue(value)))
def description(value: String) = param having (_.setDescription(value))
def editorType(value: EditorType) = param having (_.setEditorType(value))
def placeHolder(value: String) = param having (_.setPlaceHolder(value))
def meta(value: JsonObject) = param having (_.setMetaData(value))
}
/* Node */
implicit class RichNode(val node: Node) extends AnyVal {
def nodeType = Option(node.getConfig("nodeType")) map (_.getString)
def children = node.getChildren.asScala.toMap
}
/* NodeBuilder */
/**
* Pimps up NodeBuilder by providing Scala fluent syntax.
*/
implicit class RichNodeBuilder(val nb: NodeBuilder) extends AnyVal {
def display(name: String) = nb having (_.setDisplayName(name))
def attributes(tpls: (String, Value)*) = {
tpls foreach (t => nb.setAttribute(t._1, t._2))
nb
}
def config(configs: (String, Value)*) = {
configs foreach (c => nb.setConfig(c._1, c._2))
nb
}
def roConfig(configs: (String, Value)*) = {
configs foreach (c => nb.setRoConfig(c._1, c._2))
nb
}
def nodeType(nType: String) = nb having (_.setConfig("nodeType", anyToValue(nType)))
def valueType(vType: ValueType) = nb having (_.setValueType(vType))
def value(value: Value) = nb having (_.setValue(value))
def hidden(flag: Boolean) = nb having (_.setHidden(flag))
def profile(p: String) = nb having (_.setProfile(p))
def meta(md: Any) = nb having (_.setMetaData(md))
def serializable(flag: Boolean) = nb having (_.setSerializable(flag))
def writable(w: Writable) = nb having (_.setWritable(w))
def action(action: Action): NodeBuilder = nb having (_.setAction(action))
def action(handler: ActionHandler, permission: Permission = Permission.READ): NodeBuilder =
action(new Action(permission, new Handler[ActionResult] {
def handle(event: ActionResult) = handler(event)
}))
}
/**
* Extracts the data from a Value object.
*/
def valueToAny(value: Value): Any = value.getType.toJsonString match {
case ValueType.JSON_BOOL => value.getBool
case ValueType.JSON_NUMBER => value.getNumber
case ValueType.JSON_MAP => jsonObjectToMap(value.getMap)
case ValueType.JSON_ARRAY => jsonArrayToList(value.getArray)
case _ => value.getString
}
/**
* Converts a JsonArray instance into a scala List[Any].
*/
def jsonArrayToList(arr: JsonArray): List[Any] = arr.getList.asScala.toList map {
case x: JsonArray => jsonArrayToList(x)
case x: JsonObject => jsonObjectToMap(x)
case x => x
}
/**
* Converts a JsonObject instance into a scala Map[String, Any].
*/
def jsonObjectToMap(obj: JsonObject): Map[String, Any] = obj.getMap.asScala.toMap mapValues {
case x: JsonArray => jsonArrayToList(x)
case x: JsonObject => jsonObjectToMap(x)
case x => x
}
/**
* Converts a value into Value object.
*/
def anyToValue(value: Any): Value = value match {
case null => null
case x: java.lang.Number => new Value(x)
case x: Boolean => new Value(x)
case x: String => new Value(x)
case x: Map[_, _] => new Value(mapToJsonObject(x.asInstanceOf[Map[String, _]]))
case x: List[_] => new Value(listToJsonArray(x))
case x @ _ => new Value(x.toString)
}
/**
* Converts a scala List[Any] instance into a JsonArray.
*/
def listToJsonArray(ls: List[_]): JsonArray = {
val elements = ls map {
case x: List[_] => listToJsonArray(x)
case x: Map[_, _] => mapToJsonObject(x.asInstanceOf[Map[String, Any]])
case x => x
}
new JsonArray(elements.asJava)
}
/**
* Converts a scala Map[String, Any] instance into a JsonObject.
*/
def mapToJsonObject(mp: Map[String, _]): JsonObject = {
val elements = mp.mapValues {
case x: List[_] => listToJsonArray(x)
case x: Map[_, _] => mapToJsonObject(x.asInstanceOf[Map[String, Any]])
case x => x.asInstanceOf[Object]
}
// due to issues with mutability, have to do it the log way instead of elements.toJava
val m = new java.util.HashMap[String, Object]
elements foreach {
case (key, value) => m.put(key, value)
}
new JsonObject(m)
}
/**
* Helper class providing a simple syntax to add side effects to the returned value:
*
* {{{
* def square(x: Int) = {
* x * x
* } having (r => println "returned: " + r)
* }}}
*
* or simplified
*
* {{{
* def square(x: Int) = (x * x) having println
* }}}
*/
final implicit class Having[A](val result: A) extends AnyVal {
def having(body: A => Unit): A = {
body(result)
result
}
def having(body: => Unit): A = {
body
result
}
}
/**
* Retries executing some code up to the specified number of times.
*
* @param n how many times to try before throwing an exception.
* @param timeout time to wait before retrying (if `exponential` is set, this will be the initial timeout).
* @param exponential if `true`, then each subsequent timeout will be twice as long as the previous one.
*/
@annotation.tailrec
def retry[T](n: Int, timeout: Long = 10, exponential: Boolean = true)(fn: => T): T = Try { fn } match {
case Success(x) => x
case _ if n > 1 =>
log.warn(s"Operation failed, waiting for $timeout ms to retry")
if (timeout > 0) Thread.sleep(timeout)
val newTimeout = if (exponential) timeout * 2 else timeout
retry(n - 1, newTimeout, exponential)(fn)
case Failure(e) => throw e
}
} | IOT-DSA/dslink-scala-kafka | src/main/scala/org/dsa/iot/kafka/package.scala | Scala | apache-2.0 | 7,761 |
package info.hargrave.composer.backend.manager
import java.io.{InputStream, OutputStream}
/**
* Defines the idea of a project, something that is open and being worked with by the user.
* At the most basic level, a project has a title and can be saved to the disk.
*/
abstract class Project {
/**
* Name of the project
*
* @return project name
*/
def title: String
/**
* Write the project to an output stream
*
* @param output output stream
*/
def writeProject(output: OutputStream)
/**
* Read the project from an input stream
*
* @throws IllegalArgumentException when the input stream provides invalid data
* @param input input stream
*/
@throws(classOf[IllegalArgumentException])
def readProject(input: InputStream)
/**
* Returns true when the project has been modified
*
* @return true if the project has been modified since it was last saved successfully, or if it is new.
*/
def isModified: Boolean
}
| RomanHargrave/CUEComposer | src/main/scala/info/hargrave/composer/backend/manager/Project.scala | Scala | gpl-3.0 | 1,036 |
package sampler.abc
import org.scalatest.FreeSpec
class WeightedTest extends FreeSpec {
"Weighted should" - {
"Calculate mean rep scores" in fail("TODO")
"Throw exception if weight is negative" in fail("TODO")
"Know if it was generated locally" in fail("TODO")
}
} | tearne/Sampler | sampler-abc/src/test/scala/sampler/abc/WeightedTest.scala | Scala | apache-2.0 | 280 |
package controllers
import play.api._
import play.api.mvc._
object Application extends Controller {
def index = Action {
Ok(views.html.retrospective())
}
} | marlonpp/retrospective | app/controllers/Application.scala | Scala | mit | 167 |
package abstractions
trait MoveSupplier[A, D] {
def apply(context: AbstractContext[A, _, _, D]): Move[A, D]
} | arie-benichou/go-dot | src/main/scala/abstractions/MoveSupplier.scala | Scala | gpl-3.0 | 120 |
package rx.lang.scala
import scala.language.higherKinds
/**
* This package object provides some type class instances for Observable.
*/
package object scalaz extends ObservableInstances with ObservableTInstances {
}
| everpeace/rxscalaz | src/main/scala/rx/lang/scala/scalaz/package.scala | Scala | apache-2.0 | 221 |
package klughdl.components
import spinal.core._
/**
* KlugHDL
* Created by snipy on 05.12.16.
*/
class OneLevelComponent extends Component {
val io = new Bundle {
val a: Bool = in Bool
val b: Bool = in Bool
val c: Bool = in Bool
val d: Bool = in Bool
val e: Bool = out Bool
}
val andGateAB: AndGate = new AndGate
val andGateCD: AndGate = new AndGate
val orGate: OrGate = new OrGate
val one2one1 = new One2OneComponent
val one2one2 = new One2OneComponent
val one2one3 = new One2OneComponent
val one2one4 = new One2OneComponent
val one2one5 = new One2OneComponent
val one2one6 = new One2OneComponent
andGateAB.io.a := io.a
andGateAB.io.b := io.b
andGateCD.io.a := io.c
andGateCD.io.b := io.d
one2one1.io.i := andGateAB.io.c
one2one2.io.i := one2one1.io.o
one2one3.io.i := one2one2.io.o
orGate.io.a := one2one3.io.o
one2one4.io.i := andGateCD.io.c
one2one5.io.i := one2one4.io.o
one2one6.io.i := one2one5.io.o
orGate.io.b := one2one6.io.o
io.e := orGate.io.c
}
class One2OneComponent extends Component {
val io = new Bundle {
val i: Bool = in Bool
val o: Bool = out Bool
}
io.o := io.i
}
| SnipyJulmy/MSE_1617_PA | KlugHDL/src/main/scala/klughdl/components/OneLevelComponent.scala | Scala | gpl-2.0 | 1,183 |
/*
* Copyright © 2016 Schlichtherle IT Services
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package global.namespace.neuron.di.scala.sample
import global.namespace.neuron.di.scala._
@Neuron
trait SimpleGreeting {
def formatter: SimpleFormatter
/** Returns a greeting message for the given entity. */
def message(entity: String): String = formatter format entity
}
@Neuron
trait SimpleFormatter {
def theFormat: String
/** Returns a text which has been formatted using the given arguments. */
def format(args: AnyRef*): String = String.format(theFormat, args: _*)
}
object SimpleGreetingModule {
lazy val greeting: SimpleGreeting = make[SimpleGreeting]
lazy val formatter: SimpleFormatter = make[SimpleFormatter]
val theFormat = "Hello %s!"
}
| christian-schlichtherle/neuron-di | core-scala/src/test/scala/global/namespace/neuron/di/scala/sample/SimpleGreetingModule.scala | Scala | apache-2.0 | 1,292 |
package scalarules.test.io_utils
import java.io.File
import java.nio.file.Files
import io.bazel.rulesscala.io_utils.DeleteRecursively
import org.scalatest._
import flatspec._
import matchers.should._
class DeleteDirectoryTest extends AnyFlatSpec with Matchers {
"DeleteDirectory.run" should "remove nested folders" in {
// Arrange.
val tempDir = Files.createTempDirectory("test-tempdir-")
val nestedTmpDir = Files.createTempDirectory(tempDir, "abc")
Files.createTempFile(nestedTmpDir, "test", "")
val nestedTmpDir2 = Files.createTempDirectory(tempDir, "def")
Files.createTempFile(nestedTmpDir2, "test", "")
Files.createTempDirectory(tempDir, "ghi")
// Act.
DeleteRecursively.run(tempDir)
// Assert.
new File(tempDir.toUri) should not (exist)
}
}
| bazelbuild/rules_scala | test/src/main/scala/scalarules/test/io_utils/DeleteDirectoryTest.scala | Scala | apache-2.0 | 801 |
package scala.meta
package internal
package semantic
import org.scalameta.adt
import org.scalameta.adt._
import org.scalameta.invariants._
import org.scalameta.show._
import scala.meta.internal.prettyprinters._
@monadicRoot trait Typing
object Typing {
@noneLeaf object Zero extends Typing
@noneLeaf object Recursive extends Typing
@someLeaf class Nonrecursive(tpe: Type.Arg @byNeed) extends Typing {
protected def onTpeLoaded(tpe: Type.Arg) = require(tpe.isTypechecked && debug(tpe.show[Attributes]))
protected def writeReplace(): AnyRef = new Nonrecursive.SerializationProxy(this)
override def canEqual(other: Any): Boolean = other.isInstanceOf[Nonrecursive]
override def equals(that: Any): Boolean = that match {
case that: Nonrecursive => equality.Semantic.equals(this.tpe, that.tpe)
case _ => false
}
override def hashCode: Int = equality.Semantic.hashCode(tpe)
}
object Nonrecursive {
@SerialVersionUID(1L) private class SerializationProxy(@transient private var orig: Nonrecursive) extends Serializable {
private def writeObject(out: java.io.ObjectOutputStream): Unit = {
out.writeObject(orig.tpe)
}
private def readObject(in: java.io.ObjectInputStream): Unit = {
val tpe = in.readObject.asInstanceOf[Type.Arg]
orig = Nonrecursive(tpe)
}
private def readResolve(): AnyRef = orig
override def toString = s"Proxy($orig)"
}
}
}
// TODO: This unrelated code is here because of the limitation of knownDirectSubclasses.
// We would like move it to scala/meta/internal/quasiquotes/ast/ReificationMacros.scala where it belongs,
// but then we have problems with compilation order.
trait TypingLiftables extends adt.Liftables {
implicit def liftableSubTree[T <: Tree]: u.Liftable[T]
lazy implicit val liftableTyping: u.Liftable[Typing] = materializeAdt[Typing]
}
| beni55/scalameta | scalameta/trees/src/main/scala/scala/meta/internal/semantic/Typing.scala | Scala | bsd-3-clause | 1,884 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.data
import org.geotools.data._
import org.geotools.factory.Hints
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.index.{AttributeIndex, RecordIndex, Z2Index, Z3Index}
import org.locationtech.geomesa.accumulo.{AccumuloFeatureIndexType, TestWithDataStore}
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.security.SecurityUtils
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.opengis.feature.simple.SimpleFeature
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AccumuloDataStoreAttributeVisibilityTest extends TestWithDataStore {
import scala.collection.JavaConversions._
sequential
override val spec = "name:String:index=full,age:Int,dtg:Date,*geom:Point:srid=4326;geomesa.visibility.level='attribute'"
val user = {
val sf = new ScalaSimpleFeature(sft, "user")
sf.setAttribute(0, "name-user")
sf.setAttribute(1, "10")
sf.setAttribute(2, "2014-01-01T01:00:00.000Z")
sf.setAttribute(3, "POINT (-120 45)")
SecurityUtils.setFeatureVisibility(sf, "user,user,user,user")
sf.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
sf
}
val admin = {
val sf = new ScalaSimpleFeature(sft, "admin")
sf.setAttribute(0, "name-admin")
sf.setAttribute(1, "11")
sf.setAttribute(2, "2014-01-02T01:00:00.000Z")
sf.setAttribute(3, "POINT (-120 46)")
SecurityUtils.setFeatureVisibility(sf, "admin,admin,admin,admin")
sf.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
sf
}
val mixed = {
val sf = new ScalaSimpleFeature(sft, "mixed")
sf.setAttribute(0, "name-mixed")
sf.setAttribute(1, "12")
sf.setAttribute(2, "2014-01-03T01:00:00.000Z")
sf.setAttribute(3, "POINT (-120 47)")
SecurityUtils.setFeatureVisibility(sf, "admin,user,admin,user")
sf.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
sf
}
// write the feature to the store
step {
addFeatures(Seq(user, admin, mixed))
}
def queryByAuths(auths: String, filter: String, expectedStrategy: AccumuloFeatureIndexType): Seq[SimpleFeature] = {
val ds = DataStoreFinder.getDataStore(dsParams ++ Map(AccumuloDataStoreParams.AuthsParam.key -> auths)).asInstanceOf[AccumuloDataStore]
val query = new Query(sftName, ECQL.toFilter(filter))
val plans = ds.getQueryPlan(query)
forall(plans)(_.filter.index mustEqual expectedStrategy)
SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toSeq
}
val filters = Seq(
("IN ('user', 'admin', 'mixed')", RecordIndex),
("bbox(geom, -121, 44, -119, 48)", Z2Index),
("bbox(geom, -121, 44, -119, 48) AND dtg DURING 2014-01-01T00:00:00.000Z/2014-01-04T00:00:00.000Z", Z3Index),
("name = 'name-user' OR name = 'name-admin' OR name = 'name-mixed'", AttributeIndex)
)
"AccumuloDataStore" should {
"correctly return all features with appropriate auths" in {
forall(filters) { case (filter, strategy) =>
val features = queryByAuths("admin,user", filter, strategy)
features must haveLength(3)
features must containTheSameElementsAs(Seq(user, admin, mixed))
}
}
"correctly return some features with appropriate auths" in {
forall(filters) { case (filter, strategy) =>
val features = queryByAuths("user", filter, strategy)
features must haveLength(2)
features must contain(user)
val m = features.find(_.getID == "mixed")
m must beSome
m.get.getAttribute(0) must beNull
m.get.getAttribute(1) mustEqual 12
m.get.getAttribute(2) must beNull
m.get.getAttribute(3) mustEqual mixed.getAttribute(3)
}
}
}
}
| ronq/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/AccumuloDataStoreAttributeVisibilityTest.scala | Scala | apache-2.0 | 4,305 |
import language.higherKinds
object Test {
trait Monad[M[_]] {
def foo[A](ma: M[A])(f: M[A] => Any) = f(ma)
}
implicit def function1Covariant[T]: Monad[({type l[a] = (T => a)})#l] =
new Monad[({type l[a] = (T => a)})#l] {}
def main(args: Array[String]) {
// inference of T = (=> Any) here was outlawed by SI-7899 / 8ed7099
// but this pattern is used in Scalaz in just a few places and caused
// a regression.
//
// Inference of a by-name type doesn't *always* lead to a ClassCastException,
// it only gets there if a method in generic code accepts a parameter of
// that type.
//
// We need to introduce the stricter inference rules gradually, probably
// with a warning.
val m = implicitly[Monad[({type f[+x] = (=> Any) => x})#f]]
assert(m.foo[Int]((x => 0))(f => f(???)) == 0)
}
}
| yusuke2255/dotty | tests/pending/run/t7899-regression.scala | Scala | bsd-3-clause | 851 |
/*
* Copyright (c) 2012 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
class LensTests {
import Lens._
import Nat._
def typed[T](t : => T) {}
case class Address(street : String, city : String, postcode : String)
case class Person(name : String, age : Int, address : Address)
implicit val addressIso = Iso.hlist(Address.apply _, Address.unapply _)
implicit val personIso = Iso.hlist(Person.apply _, Person.unapply _)
val address = Address("Southover Street", "Brighton", "BN2 9UA")
val person = Person("Joe Grey", 37, address)
val nameLens = Lens[Person] >> _0
val ageLens = Lens[Person] >> _1
val addressLens = Lens[Person] >> _2
val streetLens = Lens[Person] >> _2 >> _0
val cityLens = Lens[Person] >> _2 >> _1
val postcodeLens = Lens[Person] >> _2 >> _2
@Test
def testBasics {
val age1 = ageLens.get(person)
typed[Int](age1)
assertEquals(37, age1)
val person2 = ageLens.set(person)(38)
assertEquals(38, person2.age)
val street1 = streetLens.get(person)
typed[String](street1)
assertEquals("Southover Street", street1)
val person3 = streetLens.set(person)("Montpelier Road")
assertEquals("Montpelier Road", person3.address.street)
}
@Test
def testCompose {
val addressLens = Lens[Person] >> _2
val streetLens = Lens[Address] >> _0
val personStreetLens1 = streetLens compose addressLens
val personStreetLens2 = compose(streetLens, addressLens)
val personStreetLens3 = (streetLens :: addressLens :: HNil).reduceLeft(compose)
val street1 = personStreetLens1.get(person)
typed[String](street1)
assertEquals("Southover Street", street1)
val street2 = personStreetLens2.get(person)
typed[String](street2)
assertEquals("Southover Street", street2)
val street3 = personStreetLens3.get(person)
typed[String](street3)
assertEquals("Southover Street", street3)
}
@Test
def testTuples {
type ISDB = (Int, (String, (Double, Boolean)))
val tp = (23, ("foo", (2.0, false)))
val lens0 = Lens[ISDB] >> _0
val lens1 = Lens[ISDB] >> _1
val lens10 = Lens[ISDB] >> _1 >> _0
val lens11 = Lens[ISDB] >> _1 >> _1
val lens110 = Lens[ISDB] >> _1 >> _1 >> _0
val lens111 = Lens[ISDB] >> _1 >> _1 >> _1
val i = lens0.get(tp)
typed[Int](i)
assertEquals(23, i)
val tpi = lens0.set(tp)(13)
typed[ISDB](tpi)
assertEquals((13, ("foo", (2.0, false))), tpi)
val sdb = lens1.get(tp)
typed[(String, (Double, Boolean))](sdb)
assertEquals(("foo", (2.0, false)), sdb)
val tpsdb = lens1.set(tp)("bar", (3.0, true))
typed[ISDB](tpsdb)
assertEquals((23, ("bar", (3.0, true))), tpsdb)
val s = lens10.get(tp)
typed[String](s)
assertEquals("foo", s)
val tps = lens10.set(tp)("bar")
typed[ISDB](tps)
assertEquals((23, ("bar", (2.0, false))), tps)
val db = lens11.get(tp)
typed[(Double, Boolean)](db)
assertEquals((2.0, false), db)
val tpdb = lens11.set(tp)(3.0, true)
typed[ISDB](tpdb)
assertEquals((23, ("foo", (3.0, true))), tpdb)
val d = lens110.get(tp)
typed[Double](d)
(2.0, d, Double.MinPositiveValue)
val tpd = lens110.set(tp)(3.0)
typed[ISDB](tpd)
assertEquals((23, ("foo", (3.0, false))), tpd)
val b = lens111.get(tp)
typed[Boolean](b)
assertEquals(false, b)
val tpb = lens111.set(tp)(true)
typed[ISDB](tpb)
assertEquals((23, ("foo", (2.0, true))), tpb)
}
@Test
def testHLists {
type ISB = Int :: String :: Boolean :: HNil
val l = 23 :: "foo" :: true :: HNil
val lens0 = hlistNthLens[ISB, _0]
val lens1 = hlistNthLens[ISB, _1]
val lens2 = hlistNthLens[ISB, _2]
val i = lens0.get(l)
typed[Int](i)
assertEquals(23, i)
val li = lens0.set(l)(13)
typed[ISB](li)
assertEquals(13 :: "foo" :: true :: HNil, li)
val s = lens1.get(l)
typed[String](s)
assertEquals("foo", s)
val ls = lens1.set(l)("bar")
typed[ISB](ls)
assertEquals(23 :: "bar" :: true :: HNil, ls)
val b = lens2.get(l)
typed[Boolean](b)
assertEquals(true, b)
val lb = lens2.set(l)(false)
typed[ISB](lb)
assertEquals(23 :: "foo" :: false :: HNil, lb)
}
@Test
def testSets {
val s = Set("foo", "bar", "baz")
val lens = setLens[String]("bar")
val b1 = lens.get(s)
assert(b1)
val s2 = lens.set(s)(false)
assertEquals(Set("foo", "baz"), s2)
val b2 = lens.get(s2)
assert(!b2)
val s3 = lens.set(s2)(true)
assertEquals(s, s3)
}
@Test
def testMaps {
val m = Map(23 -> "foo", 13 -> "bar", 11 -> "baz")
val lens = mapLens[Int, String](13)
val s1 = lens.get(m)
assertEquals(Option("bar"), s1)
val m2 = lens.set(m)(Option("wibble"))
assertEquals(Map(23 -> "foo", 13 -> "wibble", 11 -> "baz"), m2)
val s2 = lens.get(m2)
assertEquals(Option("wibble"), s2)
val m3 = lens.set(m)(None)
assertEquals(Map(23 -> "foo", 11 -> "baz"), m3)
val s3 = lens.get(m3)
assertEquals(None, s3)
val m4 = lens.set(m3)(Option("bar"))
assertEquals(m, m4)
val s4 = lens.get(m4)
assertEquals(Option("bar"), s4)
}
@Test
def testProducts {
val nameAgeCityLens = nameLens ~ ageLens ~ cityLens
val nac1 = nameAgeCityLens.get(person)
typed[(String, Int, String)](nac1)
assertEquals(("Joe Grey", 37, "Brighton"), nac1)
val person2 = nameAgeCityLens.set(person)("Joe Soap", 27, "London")
assertEquals(Person("Joe Soap", 27, Address("Southover Street", "London", "BN2 9UA")), person2)
}
} | mpilquist/shapeless | core/src/test/scala/shapeless/lenses.scala | Scala | apache-2.0 | 6,301 |
package views.html
package site
import controllers.routes
import scala.util.chaining._
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
object contact {
import trans.contact._
import views.html.base.navTree._
private lazy val contactEmailBase64 = lila.common.String.base64.encode(contactEmailInClear)
def contactEmailLinkEmpty =
a(cls := "contact-email-obfuscated", attr("data-email") := contactEmailBase64)
def contactEmailLink(implicit ctx: Context) =
contactEmailLinkEmpty(trans.clickToRevealEmailAddress())
private def reopenLeaf(prefix: String)(implicit ctx: Context) =
Leaf(
s"$prefix-reopen",
wantReopen(),
frag(
p(a(href := routes.Account.reopen)(reopenOnThisPage())),
p(doNotAskByEmailToReopen())
)
)
private def howToReportBugs(implicit ctx: Context): Frag =
frag(
ul(
li(
a(href := routes.ForumCateg.show("lichess-feedback"))(reportBugInForum())
),
li(
a(href := "https://github.com/ornicar/lila/issues")(reportWebsiteIssue())
),
li(
a(href := "https://github.com/veloce/lichobile/issues")(reportMobileIssue())
),
li(
a(href := "https://discord.gg/lichess")(reportBugInDiscord())
)
),
p(howToReportBug())
)
private def menu(implicit ctx: Context): Branch =
Branch(
"root",
whatCanWeHelpYouWith(),
List(
Branch(
"login",
iCantLogIn(),
List(
Leaf(
"email-confirm",
noConfirmationEmail(),
p(
a(href := routes.Account.emailConfirmHelp)(visitThisPage()),
"."
)
),
Leaf(
"forgot-password",
forgotPassword(),
p(
a(href := routes.Auth.passwordReset)(visitThisPage()),
"."
)
),
Leaf(
"forgot-username",
forgotUsername(),
p(
a(href := routes.Auth.login)(youCanLoginWithEmail()),
"."
)
),
Leaf(
"lost-2fa",
lost2FA(),
p(a(href := routes.Auth.passwordReset)(doPasswordReset()), ".")
),
reopenLeaf("login"),
Leaf(
"dns",
"\"This site can’t be reached\"",
frag(
p("If you can't reach Lichess, and your browser says something like:"),
ul(
li("This site can't be reached."),
li(strong("lichess.org"), "’s server IP address could not be found."),
li("We can’t connect to the server at lichess.org.")
),
p("Then you have a ", strong("DNS issue"), "."),
p(
"There's nothing we can do about it, but ",
a("here's how you can fix it")(
href := "https://www.wikihow.com/Fix-DNS-Server-Not-Responding-Problem"
),
"."
)
)
)
)
),
Branch(
"account",
accountSupport(),
List(
Leaf(
"title",
wantTitle(),
p(
a(href := routes.Page.master)(visitTitleConfirmation()),
"."
)
),
Leaf(
"close",
wantCloseAccount(),
frag(
p(a(href := routes.Account.close)(closeYourAccount()), "."),
p(doNotAskByEmail())
)
),
reopenLeaf("account"),
Leaf(
"change-username",
wantChangeUsername(),
frag(
p(a(href := routes.Account.username)(changeUsernameCase()), "."),
p(cantChangeMore()),
p(orCloseAccount())
)
),
Leaf(
"clear-history",
wantClearHistory(),
frag(
p(cantClearHistory()),
p(orCloseAccount())
)
)
)
),
Leaf(
"report",
wantReport(),
frag(
p(
a(href := routes.Report.form)(toReportAPlayerUseForm()),
"."
),
p(
youCanAlsoReachReportPage(button(cls := "thin button button-empty", dataIcon := ""))
),
p(
doNotMessageModerators(),
br,
doNotReportInForum(),
br,
doNotSendReportEmails(),
br,
onlyReports()
)
)
),
Branch(
"bug",
wantReportBug(),
List(
Leaf(
"enpassant",
illegalPawnCapture(),
frag(
p(calledEnPassant()),
p(a(href := "/learn#/15")(tryEnPassant()), ".")
)
),
Leaf(
"castling",
illegalCastling(),
frag(
p(castlingPrevented()),
p(a(href := "https://en.wikipedia.org/wiki/Castling#Requirements")(castlingRules()), "."),
p(a(href := "/learn#/14")(tryCastling()), "."),
p(castlingImported())
)
),
Leaf(
"insufficient",
insufficientMaterial(),
frag(
p(a(href := faq.fideHandbookUrl)(fideMate()), "."),
p(knightMate())
)
),
Leaf(
"casual",
noRatingPoints(),
frag(
p(ratedGame()),
botRatingAbuse()
)
),
Leaf(
"error-page",
errorPage(),
frag(
p(reportErrorPage()),
howToReportBugs
)
),
Leaf(
"security",
"Security vulnerability",
frag(
p("Please report security issues to ", contactEmailLink),
p(
"Like all contributions to Lichess, security reviews and pentesting are appreciated. ",
"Note that we do not currently pay cash bounties."
),
p(
"Vulnerabilities are relevant even when they are not directly exploitable, ",
"for example XSS mitigated by CSP."
),
p(
"When doing your research, please minimize negative impact for other users. ",
"As long as you keep this in mind, testing should not require prior coordination. ",
"Avoid spamming, DDoS and volumetric attacks."
),
p(
"We believe transport encryption will probably be sufficient for all reports. ",
"If you insist on using PGP, please clarify the nature of the message ",
"in the plain-text subject and encrypt for ",
a(href := "/.well-known/gpg.asc")("multiple recipients"),
"."
)
)
),
Leaf(
"other-bug",
"Other bug",
frag(
p("If you found a new bug, you may report it:"),
howToReportBugs
)
)
)
),
frag(
p(doNotMessageModerators()),
p(sendAppealTo(a(href := routes.Appeal.home)(netConfig.domain, routes.Appeal.home.url))),
p(
falsePositives(),
br,
ifLegit()
)
) pipe { appealBase =>
Branch(
"appeal",
banAppeal(),
List(
Leaf(
"appeal-cheat",
engineAppeal(),
frag(
appealBase,
p(
accountLost(),
br,
doNotDeny()
)
)
),
Leaf(
"appeal-other",
otherRestriction(),
appealBase
)
)
)
},
Branch(
"collab",
collaboration(),
List(
Leaf(
"monetize",
monetizing(),
frag(
p("We are not interested in any way of monetizing Lichess."),
p(
"We will never display any kind of ads, we won't track our players, and we won't sell or buy traffic or users."
),
p("Please do not email us about marketing, tracking, or advertising."),
br,
p(
"We encourage everyone to ",
a(href := "/ads")("block all ads and trackers.")
)
)
),
Leaf(
"buy",
buyingLichess(),
p("We are not selling, to anyone, for any price. Ever.")
),
Leaf(
"authorize",
authorizationToUse(),
frag(
p(welcomeToUse()),
p(videosAndBooks()),
p(creditAppreciated())
)
),
Leaf(
"gdpr",
"GDPR",
frag(
p("If you are a European citizen, you may request the deletion of your Lichess account."),
p(
"First, ",
a(href := routes.Account.close)("close your account"),
"."
),
p(
"Then send us an email at ",
contactEmailLink,
" to request the definitive erasure of all data linked to the account."
)
)
),
Leaf(
"dmca",
"DMCA / Intellectual Property Take Down Notice",
p(
a(href := dmcaUrl)("Complete this form"),
" ",
"if you are the original copyright holder, or an agent acting on behalf of the copyright holder, and believe Lichess is hosting work(s) you hold the copyright to."
)
),
Leaf(
"contact-other",
noneOfTheAbove(),
frag(
p(sendEmailAt(contactEmailLink)),
p(explainYourRequest())
)
)
)
)
)
)
val dmcaUrl = "/dmca"
def apply()(implicit ctx: Context) =
page.layout(
title = trans.contact.contact.txt(),
active = "contact",
moreCss = cssTag("contact"),
moreJs = jsModule("contact"),
contentCls = "page box box-pad"
)(
frag(
h1(contactLichess()),
div(cls := "nav-tree")(renderNode(menu, none))
)
)
}
| luanlv/lila | app/views/site/contact.scala | Scala | mit | 11,392 |
package io.flow.reference
import io.flow.reference.v0.models.Region
object Regions extends Validation[Region] {
override val cache: Map[String, Region] = Map(
data.Regions.all.map { r =>
(r.id.toLowerCase -> r)
}: _*
)
override def singular = "region"
override def plural = "regions"
override def name(r: Region): String = r.name
/**
* Filters regions based on the query parameter. We filter in a few
* different ways:
*
* - if the query string maps to a country, we find all regions that contain that country
* - otherwise we check if there is a region with id = q
*/
def filter(q: String): Seq[Region] = {
Countries.find(q) match {
case None => {
Seq(find(q)).flatten
}
case Some(c) => {
data.Regions.all.filter { r =>
r.countries.contains(c.iso31663)
}
}
}
}
}
| flowcommerce/lib-reference-scala | src/main/scala/io/flow/reference/Regions.scala | Scala | mit | 897 |
package com.xored.scalajs.react.examples.timer
import com.xored.scalajs.react._
import org.scalajs.dom._
object Timer extends TypedReactSpec {
case class Props()
case class State(secondsElapsed: Int, interval: Option[Int])
def getInitialState(self: This) = State(secondsElapsed = 0, interval = None)
implicit class Closure(self: This) {
import self._
val tick = () => {
setState(state.copy(secondsElapsed = state.secondsElapsed + 1))
}
}
override def componentDidMount(self: This) = {
val interval = window.setInterval(self.tick, 1000)
self.setState(self.state.copy(interval = Some(interval)))
}
override def componentWillUnmount(self: This) = {
self.state.interval.foreach(window.clearInterval)
}
@scalax
def render(self: This) = {
<div>Seconds Elapsed: {self.state.secondsElapsed}</div>
}
}
| Aste88/scala-js-react | scalajs-react-examples/src/main/scala/com/xored/scalajs/react/examples/timer/Timer.scala | Scala | apache-2.0 | 861 |
package jmh
import org.openjdk.jmh.annotations._
import offheap._
@State(Scope.Thread)
class PoolContention {
implicit val props = Region.Props()
@Benchmark
def contention = {
val r = Region.open
r.close
}
}
| ignasi35/scala-offheap | jmh/src/main/scala/Pool.scala | Scala | bsd-3-clause | 227 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.i18n
import play.api.mvc._
/**
* Brings convenient implicit conversions from [[play.api.mvc.RequestHeader]] to [[Messages]].
*
* Example:
* {{{
* import play.api.i18n.Messages
* class MyController(val messagesApi: MessagesApi ...)
* extends AbstractController(cc) with I18nSupport {
* val action = Action { implicit request =>
* val messageFromRequest = request.messages("hello.world")
* Ok(s"\\$messageFromRequest")
* }
* }
* }}}
*/
trait I18nSupport extends I18NSupportLowPriorityImplicits {
def messagesApi: MessagesApi
/**
* Converts from a request directly into a Messages.
*
* @param request the incoming request
* @return The preferred [[Messages]] according to the given [[play.api.mvc.RequestHeader]]
*/
implicit def request2Messages(implicit request: RequestHeader): Messages = messagesApi.preferred(request)
}
/**
* A static object with type enrichment for request and responses.
*
* {{{
* import I18nSupport._
* }}}
*/
object I18nSupport extends I18NSupportLowPriorityImplicits
/**
* Implicit conversions for using i18n with requests and results.
*/
trait I18NSupportLowPriorityImplicits {
/**
* Adds convenient methods to handle the messages.
*/
implicit class RequestWithMessagesApi(request: RequestHeader) {
/**
* Adds a `messages` method that can be used on a request,
* returning the Messages object in the preferred language
* of the request.
*
* For example:
* {{{
* implicit val messagesApi: MessagesApi = ...
* val messageFromRequest: Messages = request.messages("hello.world")
* }}}
*/
def messages(implicit messagesApi: MessagesApi): Messages = {
messagesApi.preferred(request)
}
/**
* Adds a `lang` method that can be used on a request,
* returning the lang corresponding to the preferred language
* of the request.
*
* For example:
* {{{
* implicit val messagesApi: MessagesApi = ...
* val lang: Lang = request.lang
* }}}
*/
def lang(implicit messagesApi: MessagesApi): Lang = {
messagesApi.preferred(request).lang
}
}
/**
* Adds convenient methods to handle the client-side language
*/
implicit class ResultWithMessagesApi(result: Result) {
/**
* Sets the user's language permanently for future requests by storing it in a cookie.
*
* For example:
* {{{
* implicit val messagesApi: MessagesApi = ...
* val lang = Lang("fr-FR")
* Ok(Messages("hello.world")).withLang(lang)
* }}}
*
* @param lang the language to store for the user
* @return the new result
*/
def withLang(lang: Lang)(implicit messagesApi: MessagesApi): Result = {
messagesApi.setLang(result, lang)
}
/**
* Clears the user's language by discarding the language cookie set by withLang
*
* For example:
* {{{
* implicit val messagesApi: MessagesApi = ...
* Ok(Messages("hello.world")).withoutLang
* }}}
*
* @return the new result
*/
def withoutLang(implicit messagesApi: MessagesApi): Result = {
messagesApi.withoutLang(result)
}
@deprecated("Use withoutLang", "2.7.0")
def clearingLang(implicit messagesApi: MessagesApi): Result = withoutLang
}
}
/**
* A trait for extracting a Messages instance from Langs
*/
trait LangImplicits {
def messagesApi: MessagesApi
/**
* @return A [[Messages]] value that uses the [[Lang]] found in the implicit scope.
*/
implicit def lang2Messages(implicit lang: Lang): Messages = MessagesImpl(lang, messagesApi)
}
| mkurz/playframework | core/play/src/main/scala/play/api/i18n/I18nSupport.scala | Scala | apache-2.0 | 3,741 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.activation.OnSpellCastEvent
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.model.classes.HeroicCharacterClass.Druid
import io.truthencode.ddo.model.misc.DefaultCasterCoolDown
import io.truthencode.ddo.model.spells.{Spell, SpellBookImpl}
import io.truthencode.ddo.support.naming.Prefix
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, GrantsToClass, RequiresAllOfClass}
/**
* You gain an additional spell preparation slot per spell level to cast Summon Nature's Ally spell
* of that level.
*/
protected[feats] trait DruidSpontaneousCasting
extends FeatRequisiteImpl with ActiveFeat with SpellBookImpl with OnSpellCastEvent
with GrantsToClass with RequiresAllOfClass with Prefix with DefaultCasterCoolDown {
/**
* Delimits the prefix and text.
*/
override protected val prefixSeparator: String = " "
override def allOfClass: Seq[(HeroicCharacterClass, Int)] = List((Druid, 1))
override def grantToClass: Seq[(HeroicCharacterClass, Int)] = List((Druid, 1))
override def prefix: Option[String] = Some("Druid:")
abstract override def spellIds: Set[String] =
super.spellIds ++ Spell.summonNatureAllySpells
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/DruidSpontaneousCasting.scala | Scala | apache-2.0 | 1,949 |
package com.arcusys.learn.liferay.update.version300
import java.sql.Connection
import com.arcusys.learn.liferay.update.version300.migrations.scorm.ActivityDataMapMigration
import com.arcusys.valamis.persistence.common.SlickProfile
import com.arcusys.valamis.persistence.impl.scorm.schema.ActivityDataMapTableComponent
import org.scalatest.{BeforeAndAfter, FunSuite}
import scala.slick.driver.{H2Driver, JdbcProfile}
import scala.slick.jdbc.{JdbcBackend, StaticQuery}
class ActivityDataMapMigrationTest(val driver: JdbcProfile)
extends FunSuite
with BeforeAndAfter
with ActivityDataMapTableComponent
with SlickProfile {
def this() {
this(H2Driver)
}
import driver.simple._
val db = Database.forURL("jdbc:h2:mem:migrationActivityTest", driver = "org.h2.Driver")
var connection: Connection = _
before {
connection = db.source.createConnection()
db.withSession { implicit s =>
StaticQuery.updateNA(
"""create table Learn_LFActivityDataMap (
id_ LONG not null primary key,
packageID INTEGER null,
activityID VARCHAR(512) null,
targetId TEXT null,
readSharedData BOOLEAN null,
writeSharedData BOOLEAN null
);"""
).execute
activityDataMapTQ.ddl.create
}
}
after {
db.withSession { implicit s =>
StaticQuery.updateNA(
"""drop table Learn_LFActivityDataMap;"""
).execute
activityDataMapTQ.ddl.drop
}
connection.close()
}
val courseId = 245
val lessonOwnerId = 354
test("empty source table") {
db.withSession { implicit s =>
val migration = new ActivityDataMapMigration(db, driver)
migration.migrate()
val size =
activityDataMapTQ.length.run
assert(0 == size)
}
}
test("migrate") {
val id: Long = 343
val packageID: Long = 34234
val activityID: String = "sdgsdgds"
val targetId: String = "dsdsgdsg"
val readSharedData: Boolean = true
val writeSharedData: Boolean = true
db.withSession { implicit s =>
addActivityDataMap(
id,
packageID,
activityID,
targetId,
readSharedData,
writeSharedData
)
val migration = new ActivityDataMapMigration(db, driver)
migration.migrate()
val rows = activityDataMapTQ.list
assert(1 == rows.length)
val g = rows.head
assert(packageID == g.packageId.get)
assert(activityID == g.activityId.get)
assert(targetId == g.targetId)
assert(readSharedData == g.readSharedData)
assert(writeSharedData == g.writeSharedData)
}
}
private def addActivityDataMap(id: Long,
packageID: Long,
activityID: String,
targetId: String,
readSharedData: Boolean,
writeSharedData: Boolean
)(implicit s: JdbcBackend#Session): Unit = {
StaticQuery.updateNA(
s"""insert into Learn_LFActivityDataMap
(id_, packageID, activityID, targetId, readSharedData, writeSharedData)
values ($id, $packageID, '$activityID', '$targetId', $readSharedData, $writeSharedData);"""
).execute
}
}
| igor-borisov/valamis | learn-portlet/src/test/scala/com/arcusys/learn/liferay/update/version300/ActivityDataMapMigrationTest.scala | Scala | gpl-3.0 | 3,318 |
package lensimpl.bench.std
import lensimpl.bench.{Nested0Input, Util}
import org.openjdk.jmh.annotations._
@State(Scope.Benchmark)
class LensBench {
@Benchmark def lensSTDGet0(in: Nested0Input) = in.n0.i
@Benchmark def lensSTDGet3(in: Nested0Input) = in.n0.n.n.n.i
@Benchmark def lensSTDGet6(in: Nested0Input) = in.n0.n.n.n.n.n.n.i
@Benchmark def lensSTDSet0(in: Nested0Input) = in.n0.copy(i = 43)
@Benchmark def lensSTDSet3(in: Nested0Input) = in.n0.copy(n = in.n0.n.copy(n = in.n0.n.n.copy(n = in.n0.n.n.n.copy(i = 43))))
@Benchmark def lensSTDSet6(in: Nested0Input) = in.n0.copy(
n = in.n0.n.copy(
n = in.n0.n.n.copy(
n = in.n0.n.n.n.copy(
n = in.n0.n.n.n.n.copy(
n = in.n0.n.n.n.n.n.copy(
n = in.n0.n.n.n.n.n.n.copy(
i = 43
)))))))
@Benchmark def lensSTDModify0(in: Nested0Input) = in.n0.copy(i = in.n0.i + 1)
@Benchmark def lensSTDModify3(in: Nested0Input) = in.n0.copy(n = in.n0.n.copy(n = in.n0.n.n.copy(n = in.n0.n.n.n.copy(i = in.n0.n.n.n.i + 1))))
@Benchmark def lensSTDModify6(in: Nested0Input) = in.n0.copy(
n = in.n0.n.copy(
n = in.n0.n.n.copy(
n = in.n0.n.n.n.copy(
n = in.n0.n.n.n.n.copy(
n = in.n0.n.n.n.n.n.copy(
n = in.n0.n.n.n.n.n.n.copy(
i = in.n0.n.n.n.n.n.n.i + 1
)))))))
@Benchmark def lensSTDModifyF0(in: Nested0Input) = Util.safeDivide(in.n0.i).map(_i => in.n0.copy(i = _i))
@Benchmark def lensSTDModifyF3(in: Nested0Input) = Util.safeDivide(in.n0.n.n.n.i).map(_i =>
in.n0.copy(n = in.n0.n.copy(n = in.n0.n.n.copy(n = in.n0.n.n.n.copy(i = _i))))
)
@Benchmark def lensSTDModifyF6(in: Nested0Input) = Util.safeDivide(in.n0.n.n.n.n.n.n.i).map(_i => in.n0.copy(
n = in.n0.n.copy(
n = in.n0.n.n.copy(
n = in.n0.n.n.n.copy(
n = in.n0.n.n.n.n.copy(
n = in.n0.n.n.n.n.n.copy(
n = in.n0.n.n.n.n.n.n.copy(
i = _i
))))))))
} | julien-truffaut/LensImpl | bench/src/main/scala/lensimpl/bench/std/LensBench.scala | Scala | mit | 2,028 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.physical.batch
import org.apache.calcite.plan.volcano.RelSubset
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rex.RexNode
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.logical.{FlinkLogicalCalc, FlinkLogicalCorrelate, FlinkLogicalTableFunctionScan}
import org.apache.flink.table.plan.nodes.physical.batch.BatchExecCorrelate
class BatchExecCorrelateRule extends ConverterRule(
classOf[FlinkLogicalCorrelate],
FlinkConventions.LOGICAL,
FlinkConventions.BATCH_PHYSICAL,
"BatchExecCorrelateRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val join = call.rel(0).asInstanceOf[FlinkLogicalCorrelate]
val right = join.getRight.asInstanceOf[RelSubset].getOriginal
right match {
// right node is a table function
case _: FlinkLogicalTableFunctionScan => true
// a filter is pushed above the table function
case calc: FlinkLogicalCalc =>
calc.getInput.asInstanceOf[RelSubset]
.getOriginal.isInstanceOf[FlinkLogicalTableFunctionScan]
case _ => false
}
}
override def convert(rel: RelNode): RelNode = {
val join = rel.asInstanceOf[FlinkLogicalCorrelate]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.BATCH_PHYSICAL)
val convInput: RelNode = RelOptRule.convert(join.getInput(0), FlinkConventions.BATCH_PHYSICAL)
val right: RelNode = join.getInput(1)
def convertToCorrelate(relNode: RelNode, condition: Option[RexNode]): BatchExecCorrelate = {
relNode match {
case rel: RelSubset =>
convertToCorrelate(rel.getRelList.get(0), condition)
case calc: FlinkLogicalCalc =>
convertToCorrelate(
calc.getInput.asInstanceOf[RelSubset].getOriginal,
Some(calc.getProgram.expandLocalRef(calc.getProgram.getCondition)))
case scan: FlinkLogicalTableFunctionScan =>
new BatchExecCorrelate(
rel.getCluster,
traitSet,
convInput,
scan,
condition,
None,
rel.getRowType,
join.getJoinType)
}
}
convertToCorrelate(right, None)
}
}
object BatchExecCorrelateRule {
val INSTANCE: RelOptRule = new BatchExecCorrelateRule
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/rules/physical/batch/BatchExecCorrelateRule.scala | Scala | apache-2.0 | 3,273 |
package p1
object Cafe {
def coalesce(charges: Seq[Charge]): Seq[Charge] = charges.groupBy(_.cc).values.map(_.reduce(_ combine _)).toSeq
}
case class Charge(cc:CreditCard, amount:Double) {
def combine(other:Charge): Charge =
if(cc == other.cc) {
copy(amount = amount + other.amount)
} else {
throw new Exception("Cannot combine charges to different cards")
}
}
class Cafe {
def buyCoffee(cc:CreditCard): (Coffee, Charge) = {
val cup = new Coffee()
(cup, Charge(cc, cup.price))
}
def buyCoffees(cc:CreditCard, n:Int): (Seq[Coffee], Charge) = {
val purchases: Seq[(Coffee,Charge)] = List.fill(n)(buyCoffee(cc))
val (coffees, charges) = purchases.unzip
(coffees, charges.reduce((c1, c2) => c1.combine(c2)))
}
}
case class CreditCard() {
def charge(c:Int):Unit = {}
}
case class Coffee() {
var price:Int = 0
}
| tarata/FunctionalProgramminginScala | src/main/scala/p1/Cafe.scala | Scala | mit | 872 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.quinine.metrics.rna
import org.apache.spark.rdd.RDD
import org.bdgenomics.formats.avro.AlignmentRecord
/**
* Helper object for generating RNA-seq read count summaries.
*/
private[rna] object RNASeqReadCounts extends Serializable {
/**
* @param jBool A Java boolean value, which may be null.
* @param default If the jBool was null, the default to fall back to.
* @return Returns the Java boolean as a Scala boolean, with the null
* condition validated.
*/
private def toScalaBool(jBool: java.lang.Boolean, default: Boolean = false): Boolean = {
Option(jBool)
.map(b => b: Boolean)
.getOrElse(default)
}
/**
* @param read The alignment record to generate counts for.
* @return Fills in a read count object with the appropriate values determined
* by inspecting the alignment flags.
*/
def apply(read: AlignmentRecord): RNASeqReadCounts = {
val mappedFlag = toScalaBool(read.getReadMapped)
val (unique, duplicate, mapped, mappedUnique) =
(toScalaBool(read.getDuplicateRead), mappedFlag) match {
case (true, true) => (0L, 1L, 1L, 0L)
case (true, false) => (0L, 1L, 0L, 0L)
case (false, true) => (1L, 0L, 1L, 1L)
case (false, false) => (1L, 0L, 0L, 0L)
}
val (forward, reverse) = (mappedFlag, toScalaBool(read.getReadNegativeStrand)) match {
case (false, _) => (0L, 0L)
case (true, false) => (1L, 0L)
case (true, true) => (0L, 1L)
}
RNASeqReadCounts(1L,
unique,
duplicate,
mapped,
mappedUnique,
forward,
reverse)
}
/**
* Computes read counts across an RDD of reads.
*
* @param rdd The RDD of reads to compute statistics for.
* @return Returns a summary of the alignment info for the given panel.
*/
def apply(rdd: RDD[AlignmentRecord]): RNASeqReadCounts = {
rdd.map(apply)
.reduce(_ ++ _)
}
}
/**
* Counts for the number of reads sequenced in an RNA-seq protocol after
* alignment to a transcriptome build.
*
* @param reads The total number of reads.
* @param unique The number of unique reads (not marked as duplicate).
* @param duplicate The number of reads marked as duplicate.
* @param mapped The number of reads that successfully aligned.
* @param mappedUnique The number of reads that mapped successfully and that
* were not marked as duplicates.
* @param forward Reads that were mapped on the forward strand.
* @param reverse Reads that were mapped on the reverse strand.
*/
private[rna] case class RNASeqReadCounts(reads: Long,
unique: Long,
duplicate: Long,
mapped: Long,
mappedUnique: Long,
forward: Long,
reverse: Long) {
def ++(that: RNASeqReadCounts): RNASeqReadCounts = {
RNASeqReadCounts(reads + that.reads,
unique + that.unique,
duplicate + that.duplicate,
mapped + that.mapped,
mappedUnique + that.mappedUnique,
forward + that.forward,
reverse + that.reverse)
}
}
| bigdatagenomics/qc-metrics | quinine-core/src/main/scala/org/bdgenomics/quinine/metrics/rna/RNASeqReadCounts.scala | Scala | apache-2.0 | 4,026 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import scala.xml.Node
import org.apache.spark.SparkContext
/** Utility functions for generating XML pages with spark content. */
private[spark] object UIUtils {
import Page._
// Yarn has to go through a proxy so the base uri is provided and has to be on all links
private[spark] val uiRoot : String = Option(System.getenv("APPLICATION_WEB_PROXY_BASE")).
getOrElse("")
def prependBaseUri(resource: String = "") = uiRoot + resource
/** Returns a spark page with correctly formatted headers */
def headerSparkPage(content: => Seq[Node], sc: SparkContext, title: String, page: Page.Value)
: Seq[Node] = {
val jobs = page match {
case Stages => <li class="active"><a href={prependBaseUri("/stages")}>Stages</a></li>
case _ => <li><a href={prependBaseUri("/stages")}>Stages</a></li>
}
val storage = page match {
case Storage => <li class="active"><a href={prependBaseUri("/storage")}>Storage</a></li>
case _ => <li><a href={prependBaseUri("/storage")}>Storage</a></li>
}
val environment = page match {
case Environment =>
<li class="active"><a href={prependBaseUri("/environment")}>Environment</a></li>
case _ => <li><a href={prependBaseUri("/environment")}>Environment</a></li>
}
val executors = page match {
case Executors => <li class="active"><a href={prependBaseUri("/executors")}>Executors</a></li>
case _ => <li><a href={prependBaseUri("/executors")}>Executors</a></li>
}
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<link rel="stylesheet" href={prependBaseUri("/static/bootstrap.min.css")} type="text/css" />
<link rel="stylesheet" href={prependBaseUri("/static/webui.css")} type="text/css" />
<script src={prependBaseUri("/static/sorttable.js")} ></script>
<title>{sc.appName} - {title}</title>
</head>
<body>
<div class="navbar navbar-static-top">
<div class="navbar-inner">
<a href={prependBaseUri("/")} class="brand"><img src={prependBaseUri("/static/spark-logo-77x50px-hd.png")} /></a>
<ul class="nav">
{jobs}
{storage}
{environment}
{executors}
</ul>
<p class="navbar-text pull-right"><strong>{sc.appName}</strong> application UI</p>
</div>
</div>
<div class="container-fluid">
<div class="row-fluid">
<div class="span12">
<h3 style="vertical-align: bottom; display: inline-block;">
{title}
</h3>
</div>
</div>
{content}
</div>
</body>
</html>
}
/** Returns a page with the spark css/js and a simple format. Used for scheduler UI. */
def basicSparkPage(content: => Seq[Node], title: String): Seq[Node] = {
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<link rel="stylesheet" href={prependBaseUri("/static/bootstrap.min.css")} type="text/css" />
<link rel="stylesheet" href={prependBaseUri("/static/webui.css")} type="text/css" />
<script src={prependBaseUri("/static/sorttable.js")} ></script>
<title>{title}</title>
</head>
<body>
<div class="container-fluid">
<div class="row-fluid">
<div class="span12">
<h3 style="vertical-align: middle; display: inline-block;">
<img src={prependBaseUri("/static/spark-logo-77x50px-hd.png")} style="margin-right: 15px;" />
{title}
</h3>
</div>
</div>
{content}
</div>
</body>
</html>
}
/** Returns an HTML table constructed by generating a row for each object in a sequence. */
def listingTable[T](
headers: Seq[String],
makeRow: T => Seq[Node],
rows: Seq[T],
fixedWidth: Boolean = false): Seq[Node] = {
val colWidth = 100.toDouble / headers.size
val colWidthAttr = if (fixedWidth) colWidth + "%" else ""
var tableClass = "table table-bordered table-striped table-condensed sortable"
if (fixedWidth) {
tableClass += " table-fixed"
}
<table class={tableClass}>
<thead>{headers.map(h => <th width={colWidthAttr}>{h}</th>)}</thead>
<tbody>
{rows.map(r => makeRow(r))}
</tbody>
</table>
}
}
| mkolod/incubator-spark | core/src/main/scala/org/apache/spark/ui/UIUtils.scala | Scala | apache-2.0 | 5,288 |
package mesosphere.marathon
package core.task.tracker
import akka.actor.ActorRef
import akka.stream.Materializer
import mesosphere.marathon.core.base.Clock
import mesosphere.marathon.core.instance.update.{ InstanceChangeHandler, InstanceUpdateOpResolver }
import mesosphere.marathon.core.leadership.LeadershipModule
import mesosphere.marathon.core.task.tracker.impl._
import mesosphere.marathon.storage.repository.InstanceRepository
/**
* Provides the interfaces to query the current task state ([[InstanceTracker]]) and to
* update the task state ([[TaskStateOpProcessor]]).
*/
class InstanceTrackerModule(
clock: Clock,
config: InstanceTrackerConfig,
leadershipModule: LeadershipModule,
instanceRepository: InstanceRepository,
updateSteps: Seq[InstanceChangeHandler])(implicit mat: Materializer) {
lazy val instanceTracker: InstanceTracker =
new InstanceTrackerDelegate(config, instanceTrackerActorRef)
lazy val instanceTrackerUpdateStepProcessor: InstanceTrackerUpdateStepProcessor =
new InstanceTrackerUpdateStepProcessorImpl(updateSteps)
def instanceCreationHandler: InstanceCreationHandler = instanceStateOpProcessor
def stateOpProcessor: TaskStateOpProcessor = instanceStateOpProcessor
private[this] def updateOpResolver(instanceTrackerRef: ActorRef): InstanceUpdateOpResolver =
new InstanceUpdateOpResolver(
new InstanceTrackerDelegate(config, instanceTrackerRef), clock)
private[this] def instanceOpProcessor(instanceTrackerRef: ActorRef): InstanceOpProcessor =
new InstanceOpProcessorImpl(instanceTrackerRef, instanceRepository, updateOpResolver(instanceTrackerRef), config)
private[this] lazy val instanceUpdaterActorMetrics = new InstanceUpdateActor.ActorMetrics()
private[this] def instanceUpdaterActorProps(instanceTrackerRef: ActorRef) =
InstanceUpdateActor.props(clock, instanceUpdaterActorMetrics, instanceOpProcessor(instanceTrackerRef))
private[this] lazy val instancesLoader = new InstancesLoaderImpl(instanceRepository)
private[this] lazy val instanceTrackerMetrics = new InstanceTrackerActor.ActorMetrics()
private[this] lazy val instanceTrackerActorProps = InstanceTrackerActor.props(
instanceTrackerMetrics, instancesLoader, instanceTrackerUpdateStepProcessor, instanceUpdaterActorProps)
protected lazy val instanceTrackerActorName = "instanceTracker"
private[this] lazy val instanceTrackerActorRef = leadershipModule.startWhenLeader(
instanceTrackerActorProps, instanceTrackerActorName
)
private[this] lazy val instanceStateOpProcessor =
new InstanceCreationHandlerAndUpdaterDelegate(clock, config, instanceTrackerActorRef)
}
| natemurthy/marathon | src/main/scala/mesosphere/marathon/core/task/tracker/InstanceTrackerModule.scala | Scala | apache-2.0 | 2,649 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package detailedtopics.configuration.securityheaders
//#filters
import javax.inject.Inject
import play.api.http.DefaultHttpFilters
import play.filters.headers.SecurityHeadersFilter
import play.api.mvc.BaseController
import play.api.mvc.ControllerComponents
//#filters
class SecurityHeaders @Inject() (val controllerComponents: ControllerComponents) extends BaseController {
def index = Action {
//#allowActionSpecificHeaders
Ok("Index").withHeaders(SecurityHeadersFilter.REFERRER_POLICY -> "my page-specific header")
//#allowActionSpecificHeaders
}
}
object SecurityHeaders {
class Filters @Inject() (securityHeadersFilter: SecurityHeadersFilter)
extends DefaultHttpFilters(securityHeadersFilter)
}
| benmccann/playframework | documentation/manual/working/commonGuide/filters/code/SecurityHeaders.scala | Scala | apache-2.0 | 795 |
package abeel.genometools.tdf
import net.sf.samtools.SAMFileReader
import java.io.PrintWriter
import java.io.File
import abeel.genometools.Main
import java.time.LocalDateTime
import atk.compbio.DNAHash
import net.sf.samtools.SAMFileReader.ValidationStringency
import atk.util.TimeInterval
import scala.collection.JavaConversions._
import net.sf.jannot.tdf.TDFDataSource
import net.sf.jannot.source.Locator
import net.sf.jannot.source.DataSourceFactory
import net.sf.jannot.tdf.TDFData
import atk.io.NixWriter
import java.util.logging.Logger
import java.util.logging.Level
import atk.util.LoggingTrait
object TDF2GCbias extends Main {
case class Config(val input: File = null, val reference: File = null, val window: Int = 1000, val output: File = null)
override val description = "Tool to calculate GC bias from coverage file (TDF) and a reference (fasta)."
override val version = """
2016/10/05 Initial version included in genometools
"""
override def main(args: Array[String]) {
setDebugLevel(Level.WARNING)
val parser = new scopt.OptionParser[Config]("java -jar genometools.jar tdf2gcbias") {
val default = new Config()
opt[File]('i', "input") required () action { (x, c) => c.copy(input = x) } text ("Input TDF file. ")
opt[File]('r', "reference") required () action { (x, c) => c.copy(reference = x) } text ("Input fasta file. ")
opt[File]('o', "output") required () action { (x, c) => c.copy(output = x) } text ("File where you want the output to be written")
opt[Int]('w', "window") action { (x, c) => c.copy(window = x) } text ("Window length, default = " + default.window)
}
parser.parse(args, Config()) map { config =>
// assume(config.inputFile != null)
// assume(config.outputFile != null)
processFile(config)
}
}
private def processFile(config: Config) {
val ref = DataSourceFactory.create(new Locator(config.reference)).read();
val tdf = DataSourceFactory.create(new Locator(config.input)).read();
val pw = new NixWriter(config.output, config)
pw.println("# GC-content per window of "+config.window+" nt")
pw.println("#reference\\tcoverage")
for (entry <- ref) {
pw.println("#ENTRY = "+entry.getID)
println(entry.getID + "\\t" + entry.iterator().toList)
val r = entry
val tEntry = tdf.getEntry(entry.getID)
val t = (tEntry.get(tEntry.iterator().toList(0))).asInstanceOf[TDFData]
for (i <- 0 until r.getMaximumLength / config.window) {
val seq = r.sequence().subsequence(i * config.window, (i + 1) * config.window).stringRepresentation()
val nt = seq.toUpperCase().groupBy(identity).mapValues { _.size }
val gc = nt.getOrElse('C', 0) + nt.getOrElse('G', 0)
val at = nt.getOrElse('A', 0) + nt.getOrElse('T', 0)
val cov = t.get(i * config.window, (i + 1) * config.window).toList
val covMap = cov.map { pile => (pile.start() -> pile.end()) -> pile.getTotal }
val singles=covMap.filter(p=>p._1._1+1==p._1._2).filter(p=>p._1._1>=i * config.window && p._1._1<(i + 1) * config.window)
val singleSum=singles.map(_._2).sum
// println(covMap.size+"\\t"+singles.size)
// covMap.filter(p)
if (at + gc == config.window) {
val fract = math.round((gc * 100.0) / (at + gc))
val cc = singleSum / config.window
pw.println(fract + "\\t" + cc)
// } else {
// pw.println(fract + "\\t" + cc)
// println("Incomplete window: " + (at + gc) + ", " + singles.size)
}
}
}
pw.close
}
} | AbeelLab/genometools | genomeview/genomeview/genometools/TDF2GCbias.scala | Scala | gpl-3.0 | 3,613 |
package com.twitter.inject.server.tests
import com.google.inject.AbstractModule
import com.twitter.finagle.http.Status
import com.twitter.inject.server.{EmbeddedTwitterServer, Ports, TwitterServer}
import com.twitter.inject.{Test, TwitterModule}
import com.twitter.server.Lifecycle.Warmup
import com.twitter.server.{TwitterServer => BaseTwitterServer}
import com.twitter.util.Await
class StartupIntegrationTest extends Test {
"startup" should {
"ensure health check succeeds when guice config is good" in {
val server = new EmbeddedTwitterServer(new SimpleGuiceHttpTwitterServer)
server.assertHealthy()
server.httpGetAdmin(
"/admin/server_info",
andExpect = Status.Ok)
server.close()
}
"non HTTP twitter-server passes health check" in {
val server = new EmbeddedTwitterServer(new SimpleGuiceTwitterServer)
server.assertHealthy()
server.close()
}
"raw TwitterServerWithPorts starts up" in {
val server = new EmbeddedTwitterServer(
twitterServer = new RawTwitterServerWithPorts)
server.assertHealthy()
server.close()
}
"GuiceTwitterServer starts up" in {
val server = new EmbeddedTwitterServer(
twitterServer = new TwitterServer {})
server.assertHealthy()
server.close()
}
"ensure server health check fails when guice config fails fast" in {
intercept[Exception] {
new EmbeddedTwitterServer(
new FailFastServer).start()
}
}
"ensure startup fails when base twitter server preMain throws exception" in {
intercept[Exception] {
new EmbeddedTwitterServer(
twitterServer = new PremainErrorBaseTwitterServer).start()
}
}
"ensure startup fails when preMain throws exception" in {
intercept[Exception] {
new EmbeddedTwitterServer(
new ServerPremainException).start()
}
}
"ensure http server starts after warmup" in {
pending //only manually run since uses sleeps
var continueWarmup = true
class WarmupServer extends TwitterServer {
override def warmup {
println("Warmup begin")
while (continueWarmup) {
Thread.sleep(1000)
}
println("Warmup end")
}
}
val server = new EmbeddedTwitterServer(
twitterServer = new WarmupServer,
waitForWarmup = false)
server.assertHealthy(healthy = false)
Thread.sleep(3000)
server.assertHealthy(healthy = false)
continueWarmup = false
Thread.sleep(3000)
server.assertHealthy(healthy = true)
server.close()
}
"calling GuiceModule.install throws exception" in {
intercept[Exception] {
new EmbeddedTwitterServer(
twitterServer = new ServerWithGuiceModuleInstall).start()
}
}
}
}
class FailFastServer extends TwitterServer {
override val modules = Seq(new AbstractModule {
def configure() {
throw new StartupTestException("guice module exception")
}
})
//TODO: Remove override once App#nonExitingMain in opensource released
override protected def exitOnError(reason: String) {
System.err.println(reason)
close()
throw new Exception(reason)
}
}
class SimpleGuiceTwitterServer extends TwitterServer {
override val modules = Seq()
}
class SimpleGuiceHttpTwitterServer extends TwitterServer {
}
class ServerWithGuiceModuleInstall extends TwitterServer {
override val modules = Seq(new TwitterModule {
override def configure() {
install(new FooModule)
}
})
//TODO: Remove override once App#nonExitingMain in opensource released
override protected def exitOnError(reason: String) {
System.err.println(reason)
close()
throw new Exception(reason)
}
}
class FooModule extends AbstractModule {
override def configure() {}
}
class PremainErrorBaseTwitterServer extends BaseTwitterServer with Ports with Warmup {
premain {
throw new StartupTestException("premain exception")
}
/* TODO: Remove once com.twitter.app.App with nonExitingMain is opensource released */
override protected def exitOnError(reason: String): Unit = {
System.err.println(reason)
close()
throw new Exception(reason)
}
def main() {
warmupComplete()
throw new StartupTestException("shouldn't get here")
}
}
class ServerPremainException extends TwitterServer {
premain {
throw new StartupTestException("premain exception")
}
//TODO: Remove override once App#nonExitingMain in opensource released
override protected def exitOnError(reason: String) {
System.err.println(reason)
close()
throw new Exception(reason)
}
}
class StartupTestException(msg: String) extends Exception(msg)
class RawTwitterServerWithPorts extends BaseTwitterServer with Ports {
def main() {
Await.ready(
adminHttpServer)
}
} | kaushik94/finatra | inject/inject-server/src/test/scala/com/twitter/inject/server/tests/StartupIntegrationTest.scala | Scala | apache-2.0 | 4,921 |
package filodb.standalone
import scala.concurrent.duration._
import scala.util.control.NonFatal
import akka.actor.ActorRef
import akka.cluster.Cluster
import com.typesafe.scalalogging.StrictLogging
import filodb.akkabootstrapper.AkkaBootstrapper
import filodb.coordinator._
import filodb.coordinator.client.LocalClient
import filodb.http.FiloHttpServer
/**
* FiloServer starts a "standalone" FiloDB server which can ingest and support queries through the Akka
* API. It is meant to be used in a cluster.
*
* - The servers connect to each other setting up an Akka Cluster. Seed nodes must be configured.
* - Ingestion must be started using the CLI and the source configured. When it starts it does nothing
* at the beginning.
*
* ## Configuration ##
* {{{
* seed-nodes = ["akka.tcp://filo-standalone@hostname_or_ip:2552"]
* dataset-definitions {
* sample-timeseries {
* partition-columns = ["metricName:string", "tags:map"]
* data-columns = ["timestamp:long", "value:double"]
* row-keys = ["timestamp"]
* }
* }
* }}}
*
* @param watcher optionally register a watcher `ActorRef` with the `NodeClusterActor` cluster
* singleton. Primarily for Multi-JVM tests, but this strategy is used in the
* coordinator module in all test types.
*/
class FiloServer(watcher: Option[ActorRef]) extends FilodbClusterNode {
def this() = this(None)
def this(watcher: ActorRef) = this(Some(watcher))
override val role = ClusterRole.Server
lazy val config = cluster.settings.config
var filoHttpServer: FiloHttpServer = _
// Now, initialize any datasets using in memory MetaStore.
// This is a hack until we are able to use CassandraMetaStore for standalone. It is also a
// convenience for users to get up and running quickly without setting up cassandra.
val client = new LocalClient(coordinatorActor)
def bootstrap(akkaCluster: Cluster): AkkaBootstrapper = {
val bootstrapper = AkkaBootstrapper(akkaCluster)
bootstrapper.bootstrap()
bootstrapper
}
def start(): Unit = {
try {
coordinatorActor
scala.concurrent.Await.result(metaStore.initialize(), cluster.settings.InitializationTimeout)
val bootstrapper = bootstrap(cluster.cluster)
val singleton = cluster.clusterSingleton(role, watcher)
filoHttpServer = new FiloHttpServer(cluster.system, cluster.settings)
filoHttpServer.start(coordinatorActor, singleton, bootstrapper.getAkkaHttpRoute())
// Launch the profiler after startup, if configured.
SimpleProfiler.launch(systemConfig.getConfig("filodb.profiler"))
KamonShutdownHook.registerShutdownHook()
} catch {
// if there is an error in the initialization, we need to fail fast so that the process can be rescheduled
case NonFatal(e) =>
logger.error("Could not initialize server", e)
shutdown()
}
}
override def shutdown(): Unit = {
filoHttpServer.shutdown(5.seconds) // TODO configure
super.shutdown()
}
def shutdownAndExit(code: Int): Unit = {
shutdown()
sys.exit(code)
}
}
object FiloServer extends StrictLogging {
def main(args: Array[String]): Unit = {
try {
new FiloServer().start()
} catch { case e: Exception =>
logger.error("Could not start FiloDB server", e)
}
}
}
| tuplejump/FiloDB | standalone/src/main/scala/filodb.standalone/FiloServer.scala | Scala | apache-2.0 | 3,344 |
package strd.util
import java.util
import com.escalatesoft.subcut.inject.{BindingModule, MutableBindingModule}
import com.twitter.ostrich.admin.{Service, ServiceTracker}
import com.twitter.ostrich.stats.Stats
import org.slf4j.LoggerFactory
import strd.trace.PoolContext
import sun.misc.VM
import scala.collection.mutable
import scala.concurrent.ExecutionContext
import scala.sys.ShutdownHookThread
/**
* $Id$
* $URL$
* User: bulay
* Date: 4/15/13
* Time: 3:47 PM
*/
class ServerStarter(val bindings: BindingModule, ss : Option[StrdService] = None) {
import scala.collection.JavaConverters._
val log = LoggerFactory.getLogger( getClass )
def start() {
log.info("--> Start")
val begin = System.currentTimeMillis()
//ServiceTracker.register(this)
val toStart = new util.TreeMap[Int, Service]()
try {
bindings.bindings .foreach(binding => {
val o = bindings.injectOptional(binding._1)
o match {
case Some(srv: Service) =>
ss.map { s =>
toStart.put(s._bindOrdering.get(ServiceKey(binding._1.m, binding._1.name)).get, srv)
}.getOrElse {
toStart.put(toStart.size(), srv)
}
case _ =>
}
})
toStart.asScala.values.foreach{ srv => {
log.debug(" -> start " + srv.getClass.getName)
ServiceTracker.register(srv)
srv.start()
}}
} catch {
case x: Exception => {
log.error("Starting failed", x)
System.exit(-5)
}
}
ShutdownHookThread(shutdown())
val configs = bindings.injectOptional[ClusterConfigs](None)
val message =
s"""
|===================================================
| Server Started in ${System.currentTimeMillis() - begin}ms
| MaxDirectMem : ${(VM.maxDirectMemory() / 1024d / 1024d).toInt} mb
| BindingProps :
|""".stripMargin +
bindings.bindings.filterNot( b => b._1.name.exists( n=> configs.exists( c => c.props.contains( n ))) ).map( b => bindings.injectOptional(b._1) match {
case Some(s : String) =>
f" ${b._1.name.getOrElse("[noname]")}%-16s => '$s'\\n"
case Some(i : Int) =>
f" ${b._1.name.getOrElse("[noname]")}%-16s => $i (int)\\n"
case _ => ""
}).mkString("") +
configs.map( cfg =>
s"""
| NodeType : ${cfg.nodeServiceId}
| HostName : ${ClusterConfigs.shortHostName}
| Mac : ${ClusterConfigs.macForRegistration}
| IP_PUB : ${ClusterConfigs.publicAddress}
| IP_INT : ${cfg.clusterBindAddress}
| BindIface : ${cfg.bindInterface.map(iface => iface.getName).getOrElse("n/a")}
| NodeID : ${cfg.nodeId.getOrElse("no")}
| OutAddr : ${ClusterConfigs.localOutgoingIp}
| ClusterNodes:
|""".stripMargin + cfg.nodes.values.map( node =>
f" ${node.nodeType}%-20s | ${node.addr}%-16s | ${node.nodeId}%-5d | ${node.cluster}\\n"
).mkString("") +
s"""
| ClusterProps:
|""".stripMargin + cfg.props.map( param =>
f" ${param._1}%-35s => ${if (param._1.contains("pwd") || param._1.contains("password")) "*****" else param._2}\\n"
).mkString("") ).getOrElse("") +
s"""
|=======================(.)(.)======================
|
""".stripMargin
log.info(message)
}
def shutdown() {
ServiceTracker.synchronized {
Stats.setLabel("Shutdown:", "quiesce...")
log.info("--> Quiesce")
ServiceTracker.peek.foreach { x=>
log.debug("-> Quiesce: " + x.getClass.getName)
try {
x.quiesce()
} catch {
case x: Exception => log.warn("Failed: ", x)
}
log.debug("<- Quiesce: " + x.getClass.getName)
}
log.info("<-- Quiesce")
Stats.setLabel("Shutdown:", "shutdown...")
log.info("--> Shutdown")
ServiceTracker.peek.foreach {
x =>
log.debug("-> stop: " + x.getClass.getName)
try {
x.shutdown()
} catch {
case e: Exception => log.warn("Service Stop Failed :" + x.getClass.getName, e)
}
log.debug("<- stop" + x.getClass.getName)
}
log.info("<-- Shutdown")
Stats.setLabel("Shutdown:", "completed")
}
}
}
trait LateInitiable {
def lateInit()
}
trait StrdService extends MutableBindingModule with LateInitiable{
val _bindOrdering = new mutable.LinkedHashMap[ServiceKey, Int]()
implicit val module : MutableBindingModule = this
println("--> StrdService")
bind[ExecutionContext] toSingle PoolContext.cachedExecutor(name = "default")
/*
override def bind[T](implicit m: Manifest[T]) = {
_bindOrdering += ( ServiceKey(m, None) -> _bindOrdering.size)
super.bind
}
*/
override def bind[T <: Any](implicit m: scala.reflect.Manifest[T]) = {
new Bind[T]() {
override def toSingle[I <: T](function: => I) = {
_bindOrdering += ( ServiceKey(m, name) -> _bindOrdering.size)
super.toSingle(function)
}
}
}
def lateInit() {
// nothing
}
//def init() {}
}
object ServerStarter{
def apply( s : StrdService) : ServerStarter = {
s match {
case l : LateInitiable => l.lateInit()
case _ =>
}
new ServerStarter( s.freeze().fixed, s )
}
}
case class ServiceKey(m:Manifest[_], id : Option[String]) | onerinvestments/strd | strd-commons/src/main/scala/strd/util/ServerStarter.scala | Scala | apache-2.0 | 5,395 |
package org.openapitools.models
import io.circe._
import io.finch.circe._
import io.circe.generic.semiauto._
import io.circe.java8.time._
import org.openapitools._
import org.openapitools.models.ExtensionClassContainerImpl1links
import org.openapitools.models.ExtensionClassContainerImpl1map
/**
*
* @param Underscoreclass
* @param Underscorelinks
* @param map
*/
case class ExtensionClassContainerImpl1(Underscoreclass: Option[String],
Underscorelinks: Option[ExtensionClassContainerImpl1links],
map: Option[ExtensionClassContainerImpl1map]
)
object ExtensionClassContainerImpl1 {
/**
* Creates the codec for converting ExtensionClassContainerImpl1 from and to JSON.
*/
implicit val decoder: Decoder[ExtensionClassContainerImpl1] = deriveDecoder
implicit val encoder: ObjectEncoder[ExtensionClassContainerImpl1] = deriveEncoder
}
| cliffano/swaggy-jenkins | clients/scala-finch/generated/src/main/scala/org/openapitools/models/ExtensionClassContainerImpl1.scala | Scala | mit | 913 |
package cwl
import cats.data.NonEmptyList
import eu.timepit.refined._
import cats.syntax.either._
import shapeless.{:+:, CNil, Witness}
import shapeless.syntax.singleton._
import cwl.LinkMergeMethod.LinkMergeMethod
import cwl.WorkflowStepInput.InputSource
import common.validation.ErrorOr.ErrorOr
import wom.types.WomType
import wom.graph.WomIdentifier
import wom.graph.GraphNodePort.OutputPort
import wom.graph.expression.ExposedExpressionNode
case class WorkflowStepInput(
id: String,
source: Option[InputSource] = None,
linkMerge: Option[LinkMergeMethod] = None,
default: Option[CwlAny] = None,
valueFrom: Option[StringOrExpression] = None) {
def toExpressionNode(sourceMappings: Map[String, OutputPort],
outputTypeMap: Map[String, WomType],
inputs: Set[String]
): ErrorOr[ExposedExpressionNode] = {
val source = this.source.flatMap(_.select[String]).get
val lookupId = FullyQualifiedName(source).id
val outputTypeMapWithIDs = outputTypeMap.map {
case (key, value) => FullyQualifiedName(key).id -> value
}
(for {
inputType <- outputTypeMapWithIDs.get(lookupId).
toRight(NonEmptyList.one(s"couldn't find $lookupId as derived from $source in map\\n${outputTypeMapWithIDs.mkString("\\n")}"))
womExpression = WorkflowStepInputExpression(this, inputType, inputs)
identifier = WomIdentifier(id)
ret <- ExposedExpressionNode.fromInputMapping(identifier, womExpression, inputType, sourceMappings).toEither
} yield ret).toValidated
}
}
object WorkflowStepInput {
type InputSource = String :+: Array[String] :+: CNil
}
object WorkflowStepInputSource {
object String {
def unapply(arg: InputSource): Option[String] = arg.select[String]
}
object StringArray {
def unapply(arg: InputSource): Option[Array[String]] = arg.select[Array[String]]
}
}
case class InputParameter(
id: String,
label: Option[String] = None,
secondaryFiles:
Option[
Expression :+:
String :+:
Array[
Expression :+:
String :+:
CNil] :+:
CNil] = None,
format:
Option[
Expression :+:
String :+:
Array[String] :+:
CNil] = None,
streamable: Option[Boolean] = None,
doc: Option[String :+: Array[String] :+: CNil] = None,
inputBinding: Option[CommandLineBinding] = None,
default: Option[String] = None, //can be of type "Any" which... sucks.
`type`: Option[MyriadInputType] = None) {
type `type` = MyriadInputType
type Id = String
}
case class InputRecordSchema(
`type`: W.`"record"`.T,
fields: Option[Array[InputRecordField]],
label: Option[String])
case class InputRecordField(
name: String,
`type`: MyriadInputType,
doc: Option[String],
inputBinding: Option[CommandLineBinding],
label: Option[String])
case class InputEnumSchema(
symbols: Array[String],
`type`: W.`"enum"`.T,
label: Option[String],
inputBinding: Option[CommandLineBinding])
case class InputArraySchema(
items: MyriadInputType,
`type`: W.`"array"`.T,
label: Option[String],
inputBinding: Option[CommandLineBinding])
case class CommandLineBinding(
loadContents: Option[Boolean] = None,
position: Option[Int] = None,
prefix: Option[String] = None,
separate: Option[String] = None,
itemSeparator: Option[String] = None,
valueFrom: Option[StringOrExpression] = None,
shellQuote: Option[Boolean] = None)
case class WorkflowOutputParameter(
id: String,
label: Option[String] = None,
secondaryFiles:
Option[
Expression :+:
String :+:
Array[
Expression :+:
String :+:
CNil] :+:
CNil] = None,
format: Option[Expression :+: String :+: Array[String] :+: CNil] = None,
streamable: Option[Boolean] = None,
doc: Option[String :+: Array[String] :+: CNil] = None,
outputBinding: Option[CommandOutputBinding] = None,
outputSource: Option[WorkflowOutputParameter#OutputSource] = None,
linkMerge: Option[LinkMergeMethod] = None,
`type`: Option[MyriadOutputType] = None) {
type OutputSource = String :+: Array[String] :+: CNil
type `type` = MyriadOutputType
type Id = String
}
case class InputBinding(position: Int, prefix: String)
case class OutputRecordSchema(
`type`: W.`"record"`.T,
fields: Option[Array[OutputRecordField]],
label: Option[String]
)
case class OutputRecordField(
name: String,
`type`: MyriadOutputType,
doc: Option[String],
outputBinding: Option[CommandOutputBinding])
case class OutputEnumSchema(
symbols: Array[String],
`type`: W.`"enum"`.T,
label: Option[String],
outputBinding: Option[CommandOutputBinding])
case class OutputArraySchema(
items: MyriadOutputType,
`type`: W.`"array"`.T,
label: Option[String],
outputBinding: Option[CommandOutputBinding])
case class InlineJavascriptRequirement(
`class`: W.`"InlineJavascriptRequirement"`.T = "InlineJavascriptRequirement".narrow,
expressionLib: Option[Array[String]] = None)
case class SchemaDefRequirement(
`class`: W.`"SchemaDefRequirement"`.T,
types: Array[InputRecordSchema :+: InputEnumSchema :+: InputArraySchema :+: CNil]
)
//There is a large potential for regex refinements on these string types
case class DockerRequirement(
`class`: W.`"DockerRequirement"`.T,
dockerPull: Option[String], //TODO Refine to match a docker image regex?
dockerLoad: Option[String],
dockerFile: Option[String],
dockerImport: Option[String],
dockerImageId: Option[String],
dockerOutputDirectory: Option[String]
)
case class SoftwareRequirement(
`class`: W.`"SoftwareRequirement"`.T,
packages: Array[SoftwarePackage] = Array.empty
)
case class SoftwarePackage(
`package`: String,
version: Option[Array[String]],
specs: Option[Array[String]] // This could be refined to match a regex for IRI.
) {
type Package = String
type Specs = Array[String]
}
case class InitialWorkDirRequirement(
`class`: W.`"InitialWorkDirRequirement"`.T,
listing:
Array[
File :+:
Directory :+:
Dirent :+:
Expression :+:
String :+:
CNil
] :+:
Expression :+:
String :+:
CNil)
/**
* Short for "Directory Entry"
* @see <a href="http://www.commonwl.org/v1.0/CommandLineTool.html#Dirent">Dirent Specification</a>
*/
case class Dirent(
entry: Expression :+: String :+: CNil,
entryName: Option[Expression :+: String :+: CNil],
writable: Option[Boolean])
case class EnvVarRequirement(
`class`: EnvVarRequirement.ClassType = EnvVarRequirement.`class`,
envDef: Array[EnvironmentDef]
)
object EnvVarRequirement {
type ClassType = Witness.`"EnvVarRequirement"`.T
val `class`: ClassType = "EnvVarRequirement".asInstanceOf[ClassType]
}
case class EnvironmentDef(envName: String, envValue: StringOrExpression) {
type EnvName = String
type EnvValue = String
}
case class ShellCommandRequirement(`class`: W.`"ShellCommandRequirement"`.T = "ShellCommandRequirement".narrow)
case class ResourceRequirement(
`class`: W.`"ResourceRequirement"`.T,
coresMin: Long :+: Expression :+: String :+: CNil,
coresMax: Int :+: Expression :+: String :+: CNil,
ramMin: Long :+: Expression :+: String :+: CNil,
ramMax: Long :+: Expression :+: String :+: CNil,
tmpdirMin: Long :+: Expression :+: String :+: CNil,
tmpdirMax: Long :+: Expression :+: String :+: CNil,
outdirMin: Long :+: Expression :+: String :+: CNil,
outdirMax: Long :+: Expression :+: String :+: CNil)
case class SubworkflowFeatureRequirement(
`class`: W.`"SubworkflowFeatureRequirement"`.T)
case class ScatterFeatureRequirement(
`class`: W.`"ScatterFeatureRequirement"`.T)
case class MultipleInputFeatureRequirement(
`class`: W.`"MultipleInputFeatureRequirement"`.T)
case class StepInputExpressionRequirement(
`class`: W.`"StepInputExpressionRequirement"`.T)
| ohsu-comp-bio/cromwell | cwl/src/main/scala/cwl/model.scala | Scala | bsd-3-clause | 9,610 |
package com.sksamuel.scoverage.samples
import org.scalatest.{MustMatchers, FunSuite}
import scala.concurrent.Await
import scala.concurrent.duration._
/** @author Stephen Samuel */
class FuturesTest extends FunSuite with MustMatchers {
lazy val futures = new Futures
val timeout = 10.seconds
import scala.concurrent.ExecutionContext.Implicits.global
implicit val name = "sammy"
test("futures happy path") {
val person = new Person("sammy")
Await.result(futures.insert(person), timeout).success mustEqual true
Await.result(futures.insert(person), timeout).status mustEqual 421
}
}
case class Person(name: String)
| scoverage/sbt-scoverage-samples | src/test/scala/com/sksamuel/scoverage/samples/FuturesTest.scala | Scala | apache-2.0 | 642 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.reflect.generic
object ByteCodecs {
def avoidZero(src: Array[Byte]): Array[Byte] = {
var i = 0
val srclen = src.length
var count = 0
while (i < srclen) {
if (src(i) == 0x7f) count += 1
i += 1
}
val dst = new Array[Byte](srclen + count)
i = 0
var j = 0
while (i < srclen) {
val in = src(i)
if (in == 0x7f) {
dst(j) = (0xc0).toByte
dst(j + 1) = (0x80).toByte
j += 2
} else {
dst(j) = (in + 1).toByte
j += 1
}
i += 1
}
dst
}
def regenerateZero(src: Array[Byte]): Int = {
var i = 0
val srclen = src.length
var j = 0
while (i < srclen) {
val in: Int = src(i) & 0xff
if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
src(j) = 0x7f
i += 2
} else {
src(j) = (in - 1).toByte
i += 1
}
j += 1
}
j
}
def encode8to7(src: Array[Byte]): Array[Byte] = {
val srclen = src.length
val dstlen = (srclen * 8 + 6) / 7
val dst = new Array[Byte](dstlen)
var i = 0
var j = 0
while (i + 6 < srclen) {
var in: Int = src(i) & 0xff
dst(j) = (in & 0x7f).toByte
var out: Int = in >>> 7
in = src(i + 1) & 0xff
dst(j + 1) = (out | (in << 1) & 0x7f).toByte
out = in >>> 6
in = src(i + 2) & 0xff
dst(j + 2) = (out | (in << 2) & 0x7f).toByte
out = in >>> 5
in = src(i + 3) & 0xff
dst(j + 3) = (out | (in << 3) & 0x7f).toByte
out = in >>> 4
in = src(i + 4) & 0xff
dst(j + 4) = (out | (in << 4) & 0x7f).toByte
out = in >>> 3
in = src(i + 5) & 0xff
dst(j + 5) = (out | (in << 5) & 0x7f).toByte
out = in >>> 2
in = src(i + 6) & 0xff
dst(j + 6) = (out | (in << 6) & 0x7f).toByte
out = in >>> 1
dst(j + 7) = out.toByte
i += 7
j += 8
}
if (i < srclen) {
var in: Int = src(i) & 0xff
dst(j) = (in & 0x7f).toByte; j += 1
var out: Int = in >>> 7
if (i + 1 < srclen) {
in = src(i + 1) & 0xff
dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
out = in >>> 6
if (i + 2 < srclen) {
in = src(i + 2) & 0xff
dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
out = in >>> 5
if (i + 3 < srclen) {
in = src(i + 3) & 0xff
dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
out = in >>> 4
if (i + 4 < srclen) {
in = src(i + 4) & 0xff
dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
out = in >>> 3
if (i + 5 < srclen) {
in = src(i + 5) & 0xff
dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
out = in >>> 2
}
}
}
}
}
if (j < dstlen) dst(j) = out.toByte
}
dst
}
@deprecated("use 2-argument version instead")
def decode7to8(src: Array[Byte], srclen: Int, dstlen: Int) { decode7to8(src, srclen) }
def decode7to8(src: Array[Byte], srclen: Int): Int = {
var i = 0
var j = 0
val dstlen = (srclen * 7 + 7) / 8
while (i + 7 < srclen) {
var out: Int = src(i)
var in: Byte = src(i + 1)
src(j) = (out | (in & 0x01) << 7).toByte
out = in >>> 1
in = src(i + 2)
src(j + 1) = (out | (in & 0x03) << 6).toByte
out = in >>> 2
in = src(i + 3)
src(j + 2) = (out | (in & 0x07) << 5).toByte
out = in >>> 3
in = src(i + 4)
src(j + 3) = (out | (in & 0x0f) << 4).toByte
out = in >>> 4
in = src(i + 5)
src(j + 4) = (out | (in & 0x1f) << 3).toByte
out = in >>> 5
in = src(i + 6)
src(j + 5) = (out | (in & 0x3f) << 2).toByte
out = in >>> 6
in = src(i + 7)
src(j + 6) = (out | in << 1).toByte
i += 8
j += 7
}
if (i < srclen) {
var out: Int = src(i)
if (i + 1 < srclen) {
var in: Byte = src(i + 1)
src(j) = (out | (in & 0x01) << 7).toByte; j += 1
out = in >>> 1
if (i + 2 < srclen) {
in = src(i + 2)
src(j) = (out | (in & 0x03) << 6).toByte; j += 1
out = in >>> 2
if (i + 3 < srclen) {
in = src(i + 3)
src(j) = (out | (in & 0x07) << 5).toByte; j += 1
out = in >>> 3
if (i + 4 < srclen) {
in = src(i + 4)
src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
out = in >>> 4
if (i + 5 < srclen) {
in = src(i + 5)
src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
out = in >>> 5
if (i + 6 < srclen) {
in = src(i + 6)
src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
out = in >>> 6
}
}
}
}
}
}
if (j < dstlen) src(j) = out.toByte
}
dstlen
}
def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
@deprecated("use 1-argument version instead")
def decode(xs: Array[Byte], dstlen: Int) { decode(xs) }
/** Destructively decode array xs and returns the length of the decoded array */
def decode(xs: Array[Byte]): Int = {
val len = regenerateZero(xs)
decode7to8(xs, len)
}
}
| cran/rkafkajars | java/scala/reflect/generic/ByteCodecs.scala | Scala | apache-2.0 | 5,960 |
package org.jetbrains.plugins.scala
package codeInspection
package packageNameInspection
import com.intellij.codeInspection._
import com.intellij.openapi.roots.{JavaProjectRootsUtil, ProjectRootManager}
import com.intellij.openapi.util.TextRange
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.vfs.VfsUtilCore
import com.intellij.psi._
import org.jetbrains.annotations.Nls
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScValueOrVariable
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.util.IntentionAvailabilityChecker
import scala.jdk.CollectionConverters._
class ScalaPackageNameInspection extends LocalInspectionTool {
override def isEnabledByDefault: Boolean = true
override def getID: String = "ScalaPackageName"
override def checkFile(file: PsiFile, manager: InspectionManager, isOnTheFly: Boolean): Array[ProblemDescriptor] = {
file match {
case file: ScalaFile if IntentionAvailabilityChecker.checkInspection(this, file) =>
if (file.isScriptFile) return null
if (file.isWorksheetFile) return null
val members = file.members
if (members.isEmpty) return null
val sourceFolder =
for {
virtualFile <- Option(file.getVirtualFile)
sourceFolder <- Option(ProjectRootManager.getInstance(file.getProject).getFileIndex.getSourceFolder(virtualFile))
} yield sourceFolder
val packagePrefix =
for {
sourceFolder <- sourceFolder
packagePrefix = sourceFolder.getPackagePrefix if packagePrefix.nonEmpty
} yield packagePrefix
val dir = file.getContainingDirectory
if (dir == null) return null
val packageNameByDir = packageNameFromFile(dir, packagePrefix) match {
case Some(pack) => pack
case None => return null
}
lazy val packageObjects = members.collect { case td: ScTypeDefinition if td.isPackageObject => td }
def ranges: Seq[TextRange] = file.packagingRanges match {
case Seq() =>
// if there is no packaging statement, we annotate the members directly
// for this we only try to highlight the nameIds if possible
members.collect {
case named: ScNamedElement => Seq(named.nameId.getTextRange)
case v: ScValueOrVariable => v.declaredElements.map(_.nameId.getTextRange)
case e => Seq(e.getTextRange)
}
.flatten
case seq => seq ++ packageObjects.map(_.nameId.getTextRange)
}
val possiblePackageQualifiers = members
.map {
case po: ScTypeDefinition if po.isPackageObject => po.qualifiedName
case td => td.topLevelQualifier.getOrElse("")
}
.distinct
val packageQualifier = possiblePackageQualifiers match {
case Seq(qualifier) => qualifier
case _ =>
// if the type definitions could be in multiple valid locations don't warn at all
return null
}
def problemDescriptors(buffer: Seq[LocalQuickFix]): Seq[ProblemDescriptor] = {
@Nls
var message = ScalaInspectionBundle.message("package.names.does.not.correspond.to.directory.structure", packageQualifier, packageNameByDir)
// Specifically make sure that the file path doesn't repeat an existing package prefix (twice).
for (packagePrefix <- packagePrefix; sourceFolder <- sourceFolder if (packageNameByDir + ".").startsWith(packagePrefix + "." + packagePrefix + ".")) {
message += "\\n\\n" + ScalaInspectionBundle.message("package.names.does.not.correspond.to.directory.structure.package.prefix", sourceFolder.getFile.getName, packagePrefix)
}
for (range <- ranges) yield
manager.createProblemDescriptor(file, range, message, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, buffer: _*)
}
if (packageNameByDir != packageQualifier) {
assert(packageObjects.size <= 1, "There should only be one package object here... otherwise we should have already aborted")
def renameQuickfix = new ScalaRenamePackageQuickFix(file, packageNameByDir)
def moveQuickfix = new ScalaMoveToPackageQuickFix(file, packageQualifier)
// the special root/empty-name package cannot have a package object
val cannotRename = packageNameByDir == "" && packageObjects.nonEmpty
val fixes =
if (cannotRename) Seq(moveQuickfix)
else Seq(renameQuickfix, moveQuickfix)
problemDescriptors(fixes).toArray
} else null
case _ => null
}
}
/**
* stolen from [[com.intellij.core.CoreJavaFileManager.getPackage]]
* unfortunately we need our own implementation... otherwise we cannot handle escaped package names at all
*/
private def packageNameFromFile(file: PsiDirectory, packagePrefix: Option[String]): Option[String] = {
val vFile = file.getVirtualFile
val withoutPrefix = JavaProjectRootsUtil.getSuitableDestinationSourceRoots(file.getProject).asScala.iterator
.flatMap { root =>
if (VfsUtilCore.isAncestor(root, vFile, false)) {
FileUtil.getRelativePath(root.getPath, vFile.getPath, '/')
.toOption
.map {
case "." => ""
case rel => rel.split('/').map(_.escapeNonIdentifiers).mkString(".")
}
} else None
}
.headOption
withoutPrefix.map {
case "" => packagePrefix.getOrElse("")
case name => packagePrefix.fold("")(_ + ".") + name
}
}
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/packageNameInspection/ScalaPackageNameInspection.scala | Scala | apache-2.0 | 5,888 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
abstract class AsyncFreeSpec extends AsyncFreeSpecLike {
/**
* Returns a user friendly string for this suite, composed of the
* simple name of the class (possibly simplified further by removing dollar signs if added by the Scala interpeter) and, if this suite
* contains nested suites, the result of invoking <code>toString</code> on each
* of the nested suites, separated by commas and surrounded by parentheses.
*
* @return a user-friendly string for this suite
*/
override def toString: String = Suite.suiteToString(None, this)
} | cheeseng/scalatest | scalatest/src/main/scala/org/scalatest/AsyncFreeSpec.scala | Scala | apache-2.0 | 1,182 |
/*
* Copyright 2007-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package record {
package field {
import _root_.scala.xml._
import _root_.net.liftweb.util._
import _root_.net.liftweb.common._
import _root_.net.liftweb.http.{S}
import _root_.net.liftweb.http.js._
import Helpers._
import S._
import JE._
class BinaryField[OwnerType <: Record[OwnerType]](rec: OwnerType) extends Field[Array[Byte], OwnerType] {
def owner = rec
def this(rec: OwnerType, value: Array[Byte]) = {
this(rec)
set(value)
}
/**
* Sets the field value from an Any
*/
def setFromAny(f: Any): Box[Array[Byte]] = Full(this.set(
f match {
case null => Array()
case arr : Array[Byte] => f.asInstanceOf[Array[Byte]];
case _ => f.toString.getBytes("UTF-8")
}))
def setFromString(s: String): Box[Array[Byte]] = {
try{
Full(set(s.getBytes("UTF-8")));
} catch {
case e: Exception => Empty
}
}
def toForm = NodeSeq.Empty
def asXHtml: NodeSeq = NodeSeq.Empty
def defaultValue = Array(0)
def asJs = Str(hexEncode(value))
}
import _root_.java.sql.{ResultSet, Types}
import _root_.net.liftweb.mapper.{DriverType}
/**
* An int field holding DB related logic
*/
abstract class DBBinaryField[OwnerType <: DBRecord[OwnerType]](rec: OwnerType) extends BinaryField[OwnerType](rec)
with JDBCFieldFlavor[Array[Byte]] {
def targetSQLType = Types.BINARY
/**
* Given the driver type, return the string required to create the column in the database
*/
def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.enumColumnType
def jdbcFriendly(field : String) : Array[Byte] = value
}
}
}
}
| jeppenejsum/liftweb | framework/lift-persistence/lift-record/src/main/scala/net/liftweb/record/field/BinaryField.scala | Scala | apache-2.0 | 2,264 |
/**
* Copyright (C) 2016 Nicola Justus <[email protected]>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package de.thm.move.controllers
import scala.collection.JavaConverters._
import java.net.URL
import java.util.ResourceBundle
import javafx.fxml.{FXML, Initializable}
import javafx.scene.control._
import javafx.collections.ListChangeListener.Change
import javafx.collections.{FXCollections, ListChangeListener}
import javafx.event.ActionEvent
import javafx.scene.paint.Color
import de.thm.move.models.LinePattern.LinePattern
import de.thm.move.models.FillPattern.FillPattern
import de.thm.move.util.ResourceUtils
import de.thm.move.config.ValueConfig
import de.thm.move.Global
import de.thm.move.models.{FillPattern, LinePattern}
import de.thm.move.util.JFxUtils._
import de.thm.move.views.dialogs.Dialogs
/** Controller for the color toolbar (toolbar below the menu).
*/
class ColorToolbarCtrl extends Initializable {
@FXML
var strokeColorLabel: Label = _
@FXML
var fillColorPicker: ColorPicker = _
@FXML
var fillColorLabel: Label = _
@FXML
var strokeColorPicker: ColorPicker = _
@FXML
var linePatternChooser: ChoiceBox[LinePattern] = _
@FXML
var fillPatternChooser: ChoiceBox[FillPattern] = _
@FXML
var borderThicknessChooser: ChoiceBox[Int] = _
private val fillColorConfig = new ValueConfig(Global.fillColorConfigURI)
private val strokeColorConfig = new ValueConfig(Global.strokeColorConfigURI)
private var selectionCtrl:SelectedShapeCtrl = _
override def initialize(location: URL, resources: ResourceBundle): Unit = {
setupDefaultColors()
setupPattern()
val sizesList:java.util.List[Int] = (1 until 20).asJava
borderThicknessChooser.setItems(FXCollections.observableArrayList(sizesList))
}
def postInitialize(selectionCtrl:SelectedShapeCtrl): Unit = {
this.selectionCtrl = selectionCtrl
onChoiceboxChanged(borderThicknessChooser)(
this.selectionCtrl.setStrokeWidth)
onChoiceboxChanged(linePatternChooser)(
this.selectionCtrl.setStrokePattern)
onChoiceboxChanged(fillPatternChooser)(this.selectionCtrl.setFillPattern)
}
def shutdown(): Unit = {
fillColorConfig.saveConfig()
strokeColorConfig.saveConfig()
}
@FXML def colorPickerChanged(ae:ActionEvent): Unit = {
val src = ae.getSource
if(src == strokeColorPicker)
selectionCtrl.setStrokeColor(withCheckedColor(strokeColorPicker.getValue))
else if(src == fillColorPicker)
selectionCtrl.setFillColor(withCheckedColor(fillColorPicker.getValue))
}
/** Checks that the color has a valid opacity and if not warns the user. */
private def withCheckedColor(c:Color): Color = {
val opacity = c.getOpacity()
val opacityPc = opacity*100
if(opacity != 1.0 && opacity != 0.0) {
Dialogs.newWarnDialog(
f"The given color has a opacity of $opacityPc%2.0f which modelica can't display.\\n"+
"Colors in modelica can have 2 opacitys: either 100% or 0%"
).showAndWait()
}
c
}
private def setupPattern(): Unit = {
val linePatterns = LinePattern.values.toList.asJava
linePatternChooser.setItems(FXCollections.observableList(linePatterns))
linePatternChooser.setValue(LinePattern.Solid)
val fillPatterns = FillPattern.values.toList.asJava
fillPatternChooser.setItems(FXCollections.observableList(fillPatterns))
fillPatternChooser.setValue(FillPattern.Solid)
}
/*Setup default colors for fill-,strokeChooser & strokeWidth*/
private def setupDefaultColors(): Unit = {
val fillColor = ResourceUtils.asColor("colorChooser.fillColor").getOrElse(Color.BLACK)
val strokeColor = ResourceUtils.asColor("colorChooser.strokeColor").getOrElse(Color.BLACK)
val width = Global.config.getInt("colorChooser.strokeWidth").getOrElse(1)
fillColorPicker.setValue(fillColor)
strokeColorPicker.setValue(strokeColor)
borderThicknessChooser.setValue(width)
//setup custom colors
fillColorPicker.getCustomColors.addAll(fillColorConfig.getConvertedValues:_*)
strokeColorPicker.getCustomColors.addAll(strokeColorConfig.getConvertedValues:_*)
val colorChangedHandler: ValueConfig => ListChangeListener[Color] = conf => new ListChangeListener[Color] {
override def onChanged(change: Change[_ <: Color]): Unit = {
while(change.next) {
if(change.wasAdded)
change.getAddedSubList.asScala.foreach(x => conf.setUniqueValue(x.toString))
else if(change.wasRemoved)
change.getRemoved.asScala.foreach(x => conf.removeValue(x.toString))
}
}
}
fillColorPicker.getCustomColors.addListener(colorChangedHandler(fillColorConfig))
strokeColorPicker.getCustomColors.addListener(colorChangedHandler(strokeColorConfig))
}
def getStrokeColor: Color = strokeColorPicker.getValue
def getFillColor: Color = fillColorPicker.getValue
def selectedThickness: Int = borderThicknessChooser.getSelectionModel.getSelectedItem
}
| THM-MoTE/MoVE | src/main/scala/de/thm/move/controllers/ColorToolbarCtrl.scala | Scala | mpl-2.0 | 5,135 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.akkastream.util
import akka.stream.impl.StreamLayout.{Atomic, Combine, Ignore, MaterializedValueNode, Module, Transform}
class MaterializedValueOps(mat: MaterializedValueNode) {
def resolve[Mat](materializedValues: scala.collection.mutable.Map[Module, Any]): Mat = {
def resolveMaterialized(mat: MaterializedValueNode,
materializedValues: scala.collection.mutable.Map[Module, Any]): Any = mat match {
case Atomic(m) => materializedValues.getOrElse(m, ())
case Combine(f, d1, d2) => f(resolveMaterialized(d1, materializedValues),
resolveMaterialized(d2, materializedValues))
case Transform(f, d) => f(resolveMaterialized(d, materializedValues))
case Ignore => ()
}
resolveMaterialized(mat, materializedValues).asInstanceOf[Mat]
}
}
object MaterializedValueOps{
def apply(mat: MaterializedValueNode): MaterializedValueOps = new MaterializedValueOps(mat)
}
| manuzhang/incubator-gearpump | experiments/akkastream/src/main/scala/org/apache/gearpump/akkastream/util/MaterializedValueOps.scala | Scala | apache-2.0 | 1,752 |
package mesosphere.marathon
package core.instance
import com.fasterxml.uuid.{EthernetAddress, Generators}
import mesosphere.marathon.api.v2.json.Formats._
import mesosphere.marathon.state.{PathId, PersistentVolume, VolumeMount}
import play.api.libs.functional.syntax._
import play.api.libs.json._
case class LocalVolume(id: LocalVolumeId, persistentVolume: PersistentVolume, mount: VolumeMount)
case class LocalVolumeId(runSpecId: PathId, name: String, uuid: String) {
import LocalVolumeId.delimiter
lazy val idString: String = runSpecId.safePath + delimiter + name + delimiter + uuid
override def toString: String = s"LocalVolume [$idString]"
}
object LocalVolumeId {
private val uuidGenerator = Generators.timeBasedGenerator(EthernetAddress.fromInterface())
private val delimiter = "#"
private val LocalVolumeEncoderRE = s"^([^$delimiter]+)[$delimiter]([^$delimiter]+)[$delimiter]([^$delimiter]+)$$".r
def apply(runSpecId: PathId, volume: PersistentVolume, mount: VolumeMount): LocalVolumeId = {
val name = volume.name.getOrElse(mount.mountPath)
LocalVolumeId(runSpecId, name, uuidGenerator.generate().toString)
}
def unapply(id: String): Option[(LocalVolumeId)] = id match {
case LocalVolumeEncoderRE(runSpec, name, uuid) => Some(LocalVolumeId(PathId.fromSafePath(runSpec), name, uuid))
case _ => None
}
implicit val localVolumeIdReader: Reads[LocalVolumeId] = (
(__ \\ "runSpecId").read[PathId] and
(__ \\ "containerPath").read[String] and
(__ \\ "uuid").read[String]
)((id, path, uuid) => LocalVolumeId(id, path, uuid))
implicit val localVolumeIdWriter: Writes[LocalVolumeId] = Writes[LocalVolumeId] { localVolumeId =>
JsObject(Seq(
"runSpecId" -> Json.toJson(localVolumeId.runSpecId),
"containerPath" -> Json.toJson(localVolumeId.name),
"uuid" -> Json.toJson(localVolumeId.uuid),
"persistenceId" -> Json.toJson(localVolumeId.idString)
))
}
}
| gsantovena/marathon | src/main/scala/mesosphere/marathon/core/instance/LocalVolume.scala | Scala | apache-2.0 | 1,942 |
/*******************************************************************************
Copyright (c) 2013, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
object TIZENWebAPIError extends Tizen {
val name = "WebAPIError"
/* predefined locations */
val loc_obj = newSystemRecentLoc(name + "Obj")
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_obj, prop_obj), (loc_proto, prop_proto)
)
/* constructor or object*/
private val prop_obj: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("@scope", AbsConstValue(PropValueNullTop)),
("@hasinstance", AbsConstValue(PropValueNullTop)),
("code", AbsConstValue(PropValue(ObjectValue(Value(UndefTop), F, T, T)))),
("name", AbsConstValue(PropValue(ObjectValue(Value(UndefTop), F, T, T)))),
("message", AbsConstValue(PropValue(ObjectValue(Value(UndefTop), F, T, T))))
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T)))
)
override def getSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
} | darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENWebAPIError.scala | Scala | bsd-3-clause | 2,202 |
package part2
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class Exercise8bVecCaseClassSpec extends AnyFlatSpec with Matchers {
// val vec1 = Vec(3, 4)
// val vec2 = Vec(5, 12)
"accessors" should "return fields from the Vec" in {
pending
// vec1.x should equal(3)
// vec1.y should equal(4)
// vec2.x should equal(5)
// vec2.y should equal(12)
}
"copy constructor" should "create new Vecs" in {
pending
// vec1.copy(x = 100) should equal(Vec(100, 4))
// vec2.copy(y = 100) should equal(Vec(5, 100))
}
"equals method" should "compare on contents, not on reference equality" in {
pending
// (vec1 == vec1.copy()) should equal(true)
// (vec2 eq vec1.copy()) should equal(false)
}
"companion object apply method" should "create a Vec" in {
pending
// val vec3 = Vec(1, 2)
// vec3.x should equal(1)
// vec3.y should equal(2)
}
"companion object unapply method" should "allow destructuring with pattern matching" in {
pending
// vec1 match {
// case Vec(x0, y0) =>
// x0 should equal(3)
// y0 should equal(4)
// }
}
}
| underscoreio/essential-scala-code | src/test/scala/part2/Exercise8bVecCaseClassSpec.scala | Scala | apache-2.0 | 1,179 |
package models
import org.joda.time.DateTime
import java.sql.Connection
import java.time.Instant
case class CreateNewsCategory(
categoryName: String,
iconUrl: String
) (
implicit newsCategoryRepo: NewsCategoryRepo
) {
def save()(implicit conn: Connection): NewsCategory = newsCategoryRepo.createNew(
categoryName,
iconUrl
)
def update(id: Long)(implicit conn: Connection): Int = newsCategoryRepo.update(
NewsCategoryId(id),
categoryName,
iconUrl
)
}
| ruimo/store2 | app/models/CreateNewsCategory.scala | Scala | apache-2.0 | 488 |
/*
* ____ ____ _____ ____ ___ ____
* | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.performance
import org.specs2.mutable.Specification
import com.precog.common._
import com.precog.common.security._
import com.precog.common.util._
import com.precog.yggdrasil._
import com.precog.yggdrasil.actor._
import com.precog.yggdrasil.serialization._
import com.precog.bifrost.yggdrasil._
import com.precog.mimir._
import com.precog.mimir.memoization._
import akka.actor.ActorSystem
import akka.pattern.ask
import akka.dispatch.ExecutionContext
import akka.dispatch.Await
import akka.util.Timeout
import akka.util.Duration
import akka.util.duration._
import blueeyes.json._
import java.io.File
import org.streum.configrity.Configuration
import java.util.concurrent.atomic.AtomicInteger
import scalaz._
import Scalaz._
trait JDBMPerformanceSpec extends Specification with PerformanceSpec {
sequential
val timeout = Duration(5000, "seconds")
val benchParams = BenchmarkParameters(5, 500, Some(500), false)
val singleParams = BenchmarkParameters(5, 500, Some(500), false)
val config = Configuration(Map.empty[String, String])
val tmpDir = newTempDir()
lazy val bifrost = new TestShard(config, tmpDir)
lazy val executor = new TestQueryExecutor(config, bifrost)
val perfUtil = PerformanceUtil.default
step {
Await.result(bifrost.actorsStart, timeout)
}
"yggdrasil" should {
val seqId = new AtomicInteger(0)
def insert(bifrost: TestShard, path: Path, pid: Int, batchSize: Int, batches: Int) {
val batch = new Array[EventMessage](batchSize)
var id = 0
var b = 0
while(b < batches) {
var i = 0
while(i < batchSize) {
val jval = AdSamples.adCampaignSample.sample.get
val event = Event(path, "apiKey", jval, Map.empty)
batch(i) = EventMessage(EventId(pid, id), event)
i += 1
id += 1
}
val result = bifrost.storeBatch(batch, timeout)
Await.result(result, timeout)
b += 1
}
}
"load test sim" in {
insert(bifrost, Path("/test/query_set"), 2, 10000, 1)
val threadCount = 10
val queries = List(
"(load(//test/query_set))",
"""
tests := load(//test/query_set)
count(tests where tests.gender = "male")
""",
"""
tests := load(//test/query_set)
histogram('platform) :=
{ platform: 'platform, num: count(tests where tests.platform = 'platform) }
histogram
"""
)
def test(i: Int) = {
val threads = (0.until(threadCount)) map { i =>
if(i == 0) {
new Thread {
override def run() {
insert(bifrost, Path("/test/insert_set"), 2, 10000, 1)
}
}
} else {
new Thread {
val rand = new java.util.Random()
override def run() {
var cnt = 0
while(cnt < i) {
val q = queries(rand.nextInt(queries.length))
val result = executor.execute("apiKey", q,"")
result match {
case Success(jval) =>
case Failure(e) => new RuntimeException("Query result failure")
}
cnt += 1
}
}
}
}
}
threads.foreach{ _.start }
threads.foreach{ _.join }
}
println("load test sim")
val result = Performance().benchmark(test(10), benchParams, benchParams)
perfUtil.uploadResults("load_test_sim", result)
//val result = Performance().profile(test(10))
result.report("load test sym", System.out)
true must_== true
}
"insert" in {
val tests = 100000
val batchSize = 1000
val result = Performance().benchmark(insert(bifrost, Path("/test/insert/"), 0, batchSize, tests / batchSize), singleParams, singleParams)
perfUtil.uploadResults("insert_100k", result)
//val result = Performance().profile(insert(bifrost, Path("/test/insert/"), 0, batchSize, tests / batchSize))
println("starting insert test")
result.report("insert 100K", System.out)
true must_== true
}
def testRead() = {
executor.execute("apiKey", "count(load(//test/large))")
}
"read large" in {
insert(bifrost, Path("/test/large"), 1, 100000, 1)
println("read large test")
val result = Performance().benchmark(testRead(), benchParams, benchParams)
perfUtil.uploadResults("read_100k", result)
//val result = Performance().profile(testRead())
result.report("read 100K", System.out)
true must_== true
}
"read small 10K x 10" in {
insert(bifrost, Path("/test/small1"), 1, 10000, 1)
def test(i: Int) = {
var cnt = 0
while(cnt < i) {
val result = executor.execute("apiKey", "count(load(//test/small1))")
result match {
case Success(jval) =>
case Failure(e) => new RuntimeException("Query result failure")
}
cnt += 1
}
}
val result = Performance().benchmark(test(10), benchParams, benchParams)
perfUtil.uploadResults("read_10k_10x", result)
//val result = Performance().profile(test(100))
result.report("read 10K elements x 10 times", System.out)
true must_== true
}
"multi-thread read" in {
insert(bifrost, Path("/test/small2"), 2, 10000, 1)
val threadCount = 10
def test(i: Int) = {
val threads = (0.until(threadCount)) map { _ =>
new Thread {
override def run() {
var cnt = 0
while(cnt < i) {
val result = executor.execute("apiKey", "(load(//test/small2))")
result match {
case Success(jval) =>
case Failure(e) => new RuntimeException("Query result failure")
}
cnt += 1
}
}
}
}
threads.foreach{ _.start }
threads.foreach{ _.join }
}
val result = Performance().benchmark(test(1), benchParams, benchParams)
perfUtil.uploadResults("read_10k_10thread", result)
//val result = Performance().profile(test(10))
println("read small thread test")
result.report("read 10K elements x 1 times with 10 threads", System.out)
true must_== true
}
"hw2 test 100K x 1" in {
insert(bifrost, Path("/test/small3"), 1, 100000, 1)
val query =
"""
tests := load(//test/small3)
count(tests where tests.gender = "male")
"""
def test(i: Int) = {
var cnt = 0
while(cnt < i) {
val result = executor.execute("apiKey", query)
result match {
case Success(jval) =>
case Failure(e) => new RuntimeException("Query result failure")
}
cnt += 1
}
}
val result = Performance().benchmark(test(1), benchParams, benchParams)
perfUtil.uploadResults("hw2_100k", result)
//val result = Performance().profile(test(100))
result.report("hw2 test 100K * 1", System.out)
true must_== true
}
"hw3 test" in {
insert(bifrost, Path("/test/small4"), 1, 100000, 1)
val query =
"""
tests := load(//test/small4)
histogram('platform) :=
{ platform: 'platform, num: count(tests where tests.platform = 'platform) }
histogram
"""
def test(i: Int) = {
var cnt = 0
while(cnt < i) {
val result = executor.execute("apiKey", query)
result match {
case Success(jval) =>
case Failure(e) => new RuntimeException("Query result failure")
}
cnt += 1
}
}
val result = Performance().benchmark(test(1), benchParams, benchParams)
perfUtil.uploadResults("hw3_100k", result)
//val result = Performance().profile(test(100))
result.report("hw3 test 100K * 1", System.out)
true must_== true
}
"handle null scenario" in {
val nullReal = """
[{
"event":"activated",
"currency":"USD",
"customer":{
"country":"CA",
"email":"[email protected]",
"firstName":"John",
"lastName":"Smith",
"organization":"",
"zipcode":"11111"
},
"endDate":null,
"product":{
"name":"Subscription 1"
},
"quantity":1,
"regularPriceUsd":10,
"timestamp":{
"date":7,
"day":3,
"hours":0,
"minutes":0,
"month":2,
"seconds":0,
"time":1331078400000,
"timezoneOffset":0,
"year":112
}
},{
"event":"deactivated",
"currency":"USD",
"customer":{
"country":"US",
"email":"[email protected]",
"firstName":"Ryan",
"lastName":"Dewell",
"organization":"",
"zipcode":"93101"
},
"endDate":{
"date":7,
"day":3,
"hours":0,
"minutes":0,
"month":2,
"seconds":0,
"time":1331078400000,
"timezoneOffset":0,
"year":112
},
"product":{
"name":"ABC Subscription"
},
"quantity":1,
"reason":"canceled",
"regularPriceUsd":9,
"timestamp":{
"date":7,
"day":3,
"hours":0,
"minutes":0,
"month":2,
"seconds":0,
"time":1331078400000,
"timezoneOffset":0,
"year":112
}
}]
"""
val jvals = JParser.parse(nullReal)
val msgs = jvals match {
case JArray(jvals) =>
jvals.zipWithIndex.map {
case (jval, idx) =>
val event = Event(Path("/test/null"), "apiKey", jval, Map.empty)
EventMessage(EventId(1,idx), event)
}
}
Await.result(bifrost.storeBatch(msgs, timeout), timeout)
Thread.sleep(10000)
val result = executor.execute("apiKey", "load(//test/null)")
result must beLike {
case Success(JArray(vals)) => vals.size must_== 2
}
}.pendingUntilFixed
"handle mixed type scenario" in {
val mixedReal = """
[{
"event":"activated",
"currency":"USD",
"customer":{
"country":"CA",
"email":"[email protected]",
"firstName":"John",
"lastName":"Smith",
"organization":"",
"zipcode":"11111"
},
"endDate":"null",
"product":{
"name":"Subscription 1"
},
"quantity":1,
"regularPriceUsd":10,
"timestamp":{
"date":7,
"day":3,
"hours":0,
"minutes":0,
"month":2,
"seconds":0,
"time":1331078400000,
"timezoneOffset":0,
"year":112
}
},{
"event":"deactivated",
"currency":"USD",
"customer":{
"country":"US",
"email":"[email protected]",
"firstName":"Ryan",
"lastName":"Dewell",
"organization":"",
"zipcode":"93101"
},
"endDate":{
"date":7,
"day":3,
"hours":0,
"minutes":0,
"month":2,
"seconds":0,
"time":1331078400000,
"timezoneOffset":0,
"year":112
},
"product":{
"name":"ABC Subscription"
},
"quantity":1,
"reason":"canceled",
"regularPriceUsd":9,
"timestamp":{
"date":7,
"day":3,
"hours":0,
"minutes":0,
"month":2,
"seconds":0,
"time":1331078400000,
"timezoneOffset":0,
"year":112
}
}]
"""
val jvalues = JsonParser.parse(mixedReal)
val msgs = jvalues match {
case JArray(jvals) =>
jvals.zipWithIndex.map {
case (jval, idx) =>
val event = Event(Path("/test/mixed"), "apiKey", jval, Map.empty)
EventMessage(EventId(2,idx), event)
}
}
Await.result(bifrost.storeBatch(msgs, timeout), timeout)
Thread.sleep(10000)
val result = executor.execute("apiKey", "load(//test/mixed)")
result must beLike {
case Success(JArray(vals)) => vals.size must_== 2
}
}.pendingUntilFixed
}
step {
Await.result(bifrost.actorsStop, timeout)
cleanupTempDir(tmpDir)
}
}
class TestQueryExecutor(config: Configuration, testShard: TestShard) extends
JDBMQueryExecutor with
IterableDatasetOpsComponent {
override type Dataset[A] = IterableDataset[A]
lazy val actorSystem = ActorSystem("testQueryExecutor")
implicit lazy val asyncContext = ExecutionContext.defaultExecutionContext(actorSystem)
lazy val yggConfig = new JDBMQueryExecutorConfig {
val config = TestQueryExecutor.this.config
val sortWorkDir = scratchDir
val memoizationBufferSize = sortBufferSize
val memoizationWorkDir = scratchDir
val clock = blueeyes.util.Clock.System
val idSource = new FreshAtomicIdSource
object valueSerialization extends SortSerialization[SValue] with SValueRunlengthFormatting with BinarySValueFormatting with ZippedStreamSerialization
object eventSerialization extends SortSerialization[SEvent] with SEventRunlengthFormatting with BinarySValueFormatting with ZippedStreamSerialization
object groupSerialization extends SortSerialization[(SValue, Identities, SValue)] with GroupRunlengthFormatting with BinarySValueFormatting with ZippedStreamSerialization
object memoSerialization extends IncrementalSerialization[(Identities, SValue)] with SEventRunlengthFormatting with BinarySValueFormatting with ZippedStreamSerialization
override lazy val flatMapTimeout: Duration = 5000 seconds
override lazy val projectionRetrievalTimeout: Timeout = Timeout(5000 seconds)
override lazy val maxEvalDuration: Duration = 5000 seconds
}
type Storage = TestShard
object ops extends Ops
object query extends QueryAPI
val storage = testShard
}
class TestShard(config: Configuration, dataDir: File) extends ActorYggShard[IterableDataset] with StandaloneActorEcosystem {
type YggConfig = ProductionActorConfig
lazy val yggConfig = new ProductionActorConfig {
lazy val config = TestShard.this.config
}
lazy val yggState: YggState = YggState.restore(dataDir).unsafePerformIO.toOption.get
lazy val accessControl: AccessControl = new UnlimitedAccessControl()(ExecutionContext.defaultExecutionContext(actorSystem))
def waitForRoutingActorIdle() {
val td = Duration(5000, "seconds")
implicit val to = new Timeout(td)
Await.result(routingActor ? ControlledStop, td)
Await.result(routingActor ? Start, td)
}
}
| precog/platform | performance/src/test/scala/com/precog/performance/YggdrasilPerformanceSpec.scala | Scala | agpl-3.0 | 15,235 |
package com.tritondigital.consul.http.client
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class ConsulClient(listNodes: (String, String, Int) => Future[Seq[Node]]) {
private var nodeSelector = new RoundRobinSelector[Node]
def resolve(service: String, host: String, port: Int, cacheOption: Option[Cache]): Future[Node] = {
val future = cacheOption match {
case None => listNodes(service, host, port)
case Some(cache) => cache(service, host, port) {
listNodes(service, host, port)
}
}
future.map { nodes =>
nodeSelector.select(nodes).getOrElse {
throw new NoNodeException(service)
}
}
}
def reset(): Unit = nodeSelector = new RoundRobinSelector[Node]
}
case class Node(ip: String, port: Int)
object ConsulClient extends ConsulClient(ConsulHttpClient.listNodes)
| tritondigital/ConsulHttpClient | src/main/scala/com/tritondigital/consul/http/client/ConsulClient.scala | Scala | mit | 881 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.status
import com.beust.jcommander.{Parameter, ParameterException, Parameters}
import org.geotools.data.DataStore
import org.locationtech.geomesa.tools.utils.KeywordParamSplitter
import org.locationtech.geomesa.tools.{Command, DataStoreCommand, RequiredTypeNameParam}
trait KeywordsCommand[DS <: DataStore] extends DataStoreCommand[DS] {
override val name: String = "keywords"
override def params: KeywordsParams
override def execute(): Unit = withDataStore(modifyKeywords)
protected def modifyKeywords(ds: DS): Unit = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType._
import scala.collection.JavaConversions._
val sft = ds.getSchema(params.featureName)
if (sft == null) {
throw new ParameterException(s"Feature '${params.featureName}' not found")
}
if (params.removeAll) {
val confirm = System.console().readLine("Remove all keywords? (y/n): ").toLowerCase()
if (confirm.equals("y") || confirm.equals("yes")) {
sft.removeAllKeywords()
} else {
Command.user.info("Aborting operation")
return
}
} else if (params.keywordsToRemove != null) {
sft.removeKeywords(params.keywordsToRemove.toSet)
}
if (params.keywordsToAdd != null) {
sft.addKeywords(params.keywordsToAdd.toSet)
}
ds.updateSchema(params.featureName, sft)
if (params.list) {
Command.output.info("Keywords: " + ds.getSchema(sft.getTypeName).getKeywords.mkString(", "))
}
}
}
@Parameters(commandDescription = "Add/Remove/List keywords on an existing schema")
trait KeywordsParams extends RequiredTypeNameParam {
@Parameter(names = Array("-a", "--add"), description = "A keyword to add. Can be specified multiple times", splitter = classOf[KeywordParamSplitter])
var keywordsToAdd: java.util.List[String] = _
@Parameter(names = Array("-r", "--remove"), description = "A keyword to remove. Can be specified multiple times", splitter = classOf[KeywordParamSplitter])
var keywordsToRemove: java.util.List[String] = _
@Parameter(names = Array("-l", "--list"), description = "List all keywords on the schema")
var list: Boolean = false
@Parameter(names = Array("--removeAll"), description = "Remove all keywords on the schema")
var removeAll: Boolean = false
}
| ronq/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/status/KeywordsCommand.scala | Scala | apache-2.0 | 2,805 |
package plumitive
object Document {
case class Id(unId: String) extends AnyVal
case class Title(unTitle: String) extends AnyVal
case class Tag(unTag: String) extends AnyVal
case class Date(month: Option[Month], year: Int)
case class Sender(unSender: String) extends AnyVal
case class Recipient(unRecipient: String) extends AnyVal
sealed trait Month {
def toInt: Int
def name: String
}
object Month {
def fromInt(int: Int): Month = int match {
case 1 => January
case 2 => February
case 3 => March
case 4 => April
case 5 => May
case 6 => June
case 7 => July
case 8 => August
case 9 => September
case 10 => October
case 11 => November
case 12 => December
case _ => throw new RuntimeException(s"Month $int does not exist")
}
def fromName(name: String): Option[Month] = name.toLowerCase match {
case "january" => Some(January)
case "february" => Some(February)
case "march" => Some(March)
case "april" => Some(April)
case "may" => Some(May)
case "june" => Some(June)
case "july" => Some(July)
case "august" => Some(August)
case "september" => Some(September)
case "october" => Some(October)
case "november" => Some(November)
case "december" => Some(December)
case _ => None
}
}
case object January extends Month {
override def toInt: Int = 1
override def name: String = "January"
}
case object February extends Month {
override def toInt: Int = 2
override def name: String = "February"
}
case object March extends Month {
override def toInt: Int = 3
override def name: String = "March"
}
case object April extends Month {
override def toInt: Int = 4
override def name: String = "April"
}
case object May extends Month {
override def toInt: Int = 5
override def name: String = "May"
}
case object June extends Month {
override def toInt: Int = 6
override def name: String = "June"
}
case object July extends Month {
override def toInt: Int = 7
override def name: String = "July"
}
case object August extends Month {
override def toInt: Int = 8
override def name: String = "August"
}
case object September extends Month {
override def toInt: Int = 9
override def name: String = "September"
}
case object October extends Month {
override def toInt: Int = 10
override def name: String = "October"
}
case object November extends Month {
override def toInt: Int = 11
override def name: String = "November"
}
case object December extends Month {
override def toInt: Int = 12
override def name: String = "December"
}
}
import Document._
case class Document(
id: Option[Id],
title: Title,
scannedText: Option[String],
tags: Set[Tag],
date: Date,
sender: Sender,
recipients: Set[Recipient]
)
| ostapneko/plumitive | src/main/scala/plumitive/Document.scala | Scala | mit | 2,991 |
package water.api.dsl
import water.fvec.Chunk
import water.MRTask2
abstract class SMRTask[T <: SMRTask[T]] extends MRTask2[T] {
def iterator(chunks:Array[Chunk]) : Iterator[Row] = new RowIterator(chunks)
private class RowIterator (private val chunks:Array[Chunk]) extends Iterator[Row] {
private var rowNum: Int = -1
private val row = new CRow
def next() : Row = {
rowNum += 1
return if (rowNum < chunks(0)._len) row else null
}
def hasNext() : Boolean = rowNum+1 < chunks(0)._len
/** Array of chunks encapsulation */
class CRow extends Row {
override def d(ncol: Int): scala.Double = chunks(ncol).at0 (rowNum)
override def l(ncol: Int): scala.Long = chunks(ncol).at80(rowNum)
override def ncols(): Int = chunks.length
}
}
}
| janezhango/BigDataMachineLearning | h2o-scala/src/main/scala/water/api/dsl/SMRTask.scala | Scala | apache-2.0 | 803 |
package leibniz
import leibniz.internal.Unsafe
import leibniz.variance.Constant
/**
* In constructive mathematics, an apartness relation is a constructive
* form of inequality, and is often taken to be more basic than equality.
* It is often written as # to distinguish from the negation of equality
* (the denial inequality) ≠, which is weaker.
*
* An apartness relation is a symmetric irreflexive binary relation with
* the additional condition that if two elements are apart, then any other
* element is apart from at least one of them (this last property is often
* called co-transitivity or comparison).
*
* @see [[https://en.wikipedia.org/wiki/Apartness_relation
* Apartness relation]]
*/
sealed abstract class Apart[A, B] { nab =>
def weaken: WeakApart[A, B]
def leftType: TypeId[A]
def rightType: TypeId[B]
/**
* If `F[A]` equals to `F[B]` for unequal types `A` and `B`,
* then `F` must be a constant type constructor.
*/
def proof[F[_]](f: F[A] === F[B]): Constant[F] =
weaken.constant[F](f)
/**
* Inequality is a co-transitive relation: if two elements
* are apart, then any other element is apart from at least
* one of them.
*/
def compare[C](C: TypeId[C]): Either[Apart[A, C], Apart[B, C]] =
TypeId.compare(leftType, C) match {
case Right(_) => TypeId.compare(rightType, C) match {
case Right(_) => ???
case Left(p) => Right(p)
}
case Left(p) => Left(p)
}
/**
* Inequality is symmetric relation and therefore can be flipped around.
* Flipping is its own inverse, so `x.flip.flip == x`.
*/
def flip: Apart[B, A] = new Apart[B, A] {
def weaken: WeakApart[B, A] = nab.weaken.flip
def leftType: TypeId[B] = nab.rightType
def rightType: TypeId[A] = nab.leftType
override def flip: Apart[A, B] = nab
}
/**
* Having `A === B` and `A =!= B` at the same time leads to a contradiction.
*/
def contradicts[R](ab: A === B): R = {
type f[x] = x
nab.proof[f](ab)[Unit, R].subst[f](())
}
override def toString: String = s"$leftType =!= $rightType"
}
object Apart {
private[this] final class Witness[A, B]
(val leftType: TypeId[A], val rightType: TypeId[B], val weaken: A =!= B)
extends Apart[A, B]
def apply[A, B](implicit ev: Apart[A, B]): Apart[A, B] = ev
implicit def summon[A, B]: Apart[A, B] =
macro internal.MacroUtil.mkApart[A, B]
def witness[A, B](weakApart: WeakApart[A, B], A: TypeId[A], B: TypeId[B]): Apart[A, B] =
new Witness[A, B](A, B, weakApart)
/**
* Inequality is an irreflexive relation.
*/
def irreflexive[A](ev: Apart[A, A]): Void =
ev.contradicts(Is.refl[A])
} | alexknvl/leibniz | src/main/scala/leibniz/Apart.scala | Scala | mit | 2,712 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.intg
import scala.concurrent.duration._
import org.ensime.api
import org.ensime.api.{ BasicTypeInfo => _, EnsimeFile => _, _ }
import org.ensime.core._
import org.ensime.fixture._
import org.ensime.indexer.FullyQualifiedName
import org.ensime.model.BasicTypeInfo
import org.ensime.util.EnsimeSpec
import org.ensime.util.ensimefile.EnsimeFile
import org.ensime.util.ensimefile.Implicits.DefaultCharset
import org.ensime.util.file._
class BasicWorkflow extends EnsimeSpec
with IsolatedEnsimeConfigFixture
with IsolatedTestKitFixture
with IsolatedProjectFixture
with RefactoringHandlerTestUtils {
val original = EnsimeConfigFixture.SimpleTestProject
"ensime-server" should "open the simple test project" in {
withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
import testkit._
val sourceRoot = scalaMain(config)
val fooFile = sourceRoot / "org/example/Foo.scala"
val fooPath = fooFile.toPath
val fooFilePath = fooFile.getAbsolutePath
val barFile = sourceRoot / "org/example/Bar.scala"
val barPath = barFile.toPath
val testRoot = scalaTest(config)
val blooSpecFile = testRoot / "org/example/BlooSpec.scala"
project ! TypecheckFilesReq(List(Left(blooSpecFile)))
expectMsg(VoidResponse)
asyncHelper.expectMsg(FullTypeCheckCompleteEvent)
project ! UnloadFileReq(SourceFileInfo(EnsimeFile(blooSpecFile)))
expectMsg(VoidResponse)
project ! TypecheckModule(EnsimeProjectId("testing_simple", "compile"))
expectMsg(VoidResponse)
all(asyncHelper.receiveN(3)) should matchPattern {
case CompilerRestartedEvent =>
case FullTypeCheckCompleteEvent =>
case n: NewScalaNotesEvent =>
}
project ! UnloadAllReq
expectMsg(VoidResponse)
expectMsg(VoidResponse)
all(asyncHelper.receiveN(4)) should matchPattern {
case CompilerRestartedEvent =>
case FullTypeCheckCompleteEvent =>
}
// trigger typeCheck
project ! TypecheckFilesReq(List(Left(fooFile), Left(barFile)))
expectMsg(VoidResponse)
asyncHelper.expectMsg(FullTypeCheckCompleteEvent)
// Asking to typecheck mising file should report an error not kill system
val missingFile = sourceRoot / "missing.scala"
val missingFilePath = missingFile.getAbsolutePath
project ! TypecheckFilesReq(List(Left(missingFile)))
expectMsg(EnsimeServerError(s"""file(s): "${EnsimeFile(missingFilePath)}" do not exist"""))
//-----------------------------------------------------------------------------------------------
// semantic highlighting
project ! SymbolDesignationsReq(Left(fooFile), -1, 299, SourceSymbol.allSymbols)
val designations = expectMsgType[SymbolDesignations]
designations.file match {
case rf: RawFile => rf.file.toFile shouldBe fooFile
case af: ArchiveFile => ???
}
designations.syms should contain(SymbolDesignation(12, 19, PackageSymbol))
// expected Symbols
// ((package 12 19) (package 8 11) (trait 40 43) (valField 69 70) (class 100 103) (param 125 126) (class 128 131) (param 133 134) (class 136 142) (operator 156 157) (param 154 155) (functionCall 160 166) (param 158 159) (valField 183 186) (class 193 199) (class 201 204) (valField 214 217) (class 224 227) (functionCall 232 239) (operator 250 251) (valField 256 257) (valField 252 255) (functionCall 261 268) (functionCall 273 283) (valField 269 272)))
//-----------------------------------------------------------------------------------------------
// symbolAtPoint
project ! SymbolAtPointReq(Left(fooFile), 128)
expectMsgType[SymbolInfo]
project ! SymbolAtPointReq(Left(fooFile), 68) // scala.Int symbol
val symInfo = expectMsgType[SymbolInfo]
symInfo.`type`.pos should matchPattern {
case Some(OffsetSourcePosition(_, _)) =>
}
val sourceFileLocation = symInfo.`type`.pos.get.asInstanceOf[OffsetSourcePosition].file
project ! SymbolDesignationsReq(Right(SourceFileInfo(sourceFileLocation, None, None)), -1, 9999, SourceSymbol.allSymbols)
val symbolDesignations = expectMsgType[SymbolDesignations]
symbolDesignations.syms should not be empty
//-----------------------------------------------------------------------------------------------
// public symbol search - java.io.File
project ! PublicSymbolSearchReq(List("java", "io", "File"), 30)
val javaSearchSymbol = expectMsgType[SymbolSearchResults]
assert(javaSearchSymbol.syms.exists {
case TypeSearchResult("java.io.File", "File", DeclaredAs.Class, Some(_)) => true
case _ => false
})
//-----------------------------------------------------------------------------------------------
// public symbol search - scala.util.Random
project ! PublicSymbolSearchReq(List("scala", "util", "Random"), 2)
expectMsgPF() {
case SymbolSearchResults(res) if res.collectFirst { case TypeSearchResult("scala.util.Random", "Random", DeclaredAs.Class, Some(_)) => true }.isDefined =>
// this is a pretty ropey test at the best of times
}
//-----------------------------------------------------------------------------------------------
// documentation for type at point
val intDocSig = DocSigPair(DocSig(DocFqn("scala", "Int"), None), DocSig(DocFqn("", "int"), None))
// NOTE these are handled as multi-phase queries in requesthandler
project ! DocUriAtPointReq(Left(fooFile), OffsetRange(128))
expectMsg(Some(intDocSig))
//-----------------------------------------------------------------------------------------------
// uses of symbol at point
project ! TypecheckFilesReq(List(Left(fooFile)))
expectMsg(VoidResponse)
asyncHelper.expectMsg(FullTypeCheckCompleteEvent)
val packageFile = sourceRoot / "org/example/package.scala"
val packageFilePath = packageFile.getAbsolutePath
project ! FqnOfSymbolAtPointReq(SourceFileInfo(EnsimeFile(fooFile), None, None), 119)
var fqn = expectMsgType[FullyQualifiedName].fqnString
project ! FindUsages(fqn) // point on testMethod
expectMsgType[SourcePositions].positions should contain theSameElementsAs List(
PositionHint(LineSourcePosition(EnsimeFile(fooFile), 17), Some("println(foo.testMethod(7, \\"seven\\"))")),
PositionHint(LineSourcePosition(EnsimeFile(packageFile), 7), Some("new Foo.Foo().testMethod(1, \\"\\")"))
)
//-----------------------------------------------------------------------------------------------
// tree of symbol at point
project ! FqnOfTypeAtPointReq(SourceFileInfo(EnsimeFile(fooFile), None, None), 56)
fqn = expectMsgType[FullyQualifiedName].fqnString
project ! FindHierarchy(fqn) // point on class Bar
expectMsgType[HierarchyInfo] should matchPattern {
case HierarchyInfo(
List(ClassInfo(None, "java.lang.Object", DeclaredAs.Class, _)),
List(ClassInfo(Some("org.example.Foo.Foo"), "org.example.Foo$Foo", DeclaredAs.Class, _))
) =>
}
// note that the line numbers appear to have been stripped from the
// scala library classfiles, so offset/line comes out as zero unless
// loaded by the pres compiler
project ! SymbolAtPointReq(Left(fooFile), 276)
expectMsgPF() {
case SymbolInfo(
"testMethod",
"testMethod",
Some(OffsetSourcePosition(RawFile(`fooPath`), 114)),
ArrowTypeInfo(
"(Int, String) => Int",
"(scala.Int, java.lang.String) => scala.Int",
BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int"),
List(ParamSectionInfo(
List(
("i", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int")),
(s, BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String"))),
false)
), Nil)
) =>
}
// M-. external symbol
project ! SymbolAtPointReq(Left(fooFile), 190)
expectMsgPF() {
case SymbolInfo("Map", "Map", Some(OffsetSourcePosition(_, _)),
BasicTypeInfo("Map", DeclaredAs.Object, "scala.collection.immutable.Map")) =>
}
project ! SymbolAtPointReq(Left(fooFile), 343)
expectMsgPF() {
case SymbolInfo("fn", "fn", Some(OffsetSourcePosition(RawFile(`fooPath`), 304)),
api.BasicTypeInfo("(String) => Int", DeclaredAs.Trait, "(java.lang.String) => scala.Int",
List(
BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String"),
BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int")),
Nil, None, Nil)) =>
}
project ! SymbolAtPointReq(Left(barFile), 150)
expectMsgPF() {
case SymbolInfo("apply", "apply", Some(OffsetSourcePosition(RawFile(`barPath`), 59)),
ArrowTypeInfo("(String, Int) => Foo", "(java.lang.String, scala.Int) => org.example.Bar.Foo",
BasicTypeInfo("Foo", DeclaredAs.Class, "org.example.Bar.Foo"),
List(ParamSectionInfo(
List(
("bar", BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String")),
("baz", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int"))), false)),
Nil)) =>
}
project ! SymbolAtPointReq(Left(barFile), 193)
expectMsgPF() {
case SymbolInfo("copy", "copy", Some(OffsetSourcePosition(RawFile(`barPath`), 59)),
ArrowTypeInfo("(String, Int) => Foo", "(java.lang.String, scala.Int) => org.example.Bar.Foo",
BasicTypeInfo("Foo", DeclaredAs.Class, "org.example.Bar.Foo"),
List(ParamSectionInfo(
List(
("bar", BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String")),
("baz", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int"))), false)),
Nil)) =>
}
project ! SymbolAtPointReq(Left(fooFile), 600)
expectMsgPF() {
case SymbolInfo("poly", "poly", Some(OffsetSourcePosition(RawFile(`fooPath`), 548)),
ArrowTypeInfo("(A, B) => (A, B)", "(org.example.WithPolyMethod.A, org.example.WithPolyMethod.B) => (org.example.WithPolyMethod.A, org.example.WithPolyMethod.B)",
api.BasicTypeInfo(
"(A, B)", DeclaredAs.Class, "(org.example.WithPolyMethod.A, org.example.WithPolyMethod.B)",
List(
BasicTypeInfo("A", DeclaredAs.Nil, "org.example.WithPolyMethod.A"),
BasicTypeInfo("B", DeclaredAs.Nil, "org.example.WithPolyMethod.B")),
Nil, None, Nil),
List(ParamSectionInfo(
List(
("a", BasicTypeInfo("A", DeclaredAs.Nil, "org.example.WithPolyMethod.A")),
("b", BasicTypeInfo("B", DeclaredAs.Nil, "org.example.WithPolyMethod.B"))),
false)),
List(
BasicTypeInfo("A", DeclaredAs.Nil, "org.example.WithPolyMethod.A"),
BasicTypeInfo("B", DeclaredAs.Nil, "org.example.WithPolyMethod.B"))
)
) =>
}
// expand selection around "seven" in `foo.testMethod` call
project ! ExpandSelectionReq(fooFile, 215, 215)
val expandRange1 = expectMsgType[FileRange]
expandRange1 shouldBe FileRange(fooFilePath, 214, 217)
project ! ExpandSelectionReq(fooFile, 214, 217)
val expandRange2 = expectMsgType[FileRange]
expandRange2 shouldBe FileRange(fooFilePath, 210, 229)
project ! RefactorReq(1234, RenameRefactorDesc("bar", fooFile, 215, 215), false)
expectMsgPF() {
case RefactorDiffEffect(1234, RefactorType.Rename, diff) =>
val relevantExpectedPart = s"""|@@ -14,5 +14,5 @@
| val map = Map[String, Int]()
|- val foo = new Foo()
|- println("Hello, " + foo.x)
|- println(foo.testMethod(7, "seven"))
|+ val bar = new Foo()
|+ println("Hello, " + bar.x)
|+ println(bar.testMethod(7, "seven"))
| \\n""".stripMargin
val diffContents = diff.canon.readString()
val expectedContents = expectedDiffContent(fooFilePath, relevantExpectedPart)
if (diffContents == expectedContents) true
else fail(s"Different diff content than expected. \\n Actual content: '$diffContents' \\n ExpectedRelevantContent: '$relevantExpectedPart'")
}
project ! TypecheckFilesReq(List(Left(fooFile), Left(barFile)))
expectMsg(VoidResponse)
asyncHelper.fishForMessage() {
case FullTypeCheckCompleteEvent => true
case _ => false
}
project ! RefactorReq(4321, RenameRefactorDesc("Renamed", barFile, 30, 30), false)
expectMsgPF() {
case RefactorDiffEffect(4321, RefactorType.Rename, diff) =>
val renamedFile = new File(barFile.getPath.replace("Bar", "Renamed"))
val barChanges = s"""|@@ -1,13 +0,0 @@
|-package org.example
|-
|-object Bar extends App {
|- case class Foo(bar: String, baz: Int)
|- object Bla {
|- val foo: Foo = Foo(
|- bar = "Bar",
|- baz = 123
|- )
|-
|- val fooUpd = foo.copy(bar = foo.bar.reverse)
|- }
|-}
|""".stripMargin
val fooChanges = s"""|@@ -30,3 +30,3 @@
| List(1, 2, 3).head + 2
|- val x = Bar.Bla
|+ val x = Renamed.Bla
| }
|""".stripMargin
val renamedChanges = s"""|@@ -0,0 +1,13 @@
|+package org.example
|+
|+object Renamed extends App {
|+ case class Foo(bar: String, baz: Int)
|+ object Bla {
|+ val foo: Foo = Foo(
|+ bar = "Bar",
|+ baz = 123
|+ )
|+
|+ val fooUpd = foo.copy(bar = foo.bar.reverse)
|+ }
|+}
|""".stripMargin
val changes = Seq(
(barFile.getPath, DeleteFile, barChanges),
(fooFile.getPath, ChangeContents, fooChanges),
(renamedFile.getPath, CreateFile, renamedChanges)
)
val expectedDiff = expectedDiffContent(changes)
val diffContent = diff.canon.readString()
diffContent should ===(expectedDiff)
}
val bazFile = sourceRoot / "org/example2/Baz.scala"
val toBeUnloaded = SourceFileInfo(EnsimeFile(sourceRoot / "org/example2/ToBeUnloaded.scala"))
val toBeUnloaded2 = SourceFileInfo(EnsimeFile(sourceRoot / "org/example/package.scala"))
project ! TypecheckFilesReq(List(Left(bazFile), Right(toBeUnloaded), Right(toBeUnloaded2)))
expectMsg(VoidResponse)
all(asyncHelper.receiveN(2)) should matchPattern {
case note: NewScalaNotesEvent =>
case FullTypeCheckCompleteEvent =>
}
project ! UnloadFileReq(toBeUnloaded)
expectMsg(VoidResponse)
project ! UnloadFileReq(toBeUnloaded2)
expectMsg(VoidResponse)
// file with warning has been unloaded
// `NewScalaNotesEvent` should now not appear when typechecking `bazFile`
project ! TypecheckFilesReq(List(Left(bazFile)))
expectMsg(VoidResponse)
asyncHelper.expectMsg(FullTypeCheckCompleteEvent)
asyncHelper.expectNoMsg(3 seconds)
}
}
}
}
}
| VlachJosef/ensime-server | core/src/it/scala/org/ensime/intg/BasicWorkflow.scala | Scala | gpl-3.0 | 17,824 |
package com.roundeights.foldout
import java.net.URLEncoder
import java.net.URL
/**
* UrlBuilder Companion
*/
private[foldout] object UrlBuilder {
/** Generates a query string from a list of tuples */
def toQueryString ( pairs: List[(_, _)] ): String = {
pairs.map( (pair) => "%s=%s".format(
URLEncoder.encode(pair._1.toString, "UTF-8"),
URLEncoder.encode(pair._2.toString, "UTF-8")
) )
.mkString("&")
}
/** Generates a query string from a list of tuples*/
def toQueryString ( pairs: (_, _)* ): String
= toQueryString( pairs.toList )
/** Generates a query string from a map of parameters */
def toQueryString ( pairs: Map[_, _] ): String
= toQueryString( pairs.toList )
}
/**
* Constructs URLs for a base host
*/
private[foldout] class UrlBuilder (
private val host: String,
private val port: Int,
private val ssl: Boolean = false,
basePath: Option[String] = None
) {
/** The base path to attach to each request */
val rootPath = basePath.map {
"/" + _.dropWhile( _ == '/' ).reverse.dropWhile( _ == '/' ).reverse
}
/** Returns the path to use for making a request */
def buildPath ( path: Option[String] ): String = "%s/%s".format(
rootPath.getOrElse(""),
path.getOrElse("").dropWhile( _ == '/' )
)
/** Builds a new UrlBuilder with the given base path added */
def withBasePath( basePath: String )
= new UrlBuilder( host, port, ssl, Some( buildPath(Some(basePath)) ) )
/** Generates a URL with the given path and query parameters */
def url ( path: Option[String], query: List[(_, _)] ): String = {
val url = new URL(
if ( ssl ) "https" else "http",
host, port,
buildPath( path )
)
if ( query.length != 0 )
"%s?%s".format( url, UrlBuilder.toQueryString( query ) )
else
url.toString
}
/** Generates a URL with the given path and query parameters */
def url ( path: Option[String], query: Map[_, _] ): String
= url( path, query.toList )
/** Generates a URL with the given path and query parameters */
def url ( path: String, query: (_, _)* ): String
= url( Some(path), query.toList )
}
| Nycto/FoldOut | src/main/scala/FoldOut/UrlBuilder.scala | Scala | mit | 2,327 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.clustering
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{Vectors, Matrices}
import org.apache.spark.mllib.stat.distribution.MultivariateGaussian
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.util.Utils
class GaussianMixtureSuite extends SparkFunSuite with MLlibTestSparkContext {
test("single cluster") {
val data = sc.parallelize(Array(
Vectors.dense(6.0, 9.0),
Vectors.dense(5.0, 10.0),
Vectors.dense(4.0, 11.0)
))
// expectations
val Ew = 1.0
val Emu = Vectors.dense(5.0, 10.0)
val Esigma = Matrices.dense(2, 2, Array(2.0 / 3.0, -2.0 / 3.0, -2.0 / 3.0, 2.0 / 3.0))
val seeds = Array(314589, 29032897, 50181, 494821, 4660)
seeds.foreach { seed =>
val gmm = new GaussianMixture().setK(1).setSeed(seed).run(data)
assert(gmm.weights(0) ~== Ew absTol 1E-5)
assert(gmm.gaussians(0).mu ~== Emu absTol 1E-5)
assert(gmm.gaussians(0).sigma ~== Esigma absTol 1E-5)
}
}
test("two clusters") {
val data = sc.parallelize(GaussianTestData.data)
// we set an initial gaussian to induce expected results
val initialGmm = new GaussianMixtureModel(
Array(0.5, 0.5),
Array(
new MultivariateGaussian(Vectors.dense(-1.0), Matrices.dense(1, 1, Array(1.0))),
new MultivariateGaussian(Vectors.dense(1.0), Matrices.dense(1, 1, Array(1.0)))
)
)
val Ew = Array(1.0 / 3.0, 2.0 / 3.0)
val Emu = Array(Vectors.dense(-4.3673), Vectors.dense(5.1604))
val Esigma = Array(Matrices.dense(1, 1, Array(1.1098)), Matrices.dense(1, 1, Array(0.86644)))
val gmm = new GaussianMixture()
.setK(2)
.setInitialModel(initialGmm)
.run(data)
assert(gmm.weights(0) ~== Ew(0) absTol 1E-3)
assert(gmm.weights(1) ~== Ew(1) absTol 1E-3)
assert(gmm.gaussians(0).mu ~== Emu(0) absTol 1E-3)
assert(gmm.gaussians(1).mu ~== Emu(1) absTol 1E-3)
assert(gmm.gaussians(0).sigma ~== Esigma(0) absTol 1E-3)
assert(gmm.gaussians(1).sigma ~== Esigma(1) absTol 1E-3)
}
test("single cluster with sparse data") {
val data = sc.parallelize(Array(
Vectors.sparse(3, Array(0, 2), Array(4.0, 2.0)),
Vectors.sparse(3, Array(0, 2), Array(2.0, 4.0)),
Vectors.sparse(3, Array(1), Array(6.0))
))
val Ew = 1.0
val Emu = Vectors.dense(2.0, 2.0, 2.0)
val Esigma = Matrices.dense(3, 3,
Array(8.0 / 3.0, -4.0, 4.0 / 3.0, -4.0, 8.0, -4.0, 4.0 / 3.0, -4.0, 8.0 / 3.0)
)
val seeds = Array(42, 1994, 27, 11, 0)
seeds.foreach { seed =>
val gmm = new GaussianMixture().setK(1).setSeed(seed).run(data)
assert(gmm.weights(0) ~== Ew absTol 1E-5)
assert(gmm.gaussians(0).mu ~== Emu absTol 1E-5)
assert(gmm.gaussians(0).sigma ~== Esigma absTol 1E-5)
}
}
test("two clusters with sparse data") {
val data = sc.parallelize(GaussianTestData.data)
val sparseData = data.map(point => Vectors.sparse(1, Array(0), point.toArray))
// we set an initial gaussian to induce expected results
val initialGmm = new GaussianMixtureModel(
Array(0.5, 0.5),
Array(
new MultivariateGaussian(Vectors.dense(-1.0), Matrices.dense(1, 1, Array(1.0))),
new MultivariateGaussian(Vectors.dense(1.0), Matrices.dense(1, 1, Array(1.0)))
)
)
val Ew = Array(1.0 / 3.0, 2.0 / 3.0)
val Emu = Array(Vectors.dense(-4.3673), Vectors.dense(5.1604))
val Esigma = Array(Matrices.dense(1, 1, Array(1.1098)), Matrices.dense(1, 1, Array(0.86644)))
val sparseGMM = new GaussianMixture()
.setK(2)
.setInitialModel(initialGmm)
.run(data)
assert(sparseGMM.weights(0) ~== Ew(0) absTol 1E-3)
assert(sparseGMM.weights(1) ~== Ew(1) absTol 1E-3)
assert(sparseGMM.gaussians(0).mu ~== Emu(0) absTol 1E-3)
assert(sparseGMM.gaussians(1).mu ~== Emu(1) absTol 1E-3)
assert(sparseGMM.gaussians(0).sigma ~== Esigma(0) absTol 1E-3)
assert(sparseGMM.gaussians(1).sigma ~== Esigma(1) absTol 1E-3)
}
test("model save / load") {
val data = sc.parallelize(GaussianTestData.data)
val gmm = new GaussianMixture().setK(2).setSeed(0).run(data)
val tempDir = Utils.createTempDir()
val path = tempDir.toURI.toString
try {
gmm.save(sc, path)
// TODO: GaussianMixtureModel should implement equals/hashcode directly.
val sameModel = GaussianMixtureModel.load(sc, path)
assert(sameModel.k === gmm.k)
(0 until sameModel.k).foreach { i =>
assert(sameModel.gaussians(i).mu === gmm.gaussians(i).mu)
assert(sameModel.gaussians(i).sigma === gmm.gaussians(i).sigma)
}
} finally {
Utils.deleteRecursively(tempDir)
}
}
object GaussianTestData {
val data = Array(
Vectors.dense(-5.1971), Vectors.dense(-2.5359), Vectors.dense(-3.8220),
Vectors.dense(-5.2211), Vectors.dense(-5.0602), Vectors.dense( 4.7118),
Vectors.dense( 6.8989), Vectors.dense( 3.4592), Vectors.dense( 4.6322),
Vectors.dense( 5.7048), Vectors.dense( 4.6567), Vectors.dense( 5.5026),
Vectors.dense( 4.5605), Vectors.dense( 5.2043), Vectors.dense( 6.2734)
)
}
}
| andrewor14/iolap | mllib/src/test/scala/org/apache/spark/mllib/clustering/GaussianMixtureSuite.scala | Scala | apache-2.0 | 6,055 |
package lila.study
import akka.actor._
import akka.pattern.ask
import lila.hub.actorApi.HasUserId
import lila.notify.InvitedToStudy.InvitedBy
import lila.notify.{ InvitedToStudy, NotifyApi, Notification }
import lila.relation.RelationApi
import lila.user.{ User, UserRepo }
import makeTimeout.short
import org.joda.time.DateTime
private final class StudyNotifier(
netBaseUrl: String,
notifyApi: NotifyApi,
relationApi: RelationApi) {
def invite(study: Study, invited: User, socket: ActorRef) =
canNotify(study.ownerId, invited) flatMap {
_ ?? {
socket ? HasUserId(invited.id) mapTo manifest[Boolean] map { isPresent =>
study.owner.ifFalse(isPresent) foreach { owner =>
val notificationContent = InvitedToStudy(InvitedToStudy.InvitedBy(owner.id), InvitedToStudy.StudyName(study.name), InvitedToStudy.StudyId(study.id))
val notification = Notification.make(Notification.Notifies(invited.id), notificationContent)
notifyApi.addNotification(notification)
}
}
}
}
private def canNotify(fromId: User.ID, to: User): Fu[Boolean] =
UserRepo.isTroll(fromId) flatMap {
case true => relationApi.fetchFollows(to.id, fromId)
case false => !relationApi.fetchBlocks(to.id, fromId)
}
private def studyUrl(study: Study) = s"$netBaseUrl/study/${study.id}"
}
| clarkerubber/lila | modules/study/src/main/StudyNotifier.scala | Scala | agpl-3.0 | 1,376 |
package controllers
import java.util.concurrent.TimeUnit
import org.specs2.mutable._
import play.api.libs.json.Json
import play.api.test._
import play.api.test.Helpers._
import scala.concurrent.Await
import scala.concurrent.duration.FiniteDuration
class GroupIT extends Specification {
val timeout: FiniteDuration = FiniteDuration(5, TimeUnit.SECONDS)
var notification = new Array[String](1)
notification(0) = "DummyNotification";
val reqObject = Json.obj(
"uID" -> "demoIT",
"name" -> "TestID",
"member" -> "Tony Starks",
"createdBy" -> "Chuck Norris",
"notifications" -> notification)
"GroupController" should {
"insert a valid json" in {
running(FakeApplication()) {
val request = FakeRequest.apply(POST, "/admin/group").withJsonBody(reqObject)
val response = route(request)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
result.header.status must equalTo(CREATED)
}
}
"remove a group (create - precondition)" in {
running(FakeApplication()) {
val request = FakeRequest.apply(DELETE, "/admin/group/demoIT")
val response = route(request)
response.isDefined mustEqual true
}
}
}
} | HiP-App/HiPBackend | test/controllers/GroupIT.scala | Scala | apache-2.0 | 1,265 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.sources
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.sources.v2.writer.{BatchWriteSupport, DataWriter, DataWriterFactory, WriterCommitMessage}
import org.apache.spark.sql.sources.v2.writer.streaming.{StreamingDataWriterFactory, StreamingWriteSupport}
/**
* A [[BatchWriteSupport]] used to hook V2 stream writers into a microbatch plan. It implements
* the non-streaming interface, forwarding the epoch ID determined at construction to a wrapped
* streaming write support.
*/
class MicroBatchWritSupport(eppchId: Long, val writeSupport: StreamingWriteSupport)
extends BatchWriteSupport {
override def commit(messages: Array[WriterCommitMessage]): Unit = {
writeSupport.commit(eppchId, messages)
}
override def abort(messages: Array[WriterCommitMessage]): Unit = {
writeSupport.abort(eppchId, messages)
}
override def createBatchWriterFactory(): DataWriterFactory = {
new MicroBatchWriterFactory(eppchId, writeSupport.createStreamingWriterFactory())
}
}
class MicroBatchWriterFactory(epochId: Long, streamingWriterFactory: StreamingDataWriterFactory)
extends DataWriterFactory {
override def createWriter(partitionId: Int, taskId: Long): DataWriter[InternalRow] = {
streamingWriterFactory.createWriter(partitionId, taskId, epochId)
}
}
| michalsenkyr/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/MicroBatchWritSupport.scala | Scala | apache-2.0 | 2,164 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.queryapitests.dom
import java.{ util => jutil }
import org.junit.runner.RunWith
import org.xml.sax.EntityResolver
import org.xml.sax.InputSource
import org.scalatest.junit.JUnitRunner
import nl.ebpi.yaidom.convert
import nl.ebpi.yaidom.core.Scope
import nl.ebpi.yaidom.dom.DomElem
import nl.ebpi.yaidom.queryapitests.AbstractScopedElemLikeQueryTest
import nl.ebpi.yaidom.resolved
import javax.xml.parsers.DocumentBuilder
import javax.xml.parsers.DocumentBuilderFactory
/**
* Query test case for DOM wrapper elements.
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class ScopedElemLikeQueryTest extends AbstractScopedElemLikeQueryTest {
private val logger: jutil.logging.Logger = jutil.logging.Logger.getLogger("nl.ebpi.yaidom.queryapitests.dom")
final type E = DomElem
protected final val xsdSchemaElem: DomElem = {
val dbf = DocumentBuilderFactory.newInstance
def createDocumentBuilder(documentBuilderFactory: DocumentBuilderFactory): DocumentBuilder = {
val db = documentBuilderFactory.newDocumentBuilder()
db.setEntityResolver(new EntityResolver {
def resolveEntity(publicId: String, systemId: String): InputSource = {
logger.info(s"Trying to resolve entity. Public ID: $publicId. System ID: $systemId")
if (systemId.endsWith("/XMLSchema.dtd") || systemId.endsWith("\\\\XMLSchema.dtd") || (systemId == "XMLSchema.dtd")) {
new InputSource(classOf[ScopedElemLikeQueryTest].getResourceAsStream("/nl/ebpi/yaidom/queryapitests/XMLSchema.dtd"))
} else if (systemId.endsWith("/datatypes.dtd") || systemId.endsWith("\\\\datatypes.dtd") || (systemId == "datatypes.dtd")) {
new InputSource(classOf[ScopedElemLikeQueryTest].getResourceAsStream("/nl/ebpi/yaidom/queryapitests/datatypes.dtd"))
} else {
// Default behaviour
null
}
}
})
db
}
val is = classOf[ScopedElemLikeQueryTest].getResourceAsStream("/nl/ebpi/yaidom/queryapitests/XMLSchema.xsd")
val domDoc = createDocumentBuilder(dbf).parse(is)
new DomElem(domDoc.getDocumentElement())
}
protected final def toResolvedElem(elem: E): resolved.Elem =
resolved.Elem(convert.DomConversions.convertToElem(elem.wrappedNode, xsdSchemaElem.scope))
}
| EBPI/yaidom | src/test/scala/nl/ebpi/yaidom/queryapitests/dom/ScopedElemLikeQueryTest.scala | Scala | apache-2.0 | 2,915 |
package moe.lymia.princess
import java.nio.file.{Path, Paths}
object NativeImageData {
private val nativeTypes = Seq(
"moe.lymia.princess.native.NativeException",
)
private val windowsTypes = Seq(
"org.eclipse.swt.internal.ole.win32.GUID",
"org.eclipse.swt.internal.win32.ACCEL",
"org.eclipse.swt.internal.win32.ACTCTX",
"org.eclipse.swt.internal.win32.BITMAP",
"org.eclipse.swt.internal.win32.BITMAPINFOHEADER",
"org.eclipse.swt.internal.win32.BLENDFUNCTION",
"org.eclipse.swt.internal.win32.BP_PAINTPARAMS",
"org.eclipse.swt.internal.win32.BUTTON_IMAGELIST",
"org.eclipse.swt.internal.win32.CANDIDATEFORM",
"org.eclipse.swt.internal.win32.CHOOSECOLOR",
"org.eclipse.swt.internal.win32.CHOOSEFONT",
"org.eclipse.swt.internal.win32.CIDA",
"org.eclipse.swt.internal.win32.COMBOBOXINFO",
"org.eclipse.swt.internal.win32.COMPOSITIONFORM",
"org.eclipse.swt.internal.win32.CREATESTRUCT",
"org.eclipse.swt.internal.win32.DEVMODE",
"org.eclipse.swt.internal.win32.DIBSECTION",
"org.eclipse.swt.internal.win32.DOCHOSTUIINFO",
"org.eclipse.swt.internal.win32.DOCINFO",
"org.eclipse.swt.internal.win32.DRAWITEMSTRUCT",
"org.eclipse.swt.internal.win32.DROPFILES",
"org.eclipse.swt.internal.win32.EMR",
"org.eclipse.swt.internal.win32.EMREXTCREATEFONTINDIRECTW",
"org.eclipse.swt.internal.win32.EXTLOGFONTW",
"org.eclipse.swt.internal.win32.FLICK_DATA",
"org.eclipse.swt.internal.win32.FLICK_POINT",
"org.eclipse.swt.internal.win32.GCP_RESULTS",
"org.eclipse.swt.internal.win32.GESTURECONFIG",
"org.eclipse.swt.internal.win32.GESTUREINFO",
"org.eclipse.swt.internal.win32.GRADIENT_RECT",
"org.eclipse.swt.internal.win32.GUITHREADINFO",
"org.eclipse.swt.internal.win32.HDHITTESTINFO",
"org.eclipse.swt.internal.win32.HDITEM",
"org.eclipse.swt.internal.win32.HDLAYOUT",
"org.eclipse.swt.internal.win32.HELPINFO",
"org.eclipse.swt.internal.win32.HIGHCONTRAST",
"org.eclipse.swt.internal.win32.ICONINFO",
"org.eclipse.swt.internal.win32.INITCOMMONCONTROLSEX",
"org.eclipse.swt.internal.win32.INPUT",
"org.eclipse.swt.internal.win32.KEYBDINPUT",
"org.eclipse.swt.internal.win32.LITEM",
"org.eclipse.swt.internal.win32.LOGBRUSH",
"org.eclipse.swt.internal.win32.LOGFONT",
"org.eclipse.swt.internal.win32.LRESULT",
"org.eclipse.swt.internal.win32.LVCOLUMN",
"org.eclipse.swt.internal.win32.LVHITTESTINFO",
"org.eclipse.swt.internal.win32.LVINSERTMARK",
"org.eclipse.swt.internal.win32.LVITEM",
"org.eclipse.swt.internal.win32.MARGINS",
"org.eclipse.swt.internal.win32.MCHITTESTINFO",
"org.eclipse.swt.internal.win32.MEASUREITEMSTRUCT",
"org.eclipse.swt.internal.win32.MENUBARINFO",
"org.eclipse.swt.internal.win32.MENUINFO",
"org.eclipse.swt.internal.win32.MENUITEMINFO",
"org.eclipse.swt.internal.win32.MINMAXINFO",
"org.eclipse.swt.internal.win32.MONITORINFO",
"org.eclipse.swt.internal.win32.MOUSEINPUT",
"org.eclipse.swt.internal.win32.MSG",
"org.eclipse.swt.internal.win32.NMCUSTOMDRAW",
"org.eclipse.swt.internal.win32.NMHDR",
"org.eclipse.swt.internal.win32.NMHEADER",
"org.eclipse.swt.internal.win32.NMLINK",
"org.eclipse.swt.internal.win32.NMLISTVIEW",
"org.eclipse.swt.internal.win32.NMLVCUSTOMDRAW",
"org.eclipse.swt.internal.win32.NMLVDISPINFO",
"org.eclipse.swt.internal.win32.NMLVFINDITEM",
"org.eclipse.swt.internal.win32.NMLVODSTATECHANGE",
"org.eclipse.swt.internal.win32.NMREBARCHEVRON",
"org.eclipse.swt.internal.win32.NMREBARCHILDSIZE",
"org.eclipse.swt.internal.win32.NMTBCUSTOMDRAW",
"org.eclipse.swt.internal.win32.NMTBHOTITEM",
"org.eclipse.swt.internal.win32.NMTOOLBAR",
"org.eclipse.swt.internal.win32.NMTREEVIEW",
"org.eclipse.swt.internal.win32.NMTTCUSTOMDRAW",
"org.eclipse.swt.internal.win32.NMTTDISPINFO",
"org.eclipse.swt.internal.win32.NMTVCUSTOMDRAW",
"org.eclipse.swt.internal.win32.NMTVDISPINFO",
"org.eclipse.swt.internal.win32.NMTVITEMCHANGE",
"org.eclipse.swt.internal.win32.NMUPDOWN",
"org.eclipse.swt.internal.win32.NONCLIENTMETRICS",
"org.eclipse.swt.internal.win32.NOTIFYICONDATA",
"org.eclipse.swt.internal.win32.OS",
"org.eclipse.swt.internal.win32.OUTLINETEXTMETRIC",
"org.eclipse.swt.internal.win32.PAINTSTRUCT",
"org.eclipse.swt.internal.win32.POINT",
"org.eclipse.swt.internal.win32.PRINTDLG",
"org.eclipse.swt.internal.win32.PROCESS_INFORMATION",
"org.eclipse.swt.internal.win32.PROPERTYKEY",
"org.eclipse.swt.internal.win32.REBARBANDINFO",
"org.eclipse.swt.internal.win32.RECT",
"org.eclipse.swt.internal.win32.SAFEARRAY",
"org.eclipse.swt.internal.win32.SAFEARRAYBOUND",
"org.eclipse.swt.internal.win32.SCRIPT_ANALYSIS",
"org.eclipse.swt.internal.win32.SCRIPT_CONTROL",
"org.eclipse.swt.internal.win32.SCRIPT_FONTPROPERTIES",
"org.eclipse.swt.internal.win32.SCRIPT_ITEM",
"org.eclipse.swt.internal.win32.SCRIPT_LOGATTR",
"org.eclipse.swt.internal.win32.SCRIPT_PROPERTIES",
"org.eclipse.swt.internal.win32.SCRIPT_STATE",
"org.eclipse.swt.internal.win32.SCROLLBARINFO",
"org.eclipse.swt.internal.win32.SCROLLINFO",
"org.eclipse.swt.internal.win32.SHDRAGIMAGE",
"org.eclipse.swt.internal.win32.SHELLEXECUTEINFO",
"org.eclipse.swt.internal.win32.SHFILEINFO",
"org.eclipse.swt.internal.win32.SIZE",
"org.eclipse.swt.internal.win32.STARTUPINFO",
"org.eclipse.swt.internal.win32.SYSTEMTIME",
"org.eclipse.swt.internal.win32.TBBUTTON",
"org.eclipse.swt.internal.win32.TBBUTTONINFO",
"org.eclipse.swt.internal.win32.TCHAR",
"org.eclipse.swt.internal.win32.TCHITTESTINFO",
"org.eclipse.swt.internal.win32.TCITEM",
"org.eclipse.swt.internal.win32.TEXTMETRIC",
"org.eclipse.swt.internal.win32.TF_DA_COLOR",
"org.eclipse.swt.internal.win32.TF_DISPLAYATTRIBUTE",
"org.eclipse.swt.internal.win32.TOOLINFO",
"org.eclipse.swt.internal.win32.TOUCHINPUT",
"org.eclipse.swt.internal.win32.TRACKMOUSEEVENT",
"org.eclipse.swt.internal.win32.TRIVERTEX",
"org.eclipse.swt.internal.win32.TVHITTESTINFO",
"org.eclipse.swt.internal.win32.TVINSERTSTRUCT",
"org.eclipse.swt.internal.win32.TVITEM",
"org.eclipse.swt.internal.win32.TVSORTCB",
"org.eclipse.swt.internal.win32.UDACCEL",
"org.eclipse.swt.internal.win32.WINDOWPLACEMENT",
"org.eclipse.swt.internal.win32.WINDOWPOS",
"org.eclipse.swt.internal.win32.WNDCLASS",
"org.eclipse.swt.widgets.Display",
)
private val macosTypes = Seq(
"org.eclipse.swt.internal.cocoa.NSOperatingSystemVersion",
"org.eclipse.swt.internal.cocoa.NSPoint",
"org.eclipse.swt.internal.cocoa.NSRect",
"org.eclipse.swt.internal.cocoa.NSSize",
"org.eclipse.swt.internal.cocoa.objc_super",
"org.eclipse.swt.internal.cocoa.OS",
"org.eclipse.swt.widgets.Display",
)
private val linuxTypes = Seq(
"org.eclipse.swt.accessibility.AccessibleObject",
"org.eclipse.swt.graphics.Device",
"org.eclipse.swt.internal.accessibility.gtk.AtkActionIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkAttribute",
"org.eclipse.swt.internal.accessibility.gtk.AtkComponentIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkEditableTextIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkHypertextIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkObjectClass",
"org.eclipse.swt.internal.accessibility.gtk.AtkSelectionIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkTableIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkTextIface",
"org.eclipse.swt.internal.accessibility.gtk.AtkTextRange",
"org.eclipse.swt.internal.accessibility.gtk.AtkTextRectangle",
"org.eclipse.swt.internal.accessibility.gtk.AtkValueIface",
"org.eclipse.swt.internal.cairo.cairo_path_data_t",
"org.eclipse.swt.internal.cairo.cairo_path_t",
"org.eclipse.swt.internal.cairo.cairo_rectangle_int_t",
"org.eclipse.swt.internal.gtk3.GdkEvent",
"org.eclipse.swt.internal.gtk3.GdkEventButton",
"org.eclipse.swt.internal.gtk3.GdkEventCrossing",
"org.eclipse.swt.internal.gtk3.GdkEventFocus",
"org.eclipse.swt.internal.gtk3.GdkEventKey",
"org.eclipse.swt.internal.gtk3.GdkEventMotion",
"org.eclipse.swt.internal.gtk3.GdkEventWindowState",
"org.eclipse.swt.internal.gtk3.GdkGeometry",
"org.eclipse.swt.internal.gtk.GdkKeymapKey",
"org.eclipse.swt.internal.gtk.GdkRectangle",
"org.eclipse.swt.internal.gtk.GdkRGBA",
"org.eclipse.swt.internal.gtk3.GdkWindowAttr",
"org.eclipse.swt.internal.gtk.GObjectClass",
"org.eclipse.swt.internal.gtk.GtkAdjustment",
"org.eclipse.swt.internal.gtk.GtkAllocation",
"org.eclipse.swt.internal.gtk.GtkBorder",
"org.eclipse.swt.internal.gtk.GtkCellRendererClass",
"org.eclipse.swt.internal.gtk.GtkRequisition",
"org.eclipse.swt.internal.gtk3.GtkTargetEntry",
"org.eclipse.swt.internal.gtk.GtkWidgetClass",
"org.eclipse.swt.internal.gtk.GTypeInfo",
"org.eclipse.swt.internal.gtk.PangoAttrColor",
"org.eclipse.swt.internal.gtk.PangoAttribute",
"org.eclipse.swt.internal.gtk.PangoAttrInt",
"org.eclipse.swt.internal.gtk.PangoItem",
"org.eclipse.swt.internal.gtk.PangoLayoutLine",
"org.eclipse.swt.internal.gtk.PangoLayoutRun",
"org.eclipse.swt.internal.gtk.PangoLogAttr",
"org.eclipse.swt.internal.gtk.PangoRectangle",
"org.eclipse.swt.internal.gtk.XAnyEvent",
"org.eclipse.swt.internal.gtk.XEvent",
"org.eclipse.swt.internal.gtk.XExposeEvent",
"org.eclipse.swt.internal.gtk.XFocusChangeEvent",
"org.eclipse.swt.widgets.Control",
"org.eclipse.swt.widgets.Display",
"org.eclipse.swt.widgets.Dialog",
"org.eclipse.swt.widgets.ToolBar",
"org.eclipse.swt.internal.gtk.OS",
"org.eclipse.swt.internal.SessionManagerDBus",
)
val osName: String = Platform.platform match {
case Platform.Windows => "windows"
case Platform.MacOS => "macos"
case Platform.Linux => "linux"
}
val jniTypeNames: Seq[String] = Platform.platform match {
case Platform.Windows => windowsTypes ++ nativeTypes
case Platform.MacOS => macosTypes ++ nativeTypes
case Platform.Linux => linuxTypes ++ nativeTypes
}
val nativeImageConfigPath: Path =
Paths.get("native-image-configs").resolve(osName)
}
| Lymia/PrincessEdit | modules/princess-edit/src/main/scala/moe/lymia/princess/NativeImageData.scala | Scala | mit | 10,410 |
package pl.newicom.dddd.process
import akka.actor.{ActorRef, Props}
import pl.newicom.dddd.actor.CreationSupport
object ReceptorSupport {
/**
* Responsible of creating [[Receptor]] using provided [[ReceptorConfig]]
*/
type ReceptorFactory = (ReceptorConfig) => Receptor
def registerReceptor(receptorConfig: ReceptorConfig)(implicit cs: CreationSupport, rf: ReceptorFactory): ActorRef = {
val receptorProps = Props[Receptor](rf(receptorConfig))
// TODO fix actor name
cs.createChild(receptorProps, s"Receptor-${receptorConfig.stimuliSource.officeName}")
}
}
| ahjohannessen/akka-ddd | akka-ddd-core/src/main/scala/pl/newicom/dddd/process/ReceptorSupport.scala | Scala | mit | 588 |
/*
* Copyright 2013 TeamNexus
*
* TeamNexus Licenses this file to you under the MIT License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License
*/
package com.nexus.errorhandling
/**
* No description given
*
* @author jk-5
*/
class ErrorReportCategoryEntry(private var name: String, private var element: Any) {
element match{
case null => this.element = "--NULL--"
case t: Throwable => this.element = "--ERROR-- " + t.getClass.getSimpleName + ": " + t.getMessage
case _ =>
}
def getName = this.name
def getElement = this.element
}
| crvidya/nexus-scala | src/main/scala/com/nexus/errorhandling/ErrorReportCategoryEntry.scala | Scala | mit | 1,017 |
package net.fluxo.blue.downloader
import net.fluxo.blue.Processes
import java.io.File
import org.apache.log4j.Level
import org.apache.commons.exec.{ExecuteWatchdog, DefaultExecuteResultHandler, CommandLine, DefaultExecutor}
/**
* User: Ronald Kurniawan (viper)
* Date: 30/12/13.
*/
class DownloadProcess(downloadObj: TDownload) {
private var _start: Long = 0
private var _end: Long = 0
private val _tdp = new TDownloadProcess
private var _thread: Thread = _
/**
* This class only has so long to life, define the start and end at the beginning
*/
def start() {
checkWorkingDirectory()
_start = System.currentTimeMillis()
if (downloadObj.Priority) {
_end = _start + Processes.secondsToMillis(225)
} else {
_end = _start + Processes.secondsToMillis(75)
}
// now we start the downloading process
_thread = new Thread(_tdp)
_thread.start()
}
def getRemainingTime: Int = {
_tdp computeRemainingTime
}
def stop() {
// forcibly destroy this process...
if (_thread != null) _tdp.tryStop()
}
def checkWorkingDirectory(): String = {
val dir = new File(downloadObj.Name.getOrElse("fbdl-defaultdir-" + _start))
var retVal: String = ""
if (!dir.exists()) {
if (!dir.mkdir()) {
Processes.writeLog("Failed to create download directory: " + dir.getAbsolutePath, Level.ERROR)
} else {
retVal = dir.getAbsolutePath
}
} else if (dir.exists() && !dir.isDirectory) {
Processes.writeLog("Failed to create download directory: " + dir.getAbsolutePath + ", because it's already exists as a filename.", Level.ERROR)
} else {
retVal = dir.getAbsolutePath
}
retVal
}
def getWorkingDirectory: File = {
val dir = new File(downloadObj.Name.getOrElse("fbdl-defaultdir-" + _start))
if (dir.exists() && !dir.isDirectory) {
return null
}
dir
}
class TDownloadProcess extends Runnable {
private var _isRunning = true//System.currentTimeMillis() < _end
private val executor = new DefaultExecutor
private val resultHandler = new DefaultExecuteResultHandler
private val executeWatchdog = new ExecuteWatchdog(computeRunningTime + 10)
override def run() {
val dir = getWorkingDirectory
var execResult = -100
if (dir != null) executor.setWorkingDirectory(dir)
executor.setWatchdog(executeWatchdog)
//executor.execute(buildCommandLine, resultHandler)
//execResult = resultHandler.getExitValue
System.out.println(downloadObj.Name.getOrElse("") + ": start " + _start + "; end " + _end + "; current " + System.currentTimeMillis())
try {
while (_isRunning) {
if (execResult > -1) {
// that means the process has stopped, any error? we log it...
val exception = resultHandler.getException
if (exception != null) {
Processes.writeLog("Download process with ID " + downloadObj.Id + " and URL " + downloadObj.Url +
" has exited with error ", Level.ERROR)
Processes.writeLog(exception.getMessage + " caused by " + exception.getCause.getMessage, Level.ERROR)
Processes.writeLog(Processes.stackTraceToString(exception), Level.ERROR)
}
_isRunning = false
}
Thread.interrupted()
Thread.sleep(Processes.secondsToMillis(3))
}
executor.getWatchdog.destroyProcess()
} catch {
case ie: InterruptedException =>
_isRunning = false
}
}
def computeRunningTime: Int = {
val rt = (_end - _start).toInt
rt / 1000
}
def computeRemainingTime: Int = {
val rt = (_end - System.currentTimeMillis()).toInt
rt / 1000
}
def tryStop() {
executeWatchdog.destroyProcess()
_isRunning = false
}
private def buildCommandLine: CommandLine = {
val cmdLine = new CommandLine("/usr/bin/aria2c")
//cmdLine.addArgument()
cmdLine
}
}
}
| fluxodesign/FluxoBlue | src/main/scala/net/fluxo/blue/downloader/DownloadProcess.scala | Scala | lgpl-3.0 | 3,745 |
package com.sksamuel.elastic4s
import org.elasticsearch.action.index.{IndexRequestBuilder, IndexResponse}
import org.elasticsearch.action.percolate.{PercolateRequestBuilder, PercolateResponse}
import org.elasticsearch.client.Client
import org.elasticsearch.common.xcontent.{XContentHelper, XContentBuilder, XContentFactory}
import org.elasticsearch.percolator.PercolatorService
import scala.collection.mutable.ListBuffer
import scala.concurrent.Future
import scala.language.implicitConversions
/** @author Stephen Samuel */
trait PercolateDsl extends QueryDsl {
@deprecated("Use the `register id X into Y` syntax", "1.4.1")
implicit def any2register(id: AnyVal): RegisterExpectsIndexImplicit = new RegisterExpectsIndexImplicit(id.toString)
@deprecated("Use the `register id X into Y` syntax", "1.4.1")
implicit def string2register(id: String): RegisterExpectsIndexImplicit = new RegisterExpectsIndexImplicit(id)
@deprecated("Use the `percolate in X`", "1.4.1")
implicit def string2percolate(index: String): PercolateDefinitionImplicit = new
PercolateDefinitionImplicit(IndexesTypes(index))
class PercolateDefinitionImplicit(indexType: IndexesTypes) extends PercolateDefinition(indexType) {
@deprecated("Use the percolate in X", "1.4.1")
override def doc(fields: (String, Any)*): PercolateDefinition = super.doc(fields: _*)
@deprecated("Use the percolate in X", "1.4.1")
override def doc(fields: Map[String, Any]): PercolateDefinition = super.doc(fields)
@deprecated("Use the percolate in X", "1.4.1")
override def query(string: String): PercolateDefinition = super.query(string)
@deprecated("Use the percolate in X", "1.4.1")
override def query(block: => QueryDefinition): PercolateDefinition = super.query(block)
}
class RegisterExpectsIndex(id: String) {
def into(index: String) = new RegisterDefinition(index, id)
}
class RegisterExpectsIndexImplicit(id: String) {
@deprecated("Use the register id X into Y syntax", "1.4.0")
def into(index: String) = new RegisterDefinition(index, id)
}
implicit object RegisterDefinitionExecutable extends Executable[RegisterDefinition, IndexResponse, IndexResponse] {
override def apply(c: Client, t: RegisterDefinition): Future[IndexResponse] = {
injectFuture(c.index(t.build, _))
}
}
implicit object PercolateDefinitionExecutable
extends Executable[PercolateDefinition, PercolateResponse, PercolateResponse] {
override def apply(c: Client, t: PercolateDefinition): Future[PercolateResponse] = {
injectFuture(c.percolate(t.build, _))
}
}
implicit object RegisterDefinitionShow extends Show[RegisterDefinition] {
override def show(f: RegisterDefinition): String = XContentHelper.convertToJson(f.build.source, true, true)
}
implicit class RegisterDefinitionShowOps(f: RegisterDefinition) {
def show: String = RegisterDefinitionShow.show(f)
}
implicit object PercolateDefinitionShow extends Show[PercolateDefinition] {
override def show(f: PercolateDefinition): String = XContentHelper.convertToJson(f.build.source, true, true)
}
implicit class PercolateDefinitionShowOps(f: PercolateDefinition) {
def show: String = PercolateDefinitionShow.show(f)
}
}
class RegisterDefinition(index: String, id: String) extends BulkCompatibleDefinition {
private[this] var _query: QueryDefinition = _
private val _fields = new ListBuffer[(String, Any)]
def build = {
val source = XContentFactory.jsonBuilder().startObject().field("query", _query.builder)
for ( tuple <- _fields ) {
source.field(tuple._1, tuple._2)
}
source.endObject()
new IndexRequestBuilder(ProxyClients.client).setIndex(index)
.setType(PercolatorService.TYPE_NAME).setId(id).setRefresh(true)
.setSource(source).request
}
def query(block: => QueryDefinition): RegisterDefinition = {
_query = block
this
}
def query(string: String) = {
_query = new QueryStringQueryDefinition(string)
this
}
def fields(map: Map[String, Any]): RegisterDefinition = fields(map.toList)
def fields(_fields: (String, Any)*): RegisterDefinition = fields(_fields.toIterable)
def fields(iterable: Iterable[(String, Any)]) = {
this._fields ++= iterable
this
}
}
class PercolateDefinition(indexType: IndexesTypes) {
private val _fields = new ListBuffer[(String, Any)]
private var _rawDoc: Option[String] = None
private[this] var _query: QueryDefinition = _
def build = new PercolateRequestBuilder(ProxyClients.client)
.setSource(_doc)
.setIndices(indexType.index)
.setDocumentType(indexType.types.head)
.request()
private[elastic4s] def _doc: XContentBuilder = {
val source = XContentFactory.jsonBuilder().startObject()
if (_query != null)
source.field("query", _query.builder)
_rawDoc match {
case Some(doc) =>
source.rawField("doc", doc.getBytes("UTF-8"))
case None =>
source.startObject("doc")
for ( tuple <- _fields ) {
source.field(tuple._1, tuple._2)
}
source.endObject()
}
source.endObject()
source
}
def rawDoc(json: String): PercolateDefinition = {
this._rawDoc = Some(json)
this
}
def doc(fields: (String, Any)*): PercolateDefinition = {
this._fields ++= fields
this
}
def doc(fields: Map[String, Any]): PercolateDefinition = {
this._fields ++= fields
this
}
def query(string: String): PercolateDefinition = query(new QueryStringQueryDefinition(string))
def query(block: => QueryDefinition) = {
_query = block
this
}
}
| alexander-svendsen/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/PercolateDsl.scala | Scala | apache-2.0 | 5,622 |
package io.vamp.common.akka
import akka.actor.ActorSystem
trait ActorSystemProvider {
implicit def actorSystem: ActorSystem
}
| magneticio/vamp | common/src/main/scala/io/vamp/common/akka/ActorSystemProvider.scala | Scala | apache-2.0 | 130 |
package quizleague.web.model
import quizleague.web.util.rx.RefObservable
import scalajs.js
import scala.scalajs.js.annotation._
class Team (
val id:String,
val name:String,
val shortName:String,
val venue:RefObservable[Venue],
val text:RefObservable[Text],
val users:js.Array[RefObservable[User]],
val retired:Boolean
) extends Model
object Team{
def apply( id:String,
name:String,
shortName:String,
venue:RefObservable[Venue],
text:RefObservable[Text],
users:js.Array[RefObservable[User]],
retired:Boolean) = new Team(
id,name,shortName,venue,text,users,retired
)
} | gumdrop/quizleague-maintain | js/src/main/scala/quizleague/web/model/Team.scala | Scala | mit | 649 |
package io.swagger.client.api
import io.swagger.client.core._
import io.swagger.client.core.CollectionFormats._
import io.swagger.client.core.ApiKeyLocations._
object OauthApi {
/**
* Client provides authorization token obtained from /api/oauth2/authorize to this endpoint and receives an access token. Access token can then be used to query different API endpoints of QuantiModo.
*
* Expected answers:
* code 200 : (Successful Operation)
* code 401 : (Not Authenticated)
*
* @param clientId Client id
* @param clientSecret Client secret
* @param grantType Grant Type can be 'authorization_code' or 'refresh_token'
* @param responseType Response type
* @param scope Scope
* @param redirectUri Redirect uri
* @param state State
* @param realm Realm
*/
def oauth2AccesstokenGet(clientId: String, clientSecret: String, grantType: String, responseType: Option[String] = None, scope: Option[String] = None, redirectUri: Option[String] = None, state: Option[String] = None, realm: Option[String] = None): ApiRequest[Unit] =
ApiRequest[Unit](ApiMethods.GET, "https://localhost/api", "/oauth2/accesstoken", "application/json")
.withQueryParam("client_id", clientId)
.withQueryParam("client_secret", clientSecret)
.withQueryParam("grant_type", grantType)
.withQueryParam("response_type", responseType)
.withQueryParam("scope", scope)
.withQueryParam("redirect_uri", redirectUri)
.withQueryParam("state", state)
.withQueryParam("realm", realm)
.withSuccessResponse[Unit](200)
.withErrorResponse[Unit](401)
/**
* Ask the user if they want to allow a client applications to submit or obtain data from their QM account. It will redirect the user to the url provided by the client application with the code as a query parameter or error in case of an error.
*
* Expected answers:
* code 200 : (Successful Operation)
* code 401 : (Not Authenticated)
*
* @param clientId This is the unique ID that QuantiModo uses to identify your application. Obtain a client id by emailing [email protected].
* @param clientSecret This is the secret for your obtained clietn_id. QuantiModo uses this to validate that only your application uses the client_id.
* @param responseType If the value is code, launches a Basic flow, requiring a POST to the token endpoint to obtain the tokens. If the value is token id_token or id_token token, launches an Implicit flow, requiring the use of Javascript at the redirect URI to retrieve tokens from the URI #fragment.
* @param scope Scopes include basic, readmeasurements, and writemeasurements. The \\"basic\\" scope allows you to read user info (displayname, email, etc). The \\"readmeasurements\\" scope allows one to read a user's data. The \\"writemeasurements\\" scope allows you to write user data. Separate multiple scopes by a space.
* @param redirectUri The redirect URI is the URL within your client application that will receive the OAuth2 credentials.
* @param state An opaque string that is round-tripped in the protocol; that is to say, it is returned as a URI parameter in the Basic flow, and in the URI
* @param realm Name of the realm representing the users of your distributed applications and services. A \\"realm\\" attribute MAY be included to indicate the scope of protection.
*/
def oauth2AuthorizeGet(clientId: String, clientSecret: String, responseType: String, scope: String, redirectUri: Option[String] = None, state: Option[String] = None, realm: Option[String] = None): ApiRequest[Unit] =
ApiRequest[Unit](ApiMethods.GET, "https://localhost/api", "/oauth2/authorize", "application/json")
.withQueryParam("client_id", clientId)
.withQueryParam("client_secret", clientSecret)
.withQueryParam("response_type", responseType)
.withQueryParam("scope", scope)
.withQueryParam("redirect_uri", redirectUri)
.withQueryParam("state", state)
.withQueryParam("realm", realm)
.withSuccessResponse[Unit](200)
.withErrorResponse[Unit](401)
}
| QuantiModo/QuantiModo-SDK-Akka-Scala | src/main/scala/io/swagger/client/api/OauthApi.scala | Scala | gpl-2.0 | 4,170 |
/*
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*/
package org.locationtech.geomesa.compute.spark.sql
import java.util.{List => jList, Map => jMap, UUID}
import com.vividsolutions.jts.geom.Geometry
import org.apache.metamodel
import org.apache.metamodel.DataContext
import org.apache.metamodel.query.parser.QueryParser
import org.apache.metamodel.query.{CompiledQuery, Query}
import org.apache.metamodel.schema.builder.SimpleTableDefSchemaBuilder
import org.apache.metamodel.schema.{ColumnType, Table}
import org.apache.metamodel.util.SimpleTableDef
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
/**
* Metamodel data context implementation. Only implements the methods required to parse a SQL query.
*
* Tables all are identified the the SFT name, which must be unique. They all have a null schema.
* This is required as SparkSQL doesn't support schemas.
*/
class GeoMesaDataContext(sfts: Map[String, SimpleFeatureType]) extends DataContext {
import GeoMesaDataContext._
/**
* Maps a simple feature type to a SQL table definition.
*
* @param label the SFT name
* @return a table corresponding to the simple feature type
*/
override def getTableByQualifiedLabel(label: String): Table = {
cache.synchronized {
cache.getOrElseUpdate(label, {
sfts.get(label).map { sft =>
val descriptors = sft.getAttributeDescriptors
val names = descriptors.map(_.getLocalName)
val types = descriptors.map(_.getType.getBinding).map {
case c if classOf[java.lang.String].isAssignableFrom(c) => ColumnType.VARCHAR
case c if classOf[java.lang.Integer].isAssignableFrom(c) => ColumnType.INTEGER
case c if classOf[java.lang.Long].isAssignableFrom(c) => ColumnType.BIGINT
case c if classOf[java.lang.Float].isAssignableFrom(c) => ColumnType.FLOAT
case c if classOf[java.lang.Double].isAssignableFrom(c) => ColumnType.DOUBLE
case c if classOf[java.lang.Boolean].isAssignableFrom(c) => ColumnType.BOOLEAN
case c if classOf[java.util.Date].isAssignableFrom(c) => ColumnType.TIMESTAMP
case c if classOf[UUID].isAssignableFrom(c) => ColumnType.UUID
case c if classOf[Geometry].isAssignableFrom(c) => ColumnType.OTHER
case c if classOf[jList[_]].isAssignableFrom(c) => ColumnType.LIST
case c if classOf[jMap[_, _]].isAssignableFrom(c) => ColumnType.MAP
case _ => ColumnType.OTHER
}
val tableDef = new SimpleTableDef(label, names.toArray, types.toArray)
// schema has to be null for spark sql compatibility
new SimpleTableDefSchemaBuilder(null, tableDef).build().getTableByName(label)
}.orNull
})
}
}
override def parseQuery(queryString: String): Query = new QueryParser(this, queryString).parse
override def getSchemaByName(name: String) = ???
override def getDefaultSchema = ???
override def getSchemaNames = ???
override def executeQuery(query: metamodel.query.Query) = ???
override def getColumnByQualifiedLabel(columnName: String) = ???
override def executeQuery(compiledQuery: CompiledQuery, values: AnyRef*) = ???
override def executeQuery(queryString: String) = ???
override def refreshSchemas() = ???
override def getSchemas = ???
override def compileQuery(query: Query) = ???
override def query() = ???
}
object GeoMesaDataContext {
private val cache = scala.collection.mutable.Map.empty[String, Table]
} | drackaer/geomesa | geomesa-compute/src/main/scala/org/locationtech/geomesa/compute/spark/sql/GeoMesaDataContext.scala | Scala | apache-2.0 | 3,924 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import java.sql.Timestamp
import java.text.{ParseException, ParsePosition, SimpleDateFormat}
import java.time._
import java.time.format.{DateTimeFormatter, DateTimeParseException}
import java.time.temporal.ChronoField.MICRO_OF_SECOND
import java.time.temporal.TemporalQueries
import java.util.{Calendar, GregorianCalendar, Locale, TimeZone}
import java.util.concurrent.TimeUnit.SECONDS
import org.apache.commons.lang3.time.FastDateFormat
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
import org.apache.spark.sql.catalyst.util.LegacyDateFormats.{LegacyDateFormat, LENIENT_SIMPLE_DATE_FORMAT}
import org.apache.spark.sql.catalyst.util.RebaseDateTime._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy._
import org.apache.spark.sql.types.Decimal
sealed trait TimestampFormatter extends Serializable {
/**
* Parses a timestamp in a string and converts it to microseconds.
*
* @param s - string with timestamp to parse
* @return microseconds since epoch.
* @throws ParseException can be thrown by legacy parser
* @throws DateTimeParseException can be thrown by new parser
* @throws DateTimeException unable to obtain local date or time
*/
@throws(classOf[ParseException])
@throws(classOf[DateTimeParseException])
@throws(classOf[DateTimeException])
def parse(s: String): Long
def format(us: Long): String
}
class Iso8601TimestampFormatter(
pattern: String,
zoneId: ZoneId,
locale: Locale,
legacyFormat: LegacyDateFormat = LENIENT_SIMPLE_DATE_FORMAT,
needVarLengthSecondFraction: Boolean)
extends TimestampFormatter with DateTimeFormatterHelper {
@transient
protected lazy val formatter: DateTimeFormatter =
getOrCreateFormatter(pattern, locale, needVarLengthSecondFraction)
@transient
protected lazy val legacyFormatter = TimestampFormatter.getLegacyFormatter(
pattern, zoneId, locale, legacyFormat)
override def parse(s: String): Long = {
val specialDate = convertSpecialTimestamp(s.trim, zoneId)
specialDate.getOrElse {
try {
val parsed = formatter.parse(s)
val parsedZoneId = parsed.query(TemporalQueries.zone())
val timeZoneId = if (parsedZoneId == null) zoneId else parsedZoneId
val zonedDateTime = toZonedDateTime(parsed, timeZoneId)
val epochSeconds = zonedDateTime.toEpochSecond
val microsOfSecond = zonedDateTime.get(MICRO_OF_SECOND)
Math.addExact(SECONDS.toMicros(epochSeconds), microsOfSecond)
} catch checkDiffResult(s, legacyFormatter.parse)
}
}
override def format(us: Long): String = {
val instant = DateTimeUtils.microsToInstant(us)
formatter.withZone(zoneId).format(instant)
}
}
/**
* The formatter parses/formats timestamps according to the pattern `yyyy-MM-dd HH:mm:ss.[..fff..]`
* where `[..fff..]` is a fraction of second up to microsecond resolution. The formatter does not
* output trailing zeros in the fraction. For example, the timestamp `2019-03-05 15:00:01.123400` is
* formatted as the string `2019-03-05 15:00:01.1234`.
*
* @param zoneId the time zone identifier in which the formatter parses or format timestamps
*/
class FractionTimestampFormatter(zoneId: ZoneId)
extends Iso8601TimestampFormatter(
"", zoneId, TimestampFormatter.defaultLocale, needVarLengthSecondFraction = false) {
@transient
override protected lazy val formatter = DateTimeFormatterHelper.fractionFormatter
}
/**
* The custom sub-class of `GregorianCalendar` is needed to get access to
* protected `fields` immediately after parsing. We cannot use
* the `get()` method because it performs normalization of the fraction
* part. Accordingly, the `MILLISECOND` field doesn't contain original value.
*
* Also this class allows to set raw value to the `MILLISECOND` field
* directly before formatting.
*/
class MicrosCalendar(tz: TimeZone, digitsInFraction: Int)
extends GregorianCalendar(tz, Locale.US) {
// Converts parsed `MILLISECOND` field to seconds fraction in microsecond precision.
// For example if the fraction pattern is `SSSS` then `digitsInFraction` = 4, and
// if the `MILLISECOND` field was parsed to `1234`.
def getMicros(): SQLTimestamp = {
// Append 6 zeros to the field: 1234 -> 1234000000
val d = fields(Calendar.MILLISECOND) * MICROS_PER_SECOND
// Take the first 6 digits from `d`: 1234000000 -> 123400
// The rest contains exactly `digitsInFraction`: `0000` = 10 ^ digitsInFraction
// So, the result is `(1234 * 1000000) / (10 ^ digitsInFraction)
d / Decimal.POW_10(digitsInFraction)
}
// Converts the seconds fraction in microsecond precision to a value
// that can be correctly formatted according to the specified fraction pattern.
// The method performs operations opposite to `getMicros()`.
def setMicros(micros: Long): Unit = {
val d = micros * Decimal.POW_10(digitsInFraction)
fields(Calendar.MILLISECOND) = (d / MICROS_PER_SECOND).toInt
}
}
class LegacyFastTimestampFormatter(
pattern: String,
zoneId: ZoneId,
locale: Locale) extends TimestampFormatter {
@transient private lazy val fastDateFormat =
FastDateFormat.getInstance(pattern, TimeZone.getTimeZone(zoneId), locale)
@transient private lazy val cal = new MicrosCalendar(
fastDateFormat.getTimeZone,
fastDateFormat.getPattern.count(_ == 'S'))
def parse(s: String): SQLTimestamp = {
cal.clear() // Clear the calendar because it can be re-used many times
if (!fastDateFormat.parse(s, new ParsePosition(0), cal)) {
throw new IllegalArgumentException(s"'$s' is an invalid timestamp")
}
val micros = cal.getMicros()
cal.set(Calendar.MILLISECOND, 0)
val julianMicros = Math.addExact(millisToMicros(cal.getTimeInMillis), micros)
rebaseJulianToGregorianMicros(julianMicros)
}
def format(timestamp: SQLTimestamp): String = {
val julianMicros = rebaseGregorianToJulianMicros(timestamp)
cal.setTimeInMillis(Math.floorDiv(julianMicros, MICROS_PER_SECOND) * MILLIS_PER_SECOND)
cal.setMicros(Math.floorMod(julianMicros, MICROS_PER_SECOND))
fastDateFormat.format(cal)
}
}
class LegacySimpleTimestampFormatter(
pattern: String,
zoneId: ZoneId,
locale: Locale,
lenient: Boolean = true) extends TimestampFormatter {
@transient private lazy val sdf = {
val formatter = new SimpleDateFormat(pattern, locale)
formatter.setTimeZone(TimeZone.getTimeZone(zoneId))
formatter.setLenient(lenient)
formatter
}
override def parse(s: String): Long = {
fromJavaTimestamp(new Timestamp(sdf.parse(s).getTime))
}
override def format(us: Long): String = {
sdf.format(toJavaTimestamp(us))
}
}
object LegacyDateFormats extends Enumeration {
type LegacyDateFormat = Value
val FAST_DATE_FORMAT, SIMPLE_DATE_FORMAT, LENIENT_SIMPLE_DATE_FORMAT = Value
}
object TimestampFormatter {
import LegacyDateFormats._
val defaultLocale: Locale = Locale.US
def defaultPattern(): String = s"${DateFormatter.defaultPattern} HH:mm:ss"
private def getFormatter(
format: Option[String],
zoneId: ZoneId,
locale: Locale = defaultLocale,
legacyFormat: LegacyDateFormat = LENIENT_SIMPLE_DATE_FORMAT,
needVarLengthSecondFraction: Boolean = false): TimestampFormatter = {
val pattern = format.getOrElse(defaultPattern)
if (SQLConf.get.legacyTimeParserPolicy == LEGACY) {
getLegacyFormatter(pattern, zoneId, locale, legacyFormat)
} else {
new Iso8601TimestampFormatter(
pattern, zoneId, locale, legacyFormat, needVarLengthSecondFraction)
}
}
def getLegacyFormatter(
pattern: String,
zoneId: ZoneId,
locale: Locale,
legacyFormat: LegacyDateFormat): TimestampFormatter = {
legacyFormat match {
case FAST_DATE_FORMAT =>
new LegacyFastTimestampFormatter(pattern, zoneId, locale)
case SIMPLE_DATE_FORMAT =>
new LegacySimpleTimestampFormatter(pattern, zoneId, locale, lenient = false)
case LENIENT_SIMPLE_DATE_FORMAT =>
new LegacySimpleTimestampFormatter(pattern, zoneId, locale, lenient = true)
}
}
def apply(
format: String,
zoneId: ZoneId,
locale: Locale,
legacyFormat: LegacyDateFormat,
needVarLengthSecondFraction: Boolean): TimestampFormatter = {
getFormatter(Some(format), zoneId, locale, legacyFormat, needVarLengthSecondFraction)
}
def apply(
format: String,
zoneId: ZoneId,
legacyFormat: LegacyDateFormat,
needVarLengthSecondFraction: Boolean): TimestampFormatter = {
getFormatter(Some(format), zoneId, defaultLocale, legacyFormat, needVarLengthSecondFraction)
}
def apply(
format: String,
zoneId: ZoneId,
needVarLengthSecondFraction: Boolean = false): TimestampFormatter = {
getFormatter(Some(format), zoneId, needVarLengthSecondFraction = needVarLengthSecondFraction)
}
def apply(zoneId: ZoneId): TimestampFormatter = {
getFormatter(None, zoneId)
}
def getFractionFormatter(zoneId: ZoneId): TimestampFormatter = {
new FractionTimestampFormatter(zoneId)
}
}
| zuotingbing/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala | Scala | apache-2.0 | 10,048 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.