code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.batch.sql.agg
import org.apache.flink.configuration.Configuration
import org.apache.flink.table.api.TableConfigOptions
/**
* AggregateITCase using HashAgg Operator.
*/
class HashAggITCase
extends AggregateITCaseBase("HashAggregate") {
override def prepareAggOp(): Unit = {
tEnv.getConfig.getConf.setString(
TableConfigOptions.SQL_EXEC_DISABLED_OPERATORS, "SortAgg")
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/runtime/batch/sql/agg/HashAggITCase.scala | Scala | apache-2.0 | 1,241 |
def msum[T](list: T*)(implicit m: Monoid[T]): T =
list.foldLeft(m.mzero)(m.madd)
| lkuczera/scalatypeclasses | steps/Monoidmsum.scala | Scala | mit | 84 |
// Classes: Constructors
// Constructors aren’t special methods, they are
// the code outside of method definitions.
class Calculator(val brand: String) {
// Code executed on instance construction
val color: String = if (brand == "TI") {
"blue"
} else if (brand == "HP") {
"black"
} else {
"white"
}
def add(m: Int, n: Int): Int = m + n
}
| agconti/scala-school | 01-intro-to-scala/slides/slide022.scala | Scala | mit | 375 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.fetch
import io.gatling.commons.validation.Validation
import io.gatling.core.CoreComponents
import io.gatling.core.session._
import io.gatling.http.HeaderNames
import io.gatling.http.protocol.HttpComponents
import io.gatling.http.request.builder.Http
import io.gatling.http.request.builder.RequestBuilder._
import io.gatling.http.request.HttpRequest
import org.asynchttpclient.uri.Uri
object EmbeddedResource {
val DefaultResourceChecks = List(DefaultHttpCheck)
}
sealed abstract class EmbeddedResource {
def uri: Uri
def acceptHeader: Expression[String]
val url = uri.toString
def toRequest(session: Session, coreComponents: CoreComponents, httpComponents: HttpComponents, throttled: Boolean): Validation[HttpRequest] = {
val requestName = {
val start = url.lastIndexOf('/') + 1
if (start < url.length)
url.substring(start, url.length)
else
"/"
}
val http = new Http(requestName.expressionSuccess)
val httpRequestDef = http.get(uri).header(HeaderNames.Accept, acceptHeader).build(coreComponents, httpComponents, throttled)
httpRequestDef.build(requestName, session)
}
}
case class CssResource(uri: Uri) extends EmbeddedResource { val acceptHeader = CssHeaderHeaderValueExpression }
case class RegularResource(uri: Uri) extends EmbeddedResource { val acceptHeader = AllHeaderHeaderValueExpression }
| MykolaB/gatling | gatling-http/src/main/scala/io/gatling/http/fetch/EmbeddedResource.scala | Scala | apache-2.0 | 2,019 |
package com.cerner.beadledom.lifecycle
import com.google.inject.{AbstractModule, Inject, Module}
import java.util.Collections
import javax.annotation.{PostConstruct, PreDestroy}
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FunSpec, MustMatchers}
import scala.collection.JavaConverters._
/**
* Unit tests for [[GuiceLifecycleContainers]].
*
* @author John Leacox
*/
class GuiceLifecycleContainersSpec extends FunSpec with MustMatchers with MockitoSugar {
describe("GuiceLifecycleContainers") {
describe("#initialize") {
it("throws a NullPointerException for a null container") {
val modules = Collections.emptyList[Module]
intercept[NullPointerException] {
GuiceLifecycleContainers.initialize(null.asInstanceOf[LifecycleContainer], modules)
}
}
it("throws a NullPointerException for a null modules list") {
val container = new LifecycleContainer {}
intercept[NullPointerException] {
GuiceLifecycleContainers.initialize(container, null)
}
}
it("creates an injector with the provided modules") {
val module: Module = new AbstractModule {
override def configure(): Unit = {
bind(classOf[String]).toInstance("Hello World")
}
}
val modules = List(module).asJava
val container = new LifecycleContainer {}
val lifecycleInjector = GuiceLifecycleContainers.initialize(container, modules)
lifecycleInjector.getInstance(classOf[String]) mustBe "Hello World"
}
it("injects the container members") {
val container = new TestContainer
val module: Module = new AbstractModule {
override def configure(): Unit = {
bind(classOf[String]).toInstance("Hello World")
bind(classOf[Boolean]).toInstance(true)
}
}
val modules = List(module).asJava
container.fieldInjection mustBe null.asInstanceOf[String]
container.methodInjection mustBe false
val lifecycleInjector = GuiceLifecycleContainers.initialize(container, modules)
container.fieldInjection mustBe "Hello World"
container.methodInjection mustBe true
}
it("executes PostConstruct lifecycle methods in the registered modules") {
val module: Module = new AbstractModule {
override def configure(): Unit = {
bind(classOf[TestLifecycleHook]).asEagerSingleton()
}
}
val modules = List(module).asJava
val container = new LifecycleContainer {}
val lifecycleInjector = GuiceLifecycleContainers.initialize(container, modules)
val hook = lifecycleInjector.getInstance(classOf[TestLifecycleHook])
hook.hasExecutedStartup mustBe true
hook.hasExecutedShutdown mustBe false
}
it(
"returns a LifecycleInjector that executes PreDestroy lifecycle methods in the " +
"registered modules on shutdown") {
val module: Module = new AbstractModule {
override def configure(): Unit = {
bind(classOf[TestLifecycleHook]).asEagerSingleton()
}
}
val modules = List(module).asJava
val container = new LifecycleContainer {}
val lifecycleInjector = GuiceLifecycleContainers.initialize(container, modules)
val hook = lifecycleInjector.getInstance(classOf[TestLifecycleHook])
lifecycleInjector.shutdown()
hook.hasExecutedShutdown mustBe true
}
}
}
}
class TestContainer extends LifecycleContainer {
var methodInjection: Boolean = _
@Inject
var fieldInjection: String = _
@Inject
def init(value: Boolean) = {
methodInjection = value
}
}
class TestLifecycleHook {
var hasExecutedStartup = false
var hasExecutedShutdown = false
@PostConstruct
def startup(): Unit = {
hasExecutedStartup = true
}
@PreDestroy
def shutdown(): Unit = {
hasExecutedShutdown = true
}
}
| bbaugher/beadledom | lifecycle/src/test/scala/com/cerner/beadledom/lifecycle/GuiceLifecycleContainersSpec.scala | Scala | apache-2.0 | 4,013 |
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.httpclient.dummy
import java.util
import akka.actor.ActorSystem
import org.json4s.CustomSerializer
import org.json4s.JsonAST._
import org.squbs.httpclient.japi.{EmployeeBean, TeamBean, TeamBeanWithCaseClassMember}
import org.squbs.httpclient.json.JsonProtocol
import spray.http.HttpHeaders.RawHeader
import spray.http._
import spray.routing.SimpleRoutingApp
import spray.util.Utils._
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.{Failure, Success}
//case class reference case class
case class Employee(id: Long, firstName: String, lastName: String, age: Int, male: Boolean)
case class Team(description: String, members: List[Employee])
//non case class with accessor
class Employee1(val id: Long, val firstName: String, val lastName: String, val age: Int, val male: Boolean){
override def equals(obj : Any) : Boolean = {
obj match {
case t : Employee1 =>
t.id == id && t.firstName == firstName && t.lastName == lastName && t.age == age && t.male == male
case _ => false
}
}
override def hashCode() =
id.hashCode() + firstName.hashCode() + lastName.hashCode() + age.hashCode() + male.hashCode()
}
class Team1(val description: String, val members: List[Employee1]){
override def equals(obj : Any) : Boolean = {
obj match {
case t : Team1 =>
t.description == description && t.members == members
case _ => false
}
}
override def hashCode() = description.hashCode() + (members map (_.hashCode())).sum
}
object EmployeeBeanSerializer extends CustomSerializer[EmployeeBean](format => ( {
case JObject(JField("id", JInt(i)) :: JField("firstName", JString(f)) :: JField("lastName", JString(l)) :: JField(
"age", JInt(a)) :: JField("male", JBool(m)) :: Nil) =>
new EmployeeBean(i.longValue(), f, l, a.intValue(), m)
}, {
case x: EmployeeBean =>
JObject(
JField("id", JInt(BigInt(x.getId))) ::
JField("firstName", JString(x.getFirstName)) ::
JField("lastName", JString(x.getLastName)) ::
JField("age", JInt(x.getAge)) ::
JField("male", JBool(x.isMale)) ::
Nil)
}
))
//scala class reference java class
class Team2(val description: String, val members: List[EmployeeBean])
object DummyService {
val (dummyServiceIpAddress, dummyServicePort) = temporaryServerHostnameAndPort()
val dummyServiceEndpoint = Uri(s"http://$dummyServiceIpAddress:$dummyServicePort")
}
object DummyServiceMain extends App with DummyService {
implicit val actorSystem = ActorSystem("DummyServiceMain")
startDummyService(actorSystem, address = "localhost", port = 8888)
}
trait DummyService extends SimpleRoutingApp {
val fullTeamBean = {
val list = new util.ArrayList[EmployeeBean]()
list.add(new EmployeeBean(1, "John", "Doe", 20, true))
list.add(new EmployeeBean(2, "Mike", "Moon", 25, true))
list.add(new EmployeeBean(3, "Jane", "Williams", 30, false))
list.add(new EmployeeBean(4, "Liz", "Taylor", 35, false))
new TeamBean("squbs Team", list)
}
val fullTeam1 = new Team1("squbs Team", List[Employee1](
new Employee1(1, "John", "Doe", 20, male = true),
new Employee1(2, "Mike", "Moon", 25, male = true),
new Employee1(3, "Jane", "Williams", 30, male = false),
new Employee1(4, "Liz", "Taylor", 35, male = false)
))
//scala class use java bean
val fullTeam2 = new Team2("squbs Team", List[EmployeeBean](
new EmployeeBean(1, "John", "Doe", 20, true),
new EmployeeBean(2, "Mike", "Moon", 25, true),
new EmployeeBean(3, "Jane", "Williams", 30, false),
new EmployeeBean(4, "Liz", "Taylor", 35, false)
))
import scala.collection.JavaConversions._
val fullTeam3 = new TeamBeanWithCaseClassMember("squbs Team", List[Employee](
Employee(1, "John", "Doe", 20, male = true),
Employee(2, "Mike", "Moon", 25, male = true),
Employee(3, "Jane", "Williams", 30, male = false),
Employee(4, "Liz", "Taylor", 35, male = false)
))
val fullTeam = Team("squbs Team", List[Employee](
Employee(1, "John", "Doe", 20, male = true),
Employee(2, "Mike", "Moon", 25, male = true),
Employee(3, "Jane", "Williams", 30, male = false),
Employee(4, "Liz", "Taylor", 35, male = false)
))
val newTeamMember = Employee(5, "Jack", "Ripper", 35, male = true)
val newTeamMemberBean = new EmployeeBean(5, "Jack", "Ripper", 35, true)
val fullTeamJson = "{\\"description\\":\\"squbs Team\\",\\"members\\":[{\\"id\\":1,\\"firstName\\":\\"John\\"," +
"\\"lastName\\":\\"Doe\\",\\"age\\":20,\\"male\\":true},{\\"id\\":2,\\"firstName\\":\\"Mike\\",\\"lastName\\":\\"Moon\\"," +
"\\"age\\":25,\\"male\\":true},{\\"id\\":3,\\"firstName\\":\\"Jane\\",\\"lastName\\":\\"Williams\\",\\"age\\":30,\\"male\\":false}," +
"{\\"id\\":4,\\"firstName\\":\\"Liz\\",\\"lastName\\":\\"Taylor\\",\\"age\\":35,\\"male\\":false}]}"
val fullTeamWithDelJson = "{\\"description\\":\\"squbs Team\\",\\"members\\":[{\\"id\\":1,\\"firstName\\":\\"John\\"," +
"\\"lastName\\":\\"Doe\\",\\"age\\":20,\\"male\\":true},{\\"id\\":2,\\"firstName\\":\\"Mike\\",\\"lastName\\":\\"Moon\\"," +
"\\"age\\":25,\\"male\\":true},{\\"id\\":3,\\"firstName\\":\\"Jane\\",\\"lastName\\":\\"Williams\\",\\"age\\":30,\\"male\\":false}]}"
val fullTeamWithAddJson = "{\\"description\\":\\"squbs Team\\",\\"members\\":[{\\"id\\":1,\\"firstName\\":\\"John\\"," +
"\\"lastName\\":\\"Doe\\",\\"age\\":20,\\"male\\":true},{\\"id\\":2,\\"firstName\\":\\"Mike\\",\\"lastName\\":\\"Moon\\"," +
"\\"age\\":25,\\"male\\":true},{\\"id\\":3,\\"firstName\\":\\"Jane\\",\\"lastName\\":\\"Williams\\",\\"age\\":30,\\"male\\":false}," +
"{\\"id\\":4,\\"firstName\\":\\"Liz\\",\\"lastName\\":\\"Taylor\\",\\"age\\":35,\\"male\\":false},{\\"id\\":5," +
"\\"firstName\\":\\"Jack\\",\\"lastName\\":\\"Ripper\\",\\"age\\":35,\\"male\\":true}]}"
val fullTeamWithDel = Team("squbs Team", List[Employee](
Employee(1, "John", "Doe", 20, male = true),
Employee(2, "Mike", "Moon", 25, male = true),
Employee(3, "Jane", "Williams", 30, male = false)
))
val fullTeamWithAdd = Team("squbs Team", List[Employee](
Employee(1, "John", "Doe", 20, male = true),
Employee(2, "Mike", "Moon", 25, male = true),
Employee(3, "Jane", "Williams", 30, male = false),
Employee(4, "Liz", "Taylor", 35, male = false),
newTeamMember
))
val fullTeamBeanWithAdd = fullTeamBean.addMember(newTeamMemberBean)
//import org.squbs.httpclient.json.Json4sJacksonNoTypeHintsProtocol.json4sUnmarshaller
import JsonProtocol.ManifestSupport._
//import JsonProtocol.toResponseMarshallable
// import scala.concurrent.ExecutionContext.Implicits.global
import DummyService._
import org.squbs.testkit.Timeouts._
def startDummyService(implicit system: ActorSystem, address: String = dummyServiceIpAddress,
port: Int = dummyServicePort) {
implicit val ec = system.dispatcher
startServer(address, port = port) {
pathSingleSlash {
redirect("/view", StatusCodes.Found)
} ~
//get, head, options
path("view") {
(get | head | options | post) {
respondWithMediaType(MediaTypes.`application/json`)
headerValueByName("req1-name") {
value =>
respondWithHeader(RawHeader("res-req1-name", "res-" + value)) {
complete {
fullTeam
}
}
} ~
headerValueByName("req2-name") {
value =>
respondWithHeader(RawHeader("res-req2-name", "res-" + value)){
complete {
fullTeam
}
}
} ~
complete {
fullTeam
}
}
} ~
//get, head, options
path("viewj") {
(get | head | options | post) {
respondWithMediaType(MediaTypes.`application/json`)
complete {
fullTeamBean
}
}
} ~
path("view1") {
(get | head | options | post) {
respondWithMediaType(MediaTypes.`application/json`)
complete {
fullTeam1
}
}
} ~
path("view2") {
(get | head | options | post) {
respondWithMediaType(MediaTypes.`application/json`)
complete {
fullTeam2
}
}
} ~
path("view3") {
(get | head | options | post) {
respondWithMediaType(MediaTypes.`application/json`)
complete {
fullTeam3
}
}
} ~
path("stop") {
(post | parameter('method ! "post")) {
complete {
system.scheduler.scheduleOnce(1.second)(system.shutdown())(system.dispatcher)
"Shutting down in 1 second..."
}
}
} ~
path("timeout") {
(get | head | options) {
complete {
Thread.sleep(3000)
"Thread 3 seconds, then return!"
}
}
} ~
//post, put
path("add") {
(post | put) {
entity[Employee](as[Employee]) {
employee: Employee =>
respondWithMediaType(MediaTypes.`application/json`)
complete {
Team(fullTeam.description, fullTeam.members :+ employee)
}
}
}
} ~
path("addj") {
(post | put) {
entity[EmployeeBean](as[EmployeeBean]) {
employee: EmployeeBean =>
respondWithMediaType(MediaTypes.`application/json`)
complete {
fullTeamBean.addMember(employee)
}
}
}
} ~
//del
path("del" / LongNumber) {
id =>
delete {
respondWithMediaType(MediaTypes.`application/json`)
complete {
val employee = fullTeam.members.find(_.id == id)
employee match {
case Some(emp) => Team(fullTeam.description, fullTeam.members.filterNot(_ == emp))
case None => "cannot find the employee"
}
}
}
}
} onComplete {
case Success(b) =>
println(s"Successfully bound to ${b.localAddress}")
case Failure(ex) =>
println(ex.getMessage)
system.shutdown()
}
}
}
| keshin/squbs | squbs-httpclient/src/test/scala/org/squbs/httpclient/dummy/DummyService.scala | Scala | apache-2.0 | 11,052 |
package com.shorrockin.cascal.serialization
import java.nio.ByteBuffer
import java.util.{Date, UUID}
object TupleSerializer {
def extractType[T](bytes: ByteBuffer, mf: Manifest[T]): T = {
val length = (bytes.get() & 0xFF) << 8 | (bytes.get() & 0xFF)
val typeBuffer = bytes.duplicate
typeBuffer.limit(typeBuffer.position + length)
bytes.position(typeBuffer.position + length + 1)
val ser = Serializer.Default(mf.erasure)
ser.fromByteBuffer(typeBuffer).asInstanceOf[T]
}
def byteBuffer[T](value: T)(implicit mf: Manifest[T]): ByteBuffer = {
value match {
case x: String if mf.erasure == classOf[String] => StringSerializer.toByteBuffer(x)
case x: UUID if mf.erasure == classOf[UUID] => UUIDSerializer.toByteBuffer(x)
case x: Int if mf.erasure == classOf[Int] => IntSerializer.toByteBuffer(x)
case x: Long if mf.erasure == classOf[Long] => LongSerializer.toByteBuffer(x)
case x: Boolean if mf.erasure == classOf[Boolean] => BooleanSerializer.toByteBuffer(x)
case x: Float if mf.erasure == classOf[Float] => FloatSerializer.toByteBuffer(x)
case x: Double if mf.erasure == classOf[Double] => DoubleSerializer.toByteBuffer(x)
case x: Date if mf.erasure == classOf[Date] => DateSerializer.toByteBuffer(x)
case None => ByteBuffer.allocate(0)
}
}
}
class CompositeBuffer(val buffers: ByteBuffer*) {
val lengthBytesSize = 2
val endOfComponentSize = 1
val compositeOverheadSize = lengthBytesSize + endOfComponentSize
def buffer(): ByteBuffer = {
val buffersSize = buffers.foldLeft(0){(sum, buffer) => sum + buffer.remaining}
val requiredSize = buffersSize + buffers.size * compositeOverheadSize
val buffer = ByteBuffer.allocate(requiredSize)
buffers foreach {buff =>
buffer.putShort(buff.remaining.asInstanceOf[Short]).put(buff).put(0.toByte)
}
buffer.rewind
buffer
}
}
object Tuple2Serializer {
import TupleSerializer._
def toByteBuffer[T1: Manifest, T2: Manifest](tuple: Tuple2[T1, T2]): ByteBuffer = {
val buffer = new CompositeBuffer(byteBuffer(tuple._1), byteBuffer(tuple._2))
buffer.buffer
}
def fromByteBuffer[T1, T2](bytes:ByteBuffer, mf1: Manifest[T1], mf2: Manifest[T2]): Tuple2[T1, T2] = {
(extractType(bytes, mf1), extractType(bytes, mf2))
}
}
object Tuple3Serializer {
import TupleSerializer._
def toByteBuffer[T1: Manifest, T2: Manifest, T3: Manifest](tuple: Tuple3[T1, T2, T3]): ByteBuffer = {
val buffer = new CompositeBuffer(byteBuffer(tuple._1), byteBuffer(tuple._2), byteBuffer(tuple._3))
buffer.buffer
}
def fromByteBuffer[T1, T2, T3](bytes:ByteBuffer, mf1: Manifest[T1], mf2: Manifest[T2], mf3: Manifest[T3]): Tuple3[T1, T2, T3] = {
(extractType(bytes, mf1), extractType(bytes, mf2), extractType(bytes, mf3))
}
}
object Tuple4Serializer {
import TupleSerializer._
def toByteBuffer[T1: Manifest, T2: Manifest, T3: Manifest, T4: Manifest](tuple: Tuple4[T1, T2, T3, T4]): ByteBuffer = {
val buffer = new CompositeBuffer(byteBuffer(tuple._1), byteBuffer(tuple._2), byteBuffer(tuple._3), byteBuffer(tuple._4))
buffer.buffer
}
def fromByteBuffer[T1, T2, T3, T4](bytes:ByteBuffer, mf1: Manifest[T1], mf2: Manifest[T2], mf3: Manifest[T3], mf4: Manifest[T4]): Tuple4[T1, T2, T3, T4] = {
(extractType(bytes, mf1), extractType(bytes, mf2), extractType(bytes, mf3), extractType(bytes, mf4))
}
}
object Tuple5Serializer {
import TupleSerializer._
def toByteBuffer[T1: Manifest, T2: Manifest, T3: Manifest, T4: Manifest, T5: Manifest](tuple: Tuple5[T1, T2, T3, T4, T5]): ByteBuffer = {
val buffer = new CompositeBuffer(byteBuffer(tuple._1), byteBuffer(tuple._2), byteBuffer(tuple._3), byteBuffer(tuple._4), byteBuffer(tuple._5))
buffer.buffer
}
def fromByteBuffer[T1, T2, T3, T4, T5](bytes:ByteBuffer, mf1: Manifest[T1], mf2: Manifest[T2], mf3: Manifest[T3], mf4: Manifest[T4], mf5: Manifest[T5]): Tuple5[T1, T2, T3, T4, T5] = {
(extractType(bytes, mf1), extractType(bytes, mf2), extractType(bytes, mf3), extractType(bytes, mf4), extractType(bytes, mf5))
}
} | Shimi/cascal | src/main/scala/com/shorrockin/cascal/serialization/TupleSerializer.scala | Scala | apache-2.0 | 4,140 |
package chap5
object Exe14 extends App {
assert(Stream.from(1).startsWith(Stream(1, 2)))
assert(!Stream.from(1).startsWith(Stream(1, 2, 4)))
}
| ponkotuy/FPScala | src/main/scala/chap5/Exe14.scala | Scala | unlicense | 148 |
package part3
trait 火 extends NumR {
def tail: NumR
override def methodL(num: NumL): Result = ResultP(tail.methodL(num))
}
object 火 {
def apply(tail1: => NumR): NumR = new 火 {
override def tail: NumR = tail1
}
}
| djx314/ubw | a50-指对2/src/main/scala/part3/火.scala | Scala | bsd-3-clause | 231 |
package championships
import eu.ace_design.island.arena.Run
import eu.ace_design.island.game.{Directions, Plane}
import eu.ace_design.island.map.IslandMap
import eu.ace_design.island.stdlib.Resources._
import library.Islands
import library.SI3
object Week09 extends Run with SI3 {
override val number: String = "09"
override val seed: Long = Islands.s09
override lazy val theIsland: IslandMap = Islands.week09
override val crew: Int = 15
override val budget: Int = 20000
override val plane: Plane = Plane(1, 1, Directions.EAST)
override val objectives = Set((WOOD, 10000), (LEATHER, 300), (GLASS, 50))
override def players = all - "qbd" - "qcf"
}
| mosser/QGL-15-16 | arena/src/main/scala/championships/Week09.scala | Scala | lgpl-3.0 | 691 |
package org.scalaide.debug.internal.launching
import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit
import scala.io.Codec
import scala.io.Source
import scala.util.control.Exception.allCatch
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IProject
import org.eclipse.core.resources.IResource
import org.eclipse.core.resources.IncrementalProjectBuilder
import org.eclipse.core.runtime.NullProgressMonitor
import org.eclipse.debug.core.DebugPlugin
import org.eclipse.debug.core.ILaunch
import org.eclipse.debug.core.ILaunchConfiguration
import org.eclipse.debug.core.ILaunchesListener2
import org.junit.Assert
/**
* Used for launching application.
*/
trait LaunchUtils {
/** Points to launch configuration file. */
val launchConfigurationName: String
private val DefaultMonitor = new NullProgressMonitor
/** Create a launch listener for launchTerminated events on a launch of the given launchConfiguration. */
def onLaunchTerminates(f: () => Unit) = new ILaunchesListener2() {
override def launchesTerminated(launches: Array[ILaunch]): Unit = {
if (launches.exists(_.getLaunchConfiguration.getName == launchConfigurationName)) {
f()
}
}
override def launchesAdded(launches: Array[ILaunch]): Unit = {}
override def launchesRemoved(launches: Array[ILaunch]): Unit = {}
override def launchesChanged(launches: Array[ILaunch]): Unit = {}
}
/** Cleans and incrementally builds projects */
def cleanBuild(projects: IProject*): Unit = projects.foreach { project =>
project.build(IncrementalProjectBuilder.CLEAN_BUILD, DefaultMonitor)
project.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, DefaultMonitor)
}
private def launchConfiguration(project: IProject): ILaunchConfiguration =
DebugPlugin.getDefault.getLaunchManager.getLaunchConfiguration(project.getFile(launchConfigurationName + ".launch"))
def whenApplicationWasLaunchedFor(project: IProject, inMode: String)(inThatCase: => Unit): Unit = {
val latch = new CountDownLatch(1)
DebugPlugin.getDefault.getLaunchManager.addLaunchListener(onLaunchTerminates(latch.countDown))
val lc = launchConfiguration(project)
val launch = lc.launch(inMode, DefaultMonitor)
val timeout = if (launch.canTerminate) 10 else 60
latch.await(timeout, TimeUnit.SECONDS)
if (launch.canTerminate && !launch.isTerminated) {
throw new IllegalStateException(s"launch did not terminate in ${timeout}s")
}
inThatCase
}
/**
* Asserts the result of a launched test execution.
* To verify that given launcher has been fired and resulted with
* test execution the executed test side effect it checked.
* So there is following flow:
*
* `launch -> execute sample test -> verify test's side effect`
*
* So your test in `test-workspace` should materialize side effect as file
* which is passed to `assertLaunchEffect` method.
* Usually the test class/method should define and execute similar method
* {{{
* def foo(): Unit = {
* val writer = new FileWriter("launchDelegate.result")
* writer.write("success")
* writer.close
* }
* }}}
*/
def assertLaunchEffect(project: IProject, inMode: String, fileWithLaunchEffect: IFile,
expectedFileWithLaunchEffectContent: String = "success"): Unit = {
project.refreshLocal(IResource.DEPTH_ONE, new NullProgressMonitor)
if (fileWithLaunchEffect.exists) {
val source = Source.fromInputStream(fileWithLaunchEffect.getContents)(Codec.UTF8)
import scala.util.control.Exception._
val actual = allCatch.andFinally(source.close) opt source.mkString
Assert.assertEquals("Wrong result file content", expectedFileWithLaunchEffectContent, actual.getOrElse("failure"))
} else {
Assert.fail(s"result file not found in mode '$inMode'")
}
}
}
| Kwestor/scala-ide | org.scala-ide.sdt.debug.tests/src/org/scalaide/debug/internal/launching/LaunchUtils.scala | Scala | bsd-3-clause | 3,884 |
package rx.lang.scala.completeness
import java.util.Calendar
/**
* Generate comparison tables for Scala classes and Java classes. Run `sbt 'test:run rx.lang.scala.completeness.CompletenessTest'` to generate them.
*/
object CompletenessTables {
/**
* CompletenessKits to generate completeness tables.
*/
val completenessKits = List(
new ObservableCompletenessKit,
new BlockingObservableCompletenessKit,
new TestSchedulerCompletenessKit,
new TestSubscriberCompletenessKit)
def setTodoForMissingMethods(completenessKit: CompletenessKit): Map[String, String] = {
val actualMethods = completenessKit.rxScalaPublicInstanceAndCompanionMethods.toSet
for ((javaM, scalaM) <- completenessKit.correspondence) yield
(javaM, if (actualMethods.contains(scalaM) || scalaM.charAt(0) == '[') scalaM else "[**TODO: missing**]")
}
def scalaToJavaSignature(s: String) =
s.replaceAllLiterally("_ <:", "? extends")
.replaceAllLiterally("_ >:", "? super")
.replaceAllLiterally("[", "<")
.replaceAllLiterally("]", ">")
.replaceAllLiterally("Array<T>", "T[]")
def escapeJava(s: String) =
s.replaceAllLiterally("<", "<")
.replaceAllLiterally(">", ">")
def printMarkdownCorrespondenceTables(): Unit = {
println("""
---
layout: comparison
title: Comparison of Scala Classes and Java Classes
---
Note:
* These tables contain both static methods and instance methods.
* If a signature is too long, move your mouse over it to get the full signature.
""")
completenessKits.foreach(printMarkdownCorrespondenceTable)
val completenessTablesClassName = getClass.getCanonicalName.dropRight(1) // Drop "$"
println(s"\nThese tables were generated on ${Calendar.getInstance().getTime}.")
println(s"**Do not edit**. Instead, edit `${completenessTablesClassName}` and run `sbt 'test:run ${completenessTablesClassName}'` to generate these tables.")
}
def printMarkdownCorrespondenceTable(completenessKit: CompletenessKit): Unit = {
def groupingKey(p: (String, String)): (String, String) =
(if (p._1.startsWith("average")) "average" else p._1.takeWhile(_ != '('), p._2)
def formatJavaCol(name: String, alternatives: Iterable[String]): String = {
alternatives.toList.sorted.map(scalaToJavaSignature).map(s => {
if (s.length > 64) {
val toolTip = escapeJava(s)
"<span title=\"" + toolTip + "\"><code>" + name + "(...)</code></span>"
} else {
"`" + s + "`"
}
}).mkString("<br/>")
}
def formatScalaCol(s: String): String =
if (s.startsWith("[") && s.endsWith("]")) s.drop(1).dropRight(1) else "`" + s + "`"
val ps = setTodoForMissingMethods(completenessKit)
println(s"""
|## Comparison of Scala ${completenessKit.rxScalaType.typeSymbol.name} and Java ${completenessKit.rxJavaType.typeSymbol.name}
|
|| Java Method | Scala Method |
||-------------|--------------|""".stripMargin)
(for (((javaName, scalaCol), pairs) <- ps.groupBy(groupingKey).toList.sortBy(_._1._1)) yield {
"| " + formatJavaCol(javaName, pairs.map(_._1)) + " | " + formatScalaCol(scalaCol) + " |"
}).foreach(println(_))
}
def main(args: Array[String]): Unit = {
printMarkdownCorrespondenceTables()
}
}
| ReactiveX/RxScala | src/test/scala-2.11/rx/lang/scala/completeness/CompletenessTables.scala | Scala | apache-2.0 | 3,344 |
package mesosphere.marathon.core.task
import mesosphere.marathon.core.task.Task.Id
import mesosphere.marathon.core.task.bus.MarathonTaskStatus
import mesosphere.marathon.state.Timestamp
sealed trait TaskStateOp {
def taskId: Task.Id
/**
* The possible task state if processing the state op succeeds. If processing the
* state op fails, this state will never be persisted, so be cautious when using it.
*/
def possibleNewState: Option[Task] = None
}
object TaskStateOp {
/** Launch (aka create) an ephemeral task*/
// FIXME (3221): The type should be LaunchedEphemeral but that needs a lot of test adjustments
case class LaunchEphemeral(task: Task) extends TaskStateOp {
override def taskId: Id = task.taskId
override def possibleNewState: Option[Task] = Some(task)
}
/** Revert a task to the given state. Used in case TaskOps are rejected. */
case class Revert(task: Task) extends TaskStateOp {
override def taskId: Id = task.taskId
override def possibleNewState: Option[Task] = Some(task)
}
case class Reserve(task: Task.Reserved) extends TaskStateOp {
override def taskId: Id = task.taskId
override def possibleNewState: Option[Task] = Some(task)
}
case class LaunchOnReservation(
taskId: Task.Id,
appVersion: Timestamp,
status: Task.Status,
networking: Task.Networking) extends TaskStateOp
case class MesosUpdate(task: Task, status: MarathonTaskStatus, now: Timestamp) extends TaskStateOp {
override def taskId: Id = task.taskId
}
case class ReservationTimeout(taskId: Task.Id) extends TaskStateOp
/** Expunge a task whose TaskOp was rejected */
case class ForceExpunge(taskId: Task.Id) extends TaskStateOp
}
sealed trait TaskStateChange
object TaskStateChange {
case class Update(newState: Task, oldState: Option[Task]) extends TaskStateChange
case class Expunge(task: Task) extends TaskStateChange
case class NoChange(taskId: Task.Id) extends TaskStateChange
case class Failure(cause: Throwable) extends TaskStateChange
object Failure {
def apply(message: String): Failure = Failure(TaskStateChangeException(message))
}
}
case class TaskStateChangeException(message: String) extends Exception(message)
| vivekjuneja/marathon | src/main/scala/mesosphere/marathon/core/task/TaskStateOp.scala | Scala | apache-2.0 | 2,227 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.network
import java.io.IOException
import java.net._
import java.nio.channels._
import java.util
import java.util.concurrent._
import java.util.concurrent.atomic._
import com.yammer.metrics.core.Gauge
import kafka.cluster.EndPoint
import kafka.common.KafkaException
import kafka.metrics.KafkaMetricsGroup
import kafka.server.KafkaConfig
import kafka.utils._
import org.apache.kafka.common.MetricName
import org.apache.kafka.common.metrics._
import org.apache.kafka.common.network.{ChannelBuilders, InvalidReceiveException, ChannelBuilder, PlaintextChannelBuilder, SSLChannelBuilder}
import org.apache.kafka.common.security.ssl.SSLFactory
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.kafka.common.protocol.types.SchemaException
import org.apache.kafka.common.utils.{SystemTime, Time, Utils}
import scala.collection._
import scala.util.control.{NonFatal, ControlThrowable}
/**
* An NIO socket server. The threading model is
* 1 Acceptor thread that handles new connections
* Acceptor has N Processor threads that each have their own selector and read requests from sockets
* M Handler threads that handle requests and produce responses back to the processor threads for writing.
*/
class SocketServer(val config: KafkaConfig, val metrics: Metrics, val time: Time) extends Logging with KafkaMetricsGroup {
val channelConfigs = config.channelConfigs
val endpoints = config.listeners
val numProcessorThreads = config.numNetworkThreads
val maxQueuedRequests = config.queuedMaxRequests
val sendBufferSize = config.socketSendBufferBytes
val recvBufferSize = config.socketReceiveBufferBytes
val maxRequestSize = config.socketRequestMaxBytes
val maxConnectionsPerIp = config.maxConnectionsPerIp
val connectionsMaxIdleMs = config.connectionsMaxIdleMs
val maxConnectionsPerIpOverrides = config.maxConnectionsPerIpOverrides
val totalProcessorThreads = numProcessorThreads * endpoints.size
this.logIdent = "[Socket Server on Broker " + config.brokerId + "], "
val requestChannel = new RequestChannel(totalProcessorThreads, maxQueuedRequests)
private val processors = new Array[Processor](totalProcessorThreads)
private[network] var acceptors = mutable.Map[EndPoint,Acceptor]()
private val allMetricNames = (0 until totalProcessorThreads).map { i =>
val tags = new util.HashMap[String, String]()
tags.put("networkProcessor", i.toString)
new MetricName("io-wait-ratio", "socket-server-metrics", tags)
}
/* I'm pushing the mapping of port-to-protocol to the processor level,
so the processor can put the correct protocol in the request channel.
we'll probably have a more elegant way of doing this once we patch the request channel
to include more information about security and authentication.
TODO: re-consider this code when working on KAFKA-1683
*/
private val portToProtocol: ConcurrentHashMap[Int, SecurityProtocol] = new ConcurrentHashMap[Int, SecurityProtocol]()
/**
* Start the socket server
*/
def startup() {
val quotas = new ConnectionQuotas(maxConnectionsPerIp, maxConnectionsPerIpOverrides)
this.synchronized {
var processorBeginIndex = 0
endpoints.values.foreach(endpoint => {
val acceptor = new Acceptor(endpoint.host, endpoint.port, sendBufferSize, recvBufferSize, config.brokerId, requestChannel, processors, processorBeginIndex, numProcessorThreads, quotas,
endpoint.protocolType, portToProtocol, channelConfigs, maxQueuedRequests, maxRequestSize, connectionsMaxIdleMs, metrics, allMetricNames, time)
acceptors.put(endpoint, acceptor)
Utils.newThread("kafka-socket-acceptor-%s-%d".format(endpoint.protocolType.toString, endpoint.port), acceptor, false).start()
acceptor.awaitStartup
processorBeginIndex += numProcessorThreads
})
}
newGauge("NetworkProcessorAvgIdlePercent",
new Gauge[Double] {
def value = allMetricNames.map( metricName =>
metrics.metrics().get(metricName).value()).sum / totalProcessorThreads
}
)
info("Started " + acceptors.size + " acceptor threads")
}
// register the processor threads for notification of responses
requestChannel.addResponseListener(id => processors(id).wakeup())
/**
* Shutdown the socket server
*/
def shutdown() = {
info("Shutting down")
this.synchronized {
acceptors.values.foreach(_.shutdown)
processors.foreach(_.shutdown)
}
info("Shutdown completed")
}
def boundPort(protocol: SecurityProtocol = SecurityProtocol.PLAINTEXT): Int = {
try {
acceptors(endpoints(protocol)).serverChannel.socket().getLocalPort
} catch {
case e: Exception => throw new KafkaException("Tried to check server's port before server was started or checked for port of non-existing protocol", e)
}
}
}
/**
* A base class with some helper variables and methods
*/
private[kafka] abstract class AbstractServerThread(connectionQuotas: ConnectionQuotas) extends Runnable with Logging {
private val startupLatch = new CountDownLatch(1)
private val shutdownLatch = new CountDownLatch(1)
private val alive = new AtomicBoolean(true)
def wakeup()
/**
* Initiates a graceful shutdown by signaling to stop and waiting for the shutdown to complete
*/
def shutdown(): Unit = {
alive.set(false)
wakeup()
shutdownLatch.await()
}
/**
* Wait for the thread to completely start up
*/
def awaitStartup(): Unit = startupLatch.await
/**
* Record that the thread startup is complete
*/
protected def startupComplete() = {
startupLatch.countDown()
}
/**
* Record that the thread shutdown is complete
*/
protected def shutdownComplete() = shutdownLatch.countDown()
/**
* Is the server still running?
*/
protected def isRunning = alive.get
/**
* Close the given key and associated socket
*/
def close(key: SelectionKey) {
if(key != null) {
key.attach(null)
close(key.channel.asInstanceOf[SocketChannel])
swallowError(key.cancel())
}
}
def close(channel: SocketChannel) {
if(channel != null) {
debug("Closing connection from " + channel.socket.getRemoteSocketAddress())
connectionQuotas.dec(channel.socket.getInetAddress)
swallowError(channel.socket().close())
swallowError(channel.close())
}
}
}
/**
* Thread that accepts and configures new connections. There is only need for one of these
*/
private[kafka] class Acceptor(val host: String,
private val port: Int,
val sendBufferSize: Int,
val recvBufferSize: Int,
brokerId: Int,
requestChannel: RequestChannel,
processors: Array[Processor],
processorBeginIndex: Int,
numProcessorThreads: Int,
connectionQuotas: ConnectionQuotas,
protocol: SecurityProtocol,
portToProtocol: ConcurrentHashMap[Int, SecurityProtocol],
channelConfigs: java.util.Map[String, Object],
maxQueuedRequests: Int,
maxRequestSize: Int,
connectionsMaxIdleMs: Long,
metrics: Metrics,
allMetricNames: Seq[MetricName],
time: Time) extends AbstractServerThread(connectionQuotas) with KafkaMetricsGroup {
val nioSelector = java.nio.channels.Selector.open()
val serverChannel = openServerSocket(host, port)
val processorEndIndex = processorBeginIndex + numProcessorThreads
portToProtocol.put(serverChannel.socket().getLocalPort, protocol)
this.synchronized {
for (i <- processorBeginIndex until processorEndIndex) {
processors(i) = new Processor(i,
time,
maxRequestSize,
numProcessorThreads,
requestChannel,
connectionQuotas,
connectionsMaxIdleMs,
protocol,
channelConfigs,
metrics
)
Utils.newThread("kafka-network-thread-%d-%s-%d".format(brokerId, protocol.name, i), processors(i), false).start()
}
}
/**
* Accept loop that checks for new connection attempts
*/
def run() {
serverChannel.register(nioSelector, SelectionKey.OP_ACCEPT);
startupComplete()
var currentProcessor = processorBeginIndex
try {
while (isRunning) {
try {
val ready = nioSelector.select(500)
if (ready > 0) {
val keys = nioSelector.selectedKeys()
val iter = keys.iterator()
while (iter.hasNext && isRunning) {
var key: SelectionKey = null
try {
key = iter.next
iter.remove()
if (key.isAcceptable)
accept(key, processors(currentProcessor))
else
throw new IllegalStateException("Unrecognized key state for acceptor thread.")
// round robin to the next processor thread
currentProcessor = (currentProcessor + 1) % processorEndIndex
if (currentProcessor < processorBeginIndex) currentProcessor = processorBeginIndex
} catch {
case e: Throwable => error("Error while accepting connection", e)
}
}
}
}
catch {
// We catch all the throwables to prevent the acceptor thread from exiting on exceptions due
// to a select operation on a specific channel or a bad request. We don't want the
// the broker to stop responding to requests from other clients in these scenarios.
case e: ControlThrowable => throw e
case e: Throwable => error("Error occurred", e)
}
}
} finally {
debug("Closing server socket and selector.")
swallowError(serverChannel.close())
swallowError(nioSelector.close())
shutdownComplete()
}
}
/*
* Create a server socket to listen for connections on.
*/
def openServerSocket(host: String, port: Int): ServerSocketChannel = {
val socketAddress =
if(host == null || host.trim.isEmpty)
new InetSocketAddress(port)
else
new InetSocketAddress(host, port)
val serverChannel = ServerSocketChannel.open()
serverChannel.configureBlocking(false)
serverChannel.socket().setReceiveBufferSize(recvBufferSize)
try {
serverChannel.socket.bind(socketAddress)
info("Awaiting socket connections on %s:%d.".format(socketAddress.getHostName, serverChannel.socket.getLocalPort))
} catch {
case e: SocketException =>
throw new KafkaException("Socket server failed to bind to %s:%d: %s.".format(socketAddress.getHostName, port, e.getMessage), e)
}
serverChannel
}
/*
* Accept a new connection
*/
def accept(key: SelectionKey, processor: Processor) {
val serverSocketChannel = key.channel().asInstanceOf[ServerSocketChannel]
val socketChannel = serverSocketChannel.accept()
try {
connectionQuotas.inc(socketChannel.socket().getInetAddress)
socketChannel.configureBlocking(false)
socketChannel.socket().setTcpNoDelay(true)
socketChannel.socket().setKeepAlive(true)
socketChannel.socket().setSendBufferSize(sendBufferSize)
debug("Accepted connection from %s on %s. sendBufferSize [actual|requested]: [%d|%d] recvBufferSize [actual|requested]: [%d|%d]"
.format(socketChannel.socket.getInetAddress, socketChannel.socket.getLocalSocketAddress,
socketChannel.socket.getSendBufferSize, sendBufferSize,
socketChannel.socket.getReceiveBufferSize, recvBufferSize))
processor.accept(socketChannel)
} catch {
case e: TooManyConnectionsException =>
info("Rejected connection from %s, address already has the configured maximum of %d connections.".format(e.ip, e.count))
close(socketChannel)
}
}
/**
* Wakeup the thread for selection.
*/
@Override
def wakeup = nioSelector.wakeup()
}
/**
* Thread that processes all requests from a single connection. There are N of these running in parallel
* each of which has its own selectors
*/
private[kafka] class Processor(val id: Int,
val time: Time,
val maxRequestSize: Int,
val totalProcessorThreads: Int,
val requestChannel: RequestChannel,
connectionQuotas: ConnectionQuotas,
val connectionsMaxIdleMs: Long,
val protocol: SecurityProtocol,
val channelConfigs: java.util.Map[String, Object],
val metrics: Metrics) extends AbstractServerThread(connectionQuotas) with KafkaMetricsGroup {
private val newConnections = new ConcurrentLinkedQueue[SocketChannel]()
private val inflightResponses = mutable.Map[String, RequestChannel.Response]()
private val channelBuilder = ChannelBuilders.create(protocol, SSLFactory.Mode.SERVER, channelConfigs)
private val metricTags = new util.HashMap[String, String]()
metricTags.put("networkProcessor", id.toString)
newGauge("IdlePercent",
new Gauge[Double] {
def value = {
metrics.metrics().get(new MetricName("io-wait-ratio", "socket-server-metrics", metricTags)).value()
}
},
JavaConversions.mapAsScalaMap(metricTags)
)
private val selector = new org.apache.kafka.common.network.Selector(
maxRequestSize,
connectionsMaxIdleMs,
metrics,
time,
"socket-server",
metricTags,
false,
channelBuilder)
override def run() {
startupComplete()
while(isRunning) {
try {
// setup any new connections that have been queued up
configureNewConnections()
// register any new responses for writing
processNewResponses()
try {
selector.poll(300)
} catch {
case e @ (_: IllegalStateException | _: IOException) => {
error("Closing processor %s due to illegal state or IO exception".format(id))
swallow(closeAll())
shutdownComplete()
throw e
}
case e: InvalidReceiveException =>
// Log warning and continue since Selector already closed the connection
warn("Connection was closed due to invalid receive. Processor will continue handling other connections")
}
collection.JavaConversions.collectionAsScalaIterable(selector.completedReceives).foreach(receive => {
try {
val channel = selector.channelForId(receive.source);
val session = RequestChannel.Session(channel.principal, channel.socketDescription)
val req = RequestChannel.Request(processor = id, connectionId = receive.source, session = session, buffer = receive.payload, startTimeMs = time.milliseconds, securityProtocol = protocol)
requestChannel.sendRequest(req)
} catch {
case e @ (_: InvalidRequestException | _: SchemaException) => {
// note that even though we got an exception, we can assume that receive.source is valid. Issues with constructing a valid receive object were handled earlier
error("Closing socket for " + receive.source + " because of error", e)
selector.close(receive.source)
}
}
selector.mute(receive.source)
})
collection.JavaConversions.iterableAsScalaIterable(selector.completedSends()).foreach(send => {
val resp = inflightResponses.remove(send.destination()).get
resp.request.updateRequestMetrics()
selector.unmute(send.destination())
})
} catch {
// We catch all the throwables here to prevent the processor thread from exiting. We do this because
// letting a processor exit might cause bigger impact on the broker. Usually the exceptions thrown would
// be either associated with a specific socket channel or a bad request. We just ignore the bad socket channel
// or request. This behavior might need to be reviewed if we see an exception that need the entire broker to stop.
case e : ControlThrowable => throw e
case e : Throwable =>
error("Processor got uncaught exception.", e)
}
}
debug("Closing selector - processor " + id)
swallowError(closeAll())
shutdownComplete()
}
private def processNewResponses() {
var curr = requestChannel.receiveResponse(id)
while(curr != null) {
try {
curr.responseAction match {
case RequestChannel.NoOpAction => {
// There is no response to send to the client, we need to read more pipelined requests
// that are sitting in the server's socket buffer
curr.request.updateRequestMetrics
trace("Socket server received empty response to send, registering for read: " + curr)
selector.unmute(curr.request.connectionId)
}
case RequestChannel.SendAction => {
trace("Socket server received response to send, registering for write and sending data: " + curr)
selector.send(curr.responseSend)
inflightResponses += (curr.request.connectionId -> curr)
}
case RequestChannel.CloseConnectionAction => {
curr.request.updateRequestMetrics
trace("Closing socket connection actively according to the response code.")
selector.close(curr.request.connectionId)
}
}
} finally {
curr = requestChannel.receiveResponse(id)
}
}
}
/**
* Queue up a new connection for reading
*/
def accept(socketChannel: SocketChannel) {
newConnections.add(socketChannel)
wakeup()
}
/**
* Register any new connections that have been queued up
*/
private def configureNewConnections() {
while(!newConnections.isEmpty) {
val channel = newConnections.poll()
try {
debug("Processor " + id + " listening to new connection from " + channel.socket.getRemoteSocketAddress)
val localHost = channel.socket().getLocalAddress.getHostAddress
val localPort = channel.socket().getLocalPort
val remoteHost = channel.socket().getInetAddress.getHostAddress
val remotePort = channel.socket().getPort
val connectionId = localHost + ":" + localPort + "-" + remoteHost + ":" + remotePort
selector.register(connectionId, channel)
} catch {
// We explicitly catch all non fatal exceptions and close the socket to avoid socket leak. The other
// throwables will be caught in processor and logged as uncaught exception.
case NonFatal(e) =>
// need to close the channel here to avoid socket leak.
close(channel)
error("Processor " + id + " closed connection from " + channel.getRemoteAddress, e)
}
}
}
/**
* Close all open connections
*/
def closeAll() {
selector.close()
}
/**
* Wakeup the thread for selection.
*/
@Override
def wakeup = selector.wakeup()
}
class ConnectionQuotas(val defaultMax: Int, overrideQuotas: Map[String, Int]) {
private val overrides = overrideQuotas.map(entry => (InetAddress.getByName(entry._1), entry._2))
private val counts = mutable.Map[InetAddress, Int]()
def inc(addr: InetAddress) {
counts synchronized {
val count = counts.getOrElse(addr, 0)
counts.put(addr, count + 1)
val max = overrides.getOrElse(addr, defaultMax)
if(count >= max)
throw new TooManyConnectionsException(addr, max)
}
}
def dec(addr: InetAddress) {
counts synchronized {
val count = counts.get(addr).get
if(count == 1)
counts.remove(addr)
else
counts.put(addr, count - 1)
}
}
}
class TooManyConnectionsException(val ip: InetAddress, val count: Int) extends KafkaException("Too many connections from %s (maximum = %d)".format(ip, count))
| usakey/kafka | core/src/main/scala/kafka/network/SocketServer.scala | Scala | apache-2.0 | 21,242 |
package com.arcusys.learn.liferay.util
import com.arcusys.learn.liferay.LiferayClasses._
import com.arcusys.learn.liferay.services.ServiceContextHelper
import com.liferay.portal.kernel.notifications.UserNotificationManagerUtil
import com.liferay.portal.model.{MembershipRequestConstants, UserNotificationDeliveryConstants}
import com.liferay.portal.service.UserNotificationEventLocalServiceUtil
import org.joda.time.DateTime
object UserNotificationEventLocalServiceHelper {
def addUserNotificationEvent(userId: Long,
activityType: String,
timestamp: Long,
deliverBy: Long,
payload: String,
archived: Boolean,
serviceContext: LServiceContext
): Unit = {
val classNameId = 0
if (UserNotificationManagerUtil.isDeliver(userId,
activityType,
classNameId,
MembershipRequestConstants.STATUS_PENDING,
UserNotificationDeliveryConstants.TYPE_WEBSITE)) {
UserNotificationEventLocalServiceUtil.addUserNotificationEvent(
userId,
activityType,
timestamp,
deliverBy,
payload,
archived,
serviceContext
)
}
}
def sendNotification(model: String, userId: Long, achivedType: String): Unit = {
val serviceContext = Option(ServiceContextHelper.getServiceContext)
.getOrElse(new LServiceContext())
addUserNotificationEvent(userId,
achivedType,
DateTime.now().getMillis,
userId,
model,
false,
serviceContext
)
}
}
| arcusys/Valamis | learn-liferay620-services/src/main/scala/com/arcusys/learn/liferay/util/UserNotificationEventLocalServiceHelper.scala | Scala | gpl-3.0 | 1,677 |
package io.github.shogowada.scalajs.reactjs.example.todoappredux
import io.github.shogowada.scalajs.reactjs.ReactDOM
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import io.github.shogowada.scalajs.reactjs.redux.ReactRedux._
import io.github.shogowada.scalajs.reactjs.redux.Redux
import org.scalajs.dom
import scala.scalajs.js.JSApp
/*
* If you are not familiar with react-redux yet, please check it out first:
*
* - http://redux.js.org/docs/basics/UsageWithReact.html
* */
object Main extends JSApp {
override def main(): Unit = {
val mountNode = dom.document.getElementById("mount-node")
val store = Redux.createStore(Reducer.reduce)
/*
* Import the following to access the Provider:
*
* - import io.github.shogowada.scalajs.reactjs.VirtualDOM._
* - import io.github.shogowada.scalajs.reactjs.redux.ReactRedux._
* */
ReactDOM.render(
<.Provider(^.store := store)(
App()
),
mountNode
)
}
}
| shogowada/scalajs-reactjs | example/todo-app-redux/src/main/scala/io/github/shogowada/scalajs/reactjs/example/todoappredux/Main.scala | Scala | mit | 986 |
import scala.annotation.StaticAnnotation
import scala.meta._
class companion extends StaticAnnotation {
inline def apply(stats: Any): Any = meta {
def extractClass(classDefn: Defn.Class): Stat = {
val q"""
..$mods class $tname[..$tparams] ..$ctorMods (...$paramss) extends { ..$earlyStats } with ..$ctorcalls {
$selfParam =>
..$stats
}
""" = classDefn
q"""
..$mods class $tname[..$tparams] ..$ctorMods (...$paramss) extends { ..$earlyStats } with ..$ctorcalls {
$selfParam =>
..$stats
}
"""
}
def extractObj(objDefn: Defn.Object): Stat = {
val q"""
..$mods object $tname extends { ..$earlyStats } with ..$ctorcalls {
$selfParam =>
..$stats
}
""" = objDefn
q"""
..$mods object $tname extends { ..$earlyStats } with ..$ctorcalls {
$selfParam =>
..$stats
}
"""
}
stats match {
case Term.Block(Seq(classDefn: Defn.Class, objDefn: Defn.Object)) =>
Term.Block(scala.collection.immutable.Seq(extractClass(classDefn), extractObj(objDefn)))
case classDefn: Defn.Class => extractClass(classDefn)
}
}
}
| xeno-by/paradise | tests/meta/src/main/scala/companion.scala | Scala | bsd-3-clause | 1,237 |
package com.twitter.finagle.netty4.http
import com.twitter.finagle.http.AbstractMultipartDecoderTest
class Netty4MultipartDecoderTest extends AbstractMultipartDecoderTest(new Netty4MultipartDecoder)
| mkhq/finagle | finagle-netty4-http/src/test/scala/com/twitter/finagle/netty4/http/Netty4MultipartDecoderTest.scala | Scala | apache-2.0 | 201 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.handler
import akka.actor.{ActorSystem, Props, ActorRef, ActorSelection}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.kernel.protocol.v5Test._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, FunSpecLike}
import test.utils.MaxAkkaTestTimeout
class GenericSocketMessageHandlerSpec extends TestKit(ActorSystem("GenericSocketMessageHandlerSystem"))
with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
describe("GenericSocketMessageHandler( ActorLoader, SocketType )") {
// Create a mock ActorLoader for the Relay we are going to test
val actorLoader: ActorLoader = mock[ActorLoader]
// Create a probe for the ActorSelection that the ActorLoader will return
val selectionProbe: TestProbe = TestProbe()
val selection: ActorSelection = system.actorSelection(selectionProbe.ref.path.toString)
when(actorLoader.load(SocketType.Control)).thenReturn(selection)
// The Relay we are going to be testing against
val genericHandler: ActorRef = system.actorOf(
Props(classOf[GenericSocketMessageHandler], actorLoader, SocketType.Control)
)
describe("#receive( KernelMessage )") {
genericHandler ! MockKernelMessage
it("should send the message to the selected actor"){
selectionProbe.expectMsg(MaxAkkaTestTimeout, MockKernelMessage)
}
}
}
}
| ericchang/incubator-toree | kernel/src/test/scala/org/apache/toree/kernel/protocol/v5/handler/GenericSocketMessageHandlerSpec.scala | Scala | apache-2.0 | 2,385 |
package net.fwbrasil.activate.util
import net.fwbrasil.scala._
object CollectionUtil {
def combine[T](lists: Seq[Seq[T]]) =
(if (lists.nonEmpty)
((lists.map(_.map(Seq(_))))
.reduceLeft((xs, ys) => for { x <- xs; y <- ys } yield x ++ y).toList)
else List(List[T]()))
.asInstanceOf[List[List[T]]]
def toTuple[T](seq: Seq[_]) =
(seq.size match {
case 1 =>
seq(0)
case 2 =>
Tuple2(seq(0), seq(1))
case 3 =>
Tuple3(seq(0), seq(1), seq(2))
case 4 =>
Tuple4(seq(0), seq(1), seq(2), seq(3))
case 5 =>
Tuple5(seq(0), seq(1), seq(2), seq(3), seq(4))
case 6 =>
Tuple6(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5))
case 7 =>
Tuple7(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6))
case 8 =>
Tuple8(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7))
case 9 =>
Tuple9(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8))
case 10 =>
Tuple10(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9))
case 11 =>
Tuple11(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10))
case 12 =>
Tuple12(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11))
case 13 =>
Tuple13(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12))
case 14 =>
Tuple14(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13))
case 15 =>
Tuple15(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14))
case 16 =>
Tuple16(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15))
case 17 =>
Tuple17(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16))
case 18 =>
Tuple18(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17))
case 19 =>
Tuple19(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18))
case 20 =>
Tuple20(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19))
case 21 =>
Tuple21(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20))
case 22 =>
Tuple22(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21))
case 23 =>
new Tuple23(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22))
case 24 =>
new Tuple24(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23))
case 25 =>
new Tuple25(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24))
case 26 =>
new Tuple26(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25))
case 27 =>
new Tuple27(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26))
case 28 =>
new Tuple28(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26), seq(27))
case 29 =>
new Tuple29(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26), seq(27), seq(28))
case 30 =>
new Tuple30(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26), seq(27), seq(28), seq(29))
case 31 =>
new Tuple31(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26), seq(27), seq(28), seq(29), seq(30))
case 32 =>
new Tuple32(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26), seq(27), seq(28), seq(29), seq(30), seq(31))
case 33 =>
new Tuple33(seq(0), seq(1), seq(2), seq(3), seq(4), seq(5), seq(6), seq(7), seq(8), seq(9), seq(10), seq(11), seq(12), seq(13), seq(14), seq(15), seq(16), seq(17), seq(18), seq(19), seq(20), seq(21), seq(22), seq(23), seq(24), seq(25), seq(26), seq(27), seq(28), seq(29), seq(30), seq(31), seq(32))
case other =>
throw new IllegalStateException("Seq has too many itens to be a tuple!")
}).asInstanceOf[T]
} | xdevelsistemas/activate | activate-core/src/main/scala/net/fwbrasil/activate/util/CollectionUtil.scala | Scala | lgpl-2.1 | 6,977 |
/*
* Copyright 2012 Pellucid and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package datomisca
import org.scalatest.{FlatSpec, Matchers, OptionValues}
class AggregatesSpec
extends FlatSpec
with Matchers
with OptionValues
with DatomicFixture
{
val countObjects = Query("""
[:find (count ?e)
:where [?e :object/name ?n]]
""")
val findLargestRadius = Query("""
[:find (max ?radius)
:where [_ :object/meanRadius ?radius]]
""")
val findSmallestRadius = Query("""
[:find (min ?radius)
:where [_ :object/meanRadius ?radius]]
""")
val findAverageRadius = Query("""
[:find (avg ?radius)
:with ?e
:where [?e :object/meanRadius ?radius]]
""")
val findMedianRadius = Query("""
[:find (median ?radius)
:with ?e
:where [?e :object/meanRadius ?radius]]
""")
val findStdDevOfRadius = Query("""
[:find (stddev ?radius)
:with ?e
:where [?e :object/meanRadius ?radius]]
""")
val findRandomObject = Query("""
[:find (rand ?name)
:where [?e :object/name ?name]]
""")
val findSmallest3 = Query("""
[:find (min 3 ?radius)
:with ?e
:where [?e :object/meanRadius ?radius]]
""")
val findLargest3 = Query("""
[:find (max 3 ?radius)
:with ?e
:where [?e :object/meanRadius ?radius]]
""")
val findRandom5 = Query("""
[:find (rand 5 ?name)
:with ?e
:where [?e :object/name ?name]]
""")
val choose5 = Query("""
[:find (sample 5 ?name)
:with ?e
:where [?e :object/name ?name]]
""")
val findAvgObjectNameLength = Query("""
[:find (avg ?length)
:with ?e
:where
[?e :object/name ?name]
[(count ?name) ?length]]
""")
val countAttributesAndValueTypesInSchema = Query("""
[:find (count ?a) (count-distinct ?vt)
:where
[?a :db/ident ?ident]
[?a :db/valueType ?vt]]
""")
"Aggregates examples" should "run to completion" in withSampleDatomicDB(PlutoSampleData) { conn =>
val db = conn.database()
Datomic.q(countObjects, db).headOption.value should equal (17)
Datomic.q(findLargestRadius, db).headOption.value should equal (696000.0)
Datomic.q(findSmallestRadius, db).headOption.value should equal (1163.0)
Datomic.q(findAverageRadius, db).headOption.value.asInstanceOf[Double] should equal (53390.176 +- 0.0005)
Datomic.q(findMedianRadius, db).headOption.value should equal (2631.2)
Datomic.q(findStdDevOfRadius, db).headOption.value.asInstanceOf[Double] should equal (161902.528 +- 0.0005)
Datomic.q(findRandomObject, db) should have size (1)
Datomic.q(findSmallest3, db).headOption.value match {
case coll: Iterable[_] =>
coll should contain allOf (1163.0, 1353.4, 1561.0)
case _ => fail
}
Datomic.q(findLargest3, db).headOption.value match {
case coll: Iterable[_] =>
coll should contain allOf (696000.0, 69911.0, 58232.0)
case _ => fail
}
Datomic.q(findRandom5, db) should have size (1)
Datomic.q(choose5, db) should have size (1)
Datomic.q(findAvgObjectNameLength, db).headOption.value.asInstanceOf[Double] should equal (5.471 +- 0.0005)
Datomic.q(countAttributesAndValueTypesInSchema, db) should have size (1)
}
}
| Enalmada/datomisca | integration/src/it/scala/datomisca/AggregatesSpec.scala | Scala | apache-2.0 | 3,789 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import com.google.inject.{Inject, Singleton}
import config.{ApplicationConfig, GmpContext, GmpSessionCache}
import controllers.auth.AuthAction
import forms.BulkReferenceForm
import play.api.Logging
import play.api.mvc.MessagesControllerComponents
import services.SessionService
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.play.audit.http.connector.AuditConnector
import views.Views
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class BulkReferenceController @Inject()(authAction: AuthAction,
val authConnector: AuthConnector,
auditConnector : AuditConnector,
sessionService: SessionService,implicit val config:GmpContext,brf:BulkReferenceForm,
override val messagesControllerComponents: MessagesControllerComponents,
implicit val executionContext: ExecutionContext,ac:ApplicationConfig,
implicit val gmpSessionCache: GmpSessionCache,
views: Views)
extends GmpController(messagesControllerComponents,ac,sessionService,config) with Logging{
lazy val bulkReferenceForm = brf.bulkReferenceForm
def get = authAction.async {
implicit request =>
Future.successful(Ok(views.bulkReference(bulkReferenceForm)))
}
def post = authAction.async {
implicit request => {
logger.debug(s"[BulkReferenceController][post]: ${request.body}")
bulkReferenceForm.bindFromRequest().fold(
formWithErrors => {Future.successful(BadRequest(views.bulkReference(formWithErrors)))},
value => {
sessionService.cacheEmailAndReference(Some(value.email.trim), Some(value.reference.trim)).map {
case Some(session) => Redirect(controllers.routes.BulkRequestReceivedController.get)
case _ => throw new RuntimeException
}
}
)
}
}
def back = authAction.async {
_ => {
Future.successful(Redirect(routes.FileUploadController.get))
}
}
}
| hmrc/gmp-frontend | app/controllers/BulkReferenceController.scala | Scala | apache-2.0 | 2,809 |
package net.aicomp.terraforming.scene
import net.aicomp.terraforming.entity.GameEnvironment
import net.aicomp.terraforming.util.settings.Defaults
import net.exkazuu.gameaiarena.gui.DefaultScene
abstract class AbstractScene extends DefaultScene[GameEnvironment] {
def env = getEnvironment
def renderer = env.getRenderer
def inputer = env.getInputer
def game = env.game
final def display(text: String) {
if (AbstractScene.display != null) {
AbstractScene.display(text)
}
}
final def displayLine(text: String) {
display(text + Defaults.NEW_LINE)
}
final def describe(sceneDescription: String) {
val dashes = "-" * 20
displayLine(dashes + sceneDescription + dashes)
}
}
object AbstractScene {
var display: (String => Unit) = null
} | AI-comp/Terraforming | src/main/scala/net/aicomp/terraforming/scene/AbstractScene.scala | Scala | apache-2.0 | 781 |
package tv.camfire.jetty.server.session
import org.eclipse.jetty.server.session.AbstractSessionIdManager
import javax.servlet.http.{HttpSession, HttpServletRequest}
/**
* User: jonathan
* Date: 6/5/13
* Time: 4:30 PM
*/
class RedisRailsSessionIdManager()
extends AbstractSessionIdManager {
def getClusterId(nodeId: String): String = nodeId
def getNodeId(clusterId: String, request: HttpServletRequest): String = clusterId
/**
* NOTE: This body is empty because we are 'fake' adding the session. Rails is the place where sessions actually
* get created/destroyed.
*/
def removeSession(session: HttpSession) {}
def addSession(session: HttpSession) {}
/**
* NOTE: The below methods are intended for adding and removing session. This implementation we just want to access
* what rails provides, thus we do not want to manage any of this in java land. We will insure none of these methods
* get called by including a filter that rejects requests that do not already have a session associated with them.
*/
def renewSessionId(oldClusterId: String, oldNodeId: String, request: HttpServletRequest) {
// throw new UnsupportedOperationException()
}
def idInUse(id: String): Boolean = true
// def idInUse(id: String): Boolean = throw new UnsupportedOperationException()
def invalidateAll(id: String) {}
// def invalidateAll(id: String) {throw new UnsupportedOperationException()}
}
| jgrowl/jetty-session-redis-json-readonly | src/main/scala/tv/camfire/jetty/server/session/RedisRailsSessionIdManager.scala | Scala | mit | 1,435 |
package tv.camfire.media_server.config
import tv.camfire.media_server.controllers.SignalController
import tv.camfire.media_server.listener.SessionListener
/**
* User: jonathan
* Date: 7/22/13
* Time: 7:01 PM
*/
trait ServletModule extends LogicModule {
lazy val signalController: SignalController = wire[SignalController]
lazy val sessionListener: SessionListener = wire[SessionListener]
}
| jgrowl/camfire-signaling | signaling-server/src/main/scala/tv/camfire/media_server/config/ServletModule.scala | Scala | mit | 400 |
package mapmartadero
package config
import net.liftweb._
import common._
import http._
import json._
import util.Props
import net.liftweb.squerylrecord.RecordTypeMode
import RecordTypeMode._
import java.sql.DriverManager
import org.squeryl.{Schema, Table}
import model._
import org.squeryl.adapters.MySQLInnoDBAdapter
object SquerylConfig extends Factory with Loggable {
private def initMysql(schema: () => Schema*) {
Class.forName("com.mysql.jdbc.Driver")
import org.squeryl.adapters.H2Adapter
import net.liftweb.squerylrecord.SquerylRecord
import org.squeryl.Session
def connection = DriverManager.getConnection(
Props.get("db.url", "jdbc:mysql://localhost/test?characterEncoding=latin1&autoReconnect=true"),
Props.get("db.user", "root"),
Props.get("db.password", "mysql"))
SquerylRecord.initWithSquerylSession(Session.create(connection, new MySQLInnoDBAdapter))
inTransaction {
try {
schema.map(s => {
// s().create
// s().printDdl
})
} catch {
case e: Throwable => e.printStackTrace()
throw e;
}
}
LiftRules.liftRequest.append({
case Req("console" ::_, _, _) => false
})
}
def init = {
initMysql(() => DbSchema)
}
}
| jgenso/mapamartadero | src/main/scala/mapmartadero/config/SquerylConfig.scala | Scala | apache-2.0 | 1,274 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.notifications
import org.joda.time.{DateTime, DateTimeZone}
import org.scalatestplus.play.PlaySpec
import play.api.libs.json._
import utils.AmlsSpec
class NotificationRowSpec extends PlaySpec with AmlsSpec {
val testNotifications = NotificationRow(
Some(
Status(
Some(StatusType.Revoked),
Some(RevokedReason.RevokedCeasedTrading))
),
None,
None,
false,
DateTime.now(),
false,
"XJML00000200000",
"v1m0",
IDType("1234567")
)
"NotificationRows " must {
"read/write Contact types Json successfully" in {
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.RejectionReasons)) must be(JsSuccess(ContactType.RejectionReasons))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.RevocationReasons)) must be(JsSuccess(ContactType.RevocationReasons))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.MindedToReject)) must be(JsSuccess(ContactType.MindedToReject))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.NoLongerMindedToReject)) must be(JsSuccess(ContactType.NoLongerMindedToReject))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.MindedToRevoke)) must be(JsSuccess(ContactType.MindedToRevoke))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.NoLongerMindedToRevoke)) must be(JsSuccess(ContactType.NoLongerMindedToRevoke))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.Others)) must be(JsSuccess(ContactType.Others))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.ApplicationApproval)) must be(JsSuccess(ContactType.ApplicationApproval))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.RenewalApproval)) must be(JsSuccess(ContactType.RenewalApproval))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.AutoExpiryOfRegistration)) must be(JsSuccess(ContactType.AutoExpiryOfRegistration))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.RenewalReminder)) must be(JsSuccess(ContactType.RenewalReminder))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.ReminderToPayForApplication)) must be(JsSuccess(ContactType.ReminderToPayForApplication))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.ReminderToPayForRenewal)) must be(JsSuccess(ContactType.ReminderToPayForRenewal))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.ReminderToPayForVariation)) must be(JsSuccess(ContactType.ReminderToPayForVariation))
ContactType.jsonReads.reads(ContactType.jsonWrites.writes(ContactType.ReminderToPayForManualCharges)) must be(JsSuccess(ContactType.ReminderToPayForManualCharges))
}
"fail with error when status value is passed incorrectly" in {
ContactType.jsonReads.reads(JsString("RPM1RPM1")) must be(JsError(List((JsPath \\ "contact_type", List(play.api.libs.json.JsonValidationError("error.invalid"))))))
}
"format the date for the table of messages" in {
testNotifications.copy(receivedAt = new DateTime(2017, 12, 1, 3, 3, DateTimeZone.UTC)).dateReceived mustBe "1 December 2017"
}
"read and write json successfully" in {
val model = NotificationRow(
Some(
Status(
Some(StatusType.Revoked),
Some(RevokedReason.RevokedCeasedTrading)
)),
Some(ContactType.MindedToRevoke),
None,
false,
new DateTime(1479730062573L, DateTimeZone.UTC),
false,
"XJML00000200000",
"1",
new IDType("5832e38e01000001005ca3ff")
)
val json = Json.parse(
"""
|{
| "status":{
| "status_type":"08",
| "status_reason":"02"},
| "contactType":"MTRV",
| "variation":false,
| "receivedAt":{"$date":1479730062573},
| "amlsRegistrationNumber":"XJML00000200000",
| "isRead":false,
| "templatePackageVersion":"1",
| "_id":{"$oid":"5832e38e01000001005ca3ff"}}
|
""".stripMargin)
NotificationRow.format.reads(json) must be(JsSuccess(model))
}
"return application failure subject line" when {
val notificationRow = NotificationRow(
None,
Some(ContactType.RejectionReasons),
None,
false,
new DateTime(1479730062573L, DateTimeZone.UTC),
false,
"XJML00000200000",
"1",
new IDType("5832e38e01000001005ca3ff"
))
"status reason is 2" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.FailedToRespond)
))
).subject must be("notifications.fail.title")
}
"status reason is 3" when {
"contact number & contact type are present" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.FailedToPayCharges)
))
).subject must be("notifications.fail.title")
}
"contact number & contact type are absent" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.FailedToPayCharges)
)),
contactType = None
).subject must be("notifications.fail.title")
}
}
"status reason is 98" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.OtherFailed)
))
).subject must be("notifications.fail.title")
}
"default" in {
notificationRow.subject must be("notifications.fail.title")
}
}
"return application refusal subject line" when {
val notificationRow = NotificationRow(
None,
Some(ContactType.RejectionReasons),
None,
false,
new DateTime(1479730062573L, DateTimeZone.UTC),
false,
"XJML00000200000",
"1",
new IDType("5832e38e01000001005ca3ff"
))
"status reason is 1" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.NonCompliant)
))
).subject must be("notifications.rejr.title")
}
"status reason is 4" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.FitAndProperFailure)
))
).subject must be("notifications.rejr.title")
}
"status reason is 99" in {
notificationRow.copy(
status = Some(
Status(
Some(StatusType.Rejected),
Some(RejectedReason.OtherRefused)
))
).subject must be("notifications.rejr.title")
}
}
}
"IDType" must {
"read json successfully" in {
IDType.read.reads(Json.parse("""{"$oid":"someId"}""")) must be(JsSuccess(IDType("someId"), JsPath \\ "$oid"))
}
"write json successfully" in {
IDType.write.writes(IDType("someId")) must be(Json.parse("""{"$oid":"someId"}"""))
}
}
}
| hmrc/amls-frontend | test/models/notifications/NotificationRowSpec.scala | Scala | apache-2.0 | 8,139 |
package io.vamp.model.artifact
import io.vamp.common.{ Artifact, Reference }
import io.vamp.model.reader.{ MegaByte, Quantity }
object Blueprint {
val kind: String = "blueprints"
}
abstract class Blueprint extends Artifact {
val kind: String = Blueprint.kind
}
trait AbstractBlueprint extends Blueprint {
def name: String
def clusters: List[AbstractCluster]
def gateways: List[Gateway]
def environmentVariables: List[EnvironmentVariable]
def traits: List[Trait]
def dialects: Map[String, Any]
}
case class DefaultBlueprint(
name: String,
metadata: Map[String, Any],
clusters: List[Cluster],
gateways: List[Gateway],
environmentVariables: List[EnvironmentVariable],
dialects: Map[String, Any] = Map()
) extends AbstractBlueprint {
lazy val traits: List[Trait] = environmentVariables
}
case class BlueprintReference(name: String) extends Blueprint with Reference
abstract class AbstractCluster extends Artifact {
val kind: String = "clusters"
def services: List[AbstractService]
def gateways: List[Gateway]
def network: Option[String]
def sla: Option[Sla]
def dialects: Map[String, Any]
def gatewayBy(portName: String): Option[Gateway] = gateways.find(_.port.name == portName)
def healthChecks: Option[List[HealthCheck]]
}
case class Cluster(
name: String,
metadata: Map[String, Any],
services: List[Service],
gateways: List[Gateway],
healthChecks: Option[List[HealthCheck]],
network: Option[String] = None,
sla: Option[Sla] = None,
dialects: Map[String, Any] = Map()
) extends AbstractCluster
abstract class AbstractService {
val kind: String = "services"
def breed: Breed
def environmentVariables: List[EnvironmentVariable]
def scale: Option[Scale]
def arguments: List[Argument]
/** A service can contain zero or many health checks that will get created when the Blueprints gets deployed */
def healthChecks: Option[List[HealthCheck]]
def network: Option[String]
def dialects: Map[String, Any]
def health: Option[Health]
}
case class Service(
breed: Breed,
environmentVariables: List[EnvironmentVariable],
scale: Option[Scale],
arguments: List[Argument],
healthChecks: Option[List[HealthCheck]],
network: Option[String] = None,
dialects: Map[String, Any] = Map(),
health: Option[Health] = None
) extends AbstractService
object Scale {
val kind: String = "scales"
}
trait Scale extends Artifact {
val kind: String = Scale.kind
}
case class ScaleReference(name: String) extends Reference with Scale
object DefaultScale {
def apply(cpu: Quantity, memory: MegaByte, instances: Int): DefaultScale = DefaultScale(name = "", metadata = Map(), cpu, memory, instances)
def apply(instances: Int = 0): DefaultScale = DefaultScale(name = "", metadata = Map(), cpu = Quantity(0.0), memory = MegaByte(0.0), instances)
}
case class DefaultScale(name: String, metadata: Map[String, Any], cpu: Quantity, memory: MegaByte, instances: Int) extends Scale
/**
* Representation of the Health retrieved from a Deployment.
* @param staged number of instances in a staged state.
* @param running number of instances in a running state.
* @param healthy number of instances in a healthy state.
* @param unhealthy number of instances in an unhealthy state.
*/
case class Health(staged: Int, running: Int, healthy: Int, unhealthy: Int)
| magneticio/vamp | model/src/main/scala/io/vamp/model/artifact/Blueprint.scala | Scala | apache-2.0 | 3,636 |
package repository
import domain.util.crypto.Aes
import repository.model.scalatrader.{User}
import scalikejdbc.AutoSession
import scalikejdbc._
object UserRepository {
def get(email: String, secret: String): Option[User] = {
implicit val session = AutoSession
sql"select * from user where email = ${email}".map(map(secret)).single().apply()
}
def all(secret: String): Seq[User] = {
implicit val session = AutoSession
sql"select * from user".map(map(secret)).list().apply()
}
def everyoneWithApiKey(secret: String): Seq[User] = {
all(secret).filter(user => notEmpty(user.api_key) && notEmpty(user.api_secret))
}
def notEmpty(str: String): Boolean = {
if (str == null) {
false
} else {
str.length > 0
}
}
private def map = { secret: String => (rs: WrappedResultSet) =>
{
User(
rs.long("id"),
rs.string("email"),
rs.string("password"),
rs.string("name"),
Aes.decode(rs.string("api_key"), secret),
Aes.decode(rs.string("api_secret"), secret)
)
}
}
}
| rysh/scalatrader | scalatrader/app/repository/UserRepository.scala | Scala | mit | 1,084 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.languagemodel
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.text.{LabeledSentenceToSample, _}
import com.intel.analytics.bigdl.dataset.{DataSet, SampleToMiniBatch}
import com.intel.analytics.bigdl.nn.{CrossEntropyCriterion, Module, TimeDistributedCriterion}
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric._
import com.intel.analytics.bigdl.utils.Engine
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import com.intel.analytics.bigdl.example.languagemodel.Utils._
import com.intel.analytics.bigdl.models.rnn.SequencePreprocess
object PTBWordLM {
Logger.getLogger("org").setLevel(Level.ERROR)
Logger.getLogger("akka").setLevel(Level.ERROR)
Logger.getLogger("breeze").setLevel(Level.ERROR)
Logger.getLogger("com.intel.analytics.bigdl.example").setLevel(Level.INFO)
val logger = Logger.getLogger(getClass)
def main(args: Array[String]): Unit = {
trainParser.parse(args, new TrainParams()).map(param => {
val conf = Engine.createSparkConf()
.setAppName("Train ptbModel on text")
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
Engine.init
val (trainData, validData, testData, dictionary) = SequencePreprocess(
param.dataFolder, param.vocabSize)
val trainSet = DataSet.rdd(sc.parallelize(
SequencePreprocess.reader(trainData, param.numSteps)))
.transform(TextToLabeledSentence[Float](param.numSteps))
.transform(LabeledSentenceToSample[Float](
oneHot = false,
fixDataLength = None,
fixLabelLength = None))
.transform(SampleToMiniBatch[Float](param.batchSize))
val validationSet = DataSet.rdd(sc.parallelize(
SequencePreprocess.reader(validData, param.numSteps)))
.transform(TextToLabeledSentence[Float](param.numSteps))
.transform(LabeledSentenceToSample[Float](
oneHot = false,
fixDataLength = None,
fixLabelLength = None))
.transform(SampleToMiniBatch[Float](param.batchSize))
val model = if (param.modelSnapshot.isDefined) {
Module.loadModule[Float](param.modelSnapshot.get)
} else {
val curModel = PTBModel(
inputSize = param.vocabSize,
hiddenSize = param.hiddenSize,
outputSize = param.vocabSize,
numLayers = param.numLayers,
keepProb = param.keepProb)
curModel.reset()
curModel
}
val optimMethod = if (param.stateSnapshot.isDefined) {
OptimMethod.load[Float](param.stateSnapshot.get)
} else {
new Adagrad[Float](learningRate = param.learningRate,
learningRateDecay = param.learningRateDecay)
}
val optimizer = Optimizer(
model = model,
dataset = trainSet,
criterion = TimeDistributedCriterion[Float](
CrossEntropyCriterion[Float](), sizeAverage = false, dimension = 1)
)
if (param.checkpoint.isDefined) {
optimizer.setCheckpoint(param.checkpoint.get, Trigger.everyEpoch)
}
if(param.overWriteCheckpoint) {
optimizer.overWriteCheckpoint()
}
optimizer
.setValidation(Trigger.everyEpoch, validationSet, Array(new Loss[Float](
TimeDistributedCriterion[Float](
CrossEntropyCriterion[Float](),
sizeAverage = false, dimension = 1))))
.setOptimMethod(optimMethod)
.setEndWhen(Trigger.maxEpoch(param.nEpochs))
.optimize()
sc.stop()
})
}
}
| jenniew/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/example/languagemodel/PTBWordLM.scala | Scala | apache-2.0 | 4,256 |
package io.udash.web.guide.styles.partials
import io.udash.css.{CssBase, CssStyle}
import io.udash.web.commons.styles.GlobalStyles
import io.udash.web.commons.styles.attributes.Attributes
import io.udash.web.commons.styles.components.CodeBlockStyles
import io.udash.web.commons.styles.utils.{FontWeight, StyleConstants, UdashFonts}
import io.udash.web.guide.styles.utils.{GuideStyleUtils, MediaQueries}
import scalacss.internal.Literal
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
object GuideStyles extends CssBase with CodeBlockStyles {
import dsl._
val main: CssStyle = style(
paddingTop(50 px),
position.relative,
minHeight :=! s"calc(100vh - ${StyleConstants.Sizes.HeaderHeight}px - ${StyleConstants.Sizes.FooterHeight}px)"
)
val floatRight: CssStyle = style(
float.right
)
val imgIntro: CssStyle = style(
MediaQueries.phone(
float.none,
width(100 %%),
maxWidth.none,
maxHeight.none,
margin(1.25 rem, `0`)
)
)
private val highlightRedKeyframes: CssStyle = keyframes(
0d -> keyframe(
color.black
),
50d -> keyframe(
color.red
),
100d -> keyframe(
color.black
)
)
val highlightRed: CssStyle = style(
animationName(highlightRedKeyframes),
animationIterationCount.count(1),
animationDuration(2 seconds)
)
val menuWrapper: CssStyle = style(
GlobalStyles.col,
GuideStyleUtils.transition(),
width(StyleConstants.Sizes.MenuWidth px),
paddingTop(4.375 rem),
borderBottomColor(StyleConstants.Colors.GreyExtra),
borderBottomWidth(1 px),
borderBottomStyle.solid,
transform := none,
MediaQueries.desktop(
borderBottomWidth(`0`),
position.sticky,
top(50 px)
),
MediaQueries.tabletLandscape(
position.fixed,
right(100 %%),
top(`0`),
height(100 %%),
paddingTop(`0`),
backgroundColor.white,
zIndex(999),
&.attr(Attributes.data(Attributes.Active), "true") {
transform := "translateX(100%)"
}
),
MediaQueries.phone(
width.auto
)
)
val contentWrapper: CssStyle = style(
GlobalStyles.col,
width :=! s"calc(100% - ${StyleConstants.Sizes.MenuWidth}px)",
paddingLeft(2.5 rem),
paddingBottom(3.125 rem),
unsafeChild("a")(
&.not(".badge")(
&.not(".nav-link")(
&.not(".dropdown-item")(
color(StyleConstants.Colors.Red),
&.hover(
color(StyleConstants.Colors.Red)
),
&.visited(
color(StyleConstants.Colors.Red)
),
)
)
)
),
MediaQueries.tabletLandscape(
width(100 %%),
paddingLeft(`0`)
)
)
private val liStyle: CssStyle = mixin(
position.relative,
paddingLeft(1.25 rem),
margin(.3125 rem, `0`,.3125 rem, 2.8125 rem),
MediaQueries.phone(
marginLeft(.9375 rem)
)
)
private val liBulletStyle: CssStyle = mixin(
position.absolute,
left(`0`),
top(`0`)
)
val defaultList: CssStyle = style(
unsafeChild("li") (
liStyle,
&.before(
liBulletStyle,
content.string("•"),
)
)
)
val innerList: CssStyle = style(
unsafeChild("li") (
liStyle,
&.before(
liBulletStyle,
content.string("‣"),
)
)
)
val codeWrapper: CssStyle = style(
marginTop(.9375 rem),
marginBottom(.9375 rem),
paddingTop(.625 rem),
paddingBottom(.625 rem)
)
val codeBlock: CssStyle = style(
counterReset := "code",
listStyleType := "decimal",
listStylePosition.outside,
fontFamily :=! "Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace",
fontSize(1 rem),
color(StyleConstants.Colors.GreySemi),
paddingLeft(3.75 rem),
marginBottom(`0`),
unsafeChild(":not(pre) > code[class*=\\"language-\\"]") (
whiteSpace.pre
)
)
val frame: CssStyle = style(
GuideStyleUtils.border(),
display.block,
padding(.9375 rem),
margin(1.25 rem, `0`)
)
val imgSmall: CssStyle = style(
display.table,
GuideStyleUtils.border(),
maxWidth(40 %%),
maxHeight(200 px),
padding(.9375 rem),
margin(`0`, 1.25 rem, 1.25 rem, 1.25 rem)
)
val imgMedium: CssStyle = style(
display.table,
GuideStyleUtils.border(),
maxWidth(70 %%),
maxHeight(350 px),
padding(.9375 rem),
margin(`0`, 1.25 rem, 1.25 rem, 1.25 rem)
)
val imgBig: CssStyle = style(
display.table,
maxWidth(100 %%),
maxHeight(750 px)
)
val useBootstrap: CssStyle = style(
addClassName("bootstrap")
)
val sectionError: CssStyle = style(
position.relative,
width(100 %%),
overflow.hidden,
height :=! s"calc(100vh - 200px)",
color.white,
backgroundColor.black,
backgroundImage := "url(/assets/images/intro_bg.jpg)",
backgroundSize := Literal.cover,
media.minHeight(1 px).maxHeight(StyleConstants.Sizes.MinSiteHeight - 1 px)(
height.auto,
paddingTop(80 px),
paddingBottom(100 px)
),
MediaQueries.tabletLandscape(
height.auto,
paddingTop(80 px),
paddingBottom(100 px)
)
)
val errorInner: CssStyle = style(
GuideStyleUtils.relativeMiddle,
top(50 %%),
transform := "translate3d(0, 0, 1)",
media.minHeight(1 px).maxHeight(650 px)(
top(60 %%)
),
media.minHeight(1 px).maxHeight(StyleConstants.Sizes.MinSiteHeight - 1 px)(
top(auto),
transform := "translateY(0)"
),
MediaQueries.tabletLandscape(
top(auto),
transform := "translateY(0)"
)
)
val errorHead: CssStyle = style(
UdashFonts.roboto(FontWeight.Bold),
fontSize(5.5 rem),
lineHeight(1.1),
transform := "translate3d(0, 0, 1)",
textShadow := "0 0 15px black",
&.after(
content.string(" "),
position.absolute,
bottom(-110 px),
right(-30 px),
width(400 px),
height(213 px),
backgroundImage := "url(/assets/images/intro_bird.png)",
backgroundSize := "100%",
media.minHeight(1 px).maxHeight(850 px)(
width(400 * .7 px),
height(213 * .7 px),
bottom(-80 px)
),
MediaQueries.tabletLandscape(
width(400 * .7 px),
height(213 * .7 px),
bottom(-80 px)
),
MediaQueries.tabletLandscape(
display.none
)
),
media.minHeight(StyleConstants.Sizes.MinSiteHeight px).maxHeight(850 px)(
marginTop(1.25 rem),
marginBottom(1.875 rem)
),
media.minHeight(751 px).maxHeight(850 px)(
marginTop(1.25 rem),
marginBottom(1.875 rem),
fontSize(5 rem)
),
media.minHeight(651 px).maxHeight(750 px)(
fontSize(3.75 rem)
),
media.minHeight(StyleConstants.Sizes.MinSiteHeight px).maxHeight(650 px)(
fontSize(3.125 rem)
),
media.minHeight(1 px).maxHeight(StyleConstants.Sizes.MinSiteHeight - 1 px)(
fontSize(3.75 rem),
marginTop(2.5 rem),
marginBottom(3.75 rem)
),
MediaQueries.tabletLandscape(
fontSize(5 rem).important
),
MediaQueries.tabletLandscape(
marginTop(1.875 rem).important,
marginBottom(3.75 rem).important,
fontSize(3.75 rem).important
),
MediaQueries.phone(
fontSize(2.5 rem).important,
lineHeight(1.2)
)
)
}
| UdashFramework/udash-core | guide/shared/src/main/scala/io/udash/web/guide/styles/partials/GuideStyles.scala | Scala | apache-2.0 | 7,429 |
package se.lu.nateko.cp.meta.upload
import java.net.URI
import scala.concurrent.Future
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.scalajs.js.URIUtils.encodeURIComponent
import org.scalajs.dom.File
import org.scalajs.dom.ext.Ajax
import org.scalajs.dom.ext.AjaxException
import org.scalajs.dom.raw.XMLHttpRequest
import JsonSupport._
import play.api.libs.json._
import se.lu.nateko.cp.meta.{SubmitterProfile, UploadDto}
import se.lu.nateko.cp.meta.core.data.{Envri, EnvriConfig}
import se.lu.nateko.cp.meta.core.data.Envri.Envri
import se.lu.nateko.cp.doi.Doi
object Backend {
import SparqlQueries._
private def whoAmI: Future[Option[String]] =
Ajax.get("/whoami", withCredentials = true)
.recoverWith(recovery("fetch user information"))
.map(xhr =>
parseTo[JsObject](xhr).value("email") match {
case JsString(email) => Some(email)
case _ => None
})
private def envri: Future[Envri] = Ajax.get("/upload/envri")
.recoverWith(recovery("fetch envri"))
.map(parseTo[Envri])
private def authHost: Future[EnvriConfig] = Ajax.get("/upload/envriconfig")
.recoverWith(recovery("fetch envri config"))
.map(parseTo[EnvriConfig])
def fetchConfig: Future[InitAppInfo] = whoAmI.zip(envri).zip(authHost).map {
case ((whoAmI, envri), authHost) => InitAppInfo(whoAmI, envri, authHost)
}
def submitterIds: Future[IndexedSeq[SubmitterProfile]] =
Ajax.get("/upload/submitterids", withCredentials = true)
.recoverWith(recovery("fetch the list of available submitter ids"))
.map(parseTo[IndexedSeq[SubmitterProfile]])
.flatMap{ s =>
if(s.isEmpty)
Future.failed(new Exception("""You are not authorized to upload data.
Please contact us to if you would like to get the permission."""))
else Future.successful(s)
}
def stationInfo(orgClass: Option[URI], producingOrg: Option[URI])(implicit envri: Envri.Envri): Future[IndexedSeq[Station]] =
sparqlSelect(stations(orgClass, producingOrg)).map(_.map(toStation))
def getObjSpecs(implicit envri: Envri.Envri): Future[IndexedSeq[ObjSpec]] =
sparqlSelect(objSpecs).map(_.map(toObjSpec))
def getSites(station: URI): Future[IndexedSeq[NamedUri]] =
sparqlSelect(sites(station)).map(_.map(toSite)).map(disambiguateNames)
def getSamplingPoints(site: URI): Future[IndexedSeq[SamplingPoint]] =
sparqlSelect(samplingpoints(site)).map((_.map(toSamplingPoint)))
def getL3SpatialCoverages(implicit envri: Envri.Envri): Future[IndexedSeq[SpatialCoverage]] =
if(envri == Envri.SITES) Future.successful(IndexedSeq.empty)
else sparqlSelect(l3spatialCoverages).map(_.map(toSpatialCoverage))
private def disambiguateNames(list: IndexedSeq[NamedUri]): IndexedSeq[NamedUri] =
list.groupBy(_.name).valuesIterator.flatMap( g =>
if (g.length <= 1) g
else g.map{nUri =>
val uriSegm = nUri.uri.getPath().split('/').last
nUri.copy(name = s"${nUri.name} ($uriSegm)")
}
).toIndexedSeq.sortBy(_.name)
def getPeople(implicit envri: Envri.Envri): Future[IndexedSeq[NamedUri]] =
sparqlSelect(people).map(_.map(toPerson)).map(disambiguateNames)
def getOrganizations(implicit envri: Envri.Envri): Future[IndexedSeq[NamedUri]] =
sparqlSelect(organizations).map(_.map(toOrganization)).map(disambiguateNames)
def getDatasetColumns(dataset: URI): Future[IndexedSeq[DatasetVar]] =
sparqlSelect(datasetColumnQuery(dataset)).map(_.map(toDatasetVar))
def getDatasetVariables(dataset: URI): Future[IndexedSeq[DatasetVar]] =
sparqlSelect(datasetVariableQuery(dataset)).map(_.map(toDatasetVar))
def tryIngestion(
file: File, spec: ObjSpec, nRows: Option[Int], varnames: Option[Seq[String]]
)(implicit envriConfig: EnvriConfig): Future[Unit] = {
val firstVarName: Option[String] = varnames.flatMap(_.headOption).filter(_ => spec.isSpatiotemporal)
if(spec.isStationTimeSer || firstVarName.isDefined){
val nRowsQ = nRows.fold("")(nr => s"&nRows=$nr")
val varsQ = varnames.fold(""){vns =>
val varsJson = encodeURIComponent(Json.toJson(vns).toString)
s"&varnames=$varsJson"
}
val url = s"https://${envriConfig.dataHost}/tryingest?specUri=${spec.uri}$nRowsQ$varsQ"
Ajax
.put(url, file)
.recoverWith {
case AjaxException(xhr) =>
val msg = if(xhr.responseText.isEmpty)
"File could not be found"
else xhr.responseText
Future.failed(new Exception(msg))
}
.flatMap(xhr => xhr.status match {
case 200 => Future.successful(())
case _ => Future.failed(new Exception(xhr.responseText))
})
} else Future.successful(())
}
def sparqlSelect(query: String): Future[IndexedSeq[Binding]] = Ajax
.post("/sparql", query)
.recoverWith(recovery("execute a SPARQL query"))
.map(xhr =>
(parseTo[JsObject](xhr) \\ "results" \\ "bindings")
.validate[JsArray]
.map(_.value.collect(parseBinding))
.get.toVector
)
def submitMetadata[T : Writes](dto: T): Future[URI] = {
val json = Json.toJson(dto)
Ajax.post("/upload", Json.prettyPrint(json), headers = Map("Content-Type" -> "application/json"), withCredentials = true)
.recoverWith(recovery("upload metadata"))
.map(xhr => new URI(xhr.responseText))
}
def uploadFile(file: File, dataURL: URI): Future[String] = Ajax
.put(dataURL.toString, file, headers = Map("Content-Type" -> "application/octet-stream"), withCredentials = true)
.recoverWith(recovery("upload file"))
.map(_.responseText)
def getMetadata(uri: URI): Future[UploadDto] = Ajax.get(s"/dtodownload?uri=$uri")
.recoverWith(recovery("fetch existing object"))
.map(parseTo[UploadDto])
def createDraftDoi(uri: URI): Future[Doi] = Ajax
.post(s"/dois/createDraft", Json.prettyPrint(Json.toJson(uri)), headers = Map("Content-Type" -> "application/json"), withCredentials = true)
.recoverWith(recovery("create draft DOI"))
.map(parseTo[Doi])
def getKeywordList(implicit envri: Envri.Envri): Future[IndexedSeq[String]] =
if (envri == Envri.SITES) Future.successful(IndexedSeq.empty)
else Ajax
.get("/uploadgui/gcmdkeywords.json")
.recoverWith(recovery("fetch keyword list"))
.map(parseTo[IndexedSeq[String]])
private val parseBinding: PartialFunction[JsValue, Binding] = {
case b: JsObject => b.fields.map{
case (key, v) => key -> (v \\ "value").validate[String].get
}.toMap
}
private def parseTo[T : Reads](xhr: XMLHttpRequest): T = {
Json.parse(xhr.responseText).as[T]
}
private def recovery(hint: String): PartialFunction[Throwable, Future[XMLHttpRequest]] = {
case AjaxException(xhr) =>
val msg = if(xhr.responseText.isEmpty)
s"Got HTTP status ${xhr.status} when trying to $hint"
else s"Error when trying to $hint: " + xhr.responseText
Future.failed(new Exception(msg))
}
}
| ICOS-Carbon-Portal/meta | uploadgui/src/main/scala/se/lu/nateko/cp/meta/upload/Backend.scala | Scala | gpl-3.0 | 6,692 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.k8s
import java.util.concurrent.TimeUnit
import io.fabric8.kubernetes.api.model.PodListBuilder
import io.fabric8.kubernetes.client.KubernetesClient
import org.jmock.lib.concurrent.DeterministicScheduler
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Mockito.{verify, when}
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.Fabric8Aliases._
import org.apache.spark.scheduler.cluster.k8s.ExecutorLifecycleTestUtils._
class ExecutorPodsPollingSnapshotSourceSuite extends SparkFunSuite with BeforeAndAfter {
private val sparkConf = new SparkConf
private val pollingInterval = sparkConf.get(KUBERNETES_EXECUTOR_API_POLLING_INTERVAL)
@Mock
private var kubernetesClient: KubernetesClient = _
@Mock
private var podOperations: PODS = _
@Mock
private var appIdLabeledPods: LABELED_PODS = _
@Mock
private var executorRoleLabeledPods: LABELED_PODS = _
@Mock
private var activeExecutorPods: LABELED_PODS = _
@Mock
private var eventQueue: ExecutorPodsSnapshotsStore = _
private var pollingExecutor: DeterministicScheduler = _
private var pollingSourceUnderTest: ExecutorPodsPollingSnapshotSource = _
before {
MockitoAnnotations.initMocks(this)
pollingExecutor = new DeterministicScheduler()
pollingSourceUnderTest = new ExecutorPodsPollingSnapshotSource(
sparkConf,
kubernetesClient,
eventQueue,
pollingExecutor)
pollingSourceUnderTest.start(TEST_SPARK_APP_ID)
when(kubernetesClient.pods()).thenReturn(podOperations)
when(podOperations.withLabel(SPARK_APP_ID_LABEL, TEST_SPARK_APP_ID))
.thenReturn(appIdLabeledPods)
when(appIdLabeledPods.withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
.thenReturn(executorRoleLabeledPods)
when(executorRoleLabeledPods.withoutLabel(SPARK_EXECUTOR_INACTIVE_LABEL, "true"))
.thenReturn(activeExecutorPods)
}
test("Items returned by the API should be pushed to the event queue") {
val exec1 = runningExecutor(1)
val exec2 = runningExecutor(2)
when(activeExecutorPods.list())
.thenReturn(new PodListBuilder()
.addToItems(
exec1,
exec2)
.build())
pollingExecutor.tick(pollingInterval, TimeUnit.MILLISECONDS)
verify(eventQueue).replaceSnapshot(Seq(exec1, exec2))
}
}
| shuangshuangwang/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsPollingSnapshotSourceSuite.scala | Scala | apache-2.0 | 3,302 |
package com.enkidu.lignum.parsers.java.v8
import com.enkidu.lignum.parsers.ast.expression.Expression
import com.enkidu.lignum.parsers.ast.expression.discardable.dimension.Dimension
import com.enkidu.lignum.parsers.ast.statement.declarator._
import org.parboiled2._
abstract class JavaDeclaratorParser extends JavaParameterParser{
protected def constructorDeclarator: Rule1[ConstructorDeclarator] = rule {
identifier ~ `(` ~ formalParameters ~ `)` ~> ConstructorDeclarator
}
protected def methodDeclarator: Rule1[FunctionDeclarator] = rule {
identifier ~ `(` ~ formalParameters ~ `)` ~ {
dims ~> ArrayMethodDeclarator |
MATCH ~> MethodDeclarator
}
}
protected def variableDeclaratorId: Rule2[String, Seq[Dimension]] = rule {
identifier ~ optionalDims
}
protected def variableDeclarators: Rule1[Seq[Declarator]] = rule {
zeroOrMore {
variableDeclaratorId ~ optional(`=` ~ (expression | arrayInitializer)) ~> {
(n: String, ds: Seq[Dimension], i: Option[Expression]) =>
if (ds.size == 0 && i.isDefined) InitializedVariableDeclarator(n, i.get)
else if (i.isDefined) InitializedArrayDeclarator(n, ds, i.get)
else if (ds.size == 0) VariableDeclarator(n)
else ArrayDeclarator(n, ds)
}
} separatedBy comma
}
}
| marek1840/java-parser | src/main/scala/com/enkidu/lignum/parsers/java/v8/JavaDeclaratorParser.scala | Scala | mit | 1,320 |
/*******************************************************************************
Copyright (c) 2013, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError}
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.domain.UIntSingle
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
object TIZENdownload extends Tizen {
private val name = "download"
/* predefined locations */
val loc_obj = TIZENtizen.loc_download
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_obj, prop_obj), (loc_proto, prop_proto)
)
/* constructor or object*/
private val prop_obj: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T)))
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("start", AbsBuiltinFunc("tizen.download.start", 2)),
("cancel", AbsBuiltinFunc("tizen.download.cancel", 1)),
("pause", AbsBuiltinFunc("tizen.download.pause", 1)),
("resume", AbsBuiltinFunc("tizen.download.resume", 1)),
("getState", AbsBuiltinFunc("tizen.download.getState", 1)),
("getDownloadRequest", AbsBuiltinFunc("tizen.download.getDownloadRequest", 1)),
("getMIMEType", AbsBuiltinFunc("tizen.download.getMIMEType", 1)),
("setListener", AbsBuiltinFunc("tizen.download.setListener", 2))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.download.start" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val addr4 = cfg.getAPIAddress(addr_env, 3)
val addr5 = cfg.getAPIAddress(addr_env, 4)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val l_r4 = addrToLoc(addr4, Recent)
val l_r5 = addrToLoc(addr5, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val (h_4, ctx_4) = Helper.Oldify(h_3, ctx_3, addr4)
val (h_5, ctx_5) = Helper.Oldify(h_4, ctx_4, addr5)
val v_1 = getArgValue(h_5, ctx_5, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_5, ctx_5, args, "length"))
val (b_1, es_1) = TizenHelper.instanceOf(h_5, v_1, Value(TIZENDownloadRequest.loc_proto))
val es_2 =
if (b_1._1._3 <= F) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_6, es_3) = n_arglen match {
case UIntSingle(n) if n >= 2 =>
val v_2 = getArgValue(h_5, ctx_5, args, "1")
val es1 =
if (v_2._2.exists((l) => Helper.IsCallable(h_5, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_6, es_3) = v_2._2.foldLeft((h_5, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onprogress"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("onpaused"))
val v3 = Helper.Proto(_he._1, l, AbsString.alpha("oncanceled"))
val v4 = Helper.Proto(_he._1, l, AbsString.alpha("oncompleted"))
val v5 = Helper.Proto(_he._1, l, AbsString.alpha("onfailed"))
val es1 =
if (v1._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es3 =
if (v3._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es4 =
if (v4._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es5 =
if (v5._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(3)).
update("0", PropValue(ObjectValue(Value(NumTop), T, T, T))).
update("1", PropValue(ObjectValue(Value(UInt), T, T, T))).
update("2", PropValue(ObjectValue(Value(UInt), T, T, T)))
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(NumTop), T, T, T)))
val o_arr3 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(NumTop), T, T, T)))
val o_arr4 = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(NumTop), T, T, T))).
update("1", PropValue(ObjectValue(Value(NumTop), T, T, T)))
val o_arr5 = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(NumTop), T, T, T))).
update("1", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_6 = _he._1.
update(l_r1, o_arr1).
update(l_r2, o_arr2).
update(l_r3, o_arr3).
update(l_r4, o_arr4).
update(l_r5, o_arr5)
val h_7 = TizenHelper.addCallbackHandler(h_6, AbsString.alpha("DownloadCB.onprogress"), Value(v1._2), Value(l_r1))
val h_8 = TizenHelper.addCallbackHandler(h_7, AbsString.alpha("DownloadCB.onpaused"), Value(v2._2), Value(l_r2))
val h_9 = TizenHelper.addCallbackHandler(h_8, AbsString.alpha("DownloadCB.oncanceled"), Value(v3._2), Value(l_r3))
val h_10 = TizenHelper.addCallbackHandler(h_9, AbsString.alpha("DownloadCB.oncompleted"), Value(v4._2), Value(l_r4))
val h_11 = TizenHelper.addCallbackHandler(h_10, AbsString.alpha("DownloadCB.onfailed"), Value(v5._2), Value(l_r5))
(h_11, _he._2 ++ es1 ++ es2 ++ es3 ++ es4 ++ es5)
})
(h_6, es1 ++ es_3)
case _ => (h_5, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError, UnknownError, NotSupportedError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ est)
((Helper.ReturnStore(h_6, Value(NumTop)), ctx_5), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.cancel" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.pause" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.resume" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.getState" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val v1 = Value(AbsString.alpha("QUEUED") + AbsString.alpha("DOWNLOADING") + AbsString.alpha("PAUSED") +
AbsString.alpha("CANCELED") + AbsString.alpha("COMPLETED") + AbsString.alpha("FAILED"))
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((Helper.ReturnStore(h, v1), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.getDownloadRequest" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val v = getArgValue(h_1, ctx_1, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_1, ctx_1, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_new = ObjEmpty.
update("@class", PropValue(AbsString.alpha("Object"))).
update("@proto", PropValue(ObjectValue(Value(TIZENDownloadRequest.loc_proto), F, F, F))).
update("@extensible", PropValue(T)).
update("url", PropValue(ObjectValue(Value(StrTop), T, T, T))).
update("destination", PropValue(ObjectValue(Value(PValue(UndefBot, NullTop, BoolBot, NumBot, StrTop)), T, T, T))).
update("fileName", PropValue(ObjectValue(Value(PValue(UndefBot, NullTop, BoolBot, NumBot, StrTop)), T, T, T))).
update("networkType", PropValue(ObjectValue(Value(AbsString.alpha("CELLULAR") + AbsString.alpha("WIFI") +
AbsString.alpha("ALL")), T, T, T))).
update("httpHeader", PropValue(ObjectValue(Value(NullTop), T, T, T)))
val h_2 = h_1.update(l_r1, o_new)
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((Helper.ReturnStore(h_2, Value(l_r1)), ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.getMIMEType" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((Helper.ReturnStore(h, Value(StrTop)), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.download.setListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val addr4 = cfg.getAPIAddress(addr_env, 3)
val addr5 = cfg.getAPIAddress(addr_env, 4)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val l_r4 = addrToLoc(addr4, Recent)
val l_r5 = addrToLoc(addr5, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val (h_4, ctx_4) = Helper.Oldify(h_3, ctx_3, addr4)
val (h_5, ctx_5) = Helper.Oldify(h_4, ctx_4, addr5)
val v_1 = getArgValue(h_5, ctx_5, args, "0")
val v_2 = getArgValue(h_5, ctx_5, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_5, ctx_5, args, "length"))
val es =
if (n_arglen == AbsNumber.alpha(0)) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_1 =
if (v_1._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_6, es_2) = v_2._2.foldLeft((h_5, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onprogress"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("onpaused"))
val v3 = Helper.Proto(_he._1, l, AbsString.alpha("oncanceled"))
val v4 = Helper.Proto(_he._1, l, AbsString.alpha("oncompleted"))
val v5 = Helper.Proto(_he._1, l, AbsString.alpha("onfailed"))
val es1 =
if (v1._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es3 =
if (v3._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es4 =
if (v4._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es5 =
if (v5._2.exists((ll) => Helper.IsCallable(_he._1, ll) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(3)).
update("0", PropValue(ObjectValue(Value(v_1._1._4), T, T, T))).
update("1", PropValue(ObjectValue(Value(UInt), T, T, T))).
update("2", PropValue(ObjectValue(Value(UInt), T, T, T)))
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(v_1._1._4), T, T, T)))
val o_arr3 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(v_1._1._4), T, T, T)))
val o_arr4 = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(v_1._1._4), T, T, T))).
update("1", PropValue(ObjectValue(Value(StrTop), T, T, T)))
val o_arr5 = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(v_1._1._4), T, T, T))).
update("1", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_6 = _he._1.
update(l_r1, o_arr1).
update(l_r2, o_arr2).
update(l_r3, o_arr3).
update(l_r4, o_arr4).
update(l_r5, o_arr5)
val h_7 = TizenHelper.addCallbackHandler(h_6, AbsString.alpha("DownloadCB.onprogress"), Value(v1._2), Value(l_r1))
val h_8 = TizenHelper.addCallbackHandler(h_7, AbsString.alpha("DownloadCB.onpaused"), Value(v2._2), Value(l_r2))
val h_9 = TizenHelper.addCallbackHandler(h_8, AbsString.alpha("DownloadCB.oncanceled"), Value(v3._2), Value(l_r3))
val h_10 = TizenHelper.addCallbackHandler(h_9, AbsString.alpha("DownloadCB.oncompleted"), Value(v4._2), Value(l_r4))
val h_11 = TizenHelper.addCallbackHandler(h_10, AbsString.alpha("DownloadCB.onfailed"), Value(v5._2), Value(l_r5))
(h_11, _he._2 ++ es1 ++ es2 ++ es3 ++ es4 ++ es5)
})
val est = Set[WebAPIException](UnknownError, NotSupportedError, NotFoundError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ es_2 ++ est)
((h_6, ctx_5), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
} | daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENdownload.scala | Scala | bsd-3-clause | 21,213 |
package adts
object t1:
enum Option[+T]:
case Some(x: T)
case None
object t2:
enum Option[+T]:
case Some(x: T) extends Option[T]
case None extends Option[Nothing]
enum Color(val rgb: Int):
case Red extends Color(0xFF0000)
case Green extends Color(0x00FF00)
case Blue extends Color(0x0000FF)
case Mix(mix: Int) extends Color(mix)
object t3:
enum Option[+T]:
case Some(x: T) extends Option[T]
case None
def isDefined: Boolean = this match
case None => false
case some => true
object Option:
def apply[T >: Null](x: T): Option[T] =
if (x == null) None else Some(x)
| som-snytt/dotty | tests/pos/reference/adts.scala | Scala | apache-2.0 | 645 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import org.fusesource.scalate.sbt._
class Project(info: ProjectInfo) extends DefaultWebProject(info) with PrecompilerWebProject {
lazy val fusesource_snapshot_repo = "FuseSource Snapshots" at
"http://repo.fusesource.com/nexus/content/repositories/snapshots"
lazy val java_net_repo = "Java.net Repository" at
"http://download.java.net/maven/2"
lazy val scalate_guice = "org.fusesource.scalate" % "scalate-guice" % "${project.version}"
lazy val servlet = "javax.servlet" % "servlet-api" % "${servlet-api-version}"
lazy val logback = "ch.qos.logback" % "logback-classic" % "${logback-version}"
// to get jetty-run working in sbt
lazy val jetty_webapp = "org.eclipse.jetty" % "jetty-webapp" % "7.0.2.RC0" % "test"
}
| dnatic09/scalate | archetypes/scalate-archetype-guice/src/main/resources/archetype-resources/project/build/Project.scala | Scala | apache-2.0 | 1,570 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail
import cats.laws._
import cats.laws.discipline._
import monix.eval.{Coeval, Task}
import monix.execution.exceptions.DummyException
import monix.tail.Iterant.Suspend
import monix.tail.batches.{Batch, BatchCursor}
import scala.util.Failure
object IterantMapEvalSuite extends BaseTestSuite {
test("Iterant[Task].mapEval covariant identity") { implicit s =>
check1 { (list: List[Int]) =>
val r = Iterant[Task].fromIterable(list).mapEval(x => Task.evalAsync(x)).toListL
r <-> Task.now(list)
}
}
test("Iterant[Task].mapEval covariant composition") { implicit s =>
check3 { (list: List[Int], f: Int => Int, g: Int => Int) =>
val r1 = Iterant[Task]
.fromIterable(list)
.mapEval(x => Task.evalAsync(f(x)))
.mapEval(x => Task.evalAsync(g(x)))
.toListL
val r2 = Iterant[Task]
.fromIterable(list)
.mapEval(x => Task.evalAsync(f(x)).flatMap(y => Task.evalAsync(g(y))))
.toListL
r1 <-> r2
}
}
test("Iterant[Task].mapEval equivalence") { implicit s =>
check2 { (list: List[Int], f: Int => Int) =>
val r = Iterant[Task].fromIterable(list).mapEval(x => Task.evalAsync(f(x))).toListL
r <-> Task.now(list.map(f))
}
}
test("Iterant[Task].mapEval equivalence (batched)") { implicit s =>
check2 { (list: List[Int], f: Int => Int) =>
val r = Iterant[Task].fromIterable(list).mapEval(x => Task.evalAsync(f(x))).toListL
r <-> Task.now(list.map(f))
}
}
test("Iterant[Task].next.mapEval guards against direct user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Task].now(1)
val result = stream.mapEval[Int](_ => throw dummy).toListL.runToFuture
s.tick()
assertEquals(result.value, Some(Failure(dummy)))
}
test("Iterant[Task].nextCursor.mapEval guards against direct user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Task].fromList(List(1, 2, 3))
val result = stream.mapEval[Int](_ => throw dummy).toListL.runToFuture
s.tick()
assertEquals(result.value, Some(Failure(dummy)))
}
test("Iterant[Task].next.mapEval guards against indirect user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Task].now(1)
val result = stream.mapEval[Int](_ => Task.raiseError(dummy)).toListL.runToFuture
s.tick()
assertEquals(result.value, Some(Failure(dummy)))
}
test("Iterant[Task].nextCursor.mapEval guards against indirect user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Task].fromList(List(1, 2, 3))
val result = stream.mapEval[Int](_ => Task.raiseError(dummy)).toListL.runToFuture
s.tick()
assertEquals(result.value, Some(Failure(dummy)))
}
test("Iterant[Task].mapEval should protect against direct exceptions") { implicit s =>
check2 { (l: List[Int], idx: Int) =>
val dummy = DummyException("dummy")
var effect = 0
val list = if (l.isEmpty) List(1) else l
val iterant = arbitraryListToIterant[Task, Int](list, idx)
val received = (iterant ++ Iterant[Task].of(1, 2))
.guarantee(Task.eval { effect += 1 })
.mapEval[Int](_ => throw dummy)
.completedL
.map(_ => 0)
.onErrorRecover { case _: DummyException => effect }
received <-> Task.pure(1)
}
}
test("Iterant[Task].mapEval should protect against indirect errors") { implicit s =>
check2 { (l: List[Int], idx: Int) =>
val dummy = DummyException("dummy")
var effect = 0
val list = if (l.isEmpty) List(1) else l
val iterant = arbitraryListToIterant[Task, Int](list, idx)
val received = (iterant ++ Iterant[Task].of(1, 2))
.guarantee(Task.eval { effect += 1 })
.mapEval[Int](_ => Task.raiseError(dummy))
.completedL
.map(_ => 0)
.onErrorRecover { case _: DummyException => effect }
received <-> Task.pure(1)
}
}
test("Iterant[Task].mapEval should protect against broken batches") { implicit s =>
check1 { (prefix: Iterant[Task, Int]) =>
val dummy = DummyException("dummy")
val cursor = new ThrowExceptionCursor(dummy)
val error = Iterant[Task].nextCursorS(cursor, Task.now(Iterant[Task].empty[Int]))
val stream = (prefix.onErrorIgnore ++ error).mapEval(x => Task.now(x))
stream <-> prefix.onErrorIgnore ++ Iterant[Task].haltS[Int](Some(dummy))
}
}
test("Iterant[Task].mapEval should protect against broken generators") { implicit s =>
check1 { (prefix: Iterant[Task, Int]) =>
val dummy = DummyException("dummy")
val cursor = new ThrowExceptionBatch(dummy)
val error = Iterant[Task].nextBatchS(cursor, Task.now(Iterant[Task].empty[Int]))
val stream = (prefix.onErrorIgnore ++ error).mapEval(x => Task.now(x))
stream <-> prefix.onErrorIgnore ++ Iterant[Task].haltS[Int](Some(dummy))
}
}
test("Iterant[Coeval].mapEval covariant identity") { implicit s =>
check1 { (list: List[Int]) =>
val r = Iterant[Coeval].fromIterable(list).mapEval(x => Coeval(x)).toListL
r <-> Coeval.now(list)
}
}
test("Iterant[Coeval].mapEval covariant composition") { implicit s =>
check3 { (list: List[Int], f: Int => Int, g: Int => Int) =>
val r1 = Iterant[Coeval]
.fromIterable(list)
.mapEval(x => Coeval(f(x)))
.mapEval(x => Coeval(g(x)))
.toListL
val r2 = Iterant[Coeval]
.fromIterable(list)
.mapEval(x => Coeval(f(x)).flatMap(y => Coeval(g(y))))
.toListL
r1 <-> r2
}
}
test("Iterant[Coeval].mapEval equivalence") { implicit s =>
check2 { (list: List[Int], f: Int => Int) =>
val r = Iterant[Coeval].fromIterable(list).mapEval(x => Coeval(f(x))).toListL
r <-> Coeval.now(list.map(f))
}
}
test("Iterant[Coeval].next.mapEval guards against direct user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Coeval].now(1)
val result = stream.mapEval[Int](_ => throw dummy).toListL.runTry()
assertEquals(result, Failure(dummy))
}
test("Iterant[Coeval].nextCursor.mapEval guards against direct user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Coeval].fromList(List(1, 2, 3))
val result = stream.mapEval[Int](_ => throw dummy).toListL.runTry()
assertEquals(result, Failure(dummy))
}
test("Iterant[Coeval].next.mapEval guards against indirect user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Coeval].now(1)
val result = stream.mapEval[Int](_ => Coeval.raiseError(dummy)).toListL.runTry()
assertEquals(result, Failure(dummy))
}
test("Iterant[Coeval].nextCursor.mapEval guards against indirect user code errors") { implicit s =>
val dummy = DummyException("dummy")
val stream = Iterant[Coeval].fromList(List(1, 2, 3))
val result = stream.mapEval[Int](_ => Coeval.raiseError(dummy)).toListL.runTry()
assertEquals(result, Failure(dummy))
}
test("Iterant[Coeval].mapEval should protect against indirect user errors") { implicit s =>
check2 { (l: List[Int], idx: Int) =>
val dummy = DummyException("dummy")
val list = if (l.isEmpty) List(1) else l
val iterant = arbitraryListToIterant[Coeval, Int](list, idx)
val received = (iterant ++ Iterant[Coeval].now(1))
.mapEval[Int](_ => Coeval.raiseError(dummy))
received <-> Iterant[Coeval].haltS[Int](Some(dummy))
}
}
test("Iterant[Coeval].mapEval should protect against direct exceptions") { implicit s =>
check2 { (l: List[Int], idx: Int) =>
val dummy = DummyException("dummy")
val list = if (l.isEmpty) List(1) else l
val iterant = arbitraryListToIterant[Coeval, Int](list, idx)
val received = (iterant ++ Iterant[Coeval].now(1)).mapEval[Int](_ => throw dummy)
received <-> Iterant[Coeval].haltS[Int](Some(dummy))
}
}
test("Iterant[Coeval].mapEval should protect against broken batches") { implicit s =>
check1 { (prefix: Iterant[Coeval, Int]) =>
val dummy = DummyException("dummy")
val cursor: BatchCursor[Int] = new ThrowExceptionCursor(dummy)
val error = Iterant[Coeval].nextCursorS(cursor, Coeval.now(Iterant[Coeval].empty[Int]))
val stream = (prefix ++ error).mapEval(x => Coeval.now(x))
stream <-> prefix ++ Iterant[Coeval].haltS[Int](Some(dummy))
}
}
test("Iterant[Coeval].mapEval should protect against broken generators") { implicit s =>
check1 { (prefix: Iterant[Coeval, Int]) =>
val dummy = DummyException("dummy")
val cursor: Batch[Int] = new ThrowExceptionBatch(dummy)
val error = Iterant[Coeval].nextBatchS(cursor, Coeval.now(Iterant[Coeval].empty[Int]))
val stream = (prefix ++ error).mapEval(x => Coeval.now(x))
stream <-> prefix ++ Iterant[Coeval].haltS[Int](Some(dummy))
}
}
test("Iterant.mapEval suspends the evaluation for NextBatch") { implicit s =>
val dummy = DummyException("dummy")
val items = new ThrowExceptionBatch(dummy)
val iter = Iterant[Task].nextBatchS[Int](items, Task.now(Iterant[Task].empty))
val state = iter.mapEval(Task.now)
assert(state.isInstanceOf[Suspend[Task, Int]], "state.isInstanceOf[Suspend[Task, Int]]")
assert(!items.isTriggered, "!batch.isTriggered")
assertEquals(state.toListL.runToFuture.value, Some(Failure(dummy)))
}
test("Iterant.mapEval suspends the evaluation for NextCursor") { implicit s =>
val dummy = DummyException("dummy")
val items = new ThrowExceptionCursor(dummy)
val iter = Iterant[Task].nextCursorS[Int](items, Task.now(Iterant[Task].empty))
val state = iter.mapEval(Task.now)
assert(state.isInstanceOf[Suspend[Task, Int]], "state.isInstanceOf[Suspend[Task, Int]]")
assert(!items.isTriggered, "!batch.isTriggered")
assertEquals(state.toListL.runToFuture.value, Some(Failure(dummy)))
}
test("Iterant.mapEval suspends the evaluation for Next") { implicit s =>
val dummy = DummyException("dummy")
val iter = Iterant[Task].nextS(1, Task.now(Iterant[Task].empty[Int]))
val state = iter.mapEval { _ =>
(throw dummy): Task[Int]
}
assert(state.isInstanceOf[Suspend[Task, Int]], "state.isInstanceOf[Suspend[Int]]")
assertEquals(state.toListL.runToFuture.value, Some(Failure(dummy)))
}
test("Iterant.mapEval suspends the evaluation for Last") { implicit s =>
val dummy = DummyException("dummy")
val iter = Iterant[Task].lastS(1)
val state = iter.mapEval { _ =>
(throw dummy): Task[Int]
}
assert(state.isInstanceOf[Suspend[Task, Int]])
assertEquals(state.toListL.runToFuture.value, Some(Failure(dummy)))
}
test("Iterant.mapEval preserves resource safety") { implicit s =>
var effect = 0
val source = Iterant[Coeval]
.nextCursorS(BatchCursor(1, 2, 3), Coeval.now(Iterant[Coeval].empty[Int]))
.guarantee(Coeval.eval(effect += 1))
val stream = source.mapEval(x => Coeval.now(x))
stream.completedL.value()
assertEquals(effect, 1)
}
}
| alexandru/monifu | monix-tail/shared/src/test/scala/monix/tail/IterantMapEvalSuite.scala | Scala | apache-2.0 | 11,893 |
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv
import _root_.cats._
import imp.imp
import kantan.codecs.cats._
/** Declares various type class instances for bridging `kantan.csv` and `cats`. */
package object cats extends CommonInstances with DecoderInstances with EncoderInstances {
// - Eq instances ----------------------------------------------------------------------------------------------------
// -------------------------------------------------------------------------------------------------------------------
implicit val csvOutOfBoundsEq: Eq[DecodeError.OutOfBounds] = Eq.fromUniversalEquals
implicit val csvTypeErrorEq: Eq[DecodeError.TypeError] = Eq.fromUniversalEquals
implicit val csvDecodeErrorEq: Eq[DecodeError] = Eq.fromUniversalEquals
implicit val csvNoSuchElementEq: Eq[ParseError.NoSuchElement.type] = Eq.fromUniversalEquals
implicit val csvIoErrorEq: Eq[ParseError.IOError] = Eq.fromUniversalEquals
implicit val csvParseErrorEq: Eq[ParseError] = Eq.fromUniversalEquals
implicit val csvReadErrorEq: Eq[ReadError] = Eq.fromUniversalEquals
// - Misc. instances --------------------------------------------------------------------------------------------------
// -------------------------------------------------------------------------------------------------------------------
implicit def foldableRowEncoder[F[_]: Foldable, A: CellEncoder]: RowEncoder[F[A]] =
RowEncoder.from { as =>
imp[Foldable[F]]
.foldLeft(as, Seq.newBuilder[String])((acc, a) => acc += CellEncoder[A].encode(a))
.result()
}
}
| nrinaudo/scala-csv | cats/shared/src/main/scala/kantan/csv/cats/package.scala | Scala | mit | 2,260 |
package com.kleggett.db
/**
* @author K. Leggett
* @since 1.0 (2/28/15 3:05 PM)
*/
case class Book(title: String, author: String, published: Option[Int])
| kleggett/scalasqlutils | src/test/scala/com/kleggett/db/Book.scala | Scala | cc0-1.0 | 158 |
/*
* Copyright 2020 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.util
import com.google.protobuf.Message
import com.spotify.scio.coders.{AvroBytesUtil, Coder, CoderMaterializer}
import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
import scala.reflect.{classTag, ClassTag}
object ProtobufUtil {
/**
* A Coder for Protobuf [[Message]]s encoded as Avro [[GenericRecord]]s.
* This must be in implicit scope when using [[ProtobufUtil.toAvro]], for example:
*
* `implicit val avroMessageCoder: Coder[GenericRecord] = ProtobufUtil.AvroMessageCoder`
*/
lazy val AvroMessageCoder: Coder[GenericRecord] =
Coder.avroGenericRecordCoder(AvroBytesUtil.schema)
/** The Avro [[Schema]] corresponding to an Avro-encoded Protobuf [[Message]]. */
lazy val AvroMessageSchema: Schema = AvroBytesUtil.schema
/**
* A metadata map containing information about the underlying Protobuf schema of the
* [[Message]] bytes encoded inside [[AvroMessageSchema]]'s `bytes` field.
*
* @tparam T subclass of [[Message]]
*/
def schemaMetadataOf[T <: Message: ClassTag]: Map[String, AnyRef] = {
import me.lyh.protobuf.generic
val schema = generic.Schema
.of[Message](classTag[T].asInstanceOf[ClassTag[Message]])
.toJson
Map("protobuf.generic.schema" -> schema)
}
/**
* A function that converts a Protobuf [[Message]] of type `T` into a [[GenericRecord]]
* whose [[Schema]] is a single byte array field, corresponding to the serialized bytes in `T`.
*/
def toAvro[T <: Message: ClassTag]: T => GenericRecord = {
val protoCoder = CoderMaterializer.beamWithDefault(Coder.protoMessageCoder[T])
(t: T) => AvroBytesUtil.encode(protoCoder, t)
}
}
| regadas/scio | scio-core/src/main/scala/com/spotify/scio/util/ProtobufUtil.scala | Scala | apache-2.0 | 2,291 |
package pw.ian.sysadmincraft.world
import java.util.UUID
import org.bukkit.World
import pw.ian.sysadmincraft.system.{SysProcess, ProcessAdmin}
import pw.ian.sysadmincraft.SysAdmincraft
import pw.ian.sysadmincraft.world.WorldConstants._
case class PillarManager(plugin: SysAdmincraft, world: World) {
// process.name -> pillar
var pillars = Map[String, ProcessPillar]()
var taken = Set[Int]()
def initPillars(): List[ProcessPillar] = {
ProcessAdmin.processes().sortBy(-_.totalMemory).zipWithIndex.map { case (process, index) =>
buildPillar(index, process)
}
}
def refresh(processes: Iterable[SysProcess]) = {
processes.foreach { process =>
pillars.get(process.name) match {
case Some(pillar) => pillar.update(process)
case None => buildPillar(nextFreeIndex, process)
}
}
// Destroy pillars that are missing
(pillars.keySet &~ processes.map(_.name).toSet).foreach { name =>
removePillar(pillars.get(name).get)
}
}
def buildPillar(index: Int, process: SysProcess) = {
val pillar = ProcessPillar(index, blockFromIndex(index), process)
pillars += process.name -> pillar
taken += index
pillar
}
def handleDeath(name: String): Unit = {
pillars.get(name) match {
case Some(pillar) => destroyPillar(pillar)
case None =>
}
}
def destroyPillar(pillar: ProcessPillar) = {
removePillar(pillar)
pillar.kill()
plugin.getServer.broadcastMessage(s"Process ${pillar.process.name} has been killed.")
}
def removePillar(pillar: ProcessPillar) = {
taken -= pillar.index
pillars -= pillar.process.name
pillar.teardown()
}
private def nextFreeIndex: Int = Stream.from(0).find(!taken.contains(_)).get
private def blockFromIndex(index: Int) = {
val i = PillarManagerUtils.spiralIndex(index)
world.getBlockAt(i._1 * PILLAR_DISTANCE + (PILLAR_DISTANCE / 2),
START_HEIGHT, i._2 * PILLAR_DISTANCE + (PILLAR_DISTANCE / 2))
}
}
object PillarManagerUtils {
def spiralIndex(n: Int): (Int, Int) = {
// given n an index in the squared spiral
// p the sum of point in inner square
// a the position on the current square
// n = p + a
val r = (Math.floor((Math.sqrt(n + 1) - 1) / 2) + 1).toInt
// compute radius : inverse arithmetic sum of 8+16+24+...=
val p = (8 * r * (r - 1)) / 2
// compute total point on radius -1 : arithmetic sum of 8+16+24+...
val en = r * 2
// points by face
val a = (1 + n - p) % (r * 8)
// compute de position and shift it so the first is (-r,-r) but (-r+1,-r)
// so square can connect
val m = Math.floor(a / (r * 2)).toInt
(
m match {
case 0 => a - r
case 1 => r
case 2 => r - (a % en)
case 3 => -r
},
m match {
case 0 => -r
case 1 => (a % en) - r
case 2 => r
case 3 => r - (a % en)
}
)
}
} | simplyianm/sysadmincraft | src/main/scala/pw/ian/sysadmincraft/world/PillarManager.scala | Scala | isc | 2,938 |
package com.regblanc.sgl.test
package core
import sgl._
import sgl.util._
trait AbstractApp extends MainScreenComponent {
this: GameApp with InputHelpersComponent with SchedulerProvider =>
override def startingScreen: GameScreen = LoadingScreen
}
| regb/scala-game-library | examples/hello/core/src/main/scala/App.scala | Scala | mit | 255 |
package akka.cluster.pubsub
import akka.actor.ActorRef
/**
* User: zhuyuqing
* Date: 2017/7/13
* Time: 15:43
*/
abstract class AbstractDistributedPubSubMediator(settings:DistributedPubSubSettings) extends DistributedPubSubMediator(settings) {
override def preStart(): Unit = super.preStart()
override def receive : Receive = extendReceive.orElse(super.receive)
def extendReceive : Receive
override def newTopicActor(encTopic: String): ActorRef = super.newTopicActor(encTopic)
}
| qqbbyq/controller | opendaylight/md-sal/sal-distributed-datastore/src/main/java/akka/cluster/pubsub/AbstractDistributedPubSubMediator.scala | Scala | epl-1.0 | 508 |
package com.sksamuel.scapegoat.inspections.math
import com.sksamuel.scapegoat.{ Levels, Inspector, InspectionContext, Inspection }
/** @author Stephen Samuel */
class ZeroNumerator extends Inspection {
def inspector(context: InspectionContext): Inspector = new Inspector(context) {
override def postTyperTraverser = Some apply new context.Traverser {
import context.global._
import definitions._
override def inspect(tree: Tree): Unit = {
tree match {
case Apply(Select(Literal(Constant(0)), TermName("$div")), args) =>
context.warn("Zero numerator",
tree.pos,
Levels.Warning,
"Dividing zero by any number will always return zero",
ZeroNumerator.this)
case _ => continue(tree)
}
}
}
}
}
| pwwpche/scalac-scapegoat-plugin | src/main/scala/com/sksamuel/scapegoat/inspections/math/ZeroNumerator.scala | Scala | apache-2.0 | 831 |
import sbt._
import Keys._
object FPInScalaBuild extends Build {
val opts = Project.defaultSettings ++ Seq(
scalaVersion := "2.10.3",
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/",
libraryDependencies ++= Seq(
"org.specs2" %% "specs2" % "2.4.2" % "test"
),
scalacOptions in Test ++= Seq("-Yrangepos")
)
lazy val root =
Project(id = "fpinscala",
base = file("."),
settings = opts ++ Seq(
onLoadMessage ~= (_ + nio2check())
)) aggregate (chapterCode, exercises, answers)
lazy val chapterCode =
Project(id = "chapter-code",
base = file("chaptercode"),
settings = opts)
lazy val exercises =
Project(id = "exercises",
base = file("exercises"),
settings = opts)
lazy val answers =
Project(id = "answers",
base = file("answers"),
settings = opts)
def nio2check(): String = {
val cls = "java.nio.channels.AsynchronousFileChannel"
try {Class.forName(cls); ""}
catch {case _: ClassNotFoundException =>
("\\nWARNING: JSR-203 \\"NIO.2\\" (" + cls + ") not found.\\n" +
"You are probably running Java < 1.7; answers will not compile.\\n" +
"You seem to be running " + System.getProperty("java.version") + ".\\n" +
"Try `project exercises' before compile, or upgrading your JDK.")
}
}
}
| coughlac/fpinscala | project/Build.scala | Scala | mit | 1,431 |
import KVStore.KVStoreState
import cats.Monad
import diesel._
@diesel
trait Maths[F[_]] {
def int(i: Int): F[Int]
def add(x: F[Int], y: F[Int]): F[Int]
}
@SuppressWarnings(Array("org.wartremover.warts.Any"))
object Maths {
implicit object KVSStateInterpreter extends Maths[KVStoreState] {
def int(i: Int) = Monad[KVStoreState].pure(i)
def add(x: KVStoreState[Int], y: KVStoreState[Int]) =
for {
a <- x
b <- y
} yield a + b
}
}
| lloydmeta/diesel | examples/src/main/scala/Maths.scala | Scala | mit | 475 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka
import consumer._
import utils.Utils
import java.util.concurrent.CountDownLatch
object TestZKConsumerOffsets {
def main(args: Array[String]): Unit = {
if(args.length < 1) {
println("USAGE: " + TestZKConsumerOffsets.getClass.getName + " consumer.properties topic latest")
System.exit(1)
}
println("Starting consumer...")
val topic = args(1)
val autoOffsetReset = args(2)
val props = Utils.loadProps(args(0))
props.put("autooffset.reset", "largest")
val config = new ConsumerConfig(props)
val consumerConnector: ConsumerConnector = Consumer.create(config)
val topicMessageStreams = consumerConnector.createMessageStreams(Predef.Map(topic -> 1))
var threadList = List[ConsumerThread]()
for ((topic, streamList) <- topicMessageStreams)
for (stream <- streamList)
threadList ::= new ConsumerThread(stream)
for (thread <- threadList)
thread.start
// attach shutdown handler to catch control-c
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run() = {
consumerConnector.shutdown
threadList.foreach(_.shutdown)
println("consumer threads shutted down")
}
})
}
}
private class ConsumerThread(stream: KafkaStream[Array[Byte], Array[Byte]]) extends Thread {
val shutdownLatch = new CountDownLatch(1)
override def run() {
println("Starting consumer thread..")
for (messageAndMetadata <- stream) {
println("consumed: " + new String(messageAndMetadata.message, "UTF-8"))
}
shutdownLatch.countDown
println("thread shutdown !" )
}
def shutdown() {
shutdownLatch.await
}
}
| dchenbecker/kafka-sbt | core/src/test/scala/other/kafka/TestZKConsumerOffsets.scala | Scala | apache-2.0 | 2,482 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.motion
import squants._
import squants.space.Feet
import squants.time.{ SecondTimeDerivative, Seconds, TimeDerivative, TimeSquared }
/**
* Represents the third time derivative of position after Velocity and Acceleration
*
* @author garyKeorkunian
* @since 0.1
*
* @param value Double
*/
final class Jerk private (val value: Double, val unit: JerkUnit)
extends Quantity[Jerk]
with TimeDerivative[Acceleration]
with SecondTimeDerivative[Velocity] {
def dimension = Jerk
protected[squants] def timeIntegrated = MetersPerSecondSquared(toMetersPerSecondCubed)
protected[squants] def time = Seconds(1)
def *(that: TimeSquared): Velocity = this * that.time1 * that.time2
def toMetersPerSecondCubed = to(MetersPerSecondCubed)
def toFeetPerSecondCubed = to(FeetPerSecondCubed)
}
object Jerk extends Dimension[Jerk] {
private[motion] def apply[A](n: A, unit: JerkUnit)(implicit num: Numeric[A]) = new Jerk(num.toDouble(n), unit)
def apply = parse _
def name = "Jerk"
def primaryUnit = MetersPerSecondCubed
def siUnit = MetersPerSecondCubed
def units = Set(MetersPerSecondCubed, FeetPerSecondCubed)
}
trait JerkUnit extends UnitOfMeasure[Jerk] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = Jerk(n, this)
}
object MetersPerSecondCubed extends JerkUnit with PrimaryUnit with SiUnit {
val symbol = "m/s³"
}
object FeetPerSecondCubed extends JerkUnit {
val symbol = "ft/s³"
val conversionFactor = Meters.conversionFactor * Feet.conversionFactor
}
object JerkConversions {
lazy val meterPerSecondCubed = MetersPerSecondCubed(1)
lazy val footPerSecondCubed = FeetPerSecondCubed(1)
implicit class JerkConversions[A](n: A)(implicit num: Numeric[A]) {
def metersPerSecondCubed = MetersPerSecondCubed(n)
def feetPerSecondCubed = FeetPerSecondCubed(n)
}
implicit object JerkNumeric extends AbstractQuantityNumeric[Jerk](Jerk.primaryUnit)
}
| derekmorr/squants | shared/src/main/scala/squants/motion/Jerk.scala | Scala | apache-2.0 | 2,468 |
package fr.acinq.eclair.wire
import fr.acinq.bitcoin.ByteVector32
import fr.acinq.eclair.UInt64
import fr.acinq.eclair.wire.CommonCodecs._
import scodec.Codec
import scodec.codecs.{discriminated, list, variableSizeBytesLong}
/** Tlv types used inside Init messages. */
sealed trait InitTlv extends Tlv
object InitTlv {
/** The chains the node is interested in. */
case class Networks(chainHashes: List[ByteVector32]) extends InitTlv
}
object InitTlvCodecs {
import InitTlv._
private val networks: Codec[Networks] = variableSizeBytesLong(varintoverflow, list(bytes32)).as[Networks]
val initTlvCodec: Codec[TlvStream[InitTlv]] = TlvCodecs.tlvStream(discriminated[InitTlv].by(varint)
.typecase(UInt64(1), networks)
)
} | btcontract/wallet | app/src/main/java/fr/acinq/eclair/wire/InitTlv.scala | Scala | apache-2.0 | 742 |
package teststate
import teststate.Exports.{testStateAssertionSettings => _, _}
import teststate.TestUtil._
import teststate.core.Around
import teststate.data._
import utest._
object DslTest extends TestSuite {
val dsl = Dsl[Unit, Unit, Unit]
// def extract1[A, B](s: Sack[A, B]): B =
// s match {
// case Sack.Value(b) => b
// case x => sys error ("Got: " + x)
// }
//
// def extract1E[A, B, E](s: SackE[A, B, E]): B =
// extract1(s) match {
// case Right(b) => b
// case Left(e) => sys error e.toString
// }
def extractAroundDelta1[O, S, E](c: Arounds[O, S, E]): Around.DeltaA[OS[O, S], E] =
c match {
case Sack.Value(Right(Around.Delta(d))) => d
case _ => sys error ("What? " + c)
}
def testName[O, S, E](c: Arounds[O, S, E], expectF: String, expectC: String): Unit = {
val n = extractAroundDelta1(c).name
assertEq(n(None).value, expectF)
assertEq(n(Some(BeforeAfter(null, null))).value, expectC)
}
override def tests = Tests {
"changeTo" - {
"pos" - testName(dsl.focus("Counter").value(_ => 7).assert.changeTo(_ + 1),
"Counter should be <?>.",
"Counter should be 8.")
"neg" - testName(dsl.focus("Counter").value(_ => 7).assert.not.changeTo(_ + 1),
"Counter shouldn't be <?>.",
"Counter shouldn't be 8.")
// TODO Didn't catch. Try using changeTo in multiple actions and verify final history. Might be Name strictness.
}
"incrementBy" - {
"pos" - testName(dsl.focus("Counter").value(_ => 7).assert.increaseBy(2),
"Counter should increase by 2.",
"Counter should increase by 2.")
// "Counter should be 9.")
"neg" - testName(dsl.focus("Counter").value(_ => 7).assert.not.increaseBy(2),
"Counter shouldn't increase by 2.",
"Counter shouldn't increase by 2.")
// "Counter shouldn't be 9.")
}
"decrementBy" - {
"pos" - testName(dsl.focus("Counter").value(_ => 7).assert.decreaseBy(2),
"Counter should decrease by 2.",
"Counter should decrease by 2.")
// "Counter should be 5.")
"neg" - testName(dsl.focus("Counter").value(_ => 7).assert.not.decreaseBy(2),
"Counter shouldn't decrease by 2.",
"Counter shouldn't decrease by 2.")
// "Counter shouldn't be 5.")
}
}
}
| japgolly/test-state | core/shared/src/test/scala/teststate/DslTest.scala | Scala | apache-2.0 | 2,348 |
object Test {
def main(args: Array[String]): Unit = {
println("Guillaume is number 1")
System.out.println("ducksarethebest.com")
System.err.println("foolsRus.com")
}
}
| som-snytt/dotty | tests/vulpix-tests/unit/i2147.scala | Scala | apache-2.0 | 184 |
package com.twitter.zipkin.tracegen
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import com.twitter.zipkin.gen
import java.nio.ByteBuffer
import collection.mutable.ListBuffer
import scala.util.Random
import com.twitter.zipkin.common._
import com.twitter.zipkin.conversions.thrift._
/**
* Here be dragons. Not terribly nice dragons.
*/
class TraceGen {
val random = new Random()
var serviceNameNo = 0
var methodNameNo = 0
def generate(genTraces: Int, maxSpanDepth: Int): List[gen.Trace] = {
val traces = ListBuffer[gen.Trace]()
for (i <- 0 until genTraces) {
val traceId = random.nextLong
traces.append(generateTrace(traceId, maxSpanDepth))
}
traces.toList
}
def generateTrace(traceId: Long, maxSpanDepth: Int): gen.Trace = {
val spanDepth = random.nextInt(maxSpanDepth) + 1
val startTimestamp = (System.currentTimeMillis * 1000) - (24 * 60 * 60 * 1000 * 1000L)
val endTimestamp = startTimestamp + 4 * 1000
gen.Trace(generateSpans(spanDepth, 1, traceId, None, startTimestamp, endTimestamp))
}
def generateSpans(depthRemaining: Int, width: Int, traceId: Long,
parentSpanId: Option[Long], startTimestamp: Long, endTimestamp: Long): List[gen.Span] = {
if (depthRemaining <= 0) return List()
val timeStep = ((endTimestamp - startTimestamp) / width).toLong
if (timeStep <= 0) return List()
var timestamp = startTimestamp
var rv = List[gen.Span]()
for (j <- 0 until width) {
val clientServerSpans = generateSpan(traceId, parentSpanId, timestamp, timestamp + timeStep)
val genSpans = generateSpans(depthRemaining - 1, math.max(1, random.nextInt(5)), traceId,
Some(clientServerSpans._1.id), clientServerSpans._3, clientServerSpans._4)
rv = rv ::: List(clientServerSpans._1, clientServerSpans._2)
rv = rv ::: genSpans
timestamp += timeStep
}
rv
}
def generateSpan(traceId: Long, parentSpanId: Option[Long],
startTimestamp: Long, endTimestamp: Long): (gen.Span, gen.Span, Long, Long) = {
val customAnnotationCount = 5
val totalAnnotations = customAnnotationCount + 4 // 4 for the reqiured client/server annotations
val spanId = random.nextLong
val serviceName = "servicenameexample_" + serviceNameNo
serviceNameNo += 1
val spanName = "methodcallfairlylongname_" + methodNameNo
methodNameNo += 1
val host1 = Endpoint(random.nextInt(), 1234, serviceName)
val host2 = Endpoint(random.nextInt(), 5678, serviceName)
val maxGapMs = math.max(1, ((endTimestamp - startTimestamp) / totalAnnotations).toInt) // ms.
var timestamp = startTimestamp
timestamp += random.nextInt(maxGapMs)
val rvStartTimestamp = timestamp
val cs = new Annotation(timestamp, gen.Constants.CLIENT_SEND, Some(host1))
timestamp += random.nextInt(maxGapMs)
val sr = new Annotation(timestamp, gen.Constants.SERVER_RECV, Some(host2))
val customAnnotations = List()
1 to customAnnotationCount foreach (i => {
timestamp += random.nextInt(maxGapMs)
customAnnotations :+ new Annotation(timestamp, "some custom annotation", Some(host2))
})
timestamp += random.nextInt(maxGapMs)
val ss = new Annotation(timestamp, gen.Constants.SERVER_SEND, Some(host2))
timestamp += random.nextInt(maxGapMs)
val cr = new Annotation(timestamp, gen.Constants.CLIENT_RECV, Some(host1))
val clientAnnotations = List(cs, cr)
val spanClient = Span(traceId, spanName, spanId, parentSpanId, clientAnnotations,
Seq(gen.BinaryAnnotation("key", ByteBuffer.wrap("value".getBytes), gen.AnnotationType.String, None).toBinaryAnnotation))
val serverAnnotations = List(sr, ss) ::: customAnnotations
val spanServer = Span(traceId, spanName, spanId, parentSpanId, serverAnnotations, Nil)
(spanClient.toThrift, spanServer.toThrift, rvStartTimestamp, timestamp)
}
}
| ajantis/zipkin | zipkin-test/src/main/scala/com/twitter/zipkin/tracegen/TraceGen.scala | Scala | apache-2.0 | 4,468 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package scala.scalajs.test.javalib
import scala.scalajs.test.JasmineTest
object ReferenceTest extends JasmineTest {
describe("java.land.ref.Reference") {
it("Should have all the normal operations") {
val s = "string"
val ref = new java.lang.ref.WeakReference(s)
expect(ref.get).toEqual(s)
expect(ref.enqueue).toEqual(false)
expect(ref.isEnqueued).toEqual(false)
ref.clear
// can't use `expect` because it tries to be clever and .toString things,
// which makes it blow up when you pass in null
assert(ref.get == null)
}
}
}
| swhgoon/scala-js | test/src/test/scala/scala/scalajs/test/javalib/ReferenceTest.scala | Scala | bsd-3-clause | 1,119 |
package org.life
object Main {
def main( args: Array[ String ] ) = {
println( "Hello, world!" )
}
}
object ConwaysGameOfLifeRunner extends App {
def allPositions( height: Int, width: Int ): Positions = Positions( ( ) => {
for {x <- 0 until height; y <- 0 until width} yield (x, y)
} )
def randomPositions( height: Int, width: Int, populationRatio: Double ): Positions =
allPositions( height, width )
.filter( e => Math.random < populationRatio )
implicit class BoardStream[ B <: AbstractMatrixBackedBoard[ B ] ]( stream: Stream[ (B, Int) ] ) {
def haltAfterSteadyState = {
def noStateMatchesPrevious( items: Stream[ (B, Int) ] ): Boolean = {
val (current, _) = items.last
!( items.init exists { case (prev, _) => current == prev } )
}
// will catch boards which are steady state and boards which oscilate (blinkers) at period 2
val maxFoundPeriod = 2
// TODO rewrite this to not require the stream to have initial throwaway elements
stream
.sliding( maxFoundPeriod + 1 )
.takeWhile( noStateMatchesPrevious )
.map( _.last ) // get back to the board we care about
}
}
val (height, width) = (50, 80)
val populationRatio = 0.3333
val initialPositions: Positions =
offsetBy( 0, 0 )( gliderGun ) ++
offsetBy( 41, 41 )( rotate(2)(gliderGun) )
// randomPositions( height, width, populationRatio )
val initBoard = MatrixBoardFactory.bounded( height, width )( initialPositions )
// this type cannot be specified explicitly lest we require updating it each time we change from toroid to bounded
val boardStream = Stream( (null, -2), (null, -1) ) ++
Stream.iterate( initBoard )( _.nextBoard( ) ).zipWithIndex
val (short, medium, long, epic) = (20, 150, 1125, 8437)
val (numbing, slow, peppy, fast) = (500, 250, 125, 62)
val (duration, delay) = (medium, fast)
boardStream
.haltAfterSteadyState
.take( duration )
.foreach { case (t, index) =>
println
println( t )
println( index )
try {
Thread.sleep( delay )
}
catch {
case e: InterruptedException => println( "error" )
}
}
}
| squid314/scala-game-of-life | src/main/scala/org/life/Main.scala | Scala | gpl-3.0 | 2,492 |
package com.faacets.qalg
package algos
import scala.{specialized => sp}
import spire.algebra._
import algebra._
trait PackRingMutable[@sp(Double, Long) A] extends Any with PackRing[A] {
implicit def MM: MatMut[M, A]
implicit def VM: VecMut[V, A]
implicit def MShift: MutableMatShift[M]
implicit def VShift: MutableVecShift[V]
}
object PackRingMutable {
type ForM[M0, A] = PackRingMutable[A] { type M = M0 }
type ForV[V0, A] = PackRingMutable[A] { type V = V0 }
type ForMV[M0, V0, A] = PackRingMutable[A] { type M = M0; type V = V0 }
}
| denisrosset/qalg | core/src/main/scala/qalg/algos/PackRingMutable.scala | Scala | mit | 555 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.cli.CommandMessages
import org.apache.openwhisk.core.database.LimitsCommand.LimitEntity
import org.apache.openwhisk.core.entity.{DocInfo, EntityName, UserLimits}
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration.Duration
import scala.util.Try
@RunWith(classOf[JUnitRunner])
class LimitsCommandTests extends FlatSpec with WhiskAdminCliTestBase {
private val limitsToDelete = ListBuffer[String]()
protected val limitsStore = LimitsCommand.createDataStore()
behavior of "limits"
it should "set limits for non existing namespace" in {
implicit val tid = transid()
val ns = newNamespace()
resultOk(
"limits",
"set",
"--invocationsPerMinute",
"3",
"--firesPerMinute",
"7",
"--concurrentInvocations",
"11",
"--allowedKinds",
"nodejs:14",
"blackbox",
"--storeActivations",
"false",
ns) shouldBe CommandMessages.limitsSuccessfullySet(ns)
val limits = limitsStore.get[LimitEntity](DocInfo(LimitsCommand.limitIdOf(EntityName(ns)))).futureValue
limits.limits shouldBe UserLimits(
invocationsPerMinute = Some(3),
firesPerMinute = Some(7),
concurrentInvocations = Some(11),
allowedKinds = Some(Set("nodejs:14", "blackbox")),
storeActivations = Some(false))
resultOk("limits", "set", "--invocationsPerMinute", "13", ns) shouldBe CommandMessages.limitsSuccessfullyUpdated(ns)
val limits2 = limitsStore.get[LimitEntity](DocInfo(LimitsCommand.limitIdOf(EntityName(ns)))).futureValue
limits2.limits shouldBe UserLimits(Some(13), None, None)
}
it should "set and get limits" in {
val ns = newNamespace()
resultOk("limits", "set", "--invocationsPerMinute", "13", ns)
resultOk("limits", "get", ns) shouldBe "invocationsPerMinute = 13"
}
it should "respond with default system limits apply for non existing namespace" in {
resultOk("limits", "get", "non-existing-ns") shouldBe CommandMessages.defaultLimits
}
it should "delete an existing limit" in {
val ns = newNamespace()
resultOk("limits", "set", "--invocationsPerMinute", "13", ns)
resultOk("limits", "get", ns) shouldBe "invocationsPerMinute = 13"
//Delete
resultOk("limits", "delete", ns) shouldBe CommandMessages.limitsDeleted
//Read after delete should result in default message
resultOk("limits", "get", ns) shouldBe CommandMessages.defaultLimits
//Delete of deleted namespace should result in error
resultNotOk("limits", "delete", ns) shouldBe CommandMessages.limitsNotFound(ns)
}
it should "update existing allowedKind limit" in {
val ns = newNamespace()
resultOk("limits", "set", "--allowedKinds", "nodejs:14", ns)
resultOk("limits", "get", ns) shouldBe "allowedKinds = nodejs:14"
resultOk("limits", "set", "--allowedKinds", "nodejs:14", "blackbox", "python", ns)
resultOk("limits", "get", ns) shouldBe "allowedKinds = nodejs:14, blackbox, python"
//Delete
resultOk("limits", "delete", ns) shouldBe CommandMessages.limitsDeleted
//Read after delete should result in default message
resultOk("limits", "get", ns) shouldBe CommandMessages.defaultLimits
//Delete of deleted namespace should result in error
resultNotOk("limits", "delete", ns) shouldBe CommandMessages.limitsNotFound(ns)
}
override def cleanup()(implicit timeout: Duration): Unit = {
implicit val tid = TransactionId.testing
limitsToDelete.map { u =>
Try {
val limit = limitsStore.get[LimitEntity](DocInfo(LimitsCommand.limitIdOf(EntityName(u)))).futureValue
delete(limitsStore, limit.docinfo)
}
}
limitsToDelete.clear()
super.cleanup()
}
private def newNamespace(): String = {
val ns = randomString()
limitsToDelete += ns
ns
}
}
| style95/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/database/LimitsCommandTests.scala | Scala | apache-2.0 | 4,846 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka010.mocks
import java.lang
import java.util.concurrent._
import java.util.function.Supplier
import org.apache.kafka.common.utils.Time
/**
* A class used for unit testing things which depend on the Time interface.
*
* This class never manually advances the clock, it only does so when you call
* sleep(ms)
*
* It also comes with an associated scheduler instance for managing background tasks in
* a deterministic way.
*/
private[kafka010] class MockTime(@volatile private var currentMs: Long) extends Time {
val scheduler = new MockScheduler(this)
def this() = this(System.currentTimeMillis)
override def milliseconds: Long = currentMs
override def hiResClockMs(): Long = milliseconds
override def nanoseconds: Long =
TimeUnit.NANOSECONDS.convert(currentMs, TimeUnit.MILLISECONDS)
override def sleep(ms: Long): Unit = {
this.currentMs += ms
scheduler.tick(ms, TimeUnit.MILLISECONDS)
}
override def waitObject(obj: Any, condition: Supplier[lang.Boolean], timeoutMs: Long): Unit =
throw new UnsupportedOperationException
override def toString(): String = s"MockTime($milliseconds)"
}
| maropu/spark | external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/mocks/MockTime.scala | Scala | apache-2.0 | 1,979 |
package net.cucumbersome
import akka.http.scaladsl.testkit.ScalatestRouteTest
import com.danielasfregola.randomdatagenerator.RandomDataGenerator
import net.cucumbersome.rpgRoller.warhammer.player.CombatActorJsonFormats
class RouteSpec extends UnitSpec with ScalatestRouteTest with RandomDataGenerator with CombatActorJsonFormats
| CucumisSativus/rpgRollerBackend | src/test/scala/net/cucumbersome/RouteSpec.scala | Scala | mit | 331 |
/*
* Copyright 2006-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package mapper {
import _root_.java.sql.{ResultSet, Types}
import _root_.java.util.Date
import _root_.java.lang.reflect.Method
import _root_.net.liftweb._
import util._
import common._
import Helpers._
import http._
import json._
import S._
import js._
import _root_.scala.xml.{NodeSeq}
abstract class MappedDateTime[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Date, T] {
private val data = FatLazy(defaultValue)
private val orgData = FatLazy(defaultValue)
protected def real_i_set_!(value: Date): Date = {
if (value != data.get) {
data() = value
this.dirty_?( true)
}
data.get
}
def dbFieldClass = classOf[Date]
def asJsonValue: JsonAST.JValue = is match {
case null => JsonAST.JNull
case v => JsonAST.JInt(v.getTime)
}
def toLong: Long = is match {
case null => 0L
case d: Date => d.getTime / 1000L
}
def asJsExp = JE.Num(toLong)
/**
* Get the JDBC SQL Type for this field
*/
def targetSQLType = Types.TIMESTAMP
def defaultValue: Date = null
// private val defaultValue_i = new Date
override def writePermission_? = true
override def readPermission_? = true
protected def i_is_! = data.get
protected def i_was_! = orgData.get
protected[mapper] def doneWithSave() {orgData.setFrom(data)}
protected def i_obscure_!(in : Date) : Date = {
new Date(0L)
}
/**
* Create an input field for the item
*/
override def _toForm: Box[NodeSeq] =
S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName =>
Full(<input type='text' id={fieldId}
name={funcName}
value={toString}/>)
}
override def setFromAny(f: Any): Date = f match {
case JsonAST.JNull => this.set(null)
case JsonAST.JInt(v) => this.set(new Date(v.longValue))
case s: String => LiftRules.parseDate(s).map(d => this.set(d)).openOr(this.is)
case (s: String) :: _ => LiftRules.parseDate(s).map(d => this.set(d)).openOr(this.is)
case _ => this.is
}
def jdbcFriendly(field : String) : Object = is match {
case null => null
case d => new _root_.java.sql.Timestamp(d.getTime)
}
def real_convertToJDBCFriendly(value: Date): Object = if (value == null) null else new _root_.java.sql.Timestamp(value.getTime)
private def st(in: Box[Date]): Unit =
in match {
case Full(d) => data.set(d); orgData.set(d)
case _ => data.set(null); orgData.set(null)
}
def buildSetActualValue(accessor: Method, v: AnyRef, columnName: String): (T, AnyRef) => Unit =
(inst, v) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(toDate(v))})
def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit =
(inst, v, isNull) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(if (isNull) Empty else Full(new Date(v)))})
def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit =
(inst, v) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(toDate(v))})
def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit =
(inst, v) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(Full(v))})
def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit =
(inst, v, isNull) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(Empty)})
/**
* Given the driver type, return the string required to create the column in the database
*/
def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.dateTimeColumnType + notNullAppender()
def inFuture_? = data.get match {
case null => false
case d => d.getTime > millis
}
def inPast_? = data.get match {
case null => false
case d => d.getTime < millis
}
override def toString: String = LiftRules.formatDate(is)
}
}
}
| jeppenejsum/liftweb | framework/lift-persistence/lift-mapper/src/main/scala/net/liftweb/mapper/MappedDateTime.scala | Scala | apache-2.0 | 4,468 |
package org.jetbrains.plugins.scala
package annotator
package template
/**
* Pavel Fatin
*/
class UndefinedMemberTest extends AnnotatorTestBase(UndefinedMember) {
def testValidHolders(): Unit = {
assertNothing(messages("class C { def f }"))
assertNothing(messages("trait T { def f }"))
}
def testHolderNew(): Unit = {
assertMatches(messages("new { def f }")) {
case Error("def f", Message) :: Nil =>
}
assertMatches(messages("new Object { def f }")) {
case Error("def f", Message) :: Nil =>
}
}
def testHolderObject(): Unit = {
assertMatches(messages("object O { def f }")) {
case Error("def f", Message) :: Nil =>
}
assertMatches(messages("object O extends Object { def f }")) {
case Error("def f", Message) :: Nil =>
}
}
def testDeclarations(): Unit = {
assertMatches(messages("new { def f }")) {
case Error("def f", Message) :: Nil =>
}
assertMatches(messages("new { var v: Object }")) {
case Error("var v: Object", Message) :: Nil =>
}
assertMatches(messages("new { type T }")) {
case Nil =>
}
assertMatches(messages("object O { type T }")) {
case Nil =>
}
}
def testDefinitions(): Unit = {
assertNothing(messages("new { def f = null }"))
assertNothing(messages("new { var v: Object = null }"))
assertNothing(messages("new { type T = Any }"))
}
private val Message = ScalaBundle.message("illegal.undefined.member")
} | jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/annotator/template/UndefinedMemberTest.scala | Scala | apache-2.0 | 1,485 |
package core.api.modules
import akka.actor.ActorRef
import com.ning.http.client.AsyncHttpClientConfig
import io.surfkit.core.api.AbstractSystem
import io.surfkit.core.rabbitmq.RabbitDispatcher
import io.surfkit.core.rabbitmq.RabbitDispatcher.RabbitMqAddress
import play.api.libs.json._
import io.surfkit.model.Api._
import io.surfkit.model.{Api, Model}
import play.api.libs.ws.DefaultWSClientConfig
import play.api.libs.ws.ning.{NingWSClient, NingAsyncHttpClientConfigBuilder}
import scala.concurrent.Future
/**
* Created by suroot on 08/05/15.
*/
trait SurfKitModule extends AbstractSystem {
this : AbstractSystem =>
val wsConfig = new NingAsyncHttpClientConfigBuilder(DefaultWSClientConfig()).build
val wsBuilder = new AsyncHttpClientConfig.Builder(wsConfig)
// CA - must override these 2 at minimum to have a functioning module
def module():String
def mapper(r:Api.Request):Future[Api.Result]
import com.typesafe.config.ConfigFactory
private val config = ConfigFactory.load
config.checkValid(ConfigFactory.defaultReference)
// Let's Wax !
val sysDispatcher = system.actorOf(RabbitDispatcher.props(RabbitMqAddress(config.getString("rabbitmq.host"), config.getInt("rabbitmq.port"))))
sysDispatcher ! RabbitDispatcher.ConnectModule(module, mapper) // connect to the MQ
// TODO: don't like the multiple dispatcher bit :(
val userDispatcher = system.actorOf(RabbitDispatcher.props(RabbitMqAddress(config.getString("rabbitmq.host"), config.getInt("rabbitmq.port"))))
userDispatcher ! RabbitDispatcher.Connect // connect to the MQ
}
| coreyauger/surfkit | core/src/main/scala/core/api/modules/SurfKitModule.scala | Scala | mit | 1,575 |
package object docs {
import java.io.File
import doodle.image.Image
import doodle.effect.Writer._
import doodle.java2d._
implicit class ImageSaveSyntax(image: Image) {
import doodle.image.syntax._
def save(filename: String): Unit = {
val dir = new File("docs/src/main/mdoc/")
val file = new File(dir, filename)
image.write[Png](file)
}
}
implicit class PictureSaveSyntax(picture: Picture[Unit]) {
import doodle.syntax._
def save(filename: String): Unit = {
val dir = new File("docs/src/main/mdoc/")
val file = new File(dir, filename)
picture.write[Png](file)
}
}
}
| underscoreio/doodle | docs/src/main/scala/package.scala | Scala | apache-2.0 | 643 |
package contege.seqgen
import scala.collection.JavaConversions._
import scala.collection.mutable.Set
import scala.collection.mutable.Map
import java.util.ArrayList
import contege.ClassReader
import contege.Random
import contege.Atom
import contege.ConstructorAtom
import contege.MethodAtom
import contege.Util
import contege.Config
import javamodel.util.TypeResolver
import contege.FieldGetterAtom
import javamodel.staticc.UnknownType
import scala.collection.mutable.Set
/**
* Central point to load classes under test (and helper classes to use the classes under test).
* Always uses the putClassLoader, which may impose a stronger security policy
* than the standard class loader.
*
*/
class TypeManager(cut: String, envClasses: Seq[String], val putClassLoader: ClassLoader, random: Random) {
val primitiveProvider = new PrimitiveProvider(random)
private val type2Atoms = Map[String, ArrayList[Atom]]() // includes subtyping
private val type2AtomsPrecise = Map[String, ArrayList[Atom]]() // only precise matches
private val allClasses = new ArrayList[String]
allClasses.addAll(envClasses)
allClasses.add(cut)
allClasses.foreach(cls => {
constructors(cls).foreach(atom => if(atom.returnType.isDefined) {
allSuperTypes(atom.returnType).foreach(typ => {
type2Atoms.getOrElseUpdate(typ, new ArrayList[Atom]).add(atom)
})
type2AtomsPrecise.getOrElse(atom.returnType.get, new ArrayList[Atom]).add(atom)
})
methods(cls).foreach(atom => if(atom.returnType.isDefined) {
allSuperTypes(atom.returnType).foreach(typ => {
type2Atoms.getOrElseUpdate(typ, new ArrayList[Atom]).add(atom)
})
type2AtomsPrecise.getOrElseUpdate(atom.returnType.get, new ArrayList[Atom]).add(atom)
})
fieldGetters(cls).foreach(atom => {
assert(atom.returnType.isDefined) // each field should have a type
allSuperTypes(atom.returnType).foreach(typ => {
type2Atoms.getOrElseUpdate(typ, new ArrayList[Atom]).add(atom)
})
type2AtomsPrecise.getOrElseUpdate(atom.returnType.get, new ArrayList[Atom]).add(atom)
})
})
println("TypeManager has indexed "+allClasses.size+" classes")
private var cutMethods_ = methods(cut)
def cutMethods = {
cutMethods_
}
def filterCUTMethods(oracleClass: String) = {
val result = new ArrayList[MethodAtom]
val oracleClassMethods = methods(oracleClass)
result.addAll(cutMethods_.filter(cm => oracleClassMethods.exists(om => cm.methodName == om.methodName && cm.paramTypes == om.paramTypes)))
cutMethods_ = result
}
def atomGivingType(typ: String): Option[Atom] = {
type2Atoms.get(typ) match {
case Some(atoms) => Some(atoms(random.nextInt(atoms.size)))
case None => None
}
}
def atomGivingTypeWithDowncast(typ: String): Option[Atom] = {
val allFromTypes = allSuperTypes(Some(typ)).toSet
val filteredFromTypes = if (allFromTypes.contains("java.lang.Object") && allFromTypes.size > 1) {
allFromTypes.filter(_ != "java.lang.Object")
} else allFromTypes
val allPotentialAtoms = Set[Atom]()
filteredFromTypes.foreach(fromType => allPotentialAtoms.addAll(allAtomsGivingPreciseType(fromType)))
val potentialAtoms = allPotentialAtoms.filter(_.isConstructor == false)
if (potentialAtoms.size == 0) None
else Some(random.chooseOne(potentialAtoms.toSet))
}
def allAtomsGivingType(typ: String): ArrayList[Atom] = {
type2Atoms.getOrElse(typ, new ArrayList[Atom])
}
def allAtomsGivingPreciseType(typ: String): ArrayList[Atom] = {
type2AtomsPrecise.getOrElse(typ, new ArrayList[Atom])
}
/**
* Names of all supertypes (classes and interfaces), including the class itself.
*/
private def allSuperTypes(clsNameOpt: Option[String]): Seq[String] = {
if (!clsNameOpt.isDefined) List[String]()
else {
if (Util.primitiveTypes.contains(clsNameOpt.get)) List(clsNameOpt.get)
else {
val cls = Class.forName(clsNameOpt.get, true, putClassLoader)
allSuperTypes(cls)
}
}
}
private def allSuperTypes(cls: Class[_]): Seq[String] = {
val superTypes = new ArrayList[String]
superTypes.add(cls.getName)
if(cls.getName != "java.lang.Object") {
cls.getInterfaces.foreach(itf => {
superTypes.addAll(allSuperTypes(itf))
})
val sup = cls.getSuperclass
if (sup != null) {
superTypes.addAll(allSuperTypes(sup))
}
}
superTypes.sortWith((t1,t2) => t1 < t2)
superTypes
}
def constructors(clsName: String): Seq[ConstructorAtom] = {
new ClassReader(Class.forName(clsName, true, putClassLoader)).readConstructorAtoms
}
def methods(clsName: String): Seq[MethodAtom] = {
new ClassReader(Class.forName(clsName, true, putClassLoader)).readMethodAtoms
}
def fieldGetters(clsName: String): Seq[FieldGetterAtom] = {
new ClassReader(Class.forName(clsName, true, putClassLoader)).readFieldGetterAtoms
}
def allSuperTypesAndItself(typeName: String) = {
val result = new ArrayList[String]()
result.add(typeName)
if (!Util.primitiveTypes.contains(typeName)) {
val typ = TypeResolver.resolve(typeName)
if (typ != null && typ != UnknownType) {
result.addAll(typ.allSuperTypes.map(_.qualName))
}
}
result.sortWith((x,y) => x < y)
}
} | michaelpradel/ConTeGe | src/contege/seqgen/TypeManager.scala | Scala | gpl-2.0 | 5,226 |
/*
* Copyright 2012 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.core.worldinterface
import simx.core.entity.Entity
import simx.core.entity.description.SValSet
import simx.core.ontology._
import simx.core.svaractor._
import simx.core.svaractor.semantictrait.base.{Base, Thing}
import simx.core.svaractor.unifiedaccess._
import simx.core.worldinterface.base.WorldInterfaceActorBase
import simx.core.worldinterface.entity._
import simx.core.worldinterface.eventhandling.{EventDescription, EventProviderMessage, _}
import scala.annotation.meta.param
import scala.collection.mutable
/* author: dwiebusch
* date: 10.09.2010
*/
/**
* @author Stephan Rehfeld
*/
case class WorldInterfaceActorInCluster( worldInterfaceActor : SVarActor.Ref )
(implicit @(transient @param) actorContext : SVarActor.Ref) extends SimXMessage
object WorldInterfaceActor extends SingletonActor(new WorldInterfaceActor, "worldInterface" )
/**
* The World Interface Actor, which is doing all the Interfacing work
*
* @author dwiebusch
*
*/
protected class WorldInterfaceActor extends WorldInterfaceActorBase
with EventProvider with EntityRegistration with NewEntityRegistration
{
// override protected implicit val actorContext = this
/** the map containing all registered actors */
private var actors = Map[Symbol, SVarActor.Ref]()
private val eventProviders = mutable.Map[GroundedSymbol, Set[SVarActor.Ref]]()
case class InvalidValueTypeException(reason : String) extends Exception
//TODO Remove/refactor if old entity registration is removed
protected def _onEntityUnRegistration(e : Entity): Unit = {
_unRegisterEntity(e)
}
//TODO Remove/refactor if old entity registration is removed
protected def _onEntityRegistration(e: Entity): Unit = {
_registerEntity(e)
}
def generateCopyOfRegistry(): List[SimXMessage] = {
//Helper function to make intellij more better ;)
def toEntityRegisterRequest(entityEntry: (Entity, List[Symbol])): SimXMessage =
EntityRegisterRequest(entityEntry._2, entityEntry._1).asInstanceOf[SimXMessage]
//TODO Use private field 'registeredEntities'
val es = worldRoot.flatten.map(toEntityRegisterRequest).toList
val ps = eventProviders.foldLeft(es : List[SimXMessage]){
(list, kv) => kv._2.foldLeft(list){ (l, p) => createMsg(p)(ProvideEventMessage(_, kv._1), l) }
}
eventHandlers.foldLeft(ps){
(list, kv) => kv._2.foldLeft(list){ (l, p) => createMsg(p._1)(RegisterHandlerMessage(_, kv._1, p._2), l) }
}
}
override def toString: String =
getClass.getCanonicalName
//Commented due to non-usage by martin, 20-04-2015
// /**
// * adds a state value to be observed, triggering a WorldInterfaceEvent with the given name
// *
// * @param stateValue the state value to be observed
// * @param trigger the name of the WorldInterfaceEvent to be triggered on value changes of stateValue
// */
// private def addValueChangeTrigger[T](stateValue : SVar[T], trigger : Symbol, container : List[Symbol]) {
// observe( stateValue ){ value => emitEvent(WorldInterfaceEvent(trigger, (stateValue, container, value ) ) ) }
// }
addHandler[RegisterHandlerMessage]{ msg =>
internalRequireEvent( msg.handler, msg.name, msg.restriction )
//Send notification about providers to handler
eventProviders.get(msg.name).collect{ case providers => msg.handler ! EventProviderMessage(providers, msg.name)}
forwardToForeignActors(msg)
}
addHandler[UnRegisterHandlerMessage]{ msg =>
internalRemoveEventHandler( msg.handler, msg.e )
forwardToForeignActors(msg)
}
addHandler[ProvideEventMessage]{ msg =>
forwardToForeignActors(msg)
eventProviders.update( msg.name, eventProviders.getOrElse(msg.name, Set[SVarActor.Ref]()) + msg.provider )
val e = new Entity(new EntityDescription(name = "Event[" + msg.name.toString + "]").desc)
e.set(types.EventDescription(new EventDescription(msg.name)))
e.set(types.Actor(msg.provider))
addToWorldRoot('eventProvider :: msg.name.toSymbol :: Nil, e)
eventHandlers.get(msg.name) collect {
case set => set.foreach{ _._1 ! EventProviderMessage(Set(msg.provider), msg.name, msg.event) }
}
}
addHandler[UnRegisterProviderMessage]{ msg =>
forwardToForeignActors(msg)
msg.e match {
case Some(event) =>
eventProviders.update(event.name, eventProviders.getOrElse(event.name, Set[SVarActor.Ref]()).filterNot( _ == msg.provider ))
case None => for ( (event, providers) <- eventProviders)
eventProviders.update(event, providers.filterNot( _ == msg.provider))
}
}
addHandler[ForwardMessageRequest]{
msg => Match(actors get msg.destination){
case Some(dst) => dst ! msg.msg
case None => ()
}
}
protected var svarDescRegistry = Map[String, SValDescription[_, _,_ <: Base,_ <: Thing]]()
protected var svarDescObservers = Set[SVarActor.Ref]()
addHandler[RegisterSVarDescription[_, _,_ <: Base,_ <: Thing]]{ msg =>
svarDescRegistry += msg.desc.ontoLink.getOrElse(msg.desc.sVarIdentifier.name) -> msg.desc
svarDescObservers.foreach( _ ! msg)
}
addHandler[GetRegisteredSVarDescriptions]{
msg => svarDescRegistry
}
addHandler[ObserveSVarDescRegistrations]{ msg =>
svarDescRegistry.foreach( tuple => msg.sender ! RegisterSVarDescription(tuple._2))
svarDescObservers += msg.sender
}
addHandler[ActorRegisterRequest]{ msg =>
forwardToForeignActors(msg)
actors += msg.name -> msg.actor
}
addHandler[ActorListingRequest]{
msg => msg.replyTo ! ActorListingReply(actors.keys.toList)
}
//Commented due to non-usage by martin, 20-04-2015
// addHandler[EntityCreateRequest]{ msg =>
// forwardToForeignActors(EntityRegisterRequest(msg.name, addToWorldRoot(msg.name, new Entity)))
// }
// addHandler[StateValueCreateRequest[_]]{
// case msg : StateValueCreateRequest[_] => forwardToForeignActors(
// EntityRegisterRequest(msg.container, addStateValue(SVarImpl(msg.value), msg.desc, msg.container) )
// )
// }
//Commented due to non-usage by martin, 20-04-2015
// addHandler[ExternalStateValueObserveRequest[_]]{
// msg => addValueChangeTrigger(msg.ovalue, msg.trigger, msg.container)
// }
addHandler[ActorEnumerateRequest]{
msg => Some(actors.keys.toList)
}
addHandler[ActorLookupRequest]{
msg => actors.get(msg.name)
}
//Commented due to non-usage by martin, 20-04-2015
// addHandler[InternalStateValueObserveRequest[_]]{
// msg => getEntity(msg.nameE).collect{
// case entity => entity.get(msg.c).forall{ _.values.foreach{ case svar : SVar[_] => addValueChangeTrigger(svar, msg.trigger, msg.nameE) } }
// }
// }
private val knownRelations = SValSet()
// private val otherKnownRelations = Map[Relation, Entity]()
addHandler[AddRelation]{ msg =>
knownRelations.update(msg.r.asSVal)
}
addHandler[RemoveRelation]{ msg =>
knownRelations.remove(msg.r.asSVal)
}
addHandler[HandleRelationRequest[_, _]]{ msg =>
if (msg.r.isLeft) {
knownRelations.getOrElse(msg.r.description.sVarIdentifier, Nil).map(_ as msg.r.description.asConvertibleTrait).
filter(_.getObject equals msg.r.getKnownValue).
map(x => MapKey(types.Entity, AnnotationSet()) -> x.getSubject).toMap
} else {
knownRelations.getOrElse(msg.r.description.sVarIdentifier, Nil).map(_ as msg.r.description.asConvertibleTrait).
filter(_.getSubject equals msg.r.getKnownValue).
map(x => MapKey(types.Entity, AnnotationSet()) -> x.getObject).toMap
}
}
}
| simulator-x/core | src/simx/core/worldinterface/WorldInterfaceActor.scala | Scala | apache-2.0 | 8,422 |
package mesosphere.marathon.api
import java.net.URI
import javax.servlet.http.{ HttpServletRequest, HttpServletResponse }
import javax.ws.rs.core.Response.Status
import javax.ws.rs.core.{ NewCookie, Response }
import mesosphere.marathon.plugin.auth._
import mesosphere.marathon.plugin.http.{ HttpRequest, HttpResponse }
import mesosphere.marathon.state.PathId
import scala.collection.JavaConverters._
/**
* Base trait for authentication and authorization in http resource endpoints.
*/
trait AuthResource extends RestResource {
def authenticator: Authenticator
def authorizer: Authorizer
def doIfAuthenticated(request: HttpServletRequest,
response: HttpServletResponse)(fn: Identity => Response): Response = {
val requestWrapper = new RequestFacade(request)
val identity = result(authenticator.authenticate(requestWrapper))
identity.map(fn).getOrElse {
val responseWrapper = new ResponseFacade
authenticator.handleNotAuthenticated(requestWrapper, responseWrapper)
responseWrapper.response
}
}
def doIfAuthorized[Resource](request: HttpServletRequest,
response: HttpServletResponse,
action: AuthorizedAction[Resource],
resources: Resource*)(fn: Identity => Response): Response = {
def isAllowed(id: Identity) = resources.forall(authorizer.isAuthorized(id, action, _))
def response(fn: HttpResponse => Unit): Response = {
val responseFacade = new ResponseFacade
fn(responseFacade)
responseFacade.response
}
val requestFacade = new RequestFacade(request)
result(authenticator.authenticate(requestFacade)) match {
case Some(identity) if isAllowed(identity) => fn(identity)
case Some(identity) => response(authorizer.handleNotAuthorized(identity, requestFacade, _))
case None => response(authenticator.handleNotAuthenticated(requestFacade, _))
}
}
def isAllowedToView(pathId: PathId)(implicit identity: Identity): Boolean = {
authorizer.isAuthorized(identity, ViewAppOrGroup, pathId)
}
private class RequestFacade(request: HttpServletRequest) extends HttpRequest {
// Jersey will not allow calls to the request object from another thread
// To circumvent that, we have to copy all data during creation
val headers = request.getHeaderNames.asScala.map(header => header -> request.getHeaders(header).asScala.toSeq).toMap
val path = request.getRequestURI
val cookies = request.getCookies
val params = request.getParameterMap
override def header(name: String): Seq[String] = headers.getOrElse(name, Seq.empty)
override def requestPath: String = path
override def cookie(name: String): Option[String] = cookies.find(_.getName == name).map(_.getValue)
override def queryParam(name: String): Seq[String] = params.asScala.get(name).map(_.toSeq).getOrElse(Seq.empty)
}
private class ResponseFacade extends HttpResponse {
private[this] var builder = Response.status(Status.UNAUTHORIZED)
override def header(name: String, value: String): Unit = builder.header(name, value)
override def status(code: Int): Unit = builder = builder.status(code)
override def sendRedirect(location: String): Unit = {
builder.status(Status.TEMPORARY_REDIRECT).location(new URI(location))
}
override def cookie(name: String, value: String, maxAge: Int, secure: Boolean): Unit = {
//scalastyle:off null
builder.cookie(new NewCookie(name, value, null, null, null, maxAge.toInt, secure))
}
override def body(mediaType: String, bytes: Array[Byte]): Unit = {
builder.`type`(mediaType)
builder.entity(bytes)
}
def response: Response = builder.build()
}
}
| Kosta-Github/marathon | src/main/scala/mesosphere/marathon/api/AuthResource.scala | Scala | apache-2.0 | 3,832 |
package com.box.castle.router.mock
import com.box.castle.metrics.MetricsLogger
import scala.collection.mutable
import scala.concurrent.Promise
class MockMetricsLogger extends MetricsLogger {
val counts = mutable.HashMap[String, Long]()
var getCountPromises = mutable.HashMap("committer_fatal" -> Promise[Long])
private[this] val lock = new Object()
def getCountFor(name: String): Long = {
lock.synchronized {
counts.getOrElse(name, 0L)
}
}
def reset(): Unit = {
lock.synchronized {
counts.clear()
}
}
override def toString: String = {
lock.synchronized {
"MockMetricsLogger: counts = " + counts.toString
}
}
def count(component: String, name: String, pairs: Map[String, String], value: Long): Unit = {
lock.synchronized {
counts(name) = value + getCountFor(name)
getCountPromises.getOrElse(name, Promise[Long]).success(value)
}
}
def time(component: String, name: String, pairs: Map[String, String], nanoSeconds: Long): Unit = {}
}
| Box-Castle/router | src/main/scala/com/box/castle/router/mock/MockMetricsLogger.scala | Scala | apache-2.0 | 1,028 |
package com.github.agourlay.cornichon.steps.regular
import cats.instances.future._
import cats.instances.either._
import cats.data.{ EitherT, NonEmptyList }
import cats.syntax.either._
import com.github.agourlay.cornichon.core._
import com.github.agourlay.cornichon.core.Engine._
import com.github.agourlay.cornichon.steps.wrapped.AttachStep
import monix.eval.Task
import scala.concurrent.duration.Duration
import scala.concurrent.{ ExecutionContext, Future }
case class EffectStep(title: String, effect: Session ⇒ Future[Either[CornichonError, Session]], show: Boolean = true) extends ValueStep[Session] {
def setTitle(newTitle: String) = copy(title = newTitle)
override def run(initialRunState: RunState) =
Task.deferFuture(effect(initialRunState.session)).map(_.leftMap(NonEmptyList.of(_)))
override def onError(errors: NonEmptyList[CornichonError], initialRunState: RunState) =
errorsToFailureStep(this, initialRunState.depth, errors)
override def onSuccess(result: Session, initialRunState: RunState, executionTime: Duration) =
(successLog(title, initialRunState.depth, show, executionTime), Some(result))
//Does not propagate the second step title
def chain(secondEffect: EffectStep)(implicit ec: ExecutionContext) =
copy(effect = s ⇒ EitherT(effect(s)).flatMap(s2 ⇒ EitherT(secondEffect.effect(s2))).value)
def chain(others: List[EffectStep]): Step =
AttachStep("", this :: others)
def chain(chainedEffect: Session ⇒ Future[Either[CornichonError, Session]])(implicit ec: ExecutionContext) =
copy(effect = s ⇒ EitherT(effect(s)).flatMap(s2 ⇒ EitherT(chainedEffect(s2))).value)
def chainSyncE(chainedEffect: Session ⇒ Either[CornichonError, Session])(implicit ec: ExecutionContext) =
copy(effect = s ⇒ EitherT(effect(s)).flatMap(s2 ⇒ EitherT.fromEither(chainedEffect(s2))).value)
def chainSync(chainedEffect: Session ⇒ Session)(implicit ec: ExecutionContext) =
copy(effect = s ⇒ EitherT(effect(s)).map(chainedEffect).value)
}
object EffectStep {
// Throws if empty list
def chainEffects(effectsStep: Seq[EffectStep])(implicit ec: ExecutionContext): EffectStep =
effectsStep.reduce((e1, e2) ⇒ e1.chain(e2))
def fromEitherT(title: String, effect: Session ⇒ EitherT[Future, CornichonError, Session], show: Boolean = true): EffectStep = {
val effectT: Session ⇒ Future[Either[CornichonError, Session]] = s ⇒ effect(s).value
EffectStep(title, effectT, show)
}
def fromSync(title: String, effect: Session ⇒ Session, show: Boolean = true): EffectStep = {
val effectF: Session ⇒ Future[Either[CornichonError, Session]] = s ⇒ Future.successful(Right(effect(s)))
EffectStep(title, effectF, show)
}
def fromSyncE(title: String, effect: Session ⇒ Either[CornichonError, Session], show: Boolean = true): EffectStep = {
val effectF: Session ⇒ Future[Either[CornichonError, Session]] = s ⇒ Future.successful(effect(s))
EffectStep(title, effectF, show)
}
def fromAsync(title: String, effect: Session ⇒ Future[Session], show: Boolean = true)(implicit ec: ExecutionContext): EffectStep = {
val effectF: Session ⇒ Future[Either[CornichonError, Session]] = s ⇒ effect(s).map(Right(_))
EffectStep(title, effectF, show)
}
} | OlegIlyenko/cornichon | cornichon-core/src/main/scala/com/github/agourlay/cornichon/steps/regular/EffectStep.scala | Scala | apache-2.0 | 3,281 |
package controllers
import io.flow.common.v0.models.User
import io.flow.splashpage.v0.{Authorization, Client}
import io.flow.splashpage.v0.errors.{GenericErrorResponse, UnitResponse}
import io.flow.splashpage.v0.models.{Geo, GeoForm, Publication, Subscription, SubscriptionForm}
import java.util.UUID
import scala.util.{Failure, Success, Try}
import play.api.libs.ws._
import play.api.test._
class SubscriptionsSpec extends PlaySpecification with MockClient {
import scala.concurrent.ExecutionContext.Implicits.global
"POST /subscriptions" in new WithServer(port=port) {
val form = createSubscriptionForm()
val subscription = await(anonClient.subscriptions.post(form))
subscription.email must beEqualTo(form.email)
subscription.publication must beEqualTo(form.publication)
}
"POST /subscriptions validates empty email" in new WithServer(port=port) {
expectErrors {
anonClient.subscriptions.post(createSubscriptionForm().copy(email = " "))
}.genericError.messages must beEqualTo(Seq("Email address cannot be empty"))
}
"POST /subscriptions validates invalid email" in new WithServer(port=port) {
expectErrors {
anonClient.subscriptions.post(createSubscriptionForm().copy(email = "test"))
}.genericError.messages must beEqualTo(Seq("Please enter a valid email address"))
}
"POST /subscriptions is idempotent with duplicate email" in new WithServer(port=port) {
val form = createSubscriptionForm()
val sub = createSubscription(form)
anonClient.subscriptions.post(form.copy(email = " " + form.email + " "))
anonClient.subscriptions.post(form.copy(email = " " + form.email.toUpperCase + " "))
anonClient.subscriptions.post(form.copy(email = form.email.toLowerCase))
subscriptionsDao.findById(sub.id).getOrElse {
sys.error("Failed to create subscription")
}.email must beEqualTo(form.email)
}
"POST /subscriptions validates publication" in new WithServer(port=port) {
val form = createSubscriptionForm()
expectErrors {
anonClient.subscriptions.post(form.copy(publication = Publication.UNDEFINED("invalid_publication")))
}.genericError.messages must beEqualTo(Seq("Publication not found"))
}
"POST /subscriptions geo info ignores empty string" in new WithServer(port=port) {
val form = GeoForm(
ipAddress = Some(" ")
)
val sub = createSubscription(createSubscriptionForm().copy(geo = Some(form)))
sub.geo must beEqualTo(Geo())
}
"POST /subscriptions stores geo info" in new WithServer(port=port) {
val form = GeoForm(
ipAddress = Some("127.0.0.1"),
country = Some("us")
)
val sub = createSubscription(createSubscriptionForm().copy(geo = Some(form)))
sub.geo must beEqualTo(
Geo(
ipAddress = form.ipAddress,
country = Some("USA")
)
)
}
"POST /subscriptions ignores invalid countries" in new WithServer(port=port) {
val form = GeoForm(
country = Some("random country")
)
val sub = createSubscription(createSubscriptionForm().copy(geo = Some(form)))
sub.geo must beEqualTo(
Geo(
ipAddress = None,
country = None
)
)
}
/* TODO: Enable tests once we have authorization running in production
"GET /subscriptions/:id" in new WithServer(port=port) {
val sub = createSubscription()
await(
identifiedClient.subscriptions.getById(sub.id)
) must beEqualTo(sub)
}
"GET /subscriptions/:id requires authorization" in new WithServer(port=port) {
expectNotAuthorized(
anonClient.subscriptions.getById(UUID.randomUUID.toString)
)
val otherClient = new Client(
s"http://localhost:$port",
Some(Authorization.Basic(UUID.randomUUID.toString.toString))
)
expectNotAuthorized(
anonClient.subscriptions.getById(UUID.randomUUID.toString)
)
}
"GET /subscriptions/:id w/ invalid id returns 404" in new WithServer(port=port) {
expectNotFound(
identifiedClient.subscriptions.getById(UUID.randomUUID.toString)
)
}
"GET /subscriptions by id" in new WithServer(port=port) {
val sub = createSubscription()
await(
identifiedClient.subscriptions.get(id = Some(Seq(sub.id)))
) must beEqualTo(Seq(sub))
await(
identifiedClient.subscriptions.get(id = Some(Seq(UUID.randomUUID.toString)))
) must beEqualTo(Nil)
}
"GET /subscriptions by email" in new WithServer(port=port) {
val sub = createSubscription()
await(
identifiedClient.subscriptions.get(email = Some(sub.email))
) must beEqualTo(Seq(sub))
await(
identifiedClient.subscriptions.get(email = Some(UUID.randomUUID.toString + "@flow.io"))
) must beEqualTo(Nil)
}
"GET /subscriptions by publication" in new WithServer(port=port) {
val sub = createSubscription()
await(
identifiedClient.subscriptions.get(email = Some(sub.email), publication = Some(sub.publication))
) must beEqualTo(Seq(sub))
}
*/
}
| flowcommerce/splashpage | api/test/controllers/SubscriptionsSpec.scala | Scala | mit | 4,992 |
package advent
//package test
import swing._
import event._
import java.util.Date
import java.awt.Color
import java.text.SimpleDateFormat
import javax.swing.{Icon, ImageIcon}
/**
* Demonstrates how to use combo boxes and custom item renderers.
*
* TODO: clean up layout
*/
object ComboBoxes extends SimpleSwingApplication {
import ComboBox._
lazy val ui = new FlowPanel {
contents += new ComboBox(List(1,2,3,4))
val patterns = List("dd MMMMM yyyy",
"dd.MM.yy",
"MM/dd/yy",
"yyyy.MM.dd G 'at' hh:mm:ss z",
"EEE, MMM d, ''yy",
"h:mm a",
"H:mm:ss:SSS",
"K:mm a,z",
"yyyy.MMMMM.dd GGG hh:mm aaa")
val dateBox = new ComboBox(patterns) { makeEditable() }
contents += dateBox
val field = new TextField(20) { editable = false }
contents += field
reactions += {
case SelectionChanged(`dateBox`) => reformat()
}
listenTo(dateBox.selection)
def reformat() {
try {
val today = new Date
val formatter = new SimpleDateFormat(dateBox.selection.item)
val dateString = formatter.format(today)
field.foreground = Color.black
field.text = dateString
} catch {
case e: IllegalArgumentException =>
field.foreground = Color.red
field.text = "Error: " + e.getMessage
}
}
val icons = try {
List(new ImageIcon("/Users/gregory/Developer/Eclipse Projects/Scala Adventure 2/images/margarita1.jpg"),
new ImageIcon("/Users/gregory/Developer/Eclipse Projects/Scala Adventure 2/images/margarita2.jpg"),
new ImageIcon("/Users/gregory/Developer/Eclipse Projects/Scala Adventure 2/images/rose.jpg"),
new ImageIcon("/Users/gregory/Developer/Eclipse Projects/Scala Adventure 2/images/banana.jpg")
)
// List(new ImageIcon(resourceFromClassloader("../../images/margarita1.jpg")),
// new ImageIcon(resourceFromClassloader("../../images/margarita2.jpg")),
// new ImageIcon(resourceFromClassloader("../../images/rose.jpg")),
// new ImageIcon(resourceFromClassloader("../../images/banana.jpg")))
} catch {
case _ =>
println("Couldn't load images for combo box")
List(Swing.EmptyIcon)
}
val iconBox = new ComboBox(icons) {
renderer = new ListView.AbstractRenderer[Icon, Label](new Label) {
def configure(list: ListView[_], isSelected: Boolean, focused: Boolean, icon: Icon, index: Int) {
component.icon = icon
component.xAlignment = Alignment.Center
if(isSelected) {
component.border = Swing.LineBorder(list.selectionBackground, 3)
} else {
component.border = Swing.EmptyBorder(3)
}
}
}
}
contents += iconBox
}
def top = new MainFrame {
title = "ComboBoxes Demo"
contents = ui
}
}
| gregwk/clay-pot | Adventure/src/advent/ComboBoxes.scala | Scala | mit | 3,053 |
package debop4s.core.cryptography
import debop4s.core.AbstractCoreFunSuite
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.ApplicationContext
import org.springframework.test.context.support.AnnotationConfigContextLoader
import org.springframework.test.context.{ContextConfiguration, TestContextManager}
import scala.collection.JavaConverters._
/**
* debop4s.core.stests.cryptography.StringDigesterFunSuite
*
* @author 배성혁 [email protected]
* @since 2014. 2. 25. 오전 10:27
*/
@ContextConfiguration(classes = Array(classOf[CryptographyConfiguration]),
loader = classOf[AnnotationConfigContextLoader])
class StringDigesterFunSuite extends AbstractCoreFunSuite {
@Autowired val ctx: ApplicationContext = null
// Spring Autowired 를 수행합니다.
new TestContextManager(this.getClass).prepareTestInstance(this)
val PLAIN_TEXT = "동해물과 백두산이 마르고 닳도록~ Hello World! 1234567890 ~!@#$%^&*()"
test("string digest") {
val digesters = ctx.getBeansOfType(classOf[StringDigesterSupport]).values()
digesters.asScala.foreach { digester =>
log.debug(s"Digest message by ${ digester.algorithm }")
val digestedText = digester.digest(PLAIN_TEXT)
digester.matches(PLAIN_TEXT, digestedText) shouldEqual true
}
}
test("digest matches") {
val digester = new SHA512StringDigester()
val digest1 = digester.digest(PLAIN_TEXT)
digester.matches(PLAIN_TEXT, digest1) shouldEqual true
}
test("digest multiple") {
val digester = new SHA512StringDigester()
val digest1 = digester.digest(PLAIN_TEXT)
val digest2 = digester.digest(PLAIN_TEXT)
digest1 shouldEqual digest2
}
}
| debop/debop4s | debop4s-core/src/test/scala/debop4s/core/cryptography/StringDigesterFunSuite.scala | Scala | apache-2.0 | 1,728 |
package andriusdap.orbweaver.controllers
import andriusdap.orbweaver.core.Classifier
import com.google.inject.Singleton
import play.api.mvc._
import scala.language.postfixOps
@Singleton
class ClassifierController {
def classify(source: String) = Action {
request =>
Results.Ok(Classifier.classify(source))
}
} | AndriusDap/orb-weaver | docs/apps/src/main/scala/andriusdap/orbweaver/controllers/ClassifierController.scala | Scala | mit | 326 |
package logic
import akka.util.ByteString
import io.circe._
import io.circe.generic.JsonCodec
import io.circe.generic.auto._
import io.circe.generic.semiauto._
import io.circe.parser._
import io.circe.syntax._
import models.MetaServer
import play.api.http.HttpEntity
import play.api.mvc.{ResponseHeader, Result}
import utils.AppUtils.ALL_SERVICES
import utils.ErrorUtils.MetaError
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Success, Try}
/**
* Created by toidiu on 11/24/16.
*/
object FsMetaLogic {
//-=-=-=-=-=-=-=-==-==-==-==-=-=-=-=-=-=-
//META
//-=-=-=-=-=-=-=-==-==-==-==-=-=-=-=-=-=-
def resultMetaList(key: String): Future[Result] = {
for {
json <- getAllMetaList(key)
body = HttpEntity.Strict(ByteString(json.get.noSpaces), Some("application/json"))
ret <- Future(Result(ResponseHeader(200), body))
} yield ret
}
private def getAllMetaList(key: String): Future[Try[Json]] = {
Future.sequence(ALL_SERVICES.map(_.getMeta(key))).map { metaList =>
val jsonList = for {
metaEither <- metaList
json <- List(metaEither.fold(l => l.asJson, r => r.asJson))
} yield json
Success(jsonList.asJson)
}
}
}
| toidiu/toidiuFS | app/logic/FsMetaLogic.scala | Scala | mit | 1,257 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.apigateway.connector
import play.api.Logger
import play.api.http.HeaderNames.USER_AGENT
import play.api.http.Status.{NOT_FOUND, OK}
import play.api.libs.json.Format
import play.api.libs.ws.WSClient
import play.api.mvc.Headers
import uk.gov.hmrc.apigateway.cache.EntityWithResponseHeaders
import uk.gov.hmrc.apigateway.exception.GatewayError.{NotFound, ServerError}
import uk.gov.hmrc.apigateway.util.PlayRequestUtils.replaceHeaders
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.reflect.ClassTag
abstract class AbstractConnector(wsClient: WSClient) {
protected val applicationName = "api-gateway"
def get[T: ClassTag](url: String)(implicit format: Format[T]): Future[T] = {
get(url, Seq.empty[(String, String)]) map (_._1)
}
def get[T: ClassTag](url: String, reqHeaders: Seq[(String, String)])(implicit format: Format[T]): Future[EntityWithResponseHeaders[T]] = {
val headers = replaceHeaders(Headers(reqHeaders: _*))(USER_AGENT -> Some(applicationName))
wsClient.url(url).withHeaders(headers.toSimpleMap.toSeq: _*).get() map {
case wsResponse if wsResponse.status >= OK && wsResponse.status < 300 =>
Logger.debug(s"GET $url ${wsResponse.status}")
(wsResponse.json.as[T], wsResponse.allHeaders.mapValues(_.toSet))
case wsResponse if wsResponse.status == NOT_FOUND =>
Logger.debug(s"GET $url ${wsResponse.status}")
throw NotFound()
case wsResponse =>
Logger.error(s"Response status not handled: GET $url ${wsResponse.status} ${wsResponse.body}")
throw ServerError()
}
}
}
| hmrc/api-gateway | app/uk/gov/hmrc/apigateway/connector/AbstractConnector.scala | Scala | apache-2.0 | 2,254 |
package foo
class Outside
package object bar {
class Val(b: Boolean)
implicit def boolean2Val(b: Boolean) = new Val(b)
implicit def boolean2Outside(b: Boolean) = new Outside
}
| scala/scala | test/files/pos/t3999/a_1.scala | Scala | apache-2.0 | 184 |
package com.bbva.mike
/**
* Created by joseluisillanaruiz on 3/3/16.
*/
object KafkaProducerProtocol {
import spray.json._
case class StructuredLog(messageValue: String)
case class Sender(topicName: String, messageValue: String)
case object SenderResponseOK
case object SenderResponseKO
case object SendToKafka
case object KillActor
/* json (un)marshalling */
object StructuredLog extends DefaultJsonProtocol {
implicit val format = jsonFormat1(StructuredLog.apply)
}
object Sender extends DefaultJsonProtocol {
implicit val format = jsonFormat2(Sender.apply)
}
/* implicit conversions */
//implicit def sendMessageToTopic(sender: Sender): Sender = Sender(topicName = sender.topicName, messageValue = sender.messageValue)
//implicit def sendResponseToCLient(responseMessage: Response): Response =
// Response(statusCode = responseMessage.statusCode, responseBody = responseMessage.responseBody)
}
| joseluisillana/rest-api-scala-akka-poc | src/main/scala/com/bbva/mike/KafkaProducerProtocol.scala | Scala | apache-2.0 | 958 |
package au.com.dius.pact.model
import au.com.dius.pact.consumer._
/**
* @deprecated Moved to Kotlin implementation: Use Pact interface instead
*/
@Deprecated
case class PactFragment(consumer: Consumer,
provider: Provider,
interactions: Seq[RequestResponseInteraction]) {
import scala.collection.JavaConversions._
def toPact = new RequestResponsePact(provider, consumer, interactions)
def duringConsumerSpec[T](config: MockProviderConfig)(test: => T, verification: ConsumerTestVerification[T]): VerificationResult = {
val server = DefaultMockProvider(config)
new ConsumerPactRunner(server).runAndWritePact(toPact, config.getPactVersion)(test, verification)
}
//TODO: it would be a good idea to ensure that all interactions in the fragment have the same state
// really? why?
def defaultState: Option[String] = interactions.headOption.map(_.getProviderState)
def runConsumer(config: MockProviderConfig, test: TestRun): VerificationResult = {
duringConsumerSpec(config)(test.run(config), (u:Unit) => None)
}
def description = s"Consumer '${consumer.getName}' has a pact with Provider '${provider.getName}': " +
interactions.map { i => i.getDescription }.mkString(" and ") + sys.props("line.separator")
}
/**
* @deprecated Moved to Kotlin implementation
*/
@Deprecated
object PactFragment {
def consumer(consumer: String) = {
PactFragmentBuilder.apply(new Consumer(consumer))
}
}
| algra/pact-jvm | pact-jvm-consumer/src/main/scala/au/com/dius/pact/model/PactFragment.scala | Scala | apache-2.0 | 1,489 |
package koncept.http.web.sessions
import java.util.Date
import java.util.concurrent.ConcurrentHashMap
import com.sun.net.httpserver.HttpPrincipal
import java.util.HashSet
import scala.collection.JavaConversions._
import java.util.concurrent.TimeUnit
class Sessions {
val map: ConcurrentHashMap[String, Session] = new ConcurrentHashMap[String, Session]();
def create(principal: HttpPrincipal): Session = {
val session = new Session(principal, generateSessionId)
map.put(session.id, session)
session
}
def get(id: String) : Session = {
//needs to handle expiretimes here?
val session = map.get(id)
if (session != null)
session.touch
session
}
def destroy(id: String) {
map.remove(id)
}
def clear() {
map.clear()
}
def cleanup(minutesTimeout: Int) {
//do cleanup!!
val minimumStartTime = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(minutesTimeout);
for(key <- new HashSet[String](map.keySet())) {
val session = map.get(key)
if (session.lastTouch < minimumStartTime)
destroy(key)
}
}
//need some king of housekeeping here
private def generateSessionId(): String = {
return (System.currentTimeMillis() + System.nanoTime()).toString.reverse
}
}
object Sessions {
val sessionsKey = "_sessions";
def apply(httpContextAttributes: java.util.Map[String,Object]) = {
httpContextAttributes.get(sessionsKey) match {
case s: Sessions => {s}
case null =>{
synchronizedCreateIfRequired(httpContextAttributes)
}
}
}
//mmm... double checked locking in scala...
private def synchronizedCreateIfRequired(httpContextAttributes: java.util.Map[String,Object]) : Sessions = {
this.synchronized {
httpContextAttributes.get(sessionsKey) match {
case s: Sessions => {s}
case null => {
val s = new Sessions();
httpContextAttributes.put(sessionsKey, s)
s
}
}
}
}
} | nkrul/http-router | src/main/scala/koncept/http/web/sessions/Sessions.scala | Scala | mit | 1,992 |
package play.modules.gjson
import play.api.libs.json.JsValue
object JSON {
def toJSON[A](a: A)(implicit jp: JSONPickler[A]): JsValue = jp.pickle(a)
def fromJSON[A](v: JsValue)(implicit jp: JSONPickler[A]) = jp.unpickle(v)
}
| kindleit/gkit | play-gjson/src/main/scala/JSON.scala | Scala | apache-2.0 | 232 |
package com.betfair.domain
import play.api.libs.json.{Reads, Writes}
object OrderBy extends Enumeration {
type OrderBy = Value
val BY_MARKET, BY_MATCH_TIME, BY_PLACE_TIME, BY_SETTLED_TIME, BY_VOID_TIME = Value
implicit val enumReads: Reads[OrderBy] = EnumUtils.enumReads(OrderBy)
implicit def enumWrites: Writes[OrderBy] = EnumUtils.enumWrites
}
| city81/betfair-service-ng | src/main/scala/com/betfair/domain/OrderBy.scala | Scala | bsd-2-clause | 358 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.health
import com.typesafe.config.ConfigRenderOptions
import play.api.{Configuration, Play}
import play.api.libs.json.Json
import play.api.mvc.{Action, Controller}
object AdminController extends AdminController{
import play.api.Play.current
def manifest = new Manifest(){
def appName = Play.configuration.getString("appName").getOrElse{
throw new IllegalArgumentException("no config value for key 'appName'")
}
}
}
trait AdminController extends Controller {
protected def manifest:Manifest
def ping = Action {
Ok
}
def details() = Action {
Ok(Json.toJson(manifest.contents))
}
def detail(name: String) = Action {
manifest.contents.get(name) match {
case Some(m) => Ok(m)
case None => NotFound
}
}
}
| liquidarmour/play-health | app/uk/gov/hmrc/play/health/AdminController.scala | Scala | apache-2.0 | 1,404 |
/**
* This file is part of mycollab-scheduler.
*
* mycollab-scheduler is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-scheduler is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-scheduler. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.schedule.email.crm.service
import com.esofthead.mycollab.common.MonitorTypeConstants
import com.esofthead.mycollab.common.domain.SimpleRelayEmailNotification
import com.esofthead.mycollab.common.i18n.GenericI18Enum
import com.esofthead.mycollab.core.utils.StringUtils
import com.esofthead.mycollab.html.{FormatUtils, LinkUtils}
import com.esofthead.mycollab.module.crm.CrmLinkGenerator
import com.esofthead.mycollab.module.crm.domain.{Lead, SimpleLead}
import com.esofthead.mycollab.module.crm.i18n.LeadI18nEnum
import com.esofthead.mycollab.module.crm.service.LeadService
import com.esofthead.mycollab.module.mail.MailUtils
import com.esofthead.mycollab.module.user.AccountLinkGenerator
import com.esofthead.mycollab.module.user.domain.SimpleUser
import com.esofthead.mycollab.module.user.service.UserService
import com.esofthead.mycollab.schedule.email.crm.LeadRelayEmailNotificationAction
import com.esofthead.mycollab.schedule.email.format.{EmailLinkFieldFormat, FieldFormat}
import com.esofthead.mycollab.schedule.email.{ItemFieldMapper, MailContext}
import com.esofthead.mycollab.spring.ApplicationContextUtil
import com.hp.gagawa.java.elements.{A, Img, Span}
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.config.BeanDefinition
import org.springframework.context.annotation.Scope
import org.springframework.stereotype.Component
/**
* @author MyCollab Ltd.
* @since 4.6.0
*/
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
class LeadRelayEmailNotificationActionImpl extends CrmDefaultSendingRelayEmailAction[SimpleLead] with
LeadRelayEmailNotificationAction {
@Autowired var leadService: LeadService = _
private val mapper = new LeadFieldNameMapper
override protected def getBeanInContext(context: MailContext[SimpleLead]): SimpleLead = leadService.findById(context.getTypeid.toInt,
context.getSaccountid)
override protected def getCreateSubjectKey: Enum[_] = LeadI18nEnum.MAIL_CREATE_ITEM_SUBJECT
override protected def getCommentSubjectKey: Enum[_] = LeadI18nEnum.MAIL_COMMENT_ITEM_SUBJECT
override protected def getItemFieldMapper: ItemFieldMapper = mapper
override protected def getItemName: String = StringUtils.trim(bean.getLeadName, 100)
override protected def buildExtraTemplateVariables(context: MailContext[SimpleLead]): Unit = {
val summary: String = bean.getLeadName
val summaryLink: String = CrmLinkGenerator.generateLeadPreviewFullLink(siteUrl, bean.getId)
val emailNotification: SimpleRelayEmailNotification = context.getEmailNotification
val user: SimpleUser = userService.findUserByUserNameInAccount(emailNotification.getChangeby, context.getSaccountid)
val avatarId: String = if (user != null) user.getAvatarid else ""
val userAvatar: Img = LinkUtils.newAvatar(avatarId)
val makeChangeUser: String = userAvatar.toString + emailNotification.getChangeByUserFullName
val actionEnum: Enum[_] = emailNotification.getAction match {
case MonitorTypeConstants.CREATE_ACTION => LeadI18nEnum.MAIL_CREATE_ITEM_HEADING
case MonitorTypeConstants.UPDATE_ACTION => LeadI18nEnum.MAIL_UPDATE_ITEM_HEADING
case MonitorTypeConstants.ADD_COMMENT_ACTION => LeadI18nEnum.MAIL_COMMENT_ITEM_HEADING
}
contentGenerator.putVariable("actionHeading", context.getMessage(actionEnum, makeChangeUser))
contentGenerator.putVariable("summary", summary)
contentGenerator.putVariable("summaryLink", summaryLink)
}
override protected def getUpdateSubjectKey: Enum[_] = LeadI18nEnum.MAIL_UPDATE_ITEM_SUBJECT
class LeadFieldNameMapper extends ItemFieldMapper {
put(Lead.Field.firstname, LeadI18nEnum.FORM_FIRSTNAME)
put(Lead.Field.email, new EmailLinkFieldFormat("email", LeadI18nEnum.FORM_EMAIL))
put(Lead.Field.lastname, LeadI18nEnum.FORM_LASTNAME)
put(Lead.Field.officephone, LeadI18nEnum.FORM_OFFICE_PHONE)
put(Lead.Field.title, LeadI18nEnum.FORM_TITLE)
put(Lead.Field.mobile, LeadI18nEnum.FORM_MOBILE)
put(Lead.Field.department, LeadI18nEnum.FORM_DEPARTMENT)
put(Lead.Field.otherphone, LeadI18nEnum.FORM_OTHER_PHONE)
put(Lead.Field.accountname, LeadI18nEnum.FORM_ACCOUNT_NAME)
put(Lead.Field.fax, LeadI18nEnum.FORM_FAX)
put(Lead.Field.leadsourcedesc, LeadI18nEnum.FORM_LEAD_SOURCE)
put(Lead.Field.website, LeadI18nEnum.FORM_WEBSITE)
put(Lead.Field.industry, LeadI18nEnum.FORM_INDUSTRY)
put(Lead.Field.status, LeadI18nEnum.FORM_STATUS)
put(Lead.Field.noemployees, LeadI18nEnum.FORM_NO_EMPLOYEES)
put(Lead.Field.assignuser, new LeadAssigneeFieldFormat(Lead.Field.assignuser.name, GenericI18Enum.FORM_ASSIGNEE))
put(Lead.Field.primaddress, LeadI18nEnum.FORM_PRIMARY_ADDRESS)
put(Lead.Field.otheraddress, LeadI18nEnum.FORM_OTHER_ADDRESS)
put(Lead.Field.primcity, LeadI18nEnum.FORM_PRIMARY_CITY)
put(Lead.Field.othercity, LeadI18nEnum.FORM_OTHER_CITY)
put(Lead.Field.primstate, LeadI18nEnum.FORM_PRIMARY_STATE)
put(Lead.Field.otherstate, LeadI18nEnum.FORM_OTHER_STATE)
put(Lead.Field.primpostalcode, LeadI18nEnum.FORM_PRIMARY_POSTAL_CODE)
put(Lead.Field.otherpostalcode, LeadI18nEnum.FORM_OTHER_POSTAL_CODE)
put(Lead.Field.primcountry, LeadI18nEnum.FORM_PRIMARY_COUNTRY)
put(Lead.Field.othercountry, LeadI18nEnum.FORM_OTHER_COUNTRY)
put(Lead.Field.description, GenericI18Enum.FORM_DESCRIPTION, isColSpan = true)
}
class LeadAssigneeFieldFormat(fieldName: String, displayName: Enum[_]) extends FieldFormat(fieldName, displayName) {
def formatField(context: MailContext[_]): String = {
val lead: SimpleLead = context.getWrappedBean.asInstanceOf[SimpleLead]
if (lead.getAssignuser != null) {
val userAvatarLink: String = MailUtils.getAvatarLink(lead.getAssignUserAvatarId, 16)
val img: Img = FormatUtils.newImg("avatar", userAvatarLink)
val userLink: String = AccountLinkGenerator.generatePreviewFullUserLink(MailUtils.getSiteUrl(lead.getSaccountid),
lead.getAssignuser)
val link: A = FormatUtils.newA(userLink, lead.getAssignUserFullName)
FormatUtils.newLink(img, link).write
}
else {
new Span().write
}
}
def formatField(context: MailContext[_], value: String): String = {
if (StringUtils.isBlank(value)) {
new Span().write
} else {
val userService: UserService = ApplicationContextUtil.getSpringBean(classOf[UserService])
val user: SimpleUser = userService.findUserByUserNameInAccount(value, context.getUser.getAccountId)
if (user != null) {
val userAvatarLink: String = MailUtils.getAvatarLink(user.getAvatarid, 16)
val userLink: String = AccountLinkGenerator.generatePreviewFullUserLink(MailUtils.getSiteUrl(user.getAccountId),
user.getUsername)
val img: Img = FormatUtils.newImg("avatar", userAvatarLink)
val link: A = FormatUtils.newA(userLink, user.getDisplayName)
FormatUtils.newLink(img, link).write
} else
value
}
}
}
}
| maduhu/mycollab | mycollab-scheduler/src/main/scala/com/esofthead/mycollab/schedule/email/crm/service/LeadRelayEmailNotificationActionImpl.scala | Scala | agpl-3.0 | 8,224 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.torch
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.optim.{L2Regularizer, SGD}
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl.utils.T
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class VolumetricConvolutionSpec extends TorchSpec {
"A VolumetricConvolution" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = 3
val to = 2
val kt = 2
val ki = 2
val kj = 2
val st = 2
val si = 2
val sj = 2
val padT = 1
val padW = 1
val padH = 1
val outt = 6
val outi = 6
val outj = 6
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH)
val input = Tensor[Double](3, 100, 56, 56).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"layer = nn.VolumetricConvolution($from, $to, $kt, $ki, $kj, $st, $si, $sj, $padT," +
s" $padW, $padH)\\n" +
"weight = layer.weight\\n" +
"bias = layer.bias \\n" +
"output = layer:forward(input)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input),
Array("weight", "bias", "output"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be (luaWeight)
bias should be (luaBias)
output should be (luaOutput)
}
"A VolumetricConvolution without bias" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = 3
val to = 2
val kt = 2
val ki = 2
val kj = 2
val st = 2
val si = 2
val sj = 2
val padT = 1
val padW = 1
val padH = 1
val outt = 6
val outi = 6
val outj = 6
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH, withBias = false)
val input = Tensor[Double](3, 100, 56, 56).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"layer = nn.VolumetricConvolution($from, $to, $kt, $ki, $kj, $st, $si, $sj, $padT," +
s" $padW, $padH):noBias()\\n" +
"weight = layer.weight\\n" +
"bias = layer.bias \\n" +
"output = layer:forward(input) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input),
Array("weight", "bias", "output"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be (luaWeight)
bias should be (luaBias)
output should be (luaOutput)
}
"A VolumetricConvolution with batch input" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = 3
val to = 2
val kt = 2
val ki = 2
val kj = 2
val st = 2
val si = 2
val sj = 2
val padT = 1
val padW = 1
val padH = 1
val outt = 6
val outi = 6
val outj = 6
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val batch = 3
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH)
val input = Tensor[Double](batch, from, int, inj, ini).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"layer = nn.VolumetricConvolution($from, $to, $kt, $ki, $kj, $st, $si, $sj, $padT," +
s" $padW, $padH)\\n" +
"weight = layer.weight\\n" +
"bias = layer.bias \\n" +
"output = layer:forward(input) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input),
Array("weight", "bias", "output"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be (luaWeight)
bias should be (luaBias)
output shouldEqual luaOutput
}
"A VolumetricConvolution with batch input no bias" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = 3
val to = 2
val kt = 2
val ki = 2
val kj = 2
val st = 2
val si = 2
val sj = 2
val padT = 1
val padW = 1
val padH = 1
val outt = 6
val outi = 6
val outj = 6
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val batch = 3
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH, withBias = false)
val input = Tensor[Double](batch, from, int, inj, ini).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"layer = nn.VolumetricConvolution($from, $to, $kt, $ki, $kj, $st, $si, $sj, $padT," +
s" $padW, $padH):noBias()\\n" +
"weight = layer.weight\\n" +
"bias = layer.bias \\n" +
"output = layer:forward(input) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input),
Array("weight", "bias", "output"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be (luaWeight)
bias should be (luaBias)
output should be (luaOutput)
}
"A VolumetricConvolution" should "be good in gradient check for input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = RNG.uniform(2, 4).toInt
val to = RNG.uniform(1, 4).toInt
val kt = RNG.uniform(1, 4).toInt
val ki = RNG.uniform(1, 4).toInt
val kj = RNG.uniform(1, 4).toInt
val st = RNG.uniform(1, 3).toInt
val si = RNG.uniform(1, 3).toInt
val sj = RNG.uniform(1, 3).toInt
val padT = RNG.uniform(0, 2).toInt
val padW = RNG.uniform(0, 2).toInt
val padH = RNG.uniform(0, 2).toInt
val outt = RNG.uniform(5, 7).toInt
val outi = RNG.uniform(5, 7).toInt
val outj = RNG.uniform(5, 7).toInt
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH)
val input = Tensor[Double](from, int, ini, inj).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkLayer[Double](layer, input, 1e-3) should be(true)
}
"A VolumetricConvolution with batch" should "be good in gradient check for input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = RNG.uniform(2, 6).toInt
val to = RNG.uniform(1, 4).toInt
val kt = RNG.uniform(1, 4).toInt
val ki = RNG.uniform(1, 4).toInt
val kj = RNG.uniform(1, 4).toInt
val st = RNG.uniform(1, 3).toInt
val si = RNG.uniform(1, 3).toInt
val sj = RNG.uniform(1, 3).toInt
val padT = RNG.uniform(0, 2).toInt
val padW = RNG.uniform(0, 2).toInt
val padH = RNG.uniform(0, 2).toInt
val outt = RNG.uniform(5, 7).toInt
val outi = RNG.uniform(5, 7).toInt
val outj = RNG.uniform(5, 7).toInt
val batch = RNG.uniform(2, 7).toInt
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH)
val input = Tensor[Double](batch, from, int, ini, inj).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkLayer[Double](layer, input, 1e-3) should be(true)
}
"A VolumetricConvolution" should "be good in gradient check for weight" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = RNG.uniform(2, 4).toInt
val to = RNG.uniform(1, 4).toInt
val kt = RNG.uniform(1, 4).toInt
val ki = RNG.uniform(1, 4).toInt
val kj = RNG.uniform(1, 4).toInt
val st = RNG.uniform(1, 3).toInt
val si = RNG.uniform(1, 3).toInt
val sj = RNG.uniform(1, 3).toInt
val padT = RNG.uniform(0, 2).toInt
val padW = RNG.uniform(0, 2).toInt
val padH = RNG.uniform(0, 2).toInt
val outt = RNG.uniform(5, 7).toInt
val outi = RNG.uniform(5, 7).toInt
val outj = RNG.uniform(5, 7).toInt
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH)
val input = Tensor[Double](from, int, ini, inj).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkWeight[Double](layer, input, 1e-3) should be(true)
}
"A VolumetricConvolution with batch" should "be good in gradient check for weight" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val from = RNG.uniform(2, 6).toInt
val to = RNG.uniform(1, 4).toInt
val kt = RNG.uniform(1, 4).toInt
val ki = RNG.uniform(1, 4).toInt
val kj = RNG.uniform(1, 4).toInt
val st = RNG.uniform(1, 3).toInt
val si = RNG.uniform(1, 3).toInt
val sj = RNG.uniform(1, 3).toInt
val padT = RNG.uniform(0, 2).toInt
val padW = RNG.uniform(0, 2).toInt
val padH = RNG.uniform(0, 2).toInt
val outt = RNG.uniform(5, 7).toInt
val outi = RNG.uniform(5, 7).toInt
val outj = RNG.uniform(5, 7).toInt
val batch = RNG.uniform(2, 7).toInt
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val layer = new VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH)
val input = Tensor[Double](batch, from, int, ini, inj).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkWeight[Double](layer, input, 1e-3) should be(true)
}
"VolumetricConvolution L2 regularizer" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericDouble
val seed = 100
RNG.setSeed(seed)
val from = 3
val to = 2
val kt = 2
val ki = 2
val kj = 2
val st = 2
val si = 2
val sj = 2
val padT = 1
val padW = 1
val padH = 1
val outt = 6
val outi = 6
val outj = 6
val int = (outt - 1) * st + kt - padT * 2
val ini = (outi - 1) * si + ki - padW * 2
val inj = (outj - 1) * sj + kj - padH * 2
val batch = 3
val input = Tensor[Double](batch, from, int, inj, ini).apply1(e => Random.nextDouble())
val state1 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7,
"weightDecay" -> 0.1, "momentum" -> 0.002)
val state2 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7,
"weightDecay" -> 0.0, "momentum" -> 0.002)
val criterion = new MSECriterion[Double]
val labels = Tensor[Double](1296).rand()
val model1 = Sequential()
.add(VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH))
.add(Sigmoid())
val (weights1, grad1) = model1.getParameters()
val model2 = Sequential()
.add(VolumetricConvolution[Double](from, to, kt, ki, kj, st, si, sj,
padT, padW, padH,
wRegularizer = L2Regularizer(0.1), bRegularizer = L2Regularizer(0.1)))
.add(Sigmoid())
val (weights2, grad2) = model2.getParameters()
weights2.copy(weights1.clone())
grad2.copy(grad1.clone())
val sgd = new SGD[Double]
def feval1(x: Tensor[Double]): (Double, Tensor[Double]) = {
val output = model1.forward(input).toTensor[Double]
val _loss = criterion.forward(output, labels)
model1.zeroGradParameters()
val gradInput = criterion.backward(output, labels)
model1.backward(input, gradInput)
(_loss, grad1)
}
def feval2(x: Tensor[Double]): (Double, Tensor[Double]) = {
val output = model2.forward(input).toTensor[Double]
val _loss = criterion.forward(output, labels)
model2.zeroGradParameters()
val gradInput = criterion.backward(output, labels)
model2.backward(input, gradInput)
(_loss, grad2)
}
var loss1: Array[Double] = null
for (i <- 1 to 100) {
loss1 = sgd.optimize(feval1, weights1, state1)._2
println(s"${i}-th loss = ${loss1(0)}")
}
var loss2: Array[Double] = null
for (i <- 1 to 100) {
loss2 = sgd.optimize(feval2, weights2, state2)._2
println(s"${i}-th loss = ${loss2(0)}")
}
weights1 should be(weights2)
loss1 should be(loss2)
}
"A VolumetricConvolution layer" should "work with SAME padding" in {
import tensor.TensorNumericMath.TensorNumeric.NumericFloat
val nInputPlane = 1
val nOutputPlane = 1
val kW = 1
val kH = 1
val kT = 1
val dT = 2
val dW = 2
val dH = 2
val padW = -1
val padH = -1
val padT = -1
val layer = new VolumetricConvolution(nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
val inputData = Array(
0.0f, 1.0f, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26
)
val kernelData = Array(
1.0f
)
val biasData = Array(0.0f)
layer.weight.copy(Tensor(Storage(kernelData), 1, Array(nOutputPlane,
nInputPlane, kT, kH, kW)))
layer.bias.copy(Tensor(Storage(biasData), 1, Array(nOutputPlane)))
val input = Tensor(Storage(inputData), 1, Array(1, 3, 3, 3))
val output = layer.updateOutput(input)
val gradInput = layer.backward(input, output)
output.storage().array() should be (Array(0.0f, 2, 6, 8, 18, 20, 24, 26))
}
}
| jenniew/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/torch/VolumetricConvolutionSpec.scala | Scala | apache-2.0 | 15,445 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.regression
import scala.beans.BeanInfo
import org.apache.spark.mllib.linalg.{Vectors, Vector}
import org.apache.spark.mllib.util.NumericParser
import org.apache.spark.SparkException
/**
* Class that represents the features and labels of a data point.
*
* @param label Label for this data point.
* @param features List of features for this data point.
*/
@BeanInfo
case class LabeledPoint(label: Double, features: Vector) {
override def toString: String = {
"(%s,%s)".format(label, features)
}
}
/**
* Parser for [[org.apache.spark.mllib.regression.LabeledPoint]].
*/
object LabeledPoint {
/**
* Parses a string resulted from `LabeledPoint#toString` into
* an [[org.apache.spark.mllib.regression.LabeledPoint]].
*/
def parse(s: String): LabeledPoint = {
if (s.startsWith("(")) {
NumericParser.parse(s) match {
case Seq(label: Double, numeric: Any) =>
LabeledPoint(label, Vectors.parseNumeric(numeric))
case other =>
throw new SparkException(s"Cannot parse $other.")
}
} else { // dense format used before v1.0
val parts = s.split(',')
val label = java.lang.Double.parseDouble(parts(0))
val features = Vectors.dense(parts(1).trim().split(' ').map(java.lang.Double.parseDouble))
LabeledPoint(label, features)
}
}
}
| trueyao/spark-lever | mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala | Scala | apache-2.0 | 2,163 |
import org.scalatest._
import eu.unicredit.sophia._
import eu.unicredit.sophia.client._
import scala.annotation.tailrec
import scala.util.Random
class CursorSpec extends FlatSpec with SequentialNestedSuiteExecution {
val client = new SophiaClient()
client.start
client.setSophiaPath("./storage")
val db = client.use("cursorTest")
def randomKey = s"key-${new String(Random.alphanumeric.take(10).toArray)}"
def randomSize = Random.nextInt(500)
def randomValue = s"$randomSize-${new String(Random.alphanumeric.take(randomSize).toArray)}"
val inserts = 101
val allKeys =
for (_<-1.to(inserts))
yield randomKey
"The Sophia client " should "insert a bunch of elements" in {
for (key <- allKeys) {
//println(s"Inserting $randomKey -> $randomValue")
client.put(db, key, randomValue)
}
}
@tailrec
private def iterate(cursor: SophiaCursor, f: (String,String) => Unit): Unit = {
if (!cursor.hasNext)
return
else {
val key = cursor.nextKey
val value = cursor.nextValue
f(getCString(key.array), getCString(value.array))
val nextOpt = cursor.nextOptCursor
if (!nextOpt.isDefined)
return
else
iterate(nextOpt.get,f)
}
}
it should "cursor over them" in {
val cursor = new SophiaCursor(client, db)
var keyInserted = allKeys
val checkPresence: (String,String) => Unit =
(key, value) => {
//println(s"Key: $key, value: $value")
keyInserted = keyInserted.filterNot { x => x === key }
}
assert { allKeys.size == keyInserted.size}
iterate(cursor, checkPresence)
assert { keyInserted.isEmpty == true }
}
} | andreaTP/sophia-java | src/test/scala/CursorSpec.scala | Scala | apache-2.0 | 1,818 |
import com.typesafe.sbt.GitPlugin
import org.scalafmt.sbt.ScalaFmtPlugin
import sbt._
import sbt.plugins.JvmPlugin
import sbt.Keys._
object Build extends AutoPlugin {
override def requires = JvmPlugin && GitPlugin && ScalaFmtPlugin
override def trigger = allRequirements
override def projectSettings =
ScalaFmtPlugin.autoImport.reformatOnCompileSettings ++
Vector(
// Core settings
organization := "com.example",
scalaVersion := Version.Scala,
crossScalaVersions := Vector(scalaVersion.value),
scalacOptions ++= Vector(
"-unchecked",
"-deprecation",
"-language:_",
"-target:jvm-1.8",
"-encoding", "UTF-8"
),
unmanagedSourceDirectories.in(Compile) := Vector(scalaSource.in(Compile).value),
unmanagedSourceDirectories.in(Test) := Vector(scalaSource.in(Test).value),
// scalafmt settings
ScalaFmtPlugin.autoImport.scalafmtConfig := Some(baseDirectory.in(ThisBuild).value / ".scalafmt"),
// Git settings
GitPlugin.autoImport.git.useGitDescribe := true
)
}
| ksilin/sargon | project/Build.scala | Scala | apache-2.0 | 1,088 |
package com.gilt.storeroom.dynamodb
import com.gilt.storeroom._
import scala.collection.JavaConverters._
import org.scalatest.time._
import play.api.libs.iteratee.Iteratee
import com.amazonaws.services.dynamodbv2.{ AmazonDynamoDBClient, AmazonDynamoDB }
import com.amazonaws.services.dynamodbv2.model._
class DynamodDbStoreTest extends IterableStoreTest[DynamoLongStore] {
implicit override val patienceConfig =
PatienceConfig(timeout = Span(2, Seconds), interval = Span(5, Millis))
val client = new AmazonDynamoDBClient()
def emptyStore = emptyStore(1)
def emptyStore(throughput: Int) = {
val tableName = s"test_${scala.util.Random.nextInt}"
val request = (new CreateTableRequest)
.withTableName(tableName)
.withAttributeDefinitions(new AttributeDefinition("key", ScalarAttributeType.S))
.withKeySchema(new KeySchemaElement("key", KeyType.HASH))
.withProvisionedThroughput(new ProvisionedThroughput(throughput,throughput))
client.createTable(request)
// createTable is async, so now wait for it to be available
waitForCreation(tableName)
DynamoLongStore(tableName, "key", "value")
}
override def cleanupStore(store: DynamoLongStore) = {
val request = new DeleteTableRequest(store.tableName)
client.deleteTable(request)
}
def waitForCreation(tableName: String) = {
val request = new DescribeTableRequest(tableName)
while (client.describeTable(request).getTable.getTableStatus != "ACTIVE") {
Thread.sleep(1000)
}
}
def withStore(throughput: Int)(f: DynamoLongStore => Unit) = {
val store = emptyStore(throughput)
try {
f(store)
} finally {
cleanupStore(store)
}
}
test("big getAll works") {
// this might take a while
implicit val patienceConfig =
PatienceConfig(timeout = Span(240, Seconds), interval = Span(1, Seconds))
withStore(10) { intStore =>
// piggyback with a new string attribute
val store = DynamoStringStore(intStore.tableName, "key", "big")
// need at least 1MB to force multiple pages. 4 * 2500 * 200 = 2MB
val items = (0L until 200).map(i => (i.toString, "asdf" * 2500)).toMap
sequenceMapOfFutures(store.multiPut(items.mapValues(Some(_)))).futureValue
// not using enumeratorValue because the implicit PatienceConfig goes all
// wonky and I can't be bothered to sort it out.
(store.getAll() run Iteratee.getChunks).futureValue should contain theSameElementsAs items.toList
}
}
}
| gilt/storeroom | dynamodb/src/test/scala/com/gilt/storeroom/dynamodb/DynamoDbStoreTest.scala | Scala | mit | 2,518 |
/**
* Copyright 2017 RiskSense, Inc.
* This file is part of ipaddr library.
*
* Ipaddr is free software licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may obtain a copy of the
* License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.risksense.ipaddr
import scala.collection.immutable.HashSet
// scalastyle:off multiple.string.literals magic.number
class IpRangeTest extends UnitSpec {
private val addr1 = "192.168.1.200"
private val addr2 = "192.168.1.230"
private val range = IpRange(addr1, addr2)
private val range2 = IpRange("192.168.1.210", "192.168.1.220")
private val range3 = IpRange("192.168.1.100", "192.168.1.210")
private val range4 = IpRange("192.168.1.220", "192.168.1.240")
"Creating an IpRange" should "result in failure if addresses are invalid" in {
// first address invalid
an[IpaddrException] should be thrownBy IpRange("1.2.300.20", "1.2.3.2")
// second address invalid
an[IpaddrException] should be thrownBy IpRange("192.168.1.200", "192.168.1.256")
// first address > second address
an[IpaddrException] should be thrownBy IpRange("192.168.1.230", "192.168.1.229")
}
it should "succeed if addresses are valid" in {
IpRange("10.2.10.12", "10.2.10.15") shouldBe a[IpRange]
IpRange("10.2.10.230", "10.2.10.230") shouldBe a[IpRange]
}
"An IpRange object" should "perform all range operations" in {
range.toString() should be(addr1 + "-" + addr2)
range.first should be(3232235976L)
range.last should be(3232236006L)
range.key should be((4, 3232235976L, 3232236006L))
range.sortKey should be((4, 3232235976L, 27))
}
it should "perform contains operation" in {
// Check range edge addresses
range.contains(addr1) should be(true)
range.contains(addr2) should be(true)
range.contains(range2) should be(true)
range.contains(range3) should be(false)
range.contains(range4) should be(false)
val net = IpNetwork("10.4.10.100/30")
val rightRange = "10.4.10.105"
val r1 = IpRange("10.4.10.101", "10.4.10.102")
val r2 = IpRange("10.4.10.99", rightRange)
val r3 = IpRange("10.4.10.100", rightRange)
val r4 = IpRange("10.4.10.101", rightRange)
r1.contains(net) should be(false)
r2.contains(net) should be(true)
r3.contains(net) should be(true)
r4.contains(net) should be(false)
an[IpaddrException] should be thrownBy r4.contains("1.2.3") // address is bad
}
it should "perform cidrs operation" in {
val net1 = IpNetwork("192.168.1.200/29")
val net2 = IpNetwork("192.168.1.208/28")
val net3 = IpNetwork("192.168.1.224/30")
val net4 = IpNetwork("192.168.1.228/31")
val net5 = IpNetwork("192.168.1.230/32")
val netList = List(net1, net2, net3, net4, net5)
range.cidrs should be(netList)
}
it should "check for equality" in {
val hs = HashSet(range2, range, range3)
range should be(IpRange(addr1, addr2))
range == range2 should be(false)
range.equals(range) should be(true)
range.equals(addr1) should be(false)
hs.contains(range) should be(true)
hs.contains(range4) should be(false)
}
}
| risksense/ipaddr | src/test/scala/com/risksense/ipaddr/IpRangeTest.scala | Scala | apache-2.0 | 3,571 |
package org.wartremover
package contrib.test
import org.scalatest.FunSuite
import org.wartremover.contrib.warts.OldTime
import org.wartremover.test.WartTestTraverser
class OldTimeTest extends FunSuite with ResultAssertions {
val javaError = "The old Java time API is disabled. Use Java 8 java.time._ API instead."
val jodaError = "JodaTime is disabled. Use Java 8 java.time._ API instead."
test("disable Joda time wildcard imports") {
val result = WartTestTraverser(OldTime) {
import org.joda.time._
}
assertError(result)(jodaError)
}
test("disable Joda time explicit imports") {
val result = WartTestTraverser(OldTime) {
import org.joda.time.LocalDate
}
assertError(result)(jodaError)
}
test("disable Joda time renamed imports") {
val result = WartTestTraverser(OldTime) {
import org.joda.time.{ Instant => Something }
}
assertError(result)(jodaError)
}
test("disable Joda time erased imports") {
val result = WartTestTraverser(OldTime) {
import org.joda.time.{ Instant => _ }
}
assertError(result)(jodaError)
}
test("disable java.util.Date explicit imports") {
val result = WartTestTraverser(OldTime) {
import java.util.Date
}
assertError(result)(javaError)
}
test("disable java.util.Date renamed imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Date => Something }
}
assertError(result)(javaError)
}
test("disable java.util.Date erased imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Date => _ }
}
assertError(result)(javaError)
}
test("disable java.util.Calendar explicit imports") {
val result = WartTestTraverser(OldTime) {
import java.util.Calendar
}
assertError(result)(javaError)
}
test("disable java.util.Calendar renamed imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Calendar => Something }
}
assertError(result)(javaError)
}
test("disable java.util.Calendar erased imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Calendar => _ }
}
assertError(result)(javaError)
}
test("disable java.util.GregorianCalendar explicit imports") {
val result = WartTestTraverser(OldTime) {
import java.util.GregorianCalendar
}
assertError(result)(javaError)
}
test("disable java.util.GregorianCalendar renamed imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ GregorianCalendar => Something }
}
assertError(result)(javaError)
}
test("disable java.util.GregorianCalendar erased imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ GregorianCalendar => _ }
}
assertError(result)(javaError)
}
test("disable java.util.TimeZone explicit imports") {
val result = WartTestTraverser(OldTime) {
import java.util.TimeZone
}
assertError(result)(javaError)
}
test("disable java.util.TimeZone renamed imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ TimeZone => Something }
}
assertError(result)(javaError)
}
test("disable java.util.TimeZone erased imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ TimeZone => _ }
}
assertError(result)(javaError)
}
test("disable java.text.DateFormat explicit imports") {
val result = WartTestTraverser(OldTime) {
import java.text.DateFormat
}
assertError(result)(javaError)
}
test("disable java.text.DateFormat renamed imports") {
val result = WartTestTraverser(OldTime) {
import java.text.{ DateFormat => Something }
}
assertError(result)(javaError)
}
test("disable java.text.DateFormat erased imports") {
val result = WartTestTraverser(OldTime) {
import java.text.{ DateFormat => _ }
}
assertError(result)(javaError)
}
test("disable java.text.SimpleDateFormat explicit imports") {
val result = WartTestTraverser(OldTime) {
import java.text.SimpleDateFormat
}
assertError(result)(javaError)
}
test("disable java.text.SimpleDateFormat renamed imports") {
val result = WartTestTraverser(OldTime) {
import java.text.{ SimpleDateFormat => Something }
}
assertError(result)(javaError)
}
test("disable java.text.SimpleDateFormat erased imports") {
val result = WartTestTraverser(OldTime) {
import java.text.{ SimpleDateFormat => _ }
}
assertError(result)(javaError)
}
test("disable java.util._ combined imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Date, Calendar, GregorianCalendar, TimeZone }
}
assertError(result)(javaError)
}
test("disable java.util._ combined multiline imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Date, Calendar }
import java.util.{ GregorianCalendar, TimeZone }
}
assertErrors(result)(javaError, 2)
}
test("disable combined java and joda time imports") {
val result = WartTestTraverser(OldTime) {
import java.util.{ Date, Calendar }
import org.joda.time.Interval
}
assertResult(List(
"[wartremover:OldTime] " + javaError,
"[wartremover:OldTime] " + jodaError), "result.errors")(result.errors)
assertResult(List.empty, "result.warnings")(result.warnings)
}
test("still allow importing java.util._") {
val result = WartTestTraverser(OldTime) {
import java.util._
}
assertEmpty(result)
}
test("still allow importing org.joda._") {
val result = WartTestTraverser(OldTime) {
import org.joda._
}
assertEmpty(result)
}
test("disable use of java.util.Date as a val (1)") {
val result = WartTestTraverser(OldTime) {
import java.util._
val x: Date = ???
}
assertError(result)(javaError)
}
test("disable use of java.util.Date as a val (2)") {
val result = WartTestTraverser(OldTime) {
import java.util._
val x = new Date()
}
assertErrors(result)(javaError, 2)
}
test("disable creating instances of java.util.Date (1)") {
val result = WartTestTraverser(OldTime) {
import java.util._
new Date()
}
assertError(result)(javaError)
}
test("disable creating instances of java.util.Date (2)") {
val result = WartTestTraverser(OldTime) {
import java.util._
val x: Object = new Date()
}
assertError(result)(javaError)
}
test("disable aliasing java.util.Date") {
val result = WartTestTraverser(OldTime) {
type X = java.util.Date
}
assertError(result)(javaError)
}
test("disable aliasing org.joda.time.LocalDate") {
val result = WartTestTraverser(OldTime) {
type X = org.joda.time.LocalDate
}
assertError(result)(jodaError)
}
test("disable using java.util.Date as a lower type bound") {
val result = WartTestTraverser(OldTime) {
def x[A >: java.util.Date](a: A): Unit = ()
}
assertError(result)(javaError)
}
test("disable using org.joda.time.LocalDate as a lower type bound") {
val result = WartTestTraverser(OldTime) {
def x[A >: org.joda.time.LocalDate](a: A): Unit = ()
}
assertError(result)(jodaError)
}
test("disable using java.util.Date as an upper type bound") {
val result = WartTestTraverser(OldTime) {
def x[A <: java.util.Date](a: A): Unit = ()
}
assertError(result)(javaError)
}
test("disable using org.joda.time.LocalDate as an upper type bound") {
val result = WartTestTraverser(OldTime) {
def x[A <: org.joda.time.LocalDate](a: A): Unit = ()
}
assertError(result)(jodaError)
}
test("disable using java.util.Date as a function return type") {
val result = WartTestTraverser(OldTime) {
def x(): java.util.Date = ???
}
assertError(result)(javaError)
}
test("disable using org.joda.time.LocalDate as a function return type") {
val result = WartTestTraverser(OldTime) {
def x(): org.joda.time.LocalDate = ???
}
assertError(result)(jodaError)
}
test("disable using java.util.Date as a function argument type") {
val result = WartTestTraverser(OldTime) {
def x(a: java.util.Date): Unit = ()
}
assertError(result)(javaError)
}
test("disable using org.joda.time.LocalDate as a function argument type") {
val result = WartTestTraverser(OldTime) {
def x(a: org.joda.time.LocalDate): Unit = ()
}
assertError(result)(jodaError)
}
test("disable using java.util.Date as a type parameter (1)") {
val result = WartTestTraverser(OldTime) {
val x: List[java.util.Date] = List.empty
}
assertError(result)(javaError)
}
test("disable using java.util.Date as a type parameter (2)") {
val result = WartTestTraverser(OldTime) {
val x = List.empty[java.util.Date]
}
assertErrors(result)(javaError, 2)
}
test("disable using org.joda.time.LocalDate as a type parameter (1)") {
val result = WartTestTraverser(OldTime) {
val x: List[org.joda.time.LocalDate] = List.empty
}
assertError(result)(jodaError)
}
test("disable using org.joda.time.LocalDate as a type parameter (2)") {
val result = WartTestTraverser(OldTime) {
val x = List.empty[org.joda.time.LocalDate]
}
assertErrors(result)(jodaError, 2)
}
}
| tim-zh/wartremover-contrib | core/src/test/scala/wartremover/contrib/warts/OldTimeTest.scala | Scala | apache-2.0 | 9,412 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.jsenv
import org.mozilla.javascript._
import org.scalajs.core.tools.io._
package object rhino {
implicit class ContextOps(val self: Context) extends AnyVal {
def evaluateFile(scope: Scriptable, file: VirtualJSFile,
securityDomain: AnyRef = null): Any = {
self.evaluateString(scope, file.content, file.path, 1, securityDomain)
}
}
implicit class ScriptableObjectOps(val self: Scriptable) {
def addFunction(name: String, function: Array[AnyRef] => Any): Unit = {
val rhinoFunction =
new BaseFunction {
ScriptRuntime.setFunctionProtoAndParent(this, self)
override def call(context: Context, scope: Scriptable,
thisObj: Scriptable, args: Array[AnyRef]): AnyRef = {
function(args) match {
case () => Undefined.instance
case r => r.asInstanceOf[AnyRef]
}
}
}
ScriptableObject.putProperty(self, name, rhinoFunction)
}
}
}
| jmnarloch/scala-js | js-envs/src/main/scala/org/scalajs/jsenv/rhino/package.scala | Scala | bsd-3-clause | 1,530 |
package org.scalatra
package swagger
// separate files so the dependencies stay clean
import org.scalatra.json.NativeJsonSupport
trait NativeSwaggerBase extends ScalatraBase with NativeJsonSupport with CorsSupport with SwaggerBase
| lightvector/scalatra | swagger/src/main/scala/org/scalatra/swagger/NativeSwaggerBase.scala | Scala | bsd-2-clause | 234 |
package com.arcusys.valamis.hook
import com.arcusys.valamis.hook.utils.{StructureInfo, TemplateInfo, Utils}
import com.liferay.portal.kernel.events.SimpleAction
import com.liferay.portal.kernel.log.LogFactoryUtil
import com.liferay.portal.service.{GroupLocalServiceUtil, UserLocalServiceUtil}
class UpgradeTemplates extends SimpleAction {
private val log = LogFactoryUtil.getLog(classOf[UpgradeTemplates])
override def run(companyIds: Array[String]): Unit = {
log.info("Upgrade valamis web content template")
companyIds.foreach(companyId => {
val groupId = GroupLocalServiceUtil.getCompanyGroup(companyId.toLong).getGroupId
val userId = UserLocalServiceUtil.getDefaultUserId(companyId.toLong)
upgrade(groupId, userId)
})
}
private def upgrade(groupId: Long, userId: Long) {
Utils.addStructureWithTemplate(
groupId,
userId,
StructureInfo(key = "ValamisWebContent", name = "valamis-web-content"),
TemplateInfo(key = "ValamisWebContent", name = "ValamisWebContent")
)
}
}
| igor-borisov/valamis | valamis-hook/src/main/scala/com/arcusys/valamis/hook/UpgradeTemplates.scala | Scala | gpl-3.0 | 1,048 |
package org.littlewings.javaee7.cdi
import java.io.File
import org.apache.catalina.startup.Tomcat
import org.scalatest.Suite
import org.scalatest.BeforeAndAfterAll
trait EmbeddedTomcatCdiSupport extends Suite with BeforeAndAfterAll {
protected val port: Int = 8080
protected val tomcat: Tomcat = new Tomcat
protected val baseDir: File = createTempDir("tomcat", port)
protected val docBaseDir: File = createTempDir("tomcat-docbase", port)
override def beforeAll(): Unit = {
tomcat.setPort(port)
tomcat.setBaseDir(baseDir.getAbsolutePath)
val context =
tomcat.addWebapp("", docBaseDir.getAbsolutePath)
context.addParameter("org.jboss.weld.environment.servlet.archive.isolation", "false")
context.addParameter("resteasy.injector.factory", "org.jboss.resteasy.cdi.CdiInjectorFactory")
tomcat.start()
}
override def afterAll(): Unit = {
tomcat.stop()
tomcat.destroy()
deleteDirs(baseDir)
deleteDirs(docBaseDir)
}
private def createTempDir(prefix: String, port: Int): File = {
val tempDir = File.createTempFile(s"${prefix}.", s".${port}")
tempDir.delete()
tempDir.mkdir()
tempDir.deleteOnExit()
tempDir
}
private def deleteDirs(file: File): Unit = {
file
.listFiles
.withFilter(f => f.getName != "." && f.getName != "..")
.foreach {
case d if d.isDirectory => deleteDirs(d)
case f => f.delete()
}
file.delete()
}
}
| kazuhira-r/javaee7-scala-examples | cdi-alternative/src/test/scala/org/littlewings/javaee7/cdi/EmbeddedTomcatCdiSupport.scala | Scala | mit | 1,461 |
package ch.ninecode.cim
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.sql.SparkSession
import org.apache.spark.storage.StorageLevel
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import ch.ninecode.model._
/**
* Handle duplicate processing.
*
* For each element with id X, if there are other elements with "rdf:ID='X'",
* this class chooses only one.
*
* This "duplicates" condition arises, for example, when spatial filters
* are applied to tile large datasets to partition the export task into
* smaller subsets, for reasons such as parallelization, memory
* constraints, etc.
*
* A linear element crossing a tile boundary can be exported in either tile,
* if it can be determined beforehand which tile. A simpler option is to
* export such objects in all tiles whose spatial extents includes
* some of the element. It is also nice to include
* related elements to make each tile self consistent.
*
* These tiles must then be recombined into the full dataset,
* which is the task for this component - to delete duplicate elements.
*
* Warnings are generated if the deleted elements are not identical to
* the elements that are retained.
*
* @param spark The Spark session this class is running in.
* @param storage The storage level to cache the resultant RDD.
*/
class CIMDeDup (spark: SparkSession, storage: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER) extends CIMRDD with Serializable
{
implicit val session: SparkSession = spark
implicit val storage_level: StorageLevel = storage // for put()
implicit val log: Logger = LoggerFactory.getLogger(getClass)
/**
* Compare elements for equality.
*
* Since all but one element with the same mRID will be deleted,
* this checks that they really are the same.
*
* It logs a warning if the elements are not equal.
*
* @param element The "primary" element.
* @param others The "other" elements, although this choice of primary and other is arbitrary.
*/
def check (element: Element, others: Iterable[Element]): Unit =
{
others match
{
case e :: t =>
if (element != e)
log.warn(s"element ${element.id} has a non-identical duplicate")
check(element, t)
case Nil =>
}
}
/**
* Perform deduplication - keep only one element.
*
* @param elements The elements with identical mRID.
* @return One element (the head of the list) after checking the others are true duplicates.
*/
def deduplicate (elements: Iterable[Element]): Element =
{
elements.toList match
{
case head :: Nil =>
head
case head :: tail =>
// check for equality
check(head, tail)
head
case _ => BasicElement()
}
}
/**
* Replace the Element RDD with a de-duplicated version.
*
* Since RDD are immutable, another copy is created containing only unique elements
* and this replaces the current RDD[Element] referenced by the persistent
* RDD registry.
*
* The new RDD is cached and checkpointed (if checkpointing is enabled by the Spark context having a CheckpointDir).
*
* @return The new element RDD.
*/
def do_deduplicate (): RDD[Element] =
{
log.info("eliminating duplicates")
// get the elements RDD
val elements = getOrElse[Element]
// deduplicate
val new_elements = elements.keyBy(_.id).groupByKey().values.map(deduplicate)
// swap the old Elements RDD for the new one
put(new_elements, false)
new_elements
}
} | derrickoswald/CIMScala | CIMReader/src/main/scala/ch/ninecode/cim/CIMDeDup.scala | Scala | mit | 3,805 |
package de.m7w3.signal.messages
import java.nio.file.Files
import java.util
import de.m7w3.signal.Logging
import de.m7w3.signal.events.{ContactsSyncedEvent, EventPublisher, GroupsSyncedEvent}
import de.m7w3.signal.store.SignalDesktopApplicationStore
import org.whispersystems.signalservice.api.SignalServiceMessageReceiver
import org.whispersystems.signalservice.api.messages.multidevice._
import org.whispersystems.signalservice.api.messages.{SignalServiceAttachment, SignalServiceDataMessage, SignalServiceEnvelope}
import scala.annotation.tailrec
import scala.collection.JavaConverters.collectionAsScalaIterableConverter
class SignalDesktopMessageHandler(signalDesktopApplicationStore: SignalDesktopApplicationStore,
messageReceiver: SignalServiceMessageReceiver,
eventPublisher: EventPublisher) extends MessageHandler with Logging {
//def handlePrekeyMessage()
def handleBlockedList(envelope: SignalServiceEnvelope, message: BlockedListMessage) = {
logger.debug(s"got blockedlist message [{}]", message.getNumbers.asScala.mkString(", "))
}
def handleContacts(envelope: SignalServiceEnvelope, contacts: SignalServiceAttachment): Unit = {
logger.debug("received contacts sync message")
val iStream = if (contacts.isStream) {
contacts.asStream().getInputStream
} else {
val tmpFile = Files.createTempFile("contacts", "contacts").toFile
tmpFile.deleteOnExit() // maybe do more, delete earlier?
messageReceiver.retrieveAttachment(contacts.asPointer(), tmpFile)
}
val contactsStream = new DeviceContactsInputStream(iStream)
processContacts(contactsStream)
eventPublisher.publishEvent(ContactsSyncedEvent)
}
@tailrec
private def processContacts(contactsStream: DeviceContactsInputStream): Unit = {
Option(contactsStream.read()) match {
case Some(deviceContact: DeviceContact) =>
logger.debug("received contact {}", deviceContact.getNumber)
signalDesktopApplicationStore.saveContact(deviceContact)
processContacts(contactsStream)
case None =>
// end
}
}
def handleGroups(envelope: SignalServiceEnvelope, attachment: SignalServiceAttachment): Unit = {
logger.debug("received groups sync message")
val iStream = if (attachment.isStream) {
attachment.asStream().getInputStream
} else {
// TODO: put into local data folder
val tmpFile = Files.createTempFile("groups", "groups").toFile
tmpFile.deleteOnExit() // maybe do more, delete earlier?
messageReceiver.retrieveAttachment(attachment.asPointer(), tmpFile)
}
val groupsStream = new DeviceGroupsInputStream(iStream)
processGroups(groupsStream)
eventPublisher.publishEvent(GroupsSyncedEvent)
}
@tailrec
private def processGroups(groupsStream: DeviceGroupsInputStream): Unit = {
Option(groupsStream.read()) match {
case Some(deviceGroup: DeviceGroup) =>
logger.debug("received group {}", deviceGroup.getName)
signalDesktopApplicationStore.saveGroup(deviceGroup)
processGroups(groupsStream)
case None =>
// end
}
}
def handleRead(envelope: SignalServiceEnvelope, messages: util.List[ReadMessage]): Unit = {
logger.debug("received read message from [{}]", messages.asScala.map(m => m.getSender).mkString(", "))
}
def handleRequest(envelope: SignalServiceEnvelope, message: RequestMessage): Unit = {
logger.debug("received request message [{}]", message.getRequest)
}
def handleSent(envelope: SignalServiceEnvelope, message: SentTranscriptMessage): Unit = {
logger.debug("received sent message [{}: {}]", message.getMessage, message.getTimestamp)
}
override def handleSyncMessage(envelope: SignalServiceEnvelope, syncMessage: SignalServiceSyncMessage): Unit = {
if (syncMessage.getBlockedList.isPresent) {
handleBlockedList(envelope, syncMessage.getBlockedList.get())
}
if (syncMessage.getContacts.isPresent) {
handleContacts(envelope, syncMessage.getContacts.get())
}
if (syncMessage.getGroups.isPresent) {
handleGroups(envelope, syncMessage.getGroups.get())
}
if (syncMessage.getRead.isPresent) {
handleRead(envelope, syncMessage.getRead.get())
}
if (syncMessage.getRequest.isPresent) {
handleRequest(envelope, syncMessage.getRequest.get())
}
if (syncMessage.getSent.isPresent) {
handleSent(envelope, syncMessage.getSent.get())
}
}
override def handleDataMessage(envelope: SignalServiceEnvelope, dataMessage: SignalServiceDataMessage): Unit = {
logger.debug(s"received data message [${dataMessage.getBody} ${dataMessage.getGroupInfo}]")
}
}
| ayoub-benali/signal-desktop-client | src/main/scala/de/m7w3/signal/messages/SignalDesktopMessageHandler.scala | Scala | apache-2.0 | 4,729 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.{thisLineNumber, createTempDirectory}
import enablers.Writability
import exceptions.TestFailedException
import org.scalactic.Prettifier
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldBeWritableLogicalAndExplicitSpec extends AnyFunSpec {
private val prettifier = Prettifier.default
val fileName: String = "ShouldBeWritableLogicalAndExplicitSpec.scala"
def wasEqualTo(left: Any, right: Any): String =
FailureMessages.wasEqualTo(prettifier, left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages.wasNotEqualTo(prettifier, left, right)
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(prettifier, left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages.didNotEqual(prettifier, left, right)
def wasNotWritable(left: Any): String =
FailureMessages.wasNotWritable(prettifier, left)
def wasWritable(left: Any): String =
FailureMessages.wasWritable(prettifier, left)
def allError(message: String, lineNumber: Int, left: Any): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(prettifier, 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(prettifier, messageWithIndex, left)
}
trait Thing {
def canRead: Boolean
}
val book = new Thing {
val canRead = true
}
val stone = new Thing {
val canRead = false
}
val writability =
new Writability[Thing] {
def isWritable(thing: Thing): Boolean = thing.canRead
}
describe("Writability matcher") {
describe("when work with 'file should be (writable)'") {
it("should do nothing when file is writable") {
(book should (equal (book) and be (writable))) (defaultEquality, writability)
(book should (be (writable) and equal (book))) (writability, defaultEquality)
(book should (be (book) and be (writable))) (writability)
(book should (be (writable) and be (book))) (writability)
}
it("should throw TestFailedException with correct stack depth when file is not writable") {
val caught1 = intercept[TestFailedException] {
(stone should (equal (stone) and be (writable))) (defaultEquality, writability)
}
assert(caught1.message === Some(equaled(stone, stone) + ", but " + wasNotWritable(stone)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
(stone should (be (writable) and equal (stone))) (writability, defaultEquality)
}
assert(caught2.message === Some(wasNotWritable(stone)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
(stone should (be (stone) and be (writable))) (writability)
}
assert(caught3.message === Some(wasEqualTo(stone, stone) + ", but " + wasNotWritable(stone)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
(stone should (be (writable) and be (stone))) (writability)
}
assert(caught4.message === Some(wasNotWritable(stone)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'file should not be sorted'") {
it("should do nothing when file is not writable") {
(stone should (not equal book and not be writable)) (defaultEquality, writability)
(stone should (not be writable and not equal book)) (writability, defaultEquality)
(stone should (not be book and not be writable)) (writability)
(stone should (not be writable and not be book)) (writability)
}
it("should throw TestFailedException with correct stack depth when xs is not sorted") {
val caught1 = intercept[TestFailedException] {
(book should (not equal stone and not be writable)) (defaultEquality, writability)
}
assert(caught1.message === Some(didNotEqual(book, stone) + ", but " + wasWritable(book)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
(book should (not be writable and not equal stone)) (writability, defaultEquality)
}
assert(caught2.message === Some(wasWritable(book)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
(book should (not be stone and not be writable)) (writability)
}
assert(caught3.message === Some(wasNotEqualTo(book, stone) + ", but " + wasWritable(book)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
(book should (not be writable and not be stone)) (writability)
}
assert(caught4.message === Some(wasWritable(book)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should be (writable)'") {
it("should do nothing when all(xs) is writable") {
(all(List(book)) should (be (book) and be (writable))) (writability)
(all(List(book)) should (be (writable) and be (book))) (writability)
(all(List(book)) should (equal (book) and be (writable))) (defaultEquality, writability)
(all(List(book)) should (be (writable) and equal (book))) (writability, defaultEquality)
}
it("should throw TestFailedException with correct stack depth when all(xs) is not writable") {
val left1 = List(stone)
val caught1 = intercept[TestFailedException] {
(all(left1) should (be (stone) and be (writable))) (writability)
}
assert(caught1.message === Some(allError(wasEqualTo(stone, stone) + ", but " + wasNotWritable(stone), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(stone)
val caught2 = intercept[TestFailedException] {
(all(left2) should (be (writable) and be (stone))) (writability)
}
assert(caught2.message === Some(allError(wasNotWritable(stone), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(stone)
val caught3 = intercept[TestFailedException] {
(all(left3) should (equal (stone) and be (writable))) (defaultEquality, writability)
}
assert(caught3.message === Some(allError(equaled(stone, stone) + ", but " + wasNotWritable(stone), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(stone)
val caught4 = intercept[TestFailedException] {
(all(left4) should (be (writable) and equal (stone))) (writability, defaultEquality)
}
assert(caught4.message === Some(allError(wasNotWritable(stone), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should not be writable'") {
it("should do nothing when all(xs) is not writable") {
(all(List(stone)) should (not be writable and not be book)) (writability)
(all(List(stone)) should (not be book and not be writable)) (writability)
(all(List(stone)) should (not be writable and not equal book)) (writability, defaultEquality)
(all(List(stone)) should (not equal book and not be writable)) (defaultEquality, writability)
}
it("should throw TestFailedException with correct stack depth when all(xs) is writable") {
val left1 = List(book)
val caught1 = intercept[TestFailedException] {
(all(left1) should (not be stone and not be writable)) (writability)
}
assert(caught1.message === Some(allError(wasNotEqualTo(book, stone) + ", but " + wasWritable(book), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(book)
val caught2 = intercept[TestFailedException] {
(all(left2) should (not be writable and not be stone)) (writability)
}
assert(caught2.message === Some(allError(wasWritable(book), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(book)
val caught3 = intercept[TestFailedException] {
(all(left3) should (not equal stone and not be writable)) (defaultEquality, writability)
}
assert(caught3.message === Some(allError(didNotEqual(book, stone) + ", but " + wasWritable(book), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(book)
val caught4 = intercept[TestFailedException] {
(all(left4) should (not be writable and not equal stone)) (writability, defaultEquality)
}
assert(caught4.message === Some(allError(wasWritable(book), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/ShouldBeWritableLogicalAndExplicitSpec.scala | Scala | apache-2.0 | 11,304 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.Since
import org.apache.spark.ml.PredictorParams
import org.apache.spark.ml.feature.Instance
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.param.{DoubleParam, Param, ParamMap, ParamValidators}
import org.apache.spark.ml.param.shared.HasWeightCol
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.sql.{Dataset, Row}
import org.apache.spark.sql.functions.col
/**
* Params for Naive Bayes Classifiers.
*/
private[classification] trait NaiveBayesParams extends PredictorParams with HasWeightCol {
/**
* The smoothing parameter.
* (default = 1.0).
* @group param
*/
final val smoothing: DoubleParam = new DoubleParam(this, "smoothing", "The smoothing parameter.",
ParamValidators.gtEq(0))
/** @group getParam */
final def getSmoothing: Double = $(smoothing)
/**
* The model type which is a string (case-sensitive).
* Supported options: "multinomial" and "bernoulli".
* (default = multinomial)
* @group param
*/
final val modelType: Param[String] = new Param[String](this, "modelType", "The model type " +
"which is a string (case-sensitive). Supported options: multinomial (default) and bernoulli.",
ParamValidators.inArray[String](NaiveBayes.supportedModelTypes.toArray))
/** @group getParam */
final def getModelType: String = $(modelType)
}
// scalastyle:off line.size.limit
/**
* Naive Bayes Classifiers.
* It supports Multinomial NB
* (see <a href="http://nlp.stanford.edu/IR-book/html/htmledition/naive-bayes-text-classification-1.html">
* here</a>)
* which can handle finitely supported discrete data. For example, by converting documents into
* TF-IDF vectors, it can be used for document classification. By making every vector a
* binary (0/1) data, it can also be used as Bernoulli NB
* (see <a href="http://nlp.stanford.edu/IR-book/html/htmledition/the-bernoulli-model-1.html">
* here</a>).
* The input feature values must be nonnegative.
*/
// scalastyle:on line.size.limit
@Since("1.5.0")
class NaiveBayes @Since("1.5.0") (
@Since("1.5.0") override val uid: String)
extends ProbabilisticClassifier[Vector, NaiveBayes, NaiveBayesModel]
with NaiveBayesParams with DefaultParamsWritable {
import NaiveBayes._
@Since("1.5.0")
def this() = this(Identifiable.randomUID("nb"))
/**
* Set the smoothing parameter.
* Default is 1.0.
* @group setParam
*/
@Since("1.5.0")
def setSmoothing(value: Double): this.type = set(smoothing, value)
setDefault(smoothing -> 1.0)
/**
* Set the model type using a string (case-sensitive).
* Supported options: "multinomial" and "bernoulli".
* Default is "multinomial"
* @group setParam
*/
@Since("1.5.0")
def setModelType(value: String): this.type = set(modelType, value)
setDefault(modelType -> NaiveBayes.Multinomial)
/**
* Sets the value of param [[weightCol]].
* If this is not set or empty, we treat all instance weights as 1.0.
* Default is not set, so all instances have weight one.
*
* @group setParam
*/
@Since("2.1.0")
def setWeightCol(value: String): this.type = set(weightCol, value)
override protected def train(dataset: Dataset[_]): NaiveBayesModel = {
trainWithLabelCheck(dataset, positiveLabel = true)
}
/**
* ml assumes input labels in range [0, numClasses). But this implementation
* is also called by mllib NaiveBayes which allows other kinds of input labels
* such as {-1, +1}. `positiveLabel` is used to determine whether the label
* should be checked and it should be removed when we remove mllib NaiveBayes.
*/
private[spark] def trainWithLabelCheck(
dataset: Dataset[_],
positiveLabel: Boolean): NaiveBayesModel = instrumented { instr =>
instr.logPipelineStage(this)
instr.logDataset(dataset)
if (positiveLabel && isDefined(thresholds)) {
val numClasses = getNumClasses(dataset)
instr.logNumClasses(numClasses)
require($(thresholds).length == numClasses, this.getClass.getSimpleName +
".train() called with non-matching numClasses and thresholds.length." +
s" numClasses=$numClasses, but thresholds has length ${$(thresholds).length}")
}
val validateInstance = $(modelType) match {
case Multinomial =>
(instance: Instance) => requireNonnegativeValues(instance.features)
case Bernoulli =>
(instance: Instance) => requireZeroOneBernoulliValues(instance.features)
case _ =>
// This should never happen.
throw new IllegalArgumentException(s"Invalid modelType: ${$(modelType)}.")
}
instr.logParams(this, labelCol, featuresCol, weightCol, predictionCol, rawPredictionCol,
probabilityCol, modelType, smoothing, thresholds)
val numFeatures = dataset.select(col($(featuresCol))).head().getAs[Vector](0).size
instr.logNumFeatures(numFeatures)
// Aggregates term frequencies per label.
// TODO: Calling aggregateByKey and collect creates two stages, we can implement something
// TODO: similar to reduceByKeyLocally to save one stage.
val aggregated = extractInstances(dataset, validateInstance).map { instance =>
(instance.label, (instance.weight, instance.features))
}.aggregateByKey[(Double, DenseVector, Long)]((0.0, Vectors.zeros(numFeatures).toDense, 0L))(
seqOp = {
case ((weightSum, featureSum, count), (weight, features)) =>
BLAS.axpy(weight, features, featureSum)
(weightSum + weight, featureSum, count + 1)
},
combOp = {
case ((weightSum1, featureSum1, count1), (weightSum2, featureSum2, count2)) =>
BLAS.axpy(1.0, featureSum2, featureSum1)
(weightSum1 + weightSum2, featureSum1, count1 + count2)
}).collect().sortBy(_._1)
val numSamples = aggregated.map(_._2._3).sum
instr.logNumExamples(numSamples)
val numLabels = aggregated.length
instr.logNumClasses(numLabels)
val numDocuments = aggregated.map(_._2._1).sum
val labelArray = new Array[Double](numLabels)
val piArray = new Array[Double](numLabels)
val thetaArray = new Array[Double](numLabels * numFeatures)
val lambda = $(smoothing)
val piLogDenom = math.log(numDocuments + numLabels * lambda)
var i = 0
aggregated.foreach { case (label, (n, sumTermFreqs, _)) =>
labelArray(i) = label
piArray(i) = math.log(n + lambda) - piLogDenom
val thetaLogDenom = $(modelType) match {
case Multinomial => math.log(sumTermFreqs.values.sum + numFeatures * lambda)
case Bernoulli => math.log(n + 2.0 * lambda)
case _ =>
// This should never happen.
throw new IllegalArgumentException(s"Invalid modelType: ${$(modelType)}.")
}
var j = 0
while (j < numFeatures) {
thetaArray(i * numFeatures + j) = math.log(sumTermFreqs(j) + lambda) - thetaLogDenom
j += 1
}
i += 1
}
val pi = Vectors.dense(piArray)
val theta = new DenseMatrix(numLabels, numFeatures, thetaArray, true)
new NaiveBayesModel(uid, pi, theta).setOldLabels(labelArray)
}
@Since("1.5.0")
override def copy(extra: ParamMap): NaiveBayes = defaultCopy(extra)
}
@Since("1.6.0")
object NaiveBayes extends DefaultParamsReadable[NaiveBayes] {
/** String name for multinomial model type. */
private[classification] val Multinomial: String = "multinomial"
/** String name for Bernoulli model type. */
private[classification] val Bernoulli: String = "bernoulli"
/* Set of modelTypes that NaiveBayes supports */
private[classification] val supportedModelTypes = Set(Multinomial, Bernoulli)
private[NaiveBayes] def requireNonnegativeValues(v: Vector): Unit = {
val values = v match {
case sv: SparseVector => sv.values
case dv: DenseVector => dv.values
}
require(values.forall(_ >= 0.0),
s"Naive Bayes requires nonnegative feature values but found $v.")
}
private[NaiveBayes] def requireZeroOneBernoulliValues(v: Vector): Unit = {
val values = v match {
case sv: SparseVector => sv.values
case dv: DenseVector => dv.values
}
require(values.forall(v => v == 0.0 || v == 1.0),
s"Bernoulli naive Bayes requires 0 or 1 feature values but found $v.")
}
@Since("1.6.0")
override def load(path: String): NaiveBayes = super.load(path)
}
/**
* Model produced by [[NaiveBayes]]
* @param pi log of class priors, whose dimension is C (number of classes)
* @param theta log of class conditional probabilities, whose dimension is C (number of classes)
* by D (number of features)
*/
@Since("1.5.0")
class NaiveBayesModel private[ml] (
@Since("1.5.0") override val uid: String,
@Since("2.0.0") val pi: Vector,
@Since("2.0.0") val theta: Matrix)
extends ProbabilisticClassificationModel[Vector, NaiveBayesModel]
with NaiveBayesParams with MLWritable {
import NaiveBayes.{Bernoulli, Multinomial}
/**
* mllib NaiveBayes is a wrapper of ml implementation currently.
* Input labels of mllib could be {-1, +1} and mllib NaiveBayesModel exposes labels,
* both of which are different from ml, so we should store the labels sequentially
* to be called by mllib. This should be removed when we remove mllib NaiveBayes.
*/
private[spark] var oldLabels: Array[Double] = null
private[spark] def setOldLabels(labels: Array[Double]): this.type = {
this.oldLabels = labels
this
}
/**
* Bernoulli scoring requires log(condprob) if 1, log(1-condprob) if 0.
* This precomputes log(1.0 - exp(theta)) and its sum which are used for the linear algebra
* application of this condition (in predict function).
*/
private lazy val (thetaMinusNegTheta, negThetaSum) = $(modelType) match {
case Multinomial => (None, None)
case Bernoulli =>
val negTheta = theta.map(value => math.log1p(-math.exp(value)))
val ones = new DenseVector(Array.fill(theta.numCols) {1.0})
val thetaMinusNegTheta = theta.map { value =>
value - math.log1p(-math.exp(value))
}
(Option(thetaMinusNegTheta), Option(negTheta.multiply(ones)))
case _ =>
// This should never happen.
throw new IllegalArgumentException(s"Invalid modelType: ${$(modelType)}.")
}
@Since("1.6.0")
override val numFeatures: Int = theta.numCols
@Since("1.5.0")
override val numClasses: Int = pi.size
private def multinomialCalculation(features: Vector) = {
val prob = theta.multiply(features)
BLAS.axpy(1.0, pi, prob)
prob
}
private def bernoulliCalculation(features: Vector) = {
features.foreachActive((_, value) =>
require(value == 0.0 || value == 1.0,
s"Bernoulli naive Bayes requires 0 or 1 feature values but found $features.")
)
val prob = thetaMinusNegTheta.get.multiply(features)
BLAS.axpy(1.0, pi, prob)
BLAS.axpy(1.0, negThetaSum.get, prob)
prob
}
override protected def predictRaw(features: Vector): Vector = {
$(modelType) match {
case Multinomial =>
multinomialCalculation(features)
case Bernoulli =>
bernoulliCalculation(features)
case _ =>
// This should never happen.
throw new IllegalArgumentException(s"Invalid modelType: ${$(modelType)}.")
}
}
override protected def raw2probabilityInPlace(rawPrediction: Vector): Vector = {
rawPrediction match {
case dv: DenseVector =>
var i = 0
val size = dv.size
val maxLog = dv.values.max
while (i < size) {
dv.values(i) = math.exp(dv.values(i) - maxLog)
i += 1
}
val probSum = dv.values.sum
i = 0
while (i < size) {
dv.values(i) = dv.values(i) / probSum
i += 1
}
dv
case sv: SparseVector =>
throw new RuntimeException("Unexpected error in NaiveBayesModel:" +
" raw2probabilityInPlace encountered SparseVector")
}
}
@Since("1.5.0")
override def copy(extra: ParamMap): NaiveBayesModel = {
copyValues(new NaiveBayesModel(uid, pi, theta).setParent(this.parent), extra)
}
@Since("1.5.0")
override def toString: String = {
s"NaiveBayesModel: uid=$uid, modelType=${$(modelType)}, numClasses=$numClasses, " +
s"numFeatures=$numFeatures"
}
@Since("1.6.0")
override def write: MLWriter = new NaiveBayesModel.NaiveBayesModelWriter(this)
}
@Since("1.6.0")
object NaiveBayesModel extends MLReadable[NaiveBayesModel] {
@Since("1.6.0")
override def read: MLReader[NaiveBayesModel] = new NaiveBayesModelReader
@Since("1.6.0")
override def load(path: String): NaiveBayesModel = super.load(path)
/** [[MLWriter]] instance for [[NaiveBayesModel]] */
private[NaiveBayesModel] class NaiveBayesModelWriter(instance: NaiveBayesModel) extends MLWriter {
private case class Data(pi: Vector, theta: Matrix)
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: pi, theta
val data = Data(instance.pi, instance.theta)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class NaiveBayesModelReader extends MLReader[NaiveBayesModel] {
/** Checked against metadata when loading model */
private val className = classOf[NaiveBayesModel].getName
override def load(path: String): NaiveBayesModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
val vecConverted = MLUtils.convertVectorColumnsToML(data, "pi")
val Row(pi: Vector, theta: Matrix) = MLUtils.convertMatrixColumnsToML(vecConverted, "theta")
.select("pi", "theta")
.head()
val model = new NaiveBayesModel(metadata.uid, pi, theta)
metadata.getAndSetParams(model)
model
}
}
}
| caneGuy/spark | mllib/src/main/scala/org/apache/spark/ml/classification/NaiveBayes.scala | Scala | apache-2.0 | 15,011 |
package org.sbuild.plugins.scalac
import org.sbuild._
class ScalacPlugin(implicit project: Project) extends Plugin[Scalac] {
def create(name: String): Scalac = {
val compileTargetName = s"scalac-${name}"
val cleanTargetName = s"clean-scalac-${name}"
val classpath = TargetRefs()
val targetDir = Path(s"target/scalac-${name}-classes")
val srcDirs = Seq(Path(s"src/${name}/scala"))
Scalac(
compileTargetName = compileTargetName,
cleanTargetName = Some(cleanTargetName),
classpath = classpath,
targetDir = targetDir,
srcDirs = srcDirs
)
}
def applyToProject(instances: Seq[(String, Scalac)]): Unit = instances.foreach {
case (name, scalac) =>
val sources: TargetRefs = scalac.sources match {
case Some(s) => s
case None => scalac.srcDirs.map(dir => TargetRef(s"scan:${dir};regex=.*\\\\.(java|scala)"))
}
val compilerClasspath: TargetRefs = scalac.compilerClasspath.getOrElse {
scalac.scalaVersion match {
case Some(v) => ScalacTask.compilerClasspath(v)
case _ => TargetRefs()
}
}
val dependencies: TargetRefs = scalac.dependsOn ~ compilerClasspath ~ scalac.classpath ~~ sources
scalac.cleanTargetName.map { cleanTargetName =>
Target(s"phony:${cleanTargetName}").evictCache(scalac.compileTargetName) exec {
scalac.targetDir.deleteRecursive
}
}
Target(s"phony:${scalac.compileTargetName}").cacheable dependsOn dependencies exec { ctx: TargetContext =>
if (sources.files.isEmpty) {
// TODO: Improve, if for a dedicated error API in SBuild
// project.monitor.warn("No sources files found.")
// ctx.error("No source files found.")
throw new RuntimeException("No source files found.")
}
val compiler = new ScalacTask(
compilerClasspath = compilerClasspath.files,
classpath = scalac.classpath.files,
sources = sources.files,
destDir = scalac.targetDir,
encoding = scalac.encoding,
fork = scalac.fork,
additionalScalacArgs = scalac.additionalScalacArgs
)
scalac.deprecation.map { d => compiler.deprecation = d }
scalac.verbose.map { d => compiler.verbose = d }
// scalac.source.map { d => compiler.source = d }
scalac.target.map { d => compiler.target = d }
scalac.debugInfo.map { d => compiler.debugInfo = d }
compiler.execute
scalac.targetDir.listFilesRecursive.foreach { f => ctx.attachFile(f) }
}
}
} | SBuild-org/sbuild-scalac-plugin | org.sbuild.plugins.scalac/src/main/scala/org/sbuild/plugins/scalac/ScalacPlugin.scala | Scala | apache-2.0 | 2,613 |
package edu.gemini.pit.ui.robot
import edu.gemini.pit.model.Model
import edu.gemini.spModel.core.Coordinates
import scalaz.Lens
import edu.gemini.model.p1.visibility.TargetVisibilityCalc
import edu.gemini.model.p1.immutable._
object VisibilityRobot extends ObservationMetaRobot[(ProposalClass, BlueprintBase, Coordinates), TargetVisibility] {
protected val valueLens: Lens[ObservationMeta, Option[TargetVisibility]] =
Lens.lensu((a, b) => a.copy(visibility = b), _.visibility)
protected def key(o: Observation): Option[(ProposalClass, BlueprintBase, Coordinates)] =
for {
m <- model
t <- o.target
b <- o.blueprint
c <- t.coords(m.proposal.semester.midPoint)
} yield (m.proposal.proposalClass, b, c)
protected def query(o: Observation): Option[TargetVisibility] = {
model.flatMap {
m =>
if (m.proposal.proposalClass.isSpecial) TargetVisibilityCalc.getOnDec(m.proposal.semester, o) else TargetVisibilityCalc.get(m.proposal.semester, o)
}
}
// Override caching rules so we update visibility every time a proposal changes
override def missing(m: Model): List[((ProposalClass, BlueprintBase, Coordinates), Observation)] =
obsLens.get(m) collect {
case o if key(o).isDefined => (key(o).get, o)
}
override def doRefresh(m: Model) {
// Mark all checks as pending so the visibility is updated after every change
// As Visibility calculation is very fast this doesn't affect performance
model.foreach {
m =>
// Cache the result and update the model
for {
o <- m.proposal.observations
k = key(o)
if k.isDefined
} state = state + (key(o).get -> Result.Pending)
}
super.doRefresh(m)
}
} | arturog8m/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/robot/VisibilityRobot.scala | Scala | bsd-3-clause | 1,750 |
/*
* Copyright (c) 2013, Scodec
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package scodec
import scodec.bits.*
import scodec.codecs.*
class EncoderTest extends CodecSuite:
test("as") {
case class Foo(x: Int)
val a: Encoder[Int] = uint8
val b: Encoder[Foo] = a.as[Foo]
assertEquals(b.encode(Foo(255)), Attempt.successful(hex"ff".bits))
}
| scodec/scodec | unitTests/src/test/scala/scodec/EncoderTest.scala | Scala | bsd-3-clause | 1,861 |
Subsets and Splits