code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package net.xivilization.ddate
import java.util.Date
import java.util.Calendar
import android.appwidget.AppWidgetProvider
import android.appwidget.AppWidgetManager
import android.content.Context
import android.widget.RemoteViews
class DdateWidget extends AppWidgetProvider {
override def onUpdate(context: Context, appWidgetManager: AppWidgetManager,
appWidgetIds: Array[Int]) = {
val updateViews = new RemoteViews(context.getPackageName(), R.layout.main)
val cal = Calendar.getInstance()
val ddate = new DiscordianDate(cal.getTime())
updateViews.setTextViewText(R.id.widget_textview, constructTime(ddate))
appWidgetManager.updateAppWidget(appWidgetIds, updateViews)
super.onUpdate(context, appWidgetManager, appWidgetIds)
}
def constructTime(ddate: DiscordianDate) = {
def translator(n: Int): String = {
(n match {
case 1 => "%dst"
case 2 => "%dnd"
case _ => "%drd"
}) format n
}
if (ddate.tibsDay) {
"Today's St. Tib's Day %d" format ddate.year
}
else {
"Today is %s, the %s of %s, %s" format (
ddate.dayNames(ddate.weekDay-1),
translator(ddate.seasonDay),
ddate.seasonNames(ddate.season-1),
ddate.year)
}
}
}
| Leonidas-from-XIV/lndsDdate | src/main/scala/DdateWidget.scala | Scala | gpl-3.0 | 1,260 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package elements
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition
import org.jetbrains.plugins.scala.lang.psi.impl.statements.ScFunctionDefinitionImpl
/**
* User: Alexander Podkhalyuzin
* Date: 14.10.2008
*/
class ScFunctionDefinitionElementType extends ScFunctionElementType("function definition") {
override def createElement(node: ASTNode): ScFunctionDefinition = new ScFunctionDefinitionImpl(node)
override def createPsi(stub: ScFunctionStub): ScFunctionDefinition = new ScFunctionDefinitionImpl(stub)
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/elements/ScFunctionDefinitionElementType.scala | Scala | apache-2.0 | 659 |
// As per https://meta.plasm.us/posts/2013/08/31/feeding-our-vampires/
// scalac: -Werror
import scala.annotation.StaticAnnotation
import scala.reflect.macros.whitebox.Context
import scala.language.experimental.macros
class body(tree: Any) extends StaticAnnotation
object Macros {
def selFieldImpl(c: Context) = {
import c.universe._
val field = c.macroApplication.symbol
val bodyAnn = field.annotations.filter(_.tree.tpe <:< typeOf[body]).head
c.Expr[Any](bodyAnn.tree.children(1))
}
def mkObjectImpl(c: Context)(xs: c.Expr[Any]*) = {
import c.universe._
import Flag._
// val kvps = xs.toList map { case q"${_}(${Literal(Constant(name: String))}).->[${_}]($value)" => name -> value }
val kvps = xs.map(_.tree).toList map { case Apply(TypeApply(Select(Apply(_, List(Literal(Constant(name: String)))), _), _), List(value)) => name -> value case x => throw new MatchError(x) }
// val fields = kvps map { case (k, v) => q"@body($v) def ${TermName(k)} = macro Macros.selFieldImpl" }
val fields = kvps map { case (k, v) => DefDef(
Modifiers(MACRO, typeNames.EMPTY, List(Apply(Select(New(Ident(TypeName("body"))), termNames.CONSTRUCTOR), List(v)))),
TermName(k), Nil, Nil, Ident(TypeName("Any")), Select(Ident(TermName("Macros")), TermName("selFieldImpl"))) }
// q"import scala.language.experimental.macros; class Workaround { ..$fields }; new Workaround{}"
c.Expr[Any](Block(
List(
Import(Select(Select(Ident(TermName("scala")), TermName("language")), TermName("experimental")), List(ImportSelector(TermName("macros"), 51, TermName("macros"), 51))),
ClassDef(
NoMods, TypeName("Workaround"), Nil,
Template(
List(Select(Ident(TermName("scala")), TypeName("AnyRef"))), noSelfType,
DefDef(
NoMods, termNames.CONSTRUCTOR, Nil, List(Nil), TypeTree(),
Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(()))))
+: fields)),
ClassDef(
Modifiers(FINAL), TypeName("$anon"), Nil,
Template(
List(Ident(TypeName("Workaround"))), noSelfType,
List(
DefDef(
NoMods, termNames.CONSTRUCTOR, Nil, List(Nil), TypeTree(),
Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))))))),
Apply(Select(New(Ident(TypeName("$anon"))), termNames.CONSTRUCTOR), List())))
}
}
object mkObject {
def apply(xs: Any*): Any = macro Macros.mkObjectImpl
}
| scala/scala | test/files/run/macro-vampire-false-warning/Macros_1.scala | Scala | apache-2.0 | 2,641 |
import scala.util.control.NonFatal
import scalaz.{\\/, \\/-, -\\/, Bind, Traverse}
import scalaz.std.list.listInstance
package object buccaneer {
type Args = List[String]
type Expr[A] = List[Denotation[A]]
type AST[A] = List[(Denotation[A], Option[String])]
type Result[A] = \\/[String, A]
def success[A](a: A): Result[A] = \\/-(a)
def failure[A](t: Throwable): Result[A] = failure(t.toString)
def failure[A](message: String): Result[A] = -\\/(message)
def attempt[A](eff: => A): Result[A] = try { success(eff) } catch { case NonFatal(t) => failure(t) }
implicit lazy val traverseList: Traverse[List] = listInstance
implicit lazy val bindStep: Bind[Step] = new Bind[Step] {
override def bind[A, B](fa: Step[A])(f: (A) => Step[B]) = fa flatMap f
override def map[A, B](fa: Step[A])(f: (A) => B) = fa map f
}
}
| AvramRobert/buccaneer | src/main/scala/buccaneer/package.scala | Scala | apache-2.0 | 838 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.io
import java.io.OutputStream
import java.util.zip.Checksum
/**
* A variant of [[java.util.zip.CheckedOutputStream]] which can
* change the checksum calculator at runtime.
*/
private[spark] class MutableCheckedOutputStream(out: OutputStream) extends OutputStream {
private var checksum: Checksum = _
def setChecksum(c: Checksum): Unit = {
this.checksum = c
}
override def write(b: Int): Unit = {
assert(checksum != null, "Checksum is not set.")
checksum.update(b)
out.write(b)
}
override def write(b: Array[Byte], off: Int, len: Int): Unit = {
assert(checksum != null, "Checksum is not set.")
checksum.update(b, off, len)
out.write(b, off, len)
}
override def flush(): Unit = out.flush()
override def close(): Unit = out.close()
}
| ueshin/apache-spark | core/src/main/scala/org/apache/spark/io/MutableCheckedOutputStream.scala | Scala | apache-2.0 | 1,615 |
/**
* Copyright (C) 2011-2012 Kaj Magnus Lindberg (born 1979)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.debiki.core
import java.{util => ju}
import org.scalactic.{Or, Every, ErrorMessage}
import EmailNotfPrefs.EmailNotfPrefs
import Prelude._
import User.{isRoleId, isGuestId, checkId}
object People {
val None = People(Nil)
}
// COULD remove, use a Map[UserId, User] instead?
//
case class People(users: List[User] = Nil) {
def + (user: User) = copy(users = user :: users)
def ++ (people: People) = People(users = people.users ::: users)
def user(id: String): Option[User] =
if (id == SystemUser.User.id)
Some(SystemUser.User)
else if (id == UnknownUser.Id)
Some(UnknownUser.User)
else
users.find(_.id == id) // COULD optimize
def user_!(id: String): User = user(id) getOrElse runErr(
"DwE730krq849", "User not found: "+ safed(id))
}
sealed abstract class NewUserData {
def name: String
def username: String
def email: String
def emailVerifiedAt: Option[ju.Date]
def userNoId = User(
id = "?",
displayName = name,
username = Some(username),
createdAt = None,
email = email,
emailNotfPrefs = EmailNotfPrefs.Unspecified,
emailVerifiedAt = emailVerifiedAt,
country = "",
website = "",
isAdmin = false,
isOwner = false)
def identityNoId: Identity
Validation.checkName(name)
Validation.checkUsername(username)
Validation.checkEmail(email)
}
case class NewPasswordUserData(
name: String,
username: String,
email: String,
password: String) {
val passwordHash: String =
DbDao.saltAndHashPassword(password)
def userNoId = User(
id = "?",
displayName = name,
username = Some(username),
createdAt = None,
email = email,
emailNotfPrefs = EmailNotfPrefs.Unspecified,
emailVerifiedAt = None,
passwordHash = Some(passwordHash),
country = "",
website = "",
isAdmin = false,
isOwner = false)
Validation.checkName(name)
Validation.checkUsername(username)
Validation.checkEmail(email)
Validation.checkPassword(password)
}
object NewPasswordUserData {
def create(name: String, username: String, email: String, password: String)
: NewPasswordUserData Or ErrorMessage = {
for {
okName <- Validation.checkName(name)
okUsername <- Validation.checkUsername(username)
okEmail <- Validation.checkEmail(email)
okPassword <- Validation.checkPassword(password)
}
yield {
NewPasswordUserData(name = okName, username = okUsername, email = okEmail,
password = okPassword)
}
}
}
case class NewOauthUserData(
name: String,
username: String,
email: String,
emailVerifiedAt: Option[ju.Date],
identityData: OpenAuthDetails) extends NewUserData {
def identityNoId =
OpenAuthIdentity(id = "?", userId = "?", openAuthDetails = identityData)
}
object NewOauthUserData {
def create(name: String, email: String, emailVerifiedAt: Option[ju.Date], username: String,
identityData: OpenAuthDetails): NewOauthUserData Or ErrorMessage = {
for {
okName <- Validation.checkName(name)
okUsername <- Validation.checkUsername(username)
okEmail <- Validation.checkEmail(email)
}
yield {
NewOauthUserData(name = okName, username = okUsername, email = okEmail,
emailVerifiedAt = emailVerifiedAt, identityData = identityData)
}
}
}
case class NameAndUsername(fullName: String, username: String)
case object UserIdData {
/** For test suites. */
def newTest(userId: UserId, ip: String = "111.112.113.114") =
UserIdData(userId, ip, browserIdCookie = None, browserFingerprint = 0)
}
case class UserIdData(
userId: UserId,
ip: String,
browserIdCookie: Option[String],
browserFingerprint: Int) {
require(userId.nonEmpty, "DwE182WH9")
require(ip.nonEmpty, "DwE6G9F0")
require(browserIdCookie.map(_.isEmpty) != Some(true), "DwE3GJ79")
def anyGuestId: Option[String] =
if (isGuestId(userId)) Some(userId drop 1) else None
def anyRoleId: Option[String] =
if (isRoleId(userId)) Some(userId) else None
def isAnonymousUser = ip == "0.0.0.0"
def isUnknownUser = userId == UnknownUser.Id
def isSystemUser = userId == SystemUser.User.id
}
case object User {
def isGuestId(userId: UserId) = userId.startsWith("-") && userId.length > 1
def isRoleId(userId: UserId) = !isGuestId(userId) && userId.nonEmpty
/**
* Checks for weird ASCII chars in an user name.
*
* Cannot be used with names from identity providers, e.g. OpenID
* or Twitter: the providers do their own user name sanity checks,
* and we should handle anything they accept?
*/
def nameIsWeird(name: String): Boolean = {
// Could check for weird Unicode whitespace too, but that will
// probably be implicitly solved, when handling spam? ASCII,
// however, could mess up the internals of something, because
// sometimes the system assigns magic meanings to ASCII chars
// that only an attacker would use?
for (c <- name if c < 0x80) {
if (c < ' ') return true // control chars
if (c < '0' && !(" '-." contains c)) return true // punctuation
if (c > '9' && c < 'A') return true // more punctuation
if (c > 'Z' && c < 'a') return true // even more punctuation
if (c > 'z' && c <= 127) return true // punctuation
}
false
}
/**
* Checks for weird ASCII chars in an email,
* and that it matches """.+@.+\..+""".
*/
def emailIsWeird(email: String): Boolean = {
// Differences from nameIsOk(): allow "@_", disallows "'".
for (c <- email if c < 0x80) {
if (c <= ' ') return true // forbid control chars and space
if (c < '0' && !(" -." contains c)) return true // punctuation
if (c > '9' && c < '@') return true // email, so '@' ok
if (c > 'Z' && c < 'a' && !"_".contains(c)) return true // punctuation
if (c > 'z' && c <= 127) return true // punctuation
}
if (email matches """.+@.+\..+""") return false
true
}
/**
* Allows all chars but control chars, space and < >
*/
def urlIsWeird(url: String): Boolean = {
for (c <- url if c < 0x80) {
if (c <= ' ') return true // forbid control chars and space
if ("<>" contains c) return true
if (c == 127) return true // control char?
}
false
}
def checkId(id: String, errcode: String) {
if (id == "") assErr(errcode, "Empty ID ")
if (id == "0") assErr(errcode, "ID is `0' ")
// "?" is okay, means unknown.
}
}
/* Could use:
sealed abstract class UserId
case class GuestId(String) extends UserId
case class RoleId(String) extends UserId
-- instead of setting User.id to "-<some-id>" for IdentitySimple,
and "<some-id>" for Role:s.
*/
/**
*
* @param id Starts with "-" for guest users. COULD replace with UserId (see above).
* @param displayName
* @param username Is None for guests, and some old users created before usernames had
* been implemented.
* @param createdAt None for guests.
* @param email
* @param emailNotfPrefs
* @param emailVerifiedAt
* @param country
* @param website COULD rename to url, that's more generic.
* @param isAdmin
* @param isOwner
*/
case class User (
id: String,
displayName: String,
username: Option[String],
createdAt: Option[ju.Date],
email: String, // COULD rename to emailAddr
emailNotfPrefs: EmailNotfPrefs,
emailVerifiedAt: Option[ju.Date] = None,
passwordHash: Option[String] = None,
country: String = "",
website: String = "",
isAdmin: Boolean = false,
isOwner: Boolean = false
){
checkId(id, "DwE02k125r")
def isAuthenticated = isRoleId(id) && !id.startsWith("?")
def isGuest = User.isGuestId(id)
def anyRoleId: Option[String] = if (isRoleId(id)) Some(id) else None
def anyGuestId: Option[String] = if (isGuestId(id)) Some(id drop 1) else None
def theRoleId: String = anyRoleId getOrDie "DwE035SKF7"
def theGuestId: String = anyGuestId getOrDie "DwE5GK904"
}
/**
* Used when searching for users.
*/
case class UserQuery()
object EmailNotfPrefs extends Enumeration {
type EmailNotfPrefs = Value
val Receive, DontReceive, ForbiddenForever, Unspecified = Value
}
case class RolePageSettings(
notfLevel: PageNotfLevel)
object RolePageSettings {
val Default = RolePageSettings(PageNotfLevel.Regular)
}
sealed abstract class LoginAttempt {
def ip: String
def date: ju.Date
}
case class GuestLoginAttempt(
ip: String,
date: ju.Date,
name: String,
email: String = "",
location: String = "",
website: String = "")
case class GuestLoginResult(user: User, isNewUser: Boolean)
case class PasswordLoginAttempt(
ip: String,
date: ju.Date,
email: String,
password: String) extends LoginAttempt {
}
case class EmailLoginAttempt(
ip: String,
date: ju.Date,
emailId: String) extends LoginAttempt {
}
case class OpenIdLoginAttempt(
ip: String,
date: ju.Date,
openIdDetails: OpenIdDetails) extends LoginAttempt {
}
case class OpenAuthLoginAttempt(
ip: String,
date: ju.Date,
openAuthDetails: OpenAuthDetails) extends LoginAttempt {
def profileProviderAndKey = openAuthDetails.providerIdAndKey
}
/**
* A user might have many identities, e.g. an OpenID Gmail identity and
* a Twitter identity.
* COULD tease apart inheritance:
* Split into three unrelated classes 1) EmailLinkLogin, 2) Guest and
* 3) Identity, with:
* authn: AuthnOpenId(...), AuthnOAuth1(...) & 2, AuthnPassword(...)
* identityProvider: Gmail, Facebook, Twitter, Local, ...)
*/
sealed abstract class Identity {
/** A local id, not a guid. -- hmm, no, it'll be a database *unique* id?!
*
* For example, if a user is loaded for inclusion on page X,
* its id might be another from when loaded for display on
* another page Y.
*
* At least for NoSQL databses (e.g. Cassandra) the id will probably
* vary from page to page. Because the user data is probably denormalized:
* it's included on each page where the user leaves a reply!
* For relational databases, however, the id might be the same always,
* on all pages. Instead of denormalizing data, indexes and table joins
* are used.
*/
def id: String
def userId: String
checkId(id, "DwE02krc3g")
checkId(userId, "DwE864rsk215")
}
/**
* By specifying an id of an email that has been sent to you,
* you can login. This login type is insecure (email transmission is
* not secure) and only used for unsubscriptions.
* @param id The email id. Should refer to an email that has already
* been saved in the database.
* @param userId The user that received the email. Not known before
* login (is "?").
* @param emailSent Not known before login (is `None`)
*/
case class IdentityEmailId(
id: String,
userId: String = "?",
emailSent: Option[Email] = None
) extends Identity {
// Either only email id known, or all info known.
require((userId startsWith "?") == emailSent.isEmpty)
}
case class IdentityOpenId(
id: String,
override val userId: String,
openIdDetails: OpenIdDetails) extends Identity {
def displayName = openIdDetails.firstName
}
case class OpenIdDetails(
oidEndpoint: String,
oidVersion: String,
oidRealm: String, // perhaps need not load from db?
// The OpenID depends on the realm, for Gmail. So for tenants
// with different realms (e.g. realms *.debiki.net and another-domain.com)
// the same user will be found in two different UserOpenID instances.
// However their Gmail addresses will be identical, so for Gmail,
// checking email could be helpful. But must ensure the OpenID provider
// is Gmail! otherwise an evil provider could provide false email addresses.
oidClaimedId: String,
oidOpLocalId: String,
firstName: String,
email: Option[String],
country: String) {
}
case class OpenAuthIdentity(
id: IdentityId,
override val userId: UserId,
openAuthDetails: OpenAuthDetails) extends Identity {
def displayName = openAuthDetails.displayName
}
case class OpenAuthDetails(
providerId: String,
providerKey: String,
firstName: Option[String] = None,
lastName: Option[String] = None,
fullName: Option[String] = None,
email: Option[String] = None,
avatarUrl: Option[String] = None) {
def providerIdAndKey = OpenAuthProviderIdKey(providerId, providerKey)
def displayName = firstName.orElse(fullName).getOrElse("(unknown name)")
}
case class OpenAuthProviderIdKey(providerId: String, providerKey: String)
case class LoginGrant(
identity: Option[Identity],
user: User,
isNewIdentity: Boolean,
isNewRole: Boolean) {
require(identity.map(_.id.contains('?')) != Some(true))
require(!user.id.contains('?'))
require(identity.map(_.userId == user.id) != Some(false))
require(!isNewRole || isNewIdentity)
def displayName: String = user.displayName
def email: String = user.email
/** For test suites. */
def testUserIdData =
UserIdData.newTest(userId = user.id)
}
/** A user that voted on a comment but was not logged in.
*/
object UnknownUser {
/** "-" means it's not a role, it's a guest. "3" is the next number after the
* first two magic id, which are "1" for the system user and "2" for the dummy
* author user (see DummyPage.scala).
*/
val Id = "-3"
val User = com.debiki.core.User(id = Id, displayName = "(unknown user)", username = None,
createdAt = None, email = "", emailNotfPrefs = EmailNotfPrefs.DontReceive,
emailVerifiedAt = None, isAdmin = false)
}
/**
* Used when things are inserted automatically into the database,
* e.g. an automatically generated default homepage, for a new website.
*/
object SystemUser {
import com.debiki.core
val Ip = "SystemUserIp"
val User = core.User(id = "1", displayName = "System", username = None,
createdAt = None, email = "", emailNotfPrefs = EmailNotfPrefs.DontReceive,
emailVerifiedAt = None, isAdmin = true)
val Person = People(List(User))
val UserIdData = core.UserIdData(
userId = SystemUser.User.id,
ip = Ip,
browserIdCookie = None,
browserFingerprint = 0)
}
// vim: fdm=marker et ts=2 sw=2 tw=80 fo=tcqwn list
| debiki/debiki-server-old | modules/debiki-core/src/main/scala/com/debiki/core/user.scala | Scala | agpl-3.0 | 14,841 |
/*
* Copyright 2018 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.common
import java.time.Instant
import java.util.Date
import org.scalatest.{FunSuite, Matchers}
final class TestImmutableDate extends FunSuite with Matchers {
private val date: Date = new Date()
private val immutableDate: ImmutableDate = ImmutableDate(date)
private val instant: Instant = date.toInstant
test("Basics") {
immutableDate.getTime shouldBe date.getTime
immutableDate.millis shouldBe date.getTime
}
test("Date => ImmutableDate - non-null") {
(date: ImmutableDate) shouldBe immutableDate
}
test("ImmutableDate => Date - non-null") {
(immutableDate: Date) shouldBe date
}
test("ImmutableDate => Instant - non-null") {
(immutableDate: Instant) shouldBe instant
}
test("apply with non-null Date") {
(ImmutableDate(date)) shouldBe immutableDate
}
test("apply with non-null Instant") {
(ImmutableDate(instant)) shouldBe immutableDate
}
test("ImmutableDate => Date Implicit - null") {
((null: ImmutableDate): Date) shouldBe null
}
test("Date => ImmutableDate Implicit - null") {
((null: Date): ImmutableDate) shouldBe null
}
test("ImmutableDate => Instant Implicit - null") {
((null: ImmutableDate): Instant) shouldBe null
}
test("apply with null Date") {
(ImmutableDate(null: Date)) shouldBe null
}
test("apply with null Instant") {
(ImmutableDate(null: Instant)) shouldBe null
}
}
| frugalmechanic/fm-common | jvm/src/test/scala/fm/common/TestImmutableDate.scala | Scala | apache-2.0 | 2,037 |
/*
* Copyright (c) 2011-13 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
class NatTests {
import nat._
import ops.nat._
trait Check[N <: Nat]
def check(expected: Nat)(actualy : => Check[expected.N]) {}
@Test
def testNat {
implicitly[Succ[_1] =:= _2]
implicitly[Pred.Aux[_19, _18]]
def pred(n: Nat)(implicit pred : Pred[n.N]) = new Check[pred.Out] {}
val pd1 = pred(19)
check(18)(pd1)
implicitly[Sum.Aux[_2, _3, _5]]
def sum(a: Nat, b: Nat)(implicit sum : Sum[a.N, b.N]) = new Check[sum.Out] {}
val s1 = sum(2, 3)
check(5)(s1)
implicitly[Diff.Aux[_5, _1, _4]]
def diff(a: Nat, b: Nat)(implicit diff : Diff[a.N, b.N]) = new Check[diff.Out] {}
val diff1 = diff(5, 1)
check(4)(diff1)
implicitly[Prod.Aux[_2, _3, _6]]
implicitly[Prod.Aux[_4, _5, _20]]
def prod(a: Nat, b: Nat)(implicit prod : Prod[a.N, b.N]) = new Check[prod.Out] {}
val p1 = prod(2, 3)
check(6)(p1)
val p2 = prod(4, 5)
check(20)(p2)
implicitly[Div.Aux[_7, _2, _3]]
implicitly[Div.Aux[_22, _11, _2]]
implicitly[Div.Aux[_15, _3, _5]]
def div(a: Nat, b: Nat)(implicit div : Div[a.N, b.N]) = new Check[div.Out] {}
val d1 = div(7, 2)
check(3)(d1)
val d2 = div(22, 11)
check(2)(d2)
val d3 = div(15, 3)
check(5)(d3)
implicitly[Mod.Aux[_7, _2, _1]]
implicitly[Mod.Aux[_22, _5, _2]]
implicitly[Mod.Aux[_9, _3, _0]]
def mod(a: Nat, b: Nat)(implicit mod : Mod[a.N, b.N]) = new Check[mod.Out] {}
val m1 = mod(7, 2)
check(1)(m1)
val m2 = mod(22, 5)
check(2)(m2)
val m3 = mod(9, 3)
check(0)(m3)
implicitly[LT[_3, _5]]
implicitly[LT[_10, _15]]
implicitly[LTEq[_2, _2]]
implicitly[LTEq[_2, _3]]
implicitly[Min.Aux[_0, _0, _0]]
implicitly[Min.Aux[_5, _2, _2]]
implicitly[Min.Aux[_3, _8, _3]]
def min[A <: Nat, B <: Nat](implicit min : Min[A, B]) = new Check[min.Out] {}
val min1 = min[_3, _4]
check(3)(min1)
val min2 = min[_5, _4]
check(4)(min2)
implicitly[Pow.Aux[_0, _8, _1]]
implicitly[Pow.Aux[_9, _0, _0]]
implicitly[Pow.Aux[_3, _2, _8]]
def pow[A <: Nat, B <: Nat](implicit pow : Pow[A, B]) = new Check[pow.Out] {}
val e1 = pow[_3, _1]
check(1)(e1)
val e2 = pow[_2, _3]
check(9)(e2)
val e3 = pow[_2, _4]
check(16)(e3)
// Type level
assertEquals(0, toInt[_0])
assertEquals(1, toInt[_1])
assertEquals(2, toInt[_2])
assertEquals(3, toInt[_3])
assertEquals(4, toInt[_4])
assertEquals(5, toInt[_5])
assertEquals(6, toInt[_6])
assertEquals(7, toInt[_7])
assertEquals(8, toInt[_8])
assertEquals(9, toInt[_9])
assertEquals(10, toInt[_10])
assertEquals(11, toInt[_11])
assertEquals(12, toInt[_12])
assertEquals(13, toInt[_13])
assertEquals(14, toInt[_14])
assertEquals(15, toInt[_15])
assertEquals(16, toInt[_16])
assertEquals(17, toInt[_17])
assertEquals(18, toInt[_18])
assertEquals(19, toInt[_19])
assertEquals(20, toInt[_20])
assertEquals(21, toInt[_21])
assertEquals(22, toInt[_22])
// Value level
assertEquals(0, toInt(_0))
assertEquals(1, toInt(_1))
assertEquals(2, toInt(_2))
assertEquals(3, toInt(_3))
assertEquals(4, toInt(_4))
assertEquals(5, toInt(_5))
assertEquals(6, toInt(_6))
assertEquals(7, toInt(_7))
assertEquals(8, toInt(_8))
assertEquals(9, toInt(_9))
assertEquals(10, toInt(_10))
assertEquals(11, toInt(_11))
assertEquals(12, toInt(_12))
assertEquals(13, toInt(_13))
assertEquals(14, toInt(_14))
assertEquals(15, toInt(_15))
assertEquals(16, toInt(_16))
assertEquals(17, toInt(_17))
assertEquals(18, toInt(_18))
assertEquals(19, toInt(_19))
assertEquals(20, toInt(_20))
assertEquals(21, toInt(_21))
assertEquals(22, toInt(_22))
}
}
| mandubian/shapeless | core/src/test/scala/shapeless/nat.scala | Scala | apache-2.0 | 4,490 |
package scala.meta
package internal
package equality
import scala.meta.internal.semantic._
// NOTE: Semantic comparison operates almost like structural comparison,
// but also taking into account envs, denots and typings.
// The difference with structural comparison is refs being treated differently, namely:
// 1) some structurally unequal refs (even having different types!) may compare equal when they refer to same defns
// 2) some structurally equal refs may compare unequal when they refer to different defns
// Now let's go through all of our refs and see how we should compare them.
// At the moment, we have 17 different AST nodes that are subtype of Ref:
// Name.Indeterminate,
// Term.Name, Term.Select, Term.ApplyUnary,
// Type.Name, Type.Select, Type.Project, Type.Singleton,
// Pat.Type.Project,
// Ctor.Ref.Name, Ctor.Ref.Select, Ctor.Ref.Project, Ctor.Ref.Function,
// Selector.Wildcard, Selector.Name, Selector.Rename, Selector.Unimport.
// In the implementation that follows we do the following to compare these refs:
// 1) XXX.Name vs name-like XXX.Select/Type.Project, where XXX can be Term, Type or Ctor.Ref, are compared equal if they refer to the same defn
// 2) Term.This, Term.Super, as well as all PrivateXXX/ProtectedXXX are compared equal to themselves if they refer to the same defn
// 3) YYY.ZZZ vs YYY.ZZZ for the rest of the refs are compared structurally
// TODO: I really don't like what I'm doing here.
// It would seem that instead of this bad-looking Any-based design,
// we should have Equality[T] { def equals; def hashCode }, which would be
// both modular (easily switch parts of the implementation) and type-safe.
// However, with the amount of AST nodes that we have,
// spelling all cases out manually will take prohibitively too much time.
// I would like to fix this in the future.
object Semantic {
def equals(x1: Any, x2: Any): Boolean = customEquals(x1, x2)
private def customEquals(x: Any, y: Any): Boolean = (x, y) match {
case (x, y) if x == null || y == null =>
x == null && y == null
case (x: Some[_], y: Some[_]) =>
customEquals(x.get, y.get)
case (x: None.type, y: None.type) =>
true
case (xs: Seq[_], ys: Seq[_]) =>
xs.length == ys.length && xs.zip(ys).forall{ case (x, y) => customEquals(x, y) }
case (x: Environment, y: Environment) =>
x == y
case (x: Prefix, y: Prefix) =>
(x, y) match {
case (Prefix.Type(x), Prefix.Type(y)) => customEquals(x, y)
case _ => x == y
}
case (x: Denotation, y: Denotation) =>
customEquals(x.prefix, y.prefix) && customEquals(x.symbols, y.symbols)
case (x: Typing, y: Typing) =>
(x, y) match {
case (Typing.Nonrecursive(x), Typing.Nonrecursive(y)) => customEquals(x, y)
case _ => x == y
}
case (x: Tree, y: Tree) =>
def syntaxPart = {
def compareStructure(x: Tree, y: Tree) = {
x.productPrefix == y.productPrefix &&
customEquals(x.productIterator.toList, y.productIterator.toList)
}
def compareSemantics(x: Name, y: Name) = {
x.denot != Denotation.None && y.denot != Denotation.None && x.denot == y.denot
}
(x, y) match {
case (NonRef(x), NonRef(y)) => compareStructure(x, y)
case (NameRef(namex, tagx), NameRef(namey, tagy)) => tagx == tagy && compareSemantics(namex, namey)
case (OpaqueRef(namex, tagx), OpaqueRef(namey, tagy)) => tagx == tagy && compareSemantics(namex, namey)
case (StructuralRef(x), StructuralRef(y)) => compareStructure(x, y)
case _ => false
}
}
def envPart = customEquals(x.privateEnv, y.privateEnv)
def denotPart = customEquals(x.privateDenot, y.privateDenot)
def typingPart = customEquals(x.privateTyping, y.privateTyping)
syntaxPart && envPart && denotPart && typingPart
case _ =>
x == y
}
def hashCode(x: Any): Int = customHashcode(x)
private def customHashcode(x: Any): Int = x match {
case null =>
0
case x: Option[_] =>
x.map(customHashcode).getOrElse(0)
case xs: Seq[_] =>
xs.foldLeft(0)((acc, curr) => acc * 37 + customHashcode(curr))
case x: Environment =>
x.hashCode
case x: Prefix =>
x match {
case Prefix.None => 0
case Prefix.Type(tpe) => customHashcode(tpe)
}
case x: Denotation =>
x match {
case Denotation.None => 0
case Denotation.Single(prefix, symbol) => customHashcode(prefix) * 37 + customHashcode(symbol)
case Denotation.Multi(prefix, symbols) => customHashcode(prefix) * 37 + customHashcode(symbols)
}
case x: Typing =>
x match {
case Typing.None => 0
case Typing.Recursive => 1
case Typing.Nonrecursive(tpe) => customHashcode(tpe)
}
case x: Tree =>
def syntaxPart = {
def hashStructure(x: Tree) = customHashcode(x.productPrefix) * 37 + customHashcode(x.productIterator.toList)
def hashSemantics(x: Name) = customHashcode(x.denot)
x match {
case NameRef(namex, tagx) => hashSemantics(namex) * 37 + tagx
case OpaqueRef(namex, tagx) => hashSemantics(namex) * 37 + tagx
case _ => hashStructure(x)
}
}
def envPart = customHashcode(x.privateEnv)
def denotPart = customHashcode(x.privateDenot)
def typingPart = customHashcode(x.privateTyping)
customHashcode(List(syntaxPart, envPart, denotPart, typingPart))
case _ =>
x.hashCode
}
}
| Dveim/scalameta | scalameta/trees/src/main/scala/scala/meta/internal/equality/Semantic.scala | Scala | bsd-3-clause | 5,552 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.io.{IOException, ObjectOutputStream}
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import org.apache.spark.{Dependency, Partition, RangeDependency, SparkContext, TaskContext}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.util.Utils
/**
* Partition for UnionRDD.
*
* @param idx index of the partition
* @param rdd the parent RDD this partition refers to
* @param parentRddIndex index of the parent RDD this partition refers to
* @param parentRddPartitionIndex index of the partition within the parent RDD
* this partition refers to
*/
private[spark] class UnionPartition[T: ClassTag](
idx: Int,
@transient rdd: RDD[T],
val parentRddIndex: Int,
@transient parentRddPartitionIndex: Int)
extends Partition {
var parentPartition: Partition = rdd.partitions(parentRddPartitionIndex)
def preferredLocations(): Seq[String] = rdd.preferredLocations(parentPartition)
override val index: Int = idx
@throws(classOf[IOException])
private def writeObject(oos: ObjectOutputStream): Unit = Utils.tryOrIOException {
// Update the reference to parent split at the time of task serialization
parentPartition = rdd.partitions(parentRddPartitionIndex)
oos.defaultWriteObject()
}
}
@DeveloperApi
class UnionRDD[T: ClassTag](
sc: SparkContext,
var rdds: Seq[RDD[T]])
extends RDD[T](sc, Nil) { // Nil since we implement getDependencies
override def getPartitions: Array[Partition] = {
val array = new Array[Partition](rdds.map(_.partitions.length).sum)
var pos = 0
for ((rdd, rddIndex) <- rdds.zipWithIndex; split <- rdd.partitions) {
array(pos) = new UnionPartition(pos, rdd, rddIndex, split.index)
pos += 1
}
array
}
override def getDependencies: Seq[Dependency[_]] = {
val deps = new ArrayBuffer[Dependency[_]]
var pos = 0
for (rdd <- rdds) {
deps += new RangeDependency(rdd, 0, pos, rdd.partitions.length)
pos += rdd.partitions.length
}
deps
}
override def compute(s: Partition, context: TaskContext): Iterator[T] = {
val part = s.asInstanceOf[UnionPartition[T]]
parent[T](part.parentRddIndex).iterator(part.parentPartition, context)
}
override def getPreferredLocations(s: Partition): Seq[String] =
s.asInstanceOf[UnionPartition[T]].preferredLocations()
override def clearDependencies() {
super.clearDependencies()
rdds = null
}
}
| andrewor14/iolap | core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala | Scala | apache-2.0 | 3,317 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.rdd.cl._
import Array._
import scala.math._
import org.apache.spark.rdd._
import java.net._
object SparkSimple {
def main(args : Array[String]) {
if (args.length < 1) {
println("usage: SparkSimple cmd")
return;
}
val cmd = args(0)
if (cmd == "convert") {
convert(args.slice(1, args.length))
} else if (cmd == "run") {
run_simple(args.slice(2, args.length), args(1).toBoolean)
} else if (cmd == "check") {
val correct : Array[(Int, Int)] = run_simple(args.slice(1, args.length), false)
val actual : Array[(Int, Int)] = run_simple(args.slice(1, args.length), true)
assert(correct.length == actual.length)
for (i <- 0 until correct.length) {
val a : (Int, Int) = correct(i)
val b : (Int, Int) = actual(i)
var error : Boolean = false
if (a._1 != b._1) {
System.err.println(i + " _1 expected " + a._1 +
" but got " + b._1)
error = true
}
if (a._2 != b._2) {
System.err.println(i + " _2 expected " + a._2 +
" but got " + b._2)
error = true
}
if (error) System.exit(1)
}
System.err.println("PASSED")
}
}
def get_spark_context(appName : String) : SparkContext = {
val conf = new SparkConf()
conf.setAppName(appName)
val localhost = InetAddress.getLocalHost
conf.setMaster("spark://" + localhost.getHostName + ":7077") // 7077 is the default port
return new SparkContext(conf)
}
def run_simple(args : Array[String], useSwat : Boolean) : Array[(Int, Int)] = {
if (args.length != 1) {
println("usage: SparkSimple run input-path");
return new Array[(Int, Int)](0);
}
val sc = get_spark_context("Spark Simple");
val inputPath = args(0)
val inputs_raw : RDD[(Int, Int)] = sc.objectFile[(Int, Int)](inputPath).cache
val inputs = if (useSwat) CLWrapper.cl[(Int, Int)](inputs_raw) else inputs_raw
val outputs : RDD[(Int, Int)] =
inputs.map(v => (v._2, v._1))
val outputs2 : Array[(Int, Int)] = outputs.collect
sc.stop
outputs2
}
def convert(args : Array[String]) {
if (args.length != 2) {
println("usage: SparkSimple convert input-dir output-dir");
return
}
val sc = get_spark_context("Spark KMeans Converter");
val inputDir = args(0)
var outputDir = args(1)
val input = sc.textFile(inputDir)
val converted = input.map(line => {
val tokens : Array[String] = line.split(" ")
assert(tokens.size == 2)
(tokens(0).toInt, tokens(1).toInt)
})
converted.saveAsObjectFile(outputDir)
}
}
| agrippa/spark-swat | functional-tests/tuple-prim-input-output/src/main/scala/sparksimple/SparkSimple.scala | Scala | bsd-3-clause | 4,663 |
package swe.backend.service
import java.net._
import com.sun.net.httpserver._
import com.hp.hpl.jena.rdf.model._
import com.hp.hpl.jena.query._
import com.hp.hpl.jena.sparql.resultset._
import scala.collection.JavaConversions._
import java.io._
import scala.io.Source
import java.nio._
import org.apache.commons.logging._
import swe.backend._
class WebServer( backend:Backend, port:Int ){
private val log:Log = LogFactory.getLog( this.getClass )
private val server = HttpServer.create( new InetSocketAddress( port ), 10 )
server.createContext("/", new FileHandler( "webClient" ) )
server.createContext("/model/n3", new OntologyHandler( backend, "N3" ) )
server.createContext("/model/rdf", new OntologyHandler( backend, "RDF/XML" ) )
server.createContext("/sparql", new SparqlHandler( backend ) )
server.createContext("/add/ResourceStatement", new AddStatementHandler( backend, "resource" ) )
server.createContext("/add/LiteralStatement", new AddStatementHandler( backend, "literal" ) )
def start = server.start
}
| flosse/semanticExperiments | backend/src/main/scala/service/WebServer.scala | Scala | gpl-3.0 | 1,069 |
package com.twitter.finagle.thrift
import com.google.common.base.Charsets
import com.twitter.finagle.stats.{Counter, DefaultStatsReceiver, StatsReceiver}
import com.twitter.logging.Logger
import com.twitter.util.NonFatal
import java.nio.{ByteBuffer, CharBuffer}
import java.nio.charset.{CoderResult, CharsetEncoder}
import java.security.{PrivilegedExceptionAction, AccessController}
import org.apache.thrift.protocol.{TProtocol, TProtocolFactory, TBinaryProtocol}
import org.apache.thrift.transport.TTransport
object Protocols {
// based on guava's UnsignedBytes.getUnsafe()
private[this] def getUnsafe: sun.misc.Unsafe = {
try {
sun.misc.Unsafe.getUnsafe()
} catch {
case NonFatal(_) => // try reflection instead
try {
AccessController.doPrivileged(new PrivilegedExceptionAction[sun.misc.Unsafe]() {
def run(): sun.misc.Unsafe = {
val k = classOf[sun.misc.Unsafe]
for (f <- k.getDeclaredFields) {
f.setAccessible(true)
val x = f.get(null)
if (k.isInstance(x)) {
return k.cast(x)
}
}
throw new NoSuchFieldException("the Unsafe") // fall through to the catch block below
}
})
} catch {
case NonFatal(t) =>
Logger.get().info("%s unable to initialize sun.misc.Unsafe", getClass.getName)
null
}
}
}
private val unsafe: Option[sun.misc.Unsafe] = Option(getUnsafe)
private[this] def optimizedBinarySupported: Boolean = unsafe.isDefined
/**
* Returns a `TProtocolFactory` that creates `TProtocol`s that
* are wire-compatible with `TBinaryProtocol`.
*/
def binaryFactory(
strictRead: Boolean = false,
strictWrite: Boolean = true,
readLength: Int = 0,
statsReceiver: StatsReceiver = DefaultStatsReceiver
): TProtocolFactory = {
if (!optimizedBinarySupported) {
new TBinaryProtocol.Factory(strictRead, strictWrite, readLength)
} else {
// Factories are created rarely while the creation of their TProtocol's
// is a common event. Minimize counter creation to just once per Factory.
val fastEncodeFailed = statsReceiver.counter("fast_encode_failed")
val largerThanTlOutBuffer = statsReceiver.counter("larger_than_threadlocal_out_buffer")
new TProtocolFactory {
override def getProtocol(trans: TTransport): TProtocol = {
val proto = new TFinagleBinaryProtocol(
trans, fastEncodeFailed, largerThanTlOutBuffer, strictRead, strictWrite)
if (readLength != 0) {
proto.setReadLength(readLength)
}
proto
}
}
}
}
def factory(statsReceiver: StatsReceiver = DefaultStatsReceiver): TProtocolFactory = {
binaryFactory(statsReceiver = statsReceiver)
}
// Visible for testing purposes.
private[thrift] object TFinagleBinaryProtocol {
// zero-length strings are written to the wire as an i32 of its length, which is 0
private val EmptyStringInBytes = Array[Byte](0, 0, 0, 0)
// assume that most of our strings are mostly single byte utf8
private val MultiByteMultiplierEstimate = 1.3f
/** Only valid if unsafe is defined */
private val StringValueOffset: Long = unsafe.map {
_.objectFieldOffset(classOf[String].getDeclaredField("value"))
}.getOrElse(Long.MinValue)
/**
* Note, some versions of the JDK's define `String.offset`,
* while others do not and always use 0.
*/
private val OffsetValueOffset: Long = unsafe.map { u =>
try {
u.objectFieldOffset(classOf[String].getDeclaredField("offset"))
} catch {
case NonFatal(_) => Long.MinValue
}
}.getOrElse(Long.MinValue)
/**
* Note, some versions of the JDK's define `String.count`,
* while others do not and always use `value.length`.
*/
private val CountValueOffset: Long = unsafe.map { u =>
try {
u.objectFieldOffset(classOf[String].getDeclaredField("count"))
} catch {
case NonFatal(_) => Long.MinValue
}
}.getOrElse(Long.MinValue)
private val charsetEncoder = new ThreadLocal[CharsetEncoder] {
override def initialValue() = Charsets.UTF_8.newEncoder()
}
// Visible for testing purposes
private[thrift] val OutBufferSize = 4096
private val outByteBuffer = new ThreadLocal[ByteBuffer] {
override def initialValue() = ByteBuffer.allocate(OutBufferSize)
}
}
/**
* An implementation of TBinaryProtocol that optimizes `writeString`
* to minimize object allocations.
*
* This specific speedup depends on sun.misc.Unsafe and will fall
* back to standard TBinaryProtocol in the case when it is unavailable.
*
* Visible for testing purposes.
*/
private[thrift] class TFinagleBinaryProtocol(
trans: TTransport,
fastEncodeFailed: Counter,
largerThanTlOutBuffer: Counter,
strictRead: Boolean = false,
strictWrite: Boolean = true)
extends TBinaryProtocol(
trans,
strictRead,
strictWrite)
{
import TFinagleBinaryProtocol._
override def writeString(str: String) {
if (str.length == 0) {
trans.write(EmptyStringInBytes)
return
}
// this is based on the CharsetEncoder code at:
// http://psy-lob-saw.blogspot.co.nz/2013/04/writing-java-micro-benchmarks-with-jmh.html
// we could probably do better than this via:
// https://github.com/nitsanw/jmh-samples/blob/master/src/main/java/psy/lob/saw/utf8/CustomUtf8Encoder.java
val u = unsafe.get
val chars = u.getObject(str, StringValueOffset).asInstanceOf[Array[Char]]
val offset = if (OffsetValueOffset == Long.MinValue) 0 else {
u.getInt(str, OffsetValueOffset)
}
val count = if (CountValueOffset == Long.MinValue) chars.length else {
u.getInt(str, CountValueOffset)
}
val charBuffer = CharBuffer.wrap(chars, offset, count)
val out = if (count * MultiByteMultiplierEstimate <= OutBufferSize) {
val o = outByteBuffer.get()
o.clear()
o
} else {
largerThanTlOutBuffer.incr()
ByteBuffer.allocate((count * MultiByteMultiplierEstimate).toInt)
}
val csEncoder = charsetEncoder.get()
csEncoder.reset()
val result = csEncoder.encode(charBuffer, out, true)
if (result != CoderResult.UNDERFLOW) {
fastEncodeFailed.incr()
super.writeString(str)
} else {
writeI32(out.position())
trans.write(out.array(), 0, out.position())
}
}
// Note: libthrift 0.5.0 has a bug when operating on ByteBuffer's with a non-zero arrayOffset.
// We instead use the version from head that fixes this issue.
override def writeBinary(bin: ByteBuffer) {
if (bin.hasArray) {
val length = bin.remaining()
writeI32(length)
trans.write(bin.array(), bin.position() + bin.arrayOffset(), length)
} else {
val array = new Array[Byte](bin.remaining())
bin.duplicate().get(array)
writeI32(array.length)
trans.write(array, 0, array.length)
}
}
}
}
| sveinnfannar/finagle | finagle-thrift/src/main/scala/com/twitter/finagle/thrift/Protocols.scala | Scala | apache-2.0 | 7,248 |
package com.danielasfregola.twitter4s.entities
final case class TweetId(id: Long, id_str: String)
| DanielaSfregola/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/TweetId.scala | Scala | apache-2.0 | 99 |
/*
* Copyright 2010-2011 Vilius Normantas <[email protected]>
*
* This file is part of Crossbow library.
*
* Crossbow is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Crossbow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with Crossbow. If not,
* see <http://www.gnu.org/licenses/>.
*/
package lt.norma.crossbow.indicators
import lt.norma.crossbow.core._
import lt.norma.crossbow.messages.EmptyMessage
import lt.norma.crossbow.core.testutils.approx
import org.scalatest.FunSuite
class AverageTest extends FunSuite {
class I(n: String) extends MutableIndicator[Double] {
def name = n
def dependencies = Empty
}
test("Average indicator") {
val e = 0.00001
val i1 = new I("A")
val i2 = new I("B")
val i3 = new I("C")
val i = new Average(i1, i2, i3)
val l = new IndicatorList(i)
expect("Average(A; B; C)") {
i.name
}
expect(1) {
i.dependencies.size
}
expect(None) {
i()
}
l.send(EmptyMessage)
expect(None) {
i()
}
i1.set(1)
l.send(EmptyMessage)
expect(None) {
i()
}
i2.set(2)
l.send(EmptyMessage)
expect(None) {
i()
}
i3.set(4)
l.send(EmptyMessage)
approx(2.33333, e) {
i.value
}
i3.set(20)
l.send(EmptyMessage)
approx(7.66666, e) {
i.value
}
i3.unset()
l.send(EmptyMessage)
expect(None) {
i()
}
i3.set(10)
l.send(EmptyMessage)
approx(4.33333, e) {
i.value
}
i1.set(2)
l.send(EmptyMessage)
approx(4.66666, e) {
i.value
}
}
test("One indicator") {
val i1 = new I("A")
val i = new Average(i1)
val l = new IndicatorList(i)
expect("Average(A)") {
i.name
}
expect(1) {
i.dependencies.size
}
expect(None) {
i()
}
i1.set(15)
l.send(EmptyMessage)
expect(15) {
i.value
}
}
test("No indicators") {
val i = new Average()
val l = new IndicatorList(i)
expect("Average()") {
i.name
}
expect(1) {
i.dependencies.size
}
expect(None) {
i()
}
l.send(EmptyMessage)
expect(None) {
i()
}
}
}
| ViliusN/Crossbow | crossbow-core/test/lt/norma/crossbow/indicators/AverageTest.scala | Scala | gpl-3.0 | 2,647 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.utils
import kafka.manager.ClusterConfig
import org.scalatest.{Matchers, FunSuite}
/**
* @author hiral
*/
class TestClusterConfig extends FunSuite with Matchers {
test("invalid name") {
intercept[IllegalArgumentException] {
ClusterConfig("qa!","0.8.1.1","localhost")
}
}
test("invalid kafka version") {
intercept[IllegalArgumentException] {
ClusterConfig("qa","0.8.1","localhost:2181")
}
}
test("case insensitive name") {
assert(ClusterConfig("QA","0.8.1.1","localhost:2181").name === "qa")
}
test("case insensitive zk hosts") {
assert(ClusterConfig("QA","0.8.1.1","LOCALHOST:2181").curatorConfig.zkConnect === "localhost:2181")
}
test("serialize and deserialize") {
val cc = ClusterConfig("qa","0.8.2-beta","localhost:2181")
val serialize: String = ClusterConfig.serialize(cc)
val deserialize = ClusterConfig.deserialize(serialize)
assert(deserialize.isSuccess === true)
cc == deserialize.get
}
test("deserialize without version") {
val cc = ClusterConfig("qa","0.8.2-beta","localhost:2181")
val serialize: String = ClusterConfig.serialize(cc)
val noverison = serialize.replace(""","kafkaVersion":"0.8.2-beta"""","")
assert(!noverison.contains("kafkaVersion"))
val deserialize = ClusterConfig.deserialize(noverison)
assert(deserialize.isSuccess === true)
cc == deserialize.get
}
}
| keen/kafka-manager | test/kafka/manager/utils/TestClusterConfig.scala | Scala | apache-2.0 | 1,540 |
/*
* Copyright 2011 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.scrooge
import com.twitter.scrooge.backend.{GeneratorFactory, WithFinagle}
import java.io.File
import java.util.Properties
import scopt.OptionParser
object Main {
def main(args: Array[String]) {
val compiler = new Compiler()
if (!parseOptions(compiler, args)) {
System.exit(1)
}
compiler.run()
}
def parseOptions(compiler: Compiler, args: Seq[String]): Boolean = {
val buildProperties = new Properties
scala.Option(getClass.getResource("build.properties")) foreach { resource =>
buildProperties.load(resource.openStream)
}
val parser = new OptionParser[Compiler]("scrooge") {
help("help") text("show this help screen")
override def showUsageOnError: Boolean = true
opt[Unit]('V', "version") action { (_, c) =>
println("scrooge " + buildProperties.getProperty("version", "0.0"))
println(" build " + buildProperties.getProperty("build_name", "unknown"))
println(" git revision " + buildProperties.getProperty("build_revision", "unknown"))
System.exit(0)
c
} text("print version and quit")
opt[Unit]('v', "verbose") action { (_, c) =>
c.verbose = true
c
} text("log verbose messages about progress")
opt[String]('d', "dest") valueName("<path>") action { (d, c) =>
c.destFolder = d
c
} text("write generated code to a folder (default: %s)".format(compiler.defaultDestFolder))
opt[String]("import-path") unbounded() valueName("<path>") action { (path, c) =>
c.includePaths ++= path.split(File.pathSeparator)
c
} text("[DEPRECATED] path(s) to search for included thrift files (may be used multiple times)")
opt[String]('i', "include-path") unbounded() valueName("<path>") action { (path, c) =>
c.includePaths ++= path.split(File.pathSeparator)
c
} text("path(s) to search for included thrift files (may be used multiple times)")
opt[String]('n', "namespace-map") unbounded() valueName("<oldname>=<newname>") action { (mapping, c) =>
mapping.split("=") match {
case Array(from, to) => {
c.namespaceMappings(from) = to
c
}
}
} text("map old namespace to new (may be used multiple times)")
opt[String]("default-java-namespace") unbounded() valueName("<name>") action { (name, c) =>
c.defaultNamespace = name
c
} text("Use <name> as default namespace if the thrift file doesn't define its own namespace. " +
"If this option is not specified either, then use \\"thrift\\" as default namespace")
opt[Unit]("disable-strict") action { (_, c) =>
c.strict = false
c
} text("issue warnings on non-severe parse errors instead of aborting")
opt[String]("gen-file-map") valueName("<path>") action { (path, c) =>
c.fileMapPath = Some(path)
c
} text("generate map.txt in the destination folder to specify the mapping from input thrift files to output Scala/Java files")
opt[Unit]("dry-run") action { (_, c) =>
c.dryRun = true
c
} text("parses and validates source thrift files, reporting any errors, but" +
" does not emit any generated source code. can be used with " +
"--gen-file-mapping to get the file mapping")
opt[Unit]('s', "skip-unchanged") action { (_, c) =>
c.skipUnchanged = true
c
} text("Don't re-generate if the target is newer than the input")
opt[String]('l', "language") action { (languageString, c) =>
if (GeneratorFactory.languages.toList contains languageString.toLowerCase) {
compiler.language = languageString
c
} else {
println("language option %s not supported".format(languageString))
System.exit(0)
c
}
} text("name of language to generate code in ('experimental-java' and 'scala' are currently supported)")
opt[String]("experiment-flag") valueName("<flag>") action { (flag, c) =>
c.experimentFlags += flag
c
} text("[EXPERIMENTAL] DO NOT USE FOR PRODUCTION. This is meant only for enabling/disabling features for benchmarking")
opt[Unit]("scala-warn-on-java-ns-fallback") action { (_, c) =>
c.scalaWarnOnJavaNSFallback = true
c
} text("Print a warning when the scala generator falls back to the java namespace")
opt[Unit]("finagle") action { (_, c) =>
c.flags += WithFinagle
c
} text("generate finagle classes")
arg[String]("<files...>") unbounded() action { (files, c) =>
c.thriftFiles += files
c
} text("thrift files to compile")
}
parser.parse(args, compiler) map { c =>
true
} getOrElse {
false
}
}
def isUnchanged(file: File, sourceLastModified: Long): Boolean = {
file.exists && file.lastModified >= sourceLastModified
}
}
| tellapart/scrooge | scrooge-generator/src/main/scala/com/twitter/scrooge/Main.scala | Scala | apache-2.0 | 5,579 |
import cats.Monoid
import cats.instances.double.catsKernelStdGroupForDouble
import cats.instances.tuple.catsKernelStdCommutativeGroupForTuple2
import scala.collection.Iterator
/**
*
*/
package object piecewise {
@inline
final def cubicRoots(a: Double, b: Double, c: Double, d: Double): Seq[Double] = {
import scala.math.pow
val p = pow(b, 2.0) - 3 * a * c
val q = 9 * a * b * c - 2 * pow(b, 3.0) - 27.0 * pow(a, 3.0) * d
val n = 27.0 * pow(p, 3.0) / pow(q, 2.0)
if (n > 6.75) {
val theta = 1.0 / math.tan(math.sqrt(4 * n / 27.0 - 1.0))
val cosin = (x: Double) => p * (4 * pow(math.cos(theta / 3.0 + x), 2.0) - 3.0)
val x1 = (-b + q / cosin(0)) / (3.0 * a)
val x2 = (-b + q / cosin(120 / scala.math.Pi)) / (3.0 * a)
val x3 = (-b + q / cosin(240 / scala.math.Pi)) / (3.0 * a)
Seq(x1, x2, x3)
}
else {
val root = math.sqrt(pow(q, 2.0) / 4.0 - pow(p, 3.0))
val n = math.cbrt(q / 2.0 + root) + math.cbrt(q / 2.0 - root)
Seq((-b + n) / (3.0 * a))
}
}
@inline
final def cubicRootsVieta(a: Double, b: Double, c: Double): Seq[Double] = {
import scala.math._
val q = (math.pow(a, 2.0) - 3.0 * b) / 9.0
val r = (2 * pow(a, 3.0) - 9.0 * a * b + 27.0 * c) / 54.0
val m = pow(r, 2.0) - pow(q, 3.0)
if (m < 0.0) {
val fi = acos(r / sqrt(pow(q, 3.0)))
val x1 = -2.0 * sqrt(q) * cos(fi / 3.0) - a / 3.0
val x2 = -2.0 * sqrt(q) * cos((fi + 2.0 * math.Pi) / 3.0) - a / 3.0
val x3 = -2.0 * sqrt(q) * cos((fi - 2.0 * math.Pi) / 3.0) - a / 3.0
Seq(x2, x1, x3)
}
else {
val s = cbrt(-r + sqrt(m))
val t = cbrt(-r - sqrt(m))
Seq(s + t - a / 3.0)
}
}
implicit class Itr[A](val iter: Iterator[A]){
def process[B](fPad: A, first: (A, A, A) => B)
(casual: (A, A, A) => B)
(last: (A, A, A) => B, lPad: A)(implicit M: Monoid[A]): Iterator[B] = {
var isFirst = true
var p0: A = M.empty
var p1: A = M.empty
new Iterator[B] {
override def hasNext: Boolean = iter.hasNext
override def next(): B = {
if (isFirst && iter.hasNext){
val f0 = iter.next()
if (iter.hasNext) {
val f1 = iter.next()
isFirst = false
p0 = f0
p1 = f1
first(fPad, f0, f1)
}
else casual(fPad, f0, lPad)
}
else if (iter.hasNext) {
val p2 = iter.next()
val r = casual(p0, p1, p2)
p0 = p1
p1 = p2
r
}
else {
last(p0, p1, lPad)
}
}
}
}
}
def deriv(xy1: (Double, Double), xy2: (Double, Double)) =
(xy2._2 - xy1._2) / (xy2._1 - xy1._1)
def centralDeriv(xy1: (Double, Double),
xy12: (Double, Double),
xy2: (Double, Double)) = {
deriv(xy1, xy2)
}
def boundDervs(values: List[(Double, Double)]) = {
val rightVals = values takeRight 2
val der1 = deriv(values.head, values.tail.head)
val der2 = deriv(rightVals.head, rightVals.tail.head)
(der1, der2)
}
def derivatives(values: Iterator[(Double, Double)]): Iterator[Double] = {
values.process((0.0, 0.0),
(xy0: (Double, Double), xy1: (Double, Double), xy2: (Double, Double)) => {
deriv(xy1, xy2)
}
)(
(xy0: (Double, Double), xy1: (Double, Double), xy2: (Double, Double)) => {
centralDeriv(xy0, xy1, xy2)
}
)(
(xy0: (Double, Double), xy1: (Double, Double), xy2: (Double, Double)) => {
deriv(xy0, xy1)
}, (0.0, 0.0)
)
}
def derivatives(values: List[(Double, Double)]): Iterator[Double] = {
val onBound = boundDervs(values)
Iterator(onBound._1) ++
values.sliding(3).map(list => deriv(list(0), list(2))) ++
Iterator(onBound._2)
}
def h(low: Double, upp: Double) = upp - low
def delta(yLow: Double, yUpp: Double, low: Double, upp: Double) = {
(yUpp - yLow) / h(low, upp)
}
}
| daniil-timofeev/gridsplines | piecewise/src/main/scala/piecewise/package.scala | Scala | apache-2.0 | 4,082 |
package ecommerce.shipping.app
import akka.actor._
import akka.kernel.Bootable
import com.typesafe.config.{Config, ConfigFactory}
import ecommerce.shipping.{HttpService, ShippingReadFrontConfiguration}
class ShippingReadFrontApp extends Bootable {
val config = ConfigFactory.load()
val system = ActorSystem("shipping-read-front", config)
def startup() = {
new ShippingReadFrontConfiguration {
override def config: Config = ShippingReadFrontApp.this.config
import httpService._
system.actorOf(HttpService.props(interface, port, askTimeout), "http-service")
}
}
def shutdown() = {
system.terminate()
}
} | odd/ddd-leaven-akka-v2 | shipping/read-front/src/main/scala/ecommerce/shipping/app/ShippingReadFrontApp.scala | Scala | mit | 660 |
package isabelle.eclipse.ui.editors
import isabelle.eclipse.core.text.DocumentModel
import isabelle.eclipse.ui.internal.IsabelleUIPlugin.{error, log}
import isabelle.eclipse.ui.util.SWTUtil
import org.eclipse.jface.text.{
BadLocationException,
DocumentEvent,
IDocumentListener,
ITextViewer,
IViewportListener,
JFaceTextUtil
}
import org.eclipse.swt.custom.StyledText
import org.eclipse.swt.events.{ControlAdapter, ControlEvent}
/** Tracker for changes in editor/document - upon change, updates the active perspective
* in the Isabelle document model.
*
* @author Andrius Velykis
*/
trait DocumentPerspectiveTracker {
/** A listener for scrolling events in the editor. Updates the active perspective upon scrolling. */
lazy private val viewerViewportListener = new IViewportListener {
override def viewportChanged(verticalOffset: Int) = updateActivePerspective()
}
/** A listener for resize events in the editor. Updates the active perspective upon editor resize. */
lazy private val viewerControlListener = new ControlAdapter {
override def controlResized(e: ControlEvent) = updateActivePerspective()
}
/** A listener for document events in the editor. Updates the active perspective upon document modification.
* This is necessary because active perspective offsets change when editing.
*/
lazy private val documentListener = new IDocumentListener {
override def documentChanged(event: DocumentEvent) = updateActivePerspective()
override def documentAboutToBeChanged(event: DocumentEvent) {}
}
protected def isabelleModel(): DocumentModel
protected def textViewer(): Option[ITextViewer]
private def textViewerControl: Option[StyledText] =
textViewer flatMap (v => Option(v.getTextWidget))
def initPerspective() {
// listen to scroll and resize events
// (text viewer must be available for init)
textViewer.get.addViewportListener(viewerViewportListener)
textViewer.get.getTextWidget.addControlListener(viewerControlListener)
isabelleModel.document.addDocumentListener(documentListener)
// update perspective with initial values
updateActivePerspective()
}
def disposePerspective() {
textViewer foreach (_.removeViewportListener(viewerViewportListener))
// the widget may be null during disposal
textViewerControl foreach (_.removeControlListener(viewerControlListener))
isabelleModel.document.removeDocumentListener(documentListener)
}
/**
* Updates the active perspective in the model. Finds the region currently
* visible in the editor and marks that in the model as its perspective -
* the area that should be submitted to the prover.
*/
def updateActivePerspective() = SWTUtil.asyncUnlessDisposed(textViewerControl) {
// only update if viewer is available
textViewer foreach { v =>
val (start, end) = visibleRange(v)
isabelleModel.setActivePerspective(math.max(start, 0), math.max(end - start, 0))
}
}
/** Calculates that start and end offsets of the currently visible text range */
private def visibleRange(viewer: ITextViewer): (Int, Int) = {
val visibleLines = JFaceTextUtil.getVisibleModelLines(viewer)
if (visibleLines.getNumberOfLines > 0 && visibleLines.getStartLine >= 0) {
// something is visible
val document = isabelleModel.document
try {
val start = document.getLineOffset(visibleLines.getStartLine)
val endLine = visibleLines.getStartLine + visibleLines.getNumberOfLines
val end = if (endLine >= document.getNumberOfLines - 1) {
document.getLength
} else {
document.getLineOffset(endLine) + document.getLineLength(endLine)
}
(start, math.max(start, end))
} catch {
case e: BadLocationException => {
log(error(Some(e)))
// something is visible, but problems calculating the perspective: use full document
(0, document.getLength)
}
}
} else {
// no perspective
(0, 0)
}
}
}
| andriusvelykis/isabelle-eclipse | isabelle.eclipse.ui/src/isabelle/eclipse/ui/editors/DocumentPerspectiveTracker.scala | Scala | epl-1.0 | 4,120 |
package stoop.attachments
import org.scalatest.FunSpec
import stoop.{TestDoc, DBInfo, DbAdmin}
import java.util.UUID
import org.scalatest.matchers.ShouldMatchers
import scala.concurrent.Await
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
import spray.json._
import stoop.DbAdmin
case class TestDocWithBody(body: String)
object TestDocWithBody {
import DefaultJsonProtocol._
implicit val jformat = jsonFormat1(TestDocWithBody.apply)
}
class AttachmentUploaderTest extends FunSpec with ShouldMatchers {
val serverUrl = "http://localhost:5984"
describe ("The attachment uploader") {
it ("should handle multiple text attachments") {
val newDBname = "testdb_" + UUID.randomUUID().toString
val admin = DbAdmin(serverUrl)
admin.createDb(newDBname) { dbInfo =>
val uploader = AttachmentUploader("localhost", 5984, newDBname)
val s1 = "this is 21 chars long"
val s2 = "this is 20 chars lon"
val a1 = new NewAttachment("foo.txt", "text/plain", s1.getBytes.length, s1.getBytes)
val a2 = new NewAttachment("bar.txt", "text/plain", s2.getBytes.length, s2.getBytes)
val responseFuture = uploader.newDocWithAttachments(TestDocWithBody("This is a body."), Seq(a1, a2))
val response = Await.result(responseFuture, Duration(10, TimeUnit.SECONDS))
admin.dropDb(newDBname) { _ should be (true) }
}
}
}
}
| MonsantoCo/stoop | src/test/scala/stoop/attachments/AttachmentUploaderTest.scala | Scala | bsd-3-clause | 1,435 |
package filodb
import akka.actor.{ActorRef}
import akka.pattern.ask
import akka.util.Timeout
import com.typesafe.config.Config
import com.typesafe.scalalogging.slf4j.StrictLogging
import java.util.concurrent.ArrayBlockingQueue
import org.apache.spark.sql.{SQLContext, SaveMode, DataFrame, Row}
import org.apache.spark.sql.types.StructType
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.language.implicitConversions
import scala.language.postfixOps
import filodb.core._
import filodb.core.metadata.{Column, DataColumn, Dataset, RichProjection}
import filodb.coordinator.{IngestionCommands, DatasetCommands, RowSource, DatasetCoordinatorActor}
import org.apache.spark.sql.hive.filodb.MetaStoreSync
package spark {
case class DatasetNotFound(dataset: String) extends Exception(s"Dataset $dataset not found")
// For each mismatch: the column name, DataFrame type, and existing column type
case class ColumnTypeMismatch(mismatches: Set[(String, Column.ColumnType, Column.ColumnType)]) extends
Exception(s"Mismatches:\\n${mismatches.toList.mkString("\\n")}")
case class BadSchemaError(reason: String) extends Exception(reason)
/**
* Options for calling saveAsFilo
* @param version the version number to write to
* @param chunkSize an optionally different chunkSize to set new dataset to use
* @param writeTimeout Maximum time to wait for write of each partition to complete
* @param flushAfterInsert if true, ensure all data is flushed in memtables at end of ingestion
* @param resetSchema if true, allows dataset schema (eg partition keys) to be reset when overwriting
* an existing dataset
*/
case class IngestionOptions(version: Int = 0,
chunkSize: Option[Int] = None,
writeTimeout: FiniteDuration = DefaultWriteTimeout,
flushAfterInsert: Boolean = true,
resetSchema: Boolean = false)
}
/**
* Provides base methods for reading from and writing to FiloDB tables/datasets.
* Note that this is not the recommended DataFrame load/save API, please see DefaultSource.scala.
* Configuration is done through setting SparkConf variables, like filodb.cassandra.keyspace
* Here is how you could use these APIs
*
* {{{
* > import filodb.spark._
* > sqlContext.saveAsFiloDataset(myDF, "table1", rowKeys, partitionKeys, segmentKey, createDataset=true)
*
* > sqlContext.filoDataset("table1")
* }}}
*/
package object spark extends StrictLogging {
val DefaultWriteTimeout = 999 minutes
import IngestionCommands._
import DatasetCommands._
import FiloDriver.metaStore
import RowSource._
import filodb.coordinator.client.Client.{parse, actorAsk}
val sparkLogger = logger
val actorCounter = new java.util.concurrent.atomic.AtomicInteger
private[spark] def ingestRddRows(coordinatorActor: ActorRef,
dataset: DatasetRef,
columns: Seq[String],
version: Int,
rows: Iterator[Row],
writeTimeout: FiniteDuration,
partitionIndex: Int): Unit = {
// Use a queue and read off of iterator in this, the Spark thread. Due to the presence of ThreadLocals
// it is not safe for us to read off of this iterator in another (ie Actor) thread
val queue = new ArrayBlockingQueue[Seq[Row]](32)
val props = RddRowSourceActor.props(queue, columns, dataset, version, coordinatorActor)
val actorId = actorCounter.getAndIncrement()
val rddRowActor = FiloExecutor.system.actorOf(props, s"${dataset}_${version}_${partitionIndex}_${actorId}")
implicit val timeout = Timeout(writeTimeout)
val resp = rddRowActor ? Start
val rowChunks = rows.grouped(1000)
var i = 0
while (rowChunks.hasNext && !resp.value.isDefined) {
queue.put(rowChunks.next)
if (i % 20 == 0) logger.info(s"Ingesting batch starting at row ${i * 1000}")
i += 1
}
queue.put(Nil) // Final marker that there are no more rows
Await.result(resp, writeTimeout) match {
case AllDone =>
case SetupError(UnknownDataset) => throw DatasetNotFound(dataset.dataset)
case SetupError(BadSchema(reason)) => throw BadSchemaError(reason)
case SetupError(other) => throw new RuntimeException(other.toString)
case IngestionErr(errString, None) => throw new RuntimeException(errString)
case IngestionErr(errString, Some(e)) => throw new RuntimeException(errString, e)
}
}
/**
* Syncs FiloDB datasets into Hive Metastore.
* Usually does not need to be called manually, unless you did not use the right HiveContext/Spark
* to create FiloDB tables.
*/
def syncToHive(sqlContext: SQLContext): Unit = {
val config = FiloDriver.initAndGetConfig(sqlContext.sparkContext)
if (config.hasPath("hive.database-name")) {
MetaStoreSync.getHiveContext(sqlContext).foreach { hiveContext =>
MetaStoreSync.syncFiloTables(config.getString("hive.database-name"),
metaStore,
hiveContext)
}
}
}
import filodb.spark.TypeConverters._
private[spark] def dfToFiloColumns(df: DataFrame): Seq[DataColumn] = dfToFiloColumns(df.schema)
private[spark] def dfToFiloColumns(schema: StructType): Seq[DataColumn] = {
schema.map { f =>
DataColumn(0, f.name, "", -1, sqlTypeToColType(f.dataType))
}
}
private[spark] def checkAndAddColumns(dfColumns: Seq[DataColumn],
dataset: DatasetRef,
version: Int): Unit = {
// Pull out existing dataset schema
val schema = parse(metaStore.getSchema(dataset, version)) { schema => schema }
// Translate DF schema to columns, create new ones if needed
val dfSchemaSeq = dfColumns.map { col => col.name -> col }
logger.info(s"Columns from Dataframe Schema: ${dfSchemaSeq.map(_._2).zipWithIndex}")
val dfSchema = dfSchemaSeq.toMap
val matchingCols = dfSchema.keySet.intersect(schema.keySet)
val missingCols = dfSchema.keySet -- schema.keySet
logger.info(s"Matching columns - $matchingCols\\nMissing columns - $missingCols")
// Type-check matching columns
val matchingTypeErrs = matchingCols.collect {
case colName: String if dfSchema(colName).columnType != schema(colName).columnType =>
(colName, dfSchema(colName).columnType, schema(colName).columnType)
}
if (matchingTypeErrs.nonEmpty) throw ColumnTypeMismatch(matchingTypeErrs)
if (missingCols.nonEmpty) {
val newCols = missingCols.map(dfSchema(_).copy(dataset = dataset.dataset, version = version))
parse(metaStore.newColumns(newCols.toSeq, dataset)) { resp =>
if (resp != Success) throw new RuntimeException(s"Error $resp creating new columns $newCols")
}
}
}
// Checks for schema errors via RichProjection.make, and returns created Dataset object
private[spark] def makeAndVerifyDataset(datasetRef: DatasetRef,
rowKeys: Seq[String],
segmentKey: String,
partitionKeys: Seq[String],
chunkSize: Option[Int],
dfColumns: Seq[Column]): Dataset = {
val options = Dataset.DefaultOptions
val options2 = chunkSize.map { newSize => options.copy(chunkSize = newSize) }.getOrElse(options)
val dataset = Dataset(datasetRef, rowKeys, segmentKey, partitionKeys).copy(options = options2)
// validate against schema. Checks key names, computed columns, etc.
RichProjection.make(dataset, dfColumns).recover {
case err: RichProjection.BadSchema => throw BadSchemaError(err.toString)
}
dataset
}
// This doesn't create columns, because that's in checkAndAddColumns.
private[spark] def createNewDataset(dataset: Dataset): Unit = {
logger.info(s"Creating dataset ${dataset.name}...")
actorAsk(FiloDriver.coordinatorActor, CreateDataset(dataset, Nil)) {
case DatasetCreated =>
logger.info(s"Dataset ${dataset.name} created successfully...")
case DatasetError(errMsg) =>
throw new RuntimeException(s"Error creating dataset: $errMsg")
}
}
private[spark] def deleteDataset(dataset: DatasetRef): Unit = {
logger.info(s"Deleting dataset $dataset")
parse(metaStore.deleteDataset(dataset)) { resp => resp }
}
implicit def sqlToFiloContext(sql: SQLContext): FiloContext = new FiloContext(sql)
} | markhamstra/FiloDB | spark/src/main/scala/filodb.spark/package.scala | Scala | apache-2.0 | 8,759 |
/* Copyright (c) 2015 Lucas Satabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package lingua
import scala.annotation.tailrec
package object fst2 {
type State = Int
def lcp[T](s1: Seq[T], s2: Seq[T]): Seq[T] =
s1.zip(s2).takeWhile { case (t1, t2) => t1 == t2 }.unzip._1
def lcp[T](ss: Iterable[Seq[T]]): Seq[T] =
ss.foldLeft(None: Option[Seq[T]]) {
case (None, s) => Some(s)
case (Some(s1), s2) => Some(lcp(s1, s2))
}.getOrElse(Seq.empty[T])
implicit def SeqOrdering[T: Ordering]: Ordering[Seq[T]] =
new Ordering[Seq[T]] {
def compare(seq1: Seq[T], seq2: Seq[T]): Int = {
val size1 = seq1.size
val size2 = seq2.size
val size = math.min(size1, size2)
@tailrec
def loop(idx: Int): Int =
if (idx >= size) {
if (size1 == size2) {
// both are equal
0
} else if (size2 > size1) {
// first is prefix of second, then it is smaller
-1
} else {
// second is prefix of first, then it is greater
1
}
} else {
val v1 = seq1(idx)
val v2 = seq2(idx)
val order = implicitly[Ordering[T]].compare(v1, v2)
if (order == 0) {
loop(idx + 1)
} else if (order < 0) {
-1
} else {
1
}
}
loop(0)
}
}
}
| satabin/lingua | fst/src/main/scala/lingua/fst2/package.scala | Scala | apache-2.0 | 1,993 |
package scalan.util
import scalan.{BaseNestedTests, DFunc}
import debox.{Set => DSet, Buffer => DBuffer}
class GraphUtilTests extends BaseNestedTests {
import GraphUtil._
describe("Collecting dependencies") {
val graph = Array(
List(1, 2), // 0
List(3), // 1
List(4), // 2
List(5, 6), // 3
List(6), // 4
List(6), // 5
List() // 6
)
val neighbours: DFunc[Int, DBuffer[Int]] = new DFunc[Int, DBuffer[Int]] {
def apply(node: Int) = {
val ns = DBuffer.empty[Int]
graph(node) foreach (ns.+=)
ns
}
}
it("depthFirstSetFrom") {
depthFirstSetFrom(DBuffer(6))(neighbours) shouldBe (DSet(6))
depthFirstSetFrom(DBuffer(5))(neighbours) shouldBe (DSet(5, 6))
depthFirstSetFrom(DBuffer(3))(neighbours) shouldBe (DSet(3, 5, 6))
depthFirstSetFrom(DBuffer(2))(neighbours) shouldBe (DSet(2, 4, 6))
depthFirstSetFrom(DBuffer(0))(neighbours) shouldBe (DSet(0, 1, 2, 3, 4, 5, 6))
}
it("depthFirstOrderFrom") {
val succ: DFunc[Int, DBuffer[Int]] = new DFunc[Int, DBuffer[Int]] {
def apply(id: Int) = DBuffer(graph(id):_*)
}
depthFirstOrderFrom(DBuffer(6), succ) shouldBe (DBuffer(6))
depthFirstOrderFrom(DBuffer(5), succ) shouldBe (DBuffer(6, 5))
depthFirstOrderFrom(DBuffer(3), succ) shouldBe (DBuffer(6, 5, 3))
depthFirstOrderFrom(DBuffer(2), succ) shouldBe (DBuffer(6, 4, 2))
depthFirstOrderFrom(DBuffer(0), succ) shouldBe (DBuffer(6, 5, 3, 1, 4, 2, 0))
}
}
}
| ScorexFoundation/sigmastate-interpreter | common/src/test/scala/scalan/util/GraphUtilTests.scala | Scala | mit | 1,558 |
package com.natalinobusa.mavr
import akka.actor.{ Actor }
import spray.routing.HttpService
import spray.routing.directives.PathDirectives._
import spray.json.DefaultJsonProtocol
import spray.httpx.SprayJsonSupport.sprayJsonMarshaller
import spray.httpx.SprayJsonSupport.sprayJsonUnmarshaller
case class Person(fname: String, v: Int, f: Double)
object JsonImplicits extends DefaultJsonProtocol {
implicit val impPerson = jsonFormat3(Person)
}
// we don't implement our route structure directly in the service actor because
// we want to be able to test it independently, without having to spin up an actor
class MavrApiService extends Actor with HttpService {
import JsonImplicits._
// the HttpService trait defines only one abstract member, which
// connects the services environment to the enclosing actor or test
def actorRefFactory = context
//curl -vv -H "Content-Type: application/json" localhost:8888/api/v1/s/123/2013/11/12/22
//curl -vv -X POST -H "Content-Type: application/json" -d '{"fname":"a", "v":3, "f":1.23}' localhost:8888/api/v1/s/123
//curl -vv -H "Content-Type: application/json" localhost:8888/api/v1/s/123
val serviceRoute = {
pathPrefix("api" / "v1") {
pathPrefix("s" / LongNumber) {
sid =>
get {
path(IntNumber / IntNumber / IntNumber / IntNumber) {
(year, month, day, hour) =>
complete(s"sid $sid, $year-$month-$day, $hour:00:00")
}
} ~
post {
entity(as[Person]) { person =>
complete(s"posted sid $sid $person")
}
} ~
get {
complete {
Person("otto", 8, 8.8)
}
}
}
}
}
// this actor only runs our route, but you could add
// other things here, like request stream processing,
// timeout handling or alternative handler registration
def receive = runRoute(serviceRoute)
} | natalinobusa/mavr | src/main/scala/com/natalinobusa/mavr/MavrApiService.scala | Scala | apache-2.0 | 1,973 |
import scala.language.higherKinds
object t9451 {
implicit def impl[I[_]]: {
type F[X] = { type Self = I[X] }
} = new {
type F[X] = { type Self = I[X] }
}
implicitly[{type F[X] = { type Self = Iterable[X] }}]
}
| lrytz/scala | test/files/pos/t9451.scala | Scala | apache-2.0 | 227 |
package com.dslplatform.test
import java.io.ByteArrayInputStream
import com.dslplatform.api.client.{HttpClient, HttpHeaderProvider, SettingsHeaderProvider}
import org.specs2.mutable._
class AuthHeaderTest extends Specification {
override def is = s2"""
Header Provider is resolved from the ServiceLocator
provide with auth header $auth
provide with project id $pid
provide custom $custom
"""
private val withAuthHeader =
"""
|auth=someAuth
|api-url=https://dsl-platform.com/test
|package-name=model
""".stripMargin
private val withPidHeader =
"""
|username=user
|project-id=0-0-0-0-0
|api-url=https://dsl-platform.com/test
|package-name=model
""".stripMargin
def auth = {
val properties = new java.util.Properties()
properties.load(new ByteArrayInputStream(withAuthHeader.getBytes("UTF-8")))
val locator = com.dslplatform.api.client.Bootstrap.init(properties)
try {
locator.resolve[SettingsHeaderProvider].getHeaders("Authorization").contains("someAuth") must beTrue
} finally {
locator.resolve[HttpClient].shutdown()
}
}
def pid = {
val properties = new java.util.Properties()
properties.load(new ByteArrayInputStream(withPidHeader.getBytes("UTF-8")))
val locator = com.dslplatform.api.client.Bootstrap.init(properties)
try {
locator.resolve[SettingsHeaderProvider].getHeaders.get("Authorization") must beSome
} finally {
locator.resolve[HttpClient].shutdown()
}
}
def custom = {
val locator = com.dslplatform.api.client.Bootstrap.init("/test-project.props",
Map[Object, AnyRef](classOf[HttpHeaderProvider] -> new HttpHeaderProvider {
override def getHeaders: Map[String, String] = Map("Do" -> "More")
}))
try {
locator.resolve[HttpHeaderProvider].getHeaders("Do") === "More"
} finally {
locator.resolve[HttpClient].shutdown()
}
}
}
| ngs-doo/dsl-client-scala | http/src/test/scala/com/dslplatform/test/AuthHeaderTest.scala | Scala | bsd-3-clause | 2,029 |
/*
* Copyright (c) 2017 Georgi Krastev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
/**
* Like `Option.orElse` on the type level and like `Either` on the value level.
*
* Instead of left and right constructors `OrElse` has primary and secondary implicits that lazily
* try to resolve first a value of type `A` or otherwise a value of type `B`.
*/
sealed trait OrElse[+A, +B] extends Serializable {
def fold[C](prim: A => C, sec: B => C): C
def unify[C >: A](implicit ev: B <:< C): C = fold(identity, ev)
}
final class Primary[+A](value: A) extends OrElse[A, Nothing] {
def fold[C](prim: A => C, sec: Nothing => C) = prim(value)
}
final class Secondary[+B](value: => B) extends OrElse[Nothing, B] {
def fold[C](prim: Nothing => C, sec: B => C) = sec(value)
}
object OrElse extends OrElse0 {
implicit def primary[A, B](implicit a: A): A OrElse B =
new Primary(a)
}
private[shapeless] abstract class OrElse0 {
implicit def secondary[A, B](implicit b: Lazy[B]): A OrElse B =
new Secondary(b.value)
}
| wheaties/shapeless | core/src/main/scala/shapeless/orelse.scala | Scala | apache-2.0 | 1,563 |
package skinny.assets
import org.scalatest._
class CoffeeScriptCompilerSpec extends FlatSpec with Matchers {
behavior of "CoffeeScriptCompiler"
it should "compile code" in {
val compiler = CoffeeScriptCompiler()
val js = compiler.compile(
"person.coffee",
"""class Person
| constructor: (name, email) ->
| @name = name
| @email = email
| name: 'Anonymous'
| email: null
| sayHello: -> console.log "My name is #{@name}!"
|
|bob = new Person('Bob')
|bob.sayHello()
""".stripMargin
)
js.replaceFirst("^// Generated by CoffeeScript .+\\n", "") should equal(
"""(function() {
| var Person, bob;
|
| Person = (function() {
| function Person(name, email) {
| this.name = name;
| this.email = email;
| }
|
| Person.prototype.name = 'Anonymous';
|
| Person.prototype.email = null;
|
| Person.prototype.sayHello = function() {
| return console.log("My name is " + this.name + "!");
| };
|
| return Person;
|
| })();
|
| bob = new Person('Bob');
|
| bob.sayHello();
|
|}).call(this);
|""".stripMargin
)
}
}
| seratch/skinny-framework | assets/src/test/scala/skinny/assets/CoffeeScriptCompilerSpec.scala | Scala | mit | 1,382 |
package com.modelfabric.akka.cluster
import com.modelfabric.akka.actor._
/**
* A ClusterSupervisor handles events for a node in the BKNOW cluster
*/
trait ClusterSupervisor
extends UnknownMessageHandler
with LocalMessageHandler
with MemberEventSubscriber
with MemberEventHandler
with ClusterStateHandler {
def receive = memberEventMessage orElse clusterStateMessage orElse localMessage orElse unknownMessage
}
| modelfabric/scala-utils | src/main/scala/com/modelfabric/akka/cluster/ClusterSupervisor.scala | Scala | mit | 427 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component.zookeeper
import java.net.InetAddress
import java.nio.charset.Charset
import akka.actor.Actor
import akka.util.Timeout
import org.apache.curator.framework.CuratorFramework
import com.typesafe.config.Config
import com.webtrends.harness.component.zookeeper.config.ZookeeperSettings
import com.webtrends.harness.logging.ActorLoggingAdapter
import net.liftweb.json.JsonDSL._
import net.liftweb.json._
import org.apache.zookeeper.KeeperException.NoNodeException
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
object NodeRegistration {
/**
* Get the node/cluster base path
* @param config the system config
* @return the base path
*/
def getBasePath(config: Config): String = {
val zookeeperSettings = ZookeeperSettings(config.getConfig("wookiee-zookeeper"))
val basePath = if (config.hasPath("wookiee-cluster.base-path")) {
config.getConfig("wookiee-cluster").getString("base-path")
} else {
zookeeperSettings.basePath
}
s"$basePath/${zookeeperSettings.dataCenter}_${zookeeperSettings.pod}/${zookeeperSettings.version}"
}
}
trait NodeRegistration extends ZookeeperAdapter {
this: Actor with ActorLoggingAdapter =>
implicit val timeout = Timeout(5 seconds)
import context.dispatcher
val utf8 = Charset.forName("UTF-8")
val address = SystemExtension(context.system).address
val port = if (address.port.isDefined) address.port.get else context.system.settings.config.getInt("akka.remote.netty.tcp.port")
private def getAddress: String = {
val host = if (address.host.isEmpty || address.host.get.equalsIgnoreCase("localhost") || address.host.get.equals("127.0.0.1")) {
InetAddress.getLocalHost.getCanonicalHostName
} else {
address.host.get
}
s"${host}:${port}"
}
def unregisterNode(curator: CuratorFramework, zookeeperSettings: ZookeeperSettings) = {
val path = s"${NodeRegistration.getBasePath(context.system.settings.config)}/nodes/${getAddress}"
Try({
// Call Curator directly because this method is usually called after the actor's queue has been disabled
curator.delete.forPath(path)
}).recover({
case e: NoNodeException =>
// do nothing
case e: Throwable =>
log.warn(e, "The node {} could not be deleted", path)
})
}
def registerNode(curator: CuratorFramework, zookeeperSettings: ZookeeperSettings, clusterEnabled: Boolean) {
val add = getAddress
val path = s"${NodeRegistration.getBasePath(context.system.settings.config)}/nodes/${add}"
// Delete the node first
deleteNode(path) onComplete {
case Success(_) => log.debug("The node {} was deleted", path)
case Failure(t) => log.error(t, "The node {} could not be deleted", path)
}
log.info("Registering harness to path: " + path)
val json = compact(render(("address" -> add.toString) ~ ("cluster-enabled" -> clusterEnabled)))
createNode(path, true, Some(json.getBytes(utf8))) onComplete {
case Success(_) => log.debug("The node {} was created", path)
case Failure(t) => log.error(t, "Failed to create node registration for {}", path)
}
}
}
| mjwallin1/wookiee-zookeeper | src/main/scala/com/webtrends/harness/component/zookeeper/NodeRegistration.scala | Scala | apache-2.0 | 3,933 |
package org.openeyes.api.services
import org.bson.types.ObjectId
import org.openeyes.api.models._
/**
* Created by stu on 02/09/2014.
*/
object PatientService {
def find(id: String): Option[Patient] = {
Patient.findOneById(new ObjectId(id))
}
def findAll = {
Patient.findAll().toSeq
}
def search(searchTerm: String) = {
Patient.search(searchTerm)
}
def create(patient: Patient) {
Patient.save(patient)
}
}
| openeyes/poc-backend | src/main/scala/org/openeyes/api/services/PatientService.scala | Scala | gpl-3.0 | 446 |
import javax.persistence.{EntityManager, PersistenceContext}
import <%= packageName %>.entities.Teste
import org.junit.Test
import org.junit.runner.RunWith
import org.scalatest.MustMatchers
import org.scalatest.junit.{JUnitSuite, AssertionsForJUnit}
import org.springframework.test.context.jdbc.Sql
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner
import org.springframework.transaction.annotation.Transactional
import test.annotations.SpringIntegrationTest
/**
* Example of test using DB
* Tip: Use spring profiles to configure different database connections
* The only way to run Spring Test with context's like @Sql and @Transaction is to run thought JUnit or TestNG
*
* @author Giovanni Silva
*/
@RunWith(classOf[SpringJUnit4ClassRunner])
@SpringIntegrationTest
@Sql(Array("/sql/dbtest.sql"))
@Transactional
class DBSpec extends JUnitSuite with MustMatchers{
@PersistenceContext
val em: EntityManager = null
@Test
def DB_Should_List_Save_Delete: Unit = {
val dbTestSql = em.find(classOf[Teste], 1L)
dbTestSql must not be null
dbTestSql.nome must equal("Hello World")
val test = new Teste()
test.id = 2
test.nome = "Alan"
em.persist(test)
val find = em.find(classOf[Teste], 2L)
find.nome must equal("Alan")
}
@Test
def SpringTest_Should_RollbackTransaction(): Unit ={
val find = em.find(classOf[Teste], 2L)
find must be (null)
}
}
| giovannicandido/slush-spring-aurelia | templates/server/src/it/scala/DBSpec.scala | Scala | mit | 1,434 |
package models
import java.util.UUID
import com.mohiva.play.silhouette.api.{ Identity, LoginInfo }
import com.mohiva.play.silhouette.api.{LoginInfo, Identity}
import play.api.libs.json.Reads._
import play.api.libs.functional.syntax._
import play.api.libs.json._
/**
* The user object.
*
* @param userID The unique ID of the user.
* @param loginInfo The linked login info.
* @param firstName Maybe the first name of the authenticated user.
* @param lastName Maybe the last name of the authenticated user.
* @param fullName Maybe the full name of the authenticated user.
* @param email Maybe the email of the authenticated provider.
* @param avatarURL Maybe the avatar URL of the authenticated provider.
*/
case class User(
userID: Option[Long],
firstName: Option[String],
lastName: Option[String],
fullName: Option[String],
email: Option[String],
avatarURL: Option[String],
providerID: String = "",
providerKey: String = "",
roles: Set[String] = Set("USER")) extends Identity {
def loginInfo = new LoginInfo(providerID, providerKey)
def toTuple() = (userID, firstName, lastName, fullName, email, avatarURL, providerID, providerKey)
def addRole(role: String) = copy(roles = roles + role)
}
object User {
implicit val userFormat = Json.format[User]
def withoutRoles(t: (Option[Long], Option[String], Option[String], Option[String], Option[String], Option[String], String, String)) =
User(t._1, t._2, t._3, t._4, t._5, t._6, t._7, t._8, Set())
def toTuple(u: User) = Some((
u.userID,
u.firstName,
u.lastName,
u.fullName,
u.email,
u.avatarURL,
u.providerID,
u.providerKey))
}
| Empia/corpreg | models/User.scala | Scala | apache-2.0 | 1,655 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.controller
import org.apache.predictionio.annotation.DeveloperApi
import org.apache.predictionio.core.BaseAlgorithm
import org.apache.predictionio.workflow.PersistentModelManifest
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
/** Base class of a parallel algorithm.
*
* A parallel algorithm can be run in parallel on a cluster and produces a
* model that can also be distributed across a cluster.
*
* If your input query class requires custom JSON4S serialization, the most
* idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
* and mix that into your algorithm class, instead of overriding
* [[querySerializer]] directly.
*
* To provide evaluation feature, one must override and implement the
* [[batchPredict]] method. Otherwise, an exception will be thrown when pio eval`
* is used.
*
* @tparam PD Prepared data class.
* @tparam M Trained model class.
* @tparam Q Input query class.
* @tparam P Output prediction class.
* @group Algorithm
*/
abstract class PAlgorithm[PD, M, Q, P]
extends BaseAlgorithm[PD, M, Q, P] {
def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
/** Implement this method to produce a model from prepared data.
*
* @param pd Prepared data for model training.
* @return Trained model.
*/
def train(sc: SparkContext, pd: PD): M
def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
: RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
/** To provide evaluation feature, one must override and implement this method
* to generate many predictions in batch. Otherwise, an exception will be
* thrown when `pio eval` is used.
*
* The default implementation throws an exception.
*
* @param m Trained model produced by [[train]].
* @param qs An RDD of index-query tuples. The index is used to keep track of
* predicted results with corresponding queries.
*/
def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] =
throw new NotImplementedError("batchPredict not implemented")
def predictBase(baseModel: Any, query: Q): P = {
predict(baseModel.asInstanceOf[M], query)
}
/** Implement this method to produce a prediction from a query and trained
* model.
*
* @param model Trained model produced by [[train]].
* @param query An input query.
* @return A prediction.
*/
def predict(model: M, query: Q): P
/** :: DeveloperApi ::
* Engine developers should not use this directly (read on to see how parallel
* algorithm models are persisted).
*
* In general, parallel models may contain multiple RDDs. It is not easy to
* infer and persist them programmatically since these RDDs may be
* potentially huge. To persist these models, engine developers need to mix
* the [[PersistentModel]] trait into the model class and implement
* [[PersistentModel.save]]. If it returns true, a
* [[org.apache.predictionio.workflow.PersistentModelManifest]] will be
* returned so that during deployment, PredictionIO will use
* [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
* returned and the model will be re-trained on-the-fly.
*
* @param sc Spark context
* @param modelId Model ID
* @param algoParams Algorithm parameters that trained this model
* @param bm Model
* @return The model itself for automatic persistence, an instance of
* [[org.apache.predictionio.workflow.PersistentModelManifest]] for manual
* persistence, or Unit for re-training on deployment
*/
@DeveloperApi
override
def makePersistentModel(
sc: SparkContext,
modelId: String,
algoParams: Params,
bm: Any): Any = {
val m = bm.asInstanceOf[M]
if (m.isInstanceOf[PersistentModel[_]]) {
if (m.asInstanceOf[PersistentModel[Params]].save(
modelId, algoParams, sc)) {
PersistentModelManifest(className = m.getClass.getName)
} else {
()
}
} else {
()
}
}
}
| himanshudhami/PredictionIO | core/src/main/scala/org/apache/predictionio/controller/PAlgorithm.scala | Scala | apache-2.0 | 4,913 |
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import preprocess.Preprocessor
/**
* 预处理,包括: "数据清洗", "标签索引化", "分词", "向量化"
*
*/
object PreprocessDemo extends Serializable {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.WARN)
// HanLP.Config.enableDebug()
val spark = SparkSession
.builder
.master("local[2]")
.appName("Preprocess Demo")
.getOrCreate()
val filePath = "data/tmp"
val preprocessor = new Preprocessor
preprocessor.train(filePath, spark)
spark.stop()
}
}
| HGladiator/MyCodes | Scala/PreprocessDemo.scala | Scala | mit | 642 |
package io.udash.web.guide.demos.rpc
import io.udash.web.SeleniumTest
class RpcSerializationTest extends SeleniumTest {
val rpcFrontendUrl = "/rpc/serialization"
"RpcSerialization view" should {
driver.get(server.createUrl(rpcFrontendUrl))
"contain example button" in {
eventually {
driver.findElementById("gencodec-demo")
}
}
"receive msg from backend" in {
val callDemo = driver.findElementById("gencodec-demo")
callDemo.isEnabled should be(true)
callDemo.click()
eventually {
driver.findElementById("gencodec-demo-int").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-double").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-string").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-seq").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-map").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-caseClass").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-cls-int").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-cls-string").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-cls-var").getText shouldNot be(empty)
driver.findElementById("gencodec-demo-sealedTrait").getText shouldNot be(empty)
}
}
}
}
| UdashFramework/udash-guide | selenium/src/test/scala/io/udash/web/guide/demos/rpc/RpcSerializationTest.scala | Scala | gpl-3.0 | 1,405 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.feature.image
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature
import com.intel.analytics.bigdl.dllib.feature.common.Preprocessing
import com.intel.analytics.bigdl.dllib.nnframes.NNImageSchema
import org.apache.spark.sql.Row
import scala.reflect.ClassTag
/**
* a Preprocessing that converts a Spark Row to a BigDL ImageFeature.
*/
class RowToImageFeature[T: ClassTag]()(implicit ev: TensorNumeric[T])
extends Preprocessing[Row, ImageFeature] {
override def apply(prev: Iterator[Row]): Iterator[ImageFeature] = {
prev.map { row =>
NNImageSchema.row2IMF(row)
}
}
}
object RowToImageFeature {
def apply[T: ClassTag]()(implicit ev: TensorNumeric[T]): RowToImageFeature[T] =
new RowToImageFeature[T]()
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/feature/image/RowToImageFeature.scala | Scala | apache-2.0 | 1,486 |
package org.powlab.jeye.tests
import org.powlab.jeye.tests.shortcirc._
import org.powlab.jeye.decompile._
package shortcirc {
class ShortCircuitAssignTest1Test extends DecompileTestClass(classOf[ShortCircuitAssignTest1], true) {}
class ShortCircuitAssignTest1aTest extends DecompileTestClass(classOf[ShortCircuitAssignTest1a], true) {}
class ShortCircuitAssignTest2Test extends DecompileTestClass(classOf[ShortCircuitAssignTest2], true) {}
class ShortCircuitAssignTest3Test extends DecompileTestClass(classOf[ShortCircuitAssignTest3], true) {}
class ShortCircuitAssignTest4Test extends DecompileTestClass(classOf[ShortCircuitAssignTest4], true) {}
class ShortCircuitAssignTest4aTest extends DecompileTestClass(classOf[ShortCircuitAssignTest4a], true) {}
// TODO FAIL: проблемы в определении if патернов
@org.junit.Ignore
class ShortCircuitAssignTest4bTest extends DecompileTestClass(classOf[ShortCircuitAssignTest4b], true) {}
class ShortCircuitAssignTest4cTest extends DecompileTestClass(classOf[ShortCircuitAssignTest4c], true) {}
// TODO FAIL: проблемы в определении if патернов
@org.junit.Ignore
class ShortCircuitAssignTest4dTest extends DecompileTestClass(classOf[ShortCircuitAssignTest4d], true) {}
// TODO FAIL: проблемы в определении if патернов
@org.junit.Ignore
class ShortCircuitAssignTest4eTest extends DecompileTestClass(classOf[ShortCircuitAssignTest4e], true) {}
// TODO FAIL: проблемы в определении if патернов
@org.junit.Ignore
class ShortCircuitAssignTest4fTest extends DecompileTestClass(classOf[ShortCircuitAssignTest4f], true) {}
class ShortCircuitAssignTest5Test extends DecompileTestClass(classOf[ShortCircuitAssignTest5], true) {}
class ShortCircuitAssignTest6Test extends DecompileTestClass(classOf[ShortCircuitAssignTest6], true) {}
// TODO FAIL: проблемы в определении if патернов при отрицании
@org.junit.Ignore
class ShortCircuitAssignTest7Test extends DecompileTestClass(classOf[ShortCircuitAssignTest7], true) {}
} | powlab/jeye | src/test/scala/org/powlab/jeye/tests/ShortcircTests.scala | Scala | apache-2.0 | 2,162 |
package com.blinkbox.books.purchasing.admin
import akka.actor.{ ActorRefFactory, ActorSystem, Props }
import akka.util.Timeout
import com.blinkbox.books.config.Configuration
import com.blinkbox.books.logging.Loggers
import com.blinkbox.books.spray.{ HealthCheckHttpService, HttpServer }
import com.typesafe.scalalogging.StrictLogging
import spray.can.Http
import spray.http.Uri.Path
import spray.routing.HttpServiceActor
import scala.concurrent.duration._
object Main extends App with Configuration with Loggers with StrictLogging {
logger.info("App Starting")
val appConfig = AppConfig(config)
val system = ActorSystem("purchasing-service", config)
val service = system.actorOf(Props(classOf[AdminApiActor], new AdminApi), "purchasing-service-admin")
val localUrl = appConfig.api.localUrl
HttpServer(Http.Bind(service, localUrl.getHost, localUrl.getPort))(system, system.dispatcher, Timeout(10.seconds))
logger.info("App Started")
}
class AdminApiActor(adminApi: AdminApi) extends HttpServiceActor {
val healthService = new HealthCheckHttpService {
override val basePath: Path = Path("/")
override implicit def actorRefFactory: ActorRefFactory = AdminApiActor.this.actorRefFactory
}
override def receive = runRoute(healthService.routes ~ adminApi.route)
}
| blinkboxbooks/skeleton-service | admin/src/main/scala/com/blinkbox/books/purchasing/admin/Main.scala | Scala | mit | 1,289 |
package com.github.cmanou.scrimage.utils.dithering.filters
import com.sksamuel.scrimage.Image
import org.scalatest.{BeforeAndAfter, FunSuite, OneInstancePerTest}
class SierraFilterTest extends FunSuite with BeforeAndAfter with OneInstancePerTest {
val original = Image.fromStream(getClass.getResourceAsStream("/macosx-desktop.png"))
test("filter output matches expected") {
val expected = Image.fromStream(getClass.getResourceAsStream("/com/github/cmanou/scrimage/utils/dithering/filters/macosx-desktop-sierra3.png"))
assert(original.filter(SierraFilter()) === expected)
}
} | cmanou/scrimage-utils | src/test/scala/com/github/cmanou/scrimage/utils/dithering/filters/SierraFilterTest.scala | Scala | mit | 592 |
/**
* Copyright 2011-2012 eBusiness Information, Groupe Excilys (www.excilys.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.excilys.ebi.gatling.core.util
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import com.excilys.ebi.gatling.core.util.PaddableStringBuilder.toPaddable
@RunWith(classOf[JUnitRunner])
class PaddableStringBuilderSpec extends Specification {
"appendLeftPaddedString" should {
"pad correctly a two digits number" in {
new StringBuilder().appendLeftPaddedString("12", 6).toString must beEqualTo(" 12")
}
"not pad when the number of digits is higher than the expected string size" in {
new StringBuilder().appendLeftPaddedString("123456", 4).toString must beEqualTo("123456")
}
}
"appendLeftPaddedString" should {
"pad correctly a two digits number" in {
new StringBuilder().appendRightPaddedString("12", 6).toString must beEqualTo("12 ")
}
"not pad when the number of digits is higher than the expected string size" in {
new StringBuilder().appendRightPaddedString("123456", 4).toString must beEqualTo("123456")
}
}
} | Tjoene/thesis | Case_Programs/gatling-1.4.0/gatling-core/src/test/scala/com/excilys/ebi/gatling/core/util/PaddableStringBuilderSpec.scala | Scala | gpl-2.0 | 1,673 |
package com.bisphone.util.syntax
import com.bisphone.std._
/**
* @author Reza Samei <[email protected]>
*/
final class StdFailureAccessors[T<:Throwable](val self: T) extends AnyVal {
@inline def cause: Option[Throwable] = Option(self.getCause)
@inline def subject: String = self.getMessage
@inline def tryFailure[R]: StdTry[R] = StdFailure(self)
} | reza-samei/bisphone-std | src/main/scala/com/bisphone/util/syntax/StdFailureAccessors.scala | Scala | mit | 364 |
package models
import lib.joda._
import lib.neo4j.data._
import lib.neo4j.data.Parser._
import lib.neo4j.Neo4j
import models.enums._
import net.flatmap.ace._
import net.flatmap.ace.CypherParser._
import play.api.Logger
import play.api.mvc.RequestHeader
import scala.language.postfixOps
case class Instance(
id: DbValue[Long] = Unassigned,
eId: Long = 0,
author: DbValue[Profile] = Unassigned,
created: DateTime = defaultDateTime,
modified: DateTime = defaultDateTime,
values: Instance.Assignments = Map.empty
)
object Instance {
type Assignments = Map[String, Option[Any]]
/* modelName -> Instance */
type Qualified = (String, Instance)
/** The Neo4j parser for a single [[models.Instance]] instance. */
def simple(column: String, rModel: Model.Rich): RowParser[Instance] = {
val baseParser =
property(column) ~
property[Long](column, "eId") ~
property[DateTime](column, "created") ~
property[DateTime](column, "modified") map {
case id ~ eId ~ created ~ modified =>
Instance(id = id, eId = eId, created = created, modified = modified)
}
val attrParsers = for(attr <- rModel._2) yield attr.datatype match {
case Datatype.Boolean => Some(attr.name -> propertyOpt[Boolean](column, "_" + attr.name))
case Datatype.Integer => Some(attr.name -> propertyOpt[Int](column, "_" + attr.name))
case Datatype.Double => Some(attr.name -> propertyOpt[Double](column, "_" + attr.name))
case Datatype.String => Some(attr.name -> propertyOpt[String](column, "_" + attr.name))
case Datatype.Text => Some(attr.name -> propertyOpt[String](column, "_" + attr.name))
case Datatype.Date => Some(attr.name -> propertyOpt[Date](column, "_" + attr.name))
case Datatype.Time => Some(attr.name -> propertyOpt[Time](column, "_" + attr.name))
case Datatype.Relation => None
}
attrParsers.foldLeft(baseParser)((ps, p) => p match {
case None => ps
case Some((name, p)) => ps ~ p map {
case i ~ v => i.copy(values = i.values + (name -> v))
}
})
}
/**
* The Neo4j parser for a single [[models.Instance]] instance and the
* corresponding [[models.Profile]].
*/
def withAuthor(instColumn: String, authorColumn: String, rModel: Model.Rich): RowParser[Instance] =
Profile.withAuthor(Instance.simple(instColumn, rModel), authorColumn) map {
case (v, a) => v.copy(author = a)
}
/** Returns the total number of [[models.Instance]]s. */
def count(modelId: Long, countDeleted: Boolean = false): Int =
Neo4j.withTx { implicit service =>
val deletedClause = if(countDeleted) "HAS(inst.deleted)" else "NOT HAS(inst.deleted)"
Cypher(s"""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- () -[:LAST]-> inst
WHERE $deletedClause
RETURN COUNT(*)"""
).on("modelId" -> modelId
).as(scalar[Long].single).toInt
}
def getMaxVersion(modelName: String, eId: Long): Long = Neo4j.withTx { implicit service =>
Cypher("""
START model = node:MODELS({nmQuery})
MATCH model -[rel:LAST]-> inst
WHERE inst.eId = {eId}
RETURN rel.version"""
).on(
"nmQuery" -> ("name:" + modelName),
"eId" -> eId
).as(scalar[Long].singleOpt).getOrElse(0)
}
def findByEId(modelName: String, eId: Long, version: Option[Long] = None, includeDeleted: Boolean = false): Option[(Model.Rich, Instance.Qualified)] = Neo4j.withTx { implicit service =>
ModelCache.modelByName(modelName).map { case rModel@(model, _) =>
val deletedClause = if(includeDeleted) "" else "AND NOT HAS(inst.deleted)"
val data = version match {
case None => Cypher(s"""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- extModel -[:LAST]-> inst -[?:HAS_AUTHOR]-> profile
WHERE inst.eId = {eId} $deletedClause
RETURN extModel.name AS name, inst, profile"""
).on(
"modelId" -> model.id.get,
"eId" -> eId
)
case Some(v) if(v >= 0) => Cypher(s"""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- extModel -[:INIT]-> instInit -[:NEXT*$v]-> instVer -[:NEXT*0..]-> inst <-[:LAST]- extModel,
instVer -[?:HAS_AUTHOR]-> profile
WHERE inst.eId = {eId} $deletedClause
RETURN extModel.name AS name, instVer AS inst, profile"""
).on(
"modelId" -> model.id.get,
"eId" -> eId
)
case Some(v) if(v < 0) => Cypher(s"""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- extModel -[:LAST]-> inst <-[:NEXT*${-v}]- instVer -[?:HAS_AUTHOR]-> profile
WHERE inst.eId = {eId} $deletedClause
RETURN extModel.name AS name, instVer AS inst, profile"""
).on(
"modelId" -> model.id.get,
"eId" -> eId
)
}
val inst = data.as(str("name") ~ Instance.withAuthor("inst", "profile", rModel) singleOpt)
inst.map { case (qName ~ inst) => (rModel, (qName, inst)) }
}.getOrElse(None)
}
/**
* Returns an index page for [[models.Instance]]s.
*
* A single page containing all instances is returned if `pageSize == 0`.
* A positive `order` will result in an ascending page order. A negative
* `order` will result in a descending page order.
*
* @param modelName the corresponding model name
* @param page the page number (must satisfy `>= 0`)
* @param pageSize the page size (must satisfy `>= 0`)
* @param order the page order
* @return a [[models.Page]] of [[models.Profile]]s.
*/
def index(modelName: String, page: Int, pageSize: Int, order: Int, showDeleted: Boolean): Option[(Model.Rich, Page[Instance])] =
ModelCache.modelByName(modelName, min = ModelStatus.Private).map { case rModel@(model, attrs) =>
require(page >= 0, "Invalid page number")
require(pageSize >= 0, "Invalid page size")
val deletedClause = if(showDeleted) "HAS(inst.deleted)" else "NOT HAS(inst.deleted)"
val columns = "eId" +: attrs.map("_" + _.name)
val column = try { columns(math.abs(order)-1) } catch { case e:Throwable => columns.head }
val ordering = s"ORDER BY inst.`$column`? ${if(order < 0) "DESC" else "ASC"}"
val pagination = if(pageSize == 0) "" else s"SKIP ${pageSize*page} LIMIT $pageSize"
Neo4j.withTx { implicit service =>
val insts = Cypher(s"""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- () -[:LAST]-> inst -[?:HAS_AUTHOR]-> profile
WHERE $deletedClause
RETURN inst, profile
$ordering $pagination"""
).on("modelId" -> model.id.get
).as(Instance.withAuthor("inst", "profile", rModel) *)
(rModel, Page(insts, page, pageSize, order, count(model.id.get, showDeleted)))
}
}
private def createOutgoing(modelName: String, source: RichNode)(targets: (Attribute, List[Long])*)(implicit service: org.neo4j.graphdb.GraphDatabaseService) =
for((attr, eIds) <- targets) {
Cypher(s"""
START targetModel = node({targetId}), sourceNode = node({sourceId})
MATCH targetModel <-[:EXTENDS*0..]- () -[:LAST]-> targetNode
WHERE targetNode.eId IN {eIds} AND NOT HAS(targetNode.deleted)
CREATE sourceNode -[rel:`_${attr.name}` { cascadeIn: ${attr.cascadeIn.getOrElse(false)}, cascadeOut: ${attr.cascadeOut.getOrElse(false)} } ]-> targetNode"""
).on(
"targetId" -> attr.relTarget.get,
"sourceId" -> source.id,
"eIds" -> eIds
).execute()
}
/**
* Creates a [[models.Instance]].
*
* @param the corresponding [[models.Model.Rich]]
* @param newInst the new [[models.Instance]]
* @return the instance's `EID`.
*/
def create(rModel: Model.Rich, newInst: Instance, link: Option[(String, Long, String)] = None)(implicit request: RequestHeader): Long =
Neo4j.withTx { implicit service =>
val (model, attrs) = rModel
/* create the new instance */
val modelNode = ModelCache.nodeById(model.id.get).get
val eId = Model.nextEId()
val (rels, noRels) = attrs.partition(_.datatype == Datatype.Relation)
val newInstNode = RichNode.create(
"eId" -> eId)(
newInst.values.filterKeys(noRels.map(_.name).contains(_)).toSeq:_*
)
Profile.setAuthor(newInstNode.id)
modelNode --> "INIT" --> newInstNode
(modelNode --> "LAST" !--> newInstNode).set("version" -> 0)
/* update the relational structure */
/* create incoming relation (if link is provided) */
val data = for {
(srcModel, srcEId, attrName) <- link
(rModel, qInst) <- Instance.findByEId(srcModel, srcEId)
attr <- rModel._2.find(_.name == attrName)
} yield (rModel, qInst, attr)
data match {
case Some((rModel, (model, inst), attr)) =>
Cypher(s"""
START srcNode = node({sourceId}), tgtNode = node({targetId})
CREATE srcNode -[:`_${attr.name}` { cascadeIn: ${attr.cascadeIn.getOrElse(false)}, cascadeOut: ${attr.cascadeOut.getOrElse(false)} } ]-> tgtNode"""
).on(
"sourceId" -> inst.id.get,
"targetId" -> newInstNode.id
).execute()
case None => if(link.isDefined) {
Logger.info(s"Could not find valid database match for link pattern ${link.get}")
}
}
/* create outgoing relations */
val targets = rels.map { case attr =>
attr -> newInst.values(attr.name).getOrElse(List.empty).asInstanceOf[List[Long]]
}
createOutgoing(model.name, newInstNode)(targets:_*)
eId
}
def update(rModel: Model.Rich, oldInst: Instance, newInst: Instance)(implicit request: RequestHeader) = Neo4j.withTx { implicit service =>
val (model, attrs) = rModel
/* update the instance */
val (modelNode ~ oldInstNode ~ version) = Cypher("""
START inst = node({instId})
MATCH model -[rel:LAST]-> inst
WHERE NOT HAS(inst.deleted)
WITH model, inst, rel, rel.version AS version
DELETE rel
RETURN model, inst, version"""
).on("instId" -> oldInst.id.get
).as(get[RichNode]("model") ~ get[RichNode]("inst") ~ long("version") single)
val (rels, noRels) = attrs.partition(_.datatype == Datatype.Relation)
val newInstNode = RichNode.create(
"eId" -> oldInst.eId, "created" -> toLong(oldInst.created))(
newInst.values.filterKeys(noRels.map(_.name).contains(_)).toSeq:_*
)
oldInstNode --> "NEXT" --> newInstNode
(modelNode --> "LAST" !--> newInstNode).set("version" -> (version + 1))
Profile.setAuthor(newInstNode.id)
/* update the relational structure */
/* copy incoming relations */
for(rel <- oldInstNode.getIncoming("INIT", "NEXT", "LAST")) {
rel --> newInstNode
}
/* create outgoing relations */
val targets = rels.map { case attr =>
(attr, newInst.values(attr.name).getOrElse(List.empty).asInstanceOf[List[Long]])
}
createOutgoing(model.name, newInstNode)(targets:_*)
}
def batchDelete(rModel: Model.Rich, eIds: Seq[Long]): Int = Neo4j.withTx { implicit service =>
val now = System.currentTimeMillis
def deleteByIds(ids: Seq[Long]): Int = {
// two queries seem to be faster than one...
val t1 = Cypher("""
START inst = node({instIds})
SET inst.deleted = {deleted}
MATCH inst -[rel?]-> target <-[:LAST]- ()
WHERE rel.cascadeOut! = true AND NOT HAS(target.deleted)
RETURN ID(target)"""
).on(
"instIds" -> ids,
"deleted" -> now
).as((scalar[Long] ?) *).flatten
val t2 = Cypher("""
START inst = node({instIds})
SET inst.deleted = {deleted}
MATCH inst <-[rel?]- target <-[:LAST]- ()
WHERE rel.cascadeIn! = true AND NOT HAS(target.deleted)
RETURN ID(target)"""
).on(
"instIds" -> ids,
"deleted" -> now
).as((scalar[Long] ?) *).flatten
val targets = (t1 ++ t2).distinct
if(targets.length > 0) ids.length + deleteByIds(targets) else ids.length
}
val targets = Cypher("""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- () -[:LAST]-> inst
WHERE inst.eId IN {eIds} AND NOT HAS(inst.deleted)
RETURN ID(inst)"""
).on(
"modelId" -> rModel._1.id.get,
"eIds" -> eIds
).as(scalar[Long] *)
deleteByIds(targets)
}
def batchRestore(rModel: Model.Rich, eIds: Seq[Long])(implicit request: RequestHeader): Int = Neo4j.withTx { implicit service =>
def restoreByIds(ids: Seq[Long], deleted: Long): Int = {
val targets = Cypher("""
START inst = node({instIds})
MATCH () -[:LAST]-> inst -[?]- target <-[:LAST]- ()
WHERE inst.deleted = {deleted}
DELETE inst.deleted
WITH target
WHERE target.deleted! = {deleted}
RETURN DISTINCT(ID(target))"""
).on(
"instIds" -> ids,
"deleted" -> deleted
).as((scalar[Long] ?) *).flatten
if(targets.length > 0) ids.length + restoreByIds(targets, deleted) else ids.length
}
val targets = Cypher("""
START model = node({modelId})
MATCH model <-[:EXTENDS*0..]- () -[:LAST]-> inst
WHERE inst.eId IN {eIds} AND HAS(inst.deleted)
RETURN ID(inst) AS id, inst.deleted AS deleted"""
).on(
"modelId" -> rModel._1.id.get,
"eIds" -> eIds
).as(long("id") ~ long("deleted") *)
targets.map { case (id ~ deleted) =>
restoreByIds(Seq(id), deleted)
} sum
}
def restoreVersion(rModel: Model.Rich, inst: Instance)(implicit request: RequestHeader) = Neo4j.withTx { implicit service =>
val now = System.currentTimeMillis
val restored = Neo4j.getNodeById(inst.id.get).clone("INIT", "NEXT", "LAST", "HAS_AUTHOR")
Cypher(s"""
START model = node({modelId}), restored = node({instId})
MATCH model <-[:EXTENDS*0..]- () -[rel:LAST]-> last
WHERE last.eId = {eId}
SET restored.modified = ${now}
CREATE model -[:LAST {version: ROUND(rel.version+1)}]-> restored <-[:NEXT]- last //ROUND: cypher bug workaround
DELETE rel"""
).on(
"modelId" -> rModel._1.id.get,
"instId" -> restored.id,
"eId" -> inst.eId
).execute()
Profile.setAuthor(restored.id)
}
} | fynnfeldpausch/frame | app/models/Instance.scala | Scala | mit | 14,732 |
import sbt._
import Keys._
import AndroidKeys._
object General {
val settings = Defaults.defaultSettings ++ Seq (
name := "SimpleRadio",
version := "0.1",
versionCode := 0,
scalaVersion := "2.10.0",
platformName in Android := "android-14",
resolvers += (
"Edge System Design Repository" at "http://jvmrepo.edgesysdesign.com/"
),
libraryDependencies ++= Seq(
"com.edgesysdesign" %% "frequency" % "master",
"org.scalatest" % "scalatest_2.10.0-RC3" % "1.8-B1" % "test",
"org.scaloid" % "scaloid" % "1.1_8_2.10"
),
javacOptions ++= Seq("-source", "1.6", "-target", "1.6"),
scalacOptions ++= Seq(
"-language:implicitConversions",
"-deprecation",
"-feature")
)
val proguardSettings = Seq (
useProguard in Android := true,
proguardOption in Android := """
-keep class scala.Function1
-keep class scala.collection.SeqLike { public protected *; }
"""
)
lazy val fullAndroidSettings =
General.settings ++
AndroidProject.androidSettings ++
TypedResources.settings ++
proguardSettings ++
AndroidManifestGenerator.settings ++
AndroidMarketPublish.settings ++ Seq (
keyalias in Android := "change-me"
)
}
object AndroidBuild extends Build {
lazy val main = Project (
"SimpleRadio",
file("."),
settings = General.fullAndroidSettings
)
lazy val tests = Project (
"tests",
file("tests"),
settings = General.settings ++
AndroidTest.androidSettings ++
General.proguardSettings ++ Seq (
name := "SimpleRadioTests"
)
) dependsOn main
}
| edge-sys-design/simple-radio | project/Build.scala | Scala | gpl-2.0 | 1,650 |
/*
* Copyright (C) 2011-2013 exsplay
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package exsplay.tools
import java.io.File
import scalax.io._
import scala.util.Try
import sys.process._
/**
* User: vdubs
* Date: 05.08.13
* Time: 13:44
*/
object ON {
private[this] val shutdownTrigerFile = "SHUTDOWN_TRIGGER"
def shutdown( action: => Unit ) = {
new Thread(
new Runnable() {
def run() {
Resource.fromFile(s"$shutdownTrigerFile").write("Remove this file for shutdown.")(Codec.UTF8)
while(new File(s"$shutdownTrigerFile").isFile) {
Try(Thread.sleep(5 * 1000L))
}
println("Performing shutdown.")
action
}
}
).start
}
} | wertlex/exsplay | exsplay/src/main/scala/exsplay/tools/ON.scala | Scala | apache-2.0 | 1,239 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.javaapi
import java.nio.ByteBuffer
import kafka.cluster.BrokerEndPoint
class GroupCoordinatorResponse(private val underlying: kafka.api.GroupCoordinatorResponse) {
def error = underlying.error
def errorCode = error.code
def coordinator: BrokerEndPoint = {
import kafka.javaapi.Implicits._
underlying.coordinatorOpt
}
override def equals(other: Any) = canEqual(other) && {
val otherConsumerMetadataResponse = other.asInstanceOf[kafka.javaapi.GroupCoordinatorResponse]
this.underlying.equals(otherConsumerMetadataResponse.underlying)
}
def canEqual(other: Any) = other.isInstanceOf[kafka.javaapi.GroupCoordinatorResponse]
override def hashCode = underlying.hashCode
override def toString = underlying.toString
}
object GroupCoordinatorResponse {
def readFrom(buffer: ByteBuffer) = new GroupCoordinatorResponse(kafka.api.GroupCoordinatorResponse.readFrom(buffer))
}
| ijuma/kafka | core/src/main/scala/kafka/javaapi/GroupCoordinatorResponse.scala | Scala | apache-2.0 | 1,727 |
import play.api._
import play.api.mvc._
import play.api.mvc.Results._
import scala.concurrent.Future
import play.libs.Akka
import akka.actor.Props
import cc.mewa.channels.ChannelManagerActor
import play.api.Play
import actors.ConnectionManagerActor
import cc.mewa.api.ChannelApp.AppEvent
import cc.mewa.channels.EventProxy
object Global extends GlobalSettings {
override def onStart(app: Application) {
val authtUrl = Play.current.configuration.getString("auth.url")
val channelManager = Akka.system.actorOf(ChannelManagerActor.props(authtUrl), "channel-manager")
val connectionManager = Akka.system.actorOf(Props[ConnectionManagerActor], "connection-manager")
// Register eventProxy to send events to remote applications
val eventProxy = Akka.system.actorOf(Props[EventProxy], "event-proxy")
Akka.system.eventStream.subscribe(eventProxy, classOf[AppEvent])
}
} | AnthillTech/mewa | app/Global.scala | Scala | bsd-2-clause | 903 |
package mimir.util
import com.typesafe.scalalogging.LazyLogging
import mimir.algebra._
object TextUtils extends LazyLogging {
def parsePrimitive(t: Type, s: String): PrimitiveValue =
{
t match {
case TInt() => IntPrimitive(java.lang.Long.parseLong(s))
case TFloat() => FloatPrimitive(java.lang.Double.parseDouble(s))
case TDate() => parseDate(s)
case TTimestamp() => parseTimestamp(s)
case TInterval() => parseInterval(s)
case TString() => StringPrimitive(s)
case TBool() =>
s.toUpperCase match {
case "YES" | "TRUE" | "1" => BoolPrimitive(true)
case "NO" | "FALSE" | "0" => BoolPrimitive(false)
}
case TRowId() => RowIdPrimitive(s)
case TType() => TypePrimitive(Type.fromString(s))
case TAny() => throw new RAException("Can't cast string to TAny")
case TUser(t) => parsePrimitive(TypeRegistry.baseType(t), s)
}
}
//val dateRegexp = """(\\d+)[\\\\\\/-](\\d+)[\\\\\\/-](\\d+)""".r
//val timestampRegexp = """(\\d+)[\\\\\\/-](\\d+)[\\\\\\/-](\\d+) (\\d+):(\\d+):(\\d+|\\d+[.]\\d*)""".r
val dateRegexp = "(\\\\d+)-(\\\\d+)-(\\\\d+)".r
val timestampRegexp = "(\\\\d+)-(\\\\d+)-(\\\\d+) (\\\\d+):(\\\\d+):(\\\\d+|\\\\d+[.]\\\\d*)".r
val intervalRegexp = "P(\\\\d+)Y(\\\\d+)M(\\\\d+)W(\\\\d+)DT(\\\\d+)H(\\\\d+)M(\\\\d+|\\\\d+[.]\\\\d*)S".r
def parseDate(s: String): PrimitiveValue =
{
logger.trace(s"Parse Date: '$s'")
s match {
case dateRegexp(y, m, d) =>
logger.trace(s" -> $y-$m-$d -> ${DatePrimitive(y.toInt, m.toInt, d.toInt)}")
DatePrimitive(y.toInt, m.toInt, d.toInt)
case _ => NullPrimitive()
}
}
def parseTimestamp(s: String): PrimitiveValue =
{
logger.trace(s"Parse Timestamp: '$s'")
s match {
case timestampRegexp(yr, mo, da, hr, mi, se) =>
val seconds = se.toDouble
TimestampPrimitive(yr.toInt, mo.toInt, da.toInt, hr.toInt, mi.toInt, seconds.toInt, (seconds*1000).toInt % 1000)
case _ => NullPrimitive()
}
}
def parseInterval(s: String): PrimitiveValue =
{
logger.trace(s"Parse Interval: '$s'")
s match {
case intervalRegexp(y, m, w, d, hh, mm, se) =>
val seconds = se.toDouble
IntervalPrimitive(new org.joda.time.Period(y.toInt, m.toInt, w.toInt, d.toInt, hh.toInt, mm.toInt, seconds.toInt, (seconds * 1000).toInt % 1000))
case _ => NullPrimitive()
}
}
object Levenshtein {
def minimum(i1: Int, i2: Int, i3: Int)=scala.math.min(scala.math.min(i1, i2), i3)
def distance(s1:String, s2:String)={
val dist=Array.tabulate(s2.length+1, s1.length+1){(j,i)=>if(j==0) i else if (i==0) j else 0}
for(j<-1 to s2.length; i<-1 to s1.length)
dist(j)(i)=if(s2(j-1)==s1(i-1)) dist(j-1)(i-1)
else minimum(dist(j-1)(i)+1, dist(j)(i-1)+1, dist(j-1)(i-1)+1)
dist(s2.length)(s1.length)
}
}
}
| UBOdin/mimir | src/main/scala/mimir/util/TextUtils.scala | Scala | apache-2.0 | 2,877 |
package scutil.gui.geom
import minitest._
object SgSpanTest extends SimpleTestSuite {
test("SgSpan should intersect nothing when empty") {
assertEquals(
SgSpan.startEnd(1, 1) intersects SgSpan.startEnd(0, 2),
false
)
}
test("SgSpan should intersect nothing when empty") {
assertEquals(
SgSpan.startEnd(0, 2) intersects SgSpan.startEnd(1, 1),
false
)
}
test("SgSpan should intersect itself") {
assertEquals(
SgSpan.startEnd(0, 1) intersects SgSpan.startEnd(0, 1),
true
)
}
test("SgSpan should not intersect left to left") {
assertEquals(
SgSpan.startEnd(0, 1) intersects SgSpan.startEnd(2, 4),
false
)
}
test("SgSpan should not intersect left to start") {
assertEquals(
SgSpan.startEnd(0, 2) intersects SgSpan.startEnd(2, 4),
false
)
}
test("SgSpan should intersect left to inside") {
assertEquals(
SgSpan.startEnd(0, 3) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect left to end") {
assertEquals(
SgSpan.startEnd(0, 4) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect left to right") {
assertEquals(
SgSpan.startEnd(0, 5) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect start to inside") {
assertEquals(
SgSpan.startEnd(2, 3) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect start to end") {
assertEquals(
SgSpan.startEnd(2, 4) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect start to right") {
assertEquals(
SgSpan.startEnd(2, 5) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect inside to end") {
assertEquals(
SgSpan.startEnd(3, 4) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should intersect inside to right") {
assertEquals(
SgSpan.startEnd(3, 5) intersects SgSpan.startEnd(2, 4),
true
)
}
test("SgSpan should not intersect end to right") {
assertEquals(
SgSpan.startEnd(4, 5) intersects SgSpan.startEnd(2, 4),
false
)
}
test("SgSpan should not intersect right to right") {
assertEquals(
SgSpan.startEnd(5, 6) intersects SgSpan.startEnd(2, 4),
false
)
}
//------------------------------------------------------------------------------
test("SgSpan should intersect nothing when empty") {
assertEquals(
SgSpan.startEnd(1, 1) intersect SgSpan.startEnd(0, 2),
None
)
}
test("SgSpan should intersect nothing when empty") {
assertEquals(
SgSpan.startEnd(0, 2) intersect SgSpan.startEnd(1, 1),
None
)
}
test("SgSpan should intersect itself") {
assertEquals(
SgSpan.startEnd(0, 1) intersect SgSpan.startEnd(0, 1),
Some(SgSpan.startEnd(0, 1))
)
}
test("SgSpan should not intersect left to left") {
assertEquals(
SgSpan.startEnd(0, 1) intersect SgSpan.startEnd(2, 4),
None
)
}
test("SgSpan should not intersect left to start") {
assertEquals(
SgSpan.startEnd(0, 2) intersect SgSpan.startEnd(2, 4),
None
)
}
test("SgSpan should intersect left to inside") {
assertEquals(
SgSpan.startEnd(0, 3) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(2,3))
)
}
test("SgSpan should intersect left to end") {
assertEquals(
SgSpan.startEnd(0, 4) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(2,4))
)
}
test("SgSpan should intersect left to right") {
assertEquals(
SgSpan.startEnd(0, 5) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(2,4))
)
}
test("SgSpan should intersect start to inside") {
assertEquals(
SgSpan.startEnd(2, 3) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(2,3))
)
}
test("SgSpan should intersect start to end") {
assertEquals(
SgSpan.startEnd(2, 4) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(2,4))
)
}
test("SgSpan should intersect start to right") {
assertEquals(
SgSpan.startEnd(2, 5) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(2,4))
)
}
test("SgSpan should intersect inside to end") {
assertEquals(
SgSpan.startEnd(3, 4) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(3,4))
)
}
test("SgSpan should intersect inside to right") {
assertEquals(
SgSpan.startEnd(3, 5) intersect SgSpan.startEnd(2, 4),
Some(SgSpan.startEnd(3,4))
)
}
test("SgSpan should not intersect end to right") {
assertEquals(
SgSpan.startEnd(4, 5) intersect SgSpan.startEnd(2, 4),
None
)
}
test("SgSpan should not intersect right to right") {
assertEquals(
SgSpan.startEnd(5, 6) intersect SgSpan.startEnd(2, 4),
None
)
}
}
| ritschwumm/scutil | modules/gui/src/test/scala/scutil/gui/geom/SgSpanTest.scala | Scala | bsd-2-clause | 4,588 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.utils
import org.apache.flink.api.dag.Transformation
import org.apache.flink.table.api.TableException
import org.apache.flink.table.data.{GenericRowData, RowData}
import org.apache.flink.table.planner.codegen.CodeGenUtils.{DEFAULT_INPUT1_TERM, GENERIC_ROW}
import org.apache.flink.table.planner.codegen.OperatorCodeGenerator.generateCollect
import org.apache.flink.table.planner.codegen.{CodeGenUtils, CodeGeneratorContext, ExprCodeGenerator, OperatorCodeGenerator}
import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil
import org.apache.flink.table.runtime.operators.CodeGenOperatorFactory
import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.flink.table.sources.TableSource
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.logical.RowType
import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo
import org.apache.calcite.rel.core.TableScan
import org.apache.calcite.rex.RexNode
import java.util
import scala.collection.JavaConversions._
/**
* Util for [[TableScan]]s.
*/
object ScanUtil {
def hasTimeAttributeField(indexes: Array[Int]) =
indexes.contains(TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER)||
indexes.contains(TimeIndicatorTypeInfo.ROWTIME_BATCH_MARKER)||
indexes.contains(TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER)||
indexes.contains(TimeIndicatorTypeInfo.PROCTIME_BATCH_MARKER)
private[flink] def needsConversion(source: TableSource[_]): Boolean = {
needsConversion(source.getProducedDataType)
}
def needsConversion(dataType: DataType): Boolean =
fromDataTypeToLogicalType(dataType) match {
case _: RowType => !CodeGenUtils.isInternalClass(dataType)
case _ => true
}
def convertToInternalRow(
ctx: CodeGeneratorContext,
input: Transformation[Any],
fieldIndexes: Array[Int],
inputType: DataType,
outputRowType: RowType,
qualifiedName: util.List[String],
nameFormatter: (String, String) => String,
descriptionFormatter: String => String,
rowtimeExpr: Option[RexNode] = None,
beforeConvert: String = "",
afterConvert: String = ""): Transformation[RowData] = {
// conversion
val convertName = "SourceConversion"
// type convert
val inputTerm = DEFAULT_INPUT1_TERM
val internalInType = fromDataTypeToLogicalType(inputType)
val (inputTermConverter, inputRowType) = {
val convertFunc = CodeGenUtils.genToInternalConverter(ctx, inputType)
internalInType match {
case rt: RowType => (convertFunc, rt)
case _ => ((record: String) => s"$GENERIC_ROW.of(${convertFunc(record)})",
RowType.of(internalInType))
}
}
val processCode =
if ((inputRowType.getChildren == outputRowType.getChildren) &&
(inputRowType.getFieldNames == outputRowType.getFieldNames) &&
!hasTimeAttributeField(fieldIndexes)) {
s"${generateCollect(inputTerm)}"
} else {
// field index change (pojo) or has time attribute field
val conversion = new ExprCodeGenerator(ctx, false)
.bindInput(inputRowType, inputTerm = inputTerm, inputFieldMapping = Some(fieldIndexes))
.generateConverterResultExpression(
outputRowType, classOf[GenericRowData], rowtimeExpression = rowtimeExpr)
s"""
|$beforeConvert
|${conversion.code}
|${generateCollect(conversion.resultTerm)}
|$afterConvert
|""".stripMargin
}
val generatedOperator = OperatorCodeGenerator.generateOneInputStreamOperator[Any, RowData](
ctx,
convertName,
processCode,
outputRowType,
converter = inputTermConverter)
val substituteStreamOperator = new CodeGenOperatorFactory[RowData](generatedOperator)
val description = descriptionFormatter(getOperatorDescription(qualifiedName, outputRowType))
val name = nameFormatter(description, "SourceConversion");
ExecNodeUtil.createOneInputTransformation(
input.asInstanceOf[Transformation[RowData]],
name,
description,
substituteStreamOperator,
InternalTypeInfo.of(outputRowType),
input.getParallelism,
0)
}
/**
* @param qualifiedName qualified name for table
*/
private[flink] def getOperatorDescription(
qualifiedName: Seq[String], rowType: RowType): String = {
val tableQualifiedName = qualifiedName.mkString(".")
val fieldNames = rowType.getFieldNames.mkString(", ")
s"SourceConversion(table=[$tableQualifiedName], fields=[$fieldNames])"
}
/**
* Returns the field indices of primary key in given fields.
*/
def getPrimaryKeyIndices(
fieldNames: util.List[String],
keyFields: util.List[String]): Array[Int] = {
// we must use the output field names of scan node instead of the original schema
// to calculate the primary key indices, because the scan node maybe projection pushed down
keyFields.map { k =>
val index = fieldNames.indexOf(k)
if (index < 0) {
// primary key shouldn't be pruned, otherwise it's a bug
throw new TableException(
s"Can't find primary key field $k in the input fields $fieldNames. " +
s"This is a bug, please file an issue.")
}
index
}.toArray
}
}
| lincoln-lil/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/utils/ScanUtil.scala | Scala | apache-2.0 | 6,292 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.pulsar.sink
import java.util
import java.util.UUID
import com.datamountaineer.streamreactor.connect.errors.ErrorPolicyEnum
import com.datamountaineer.streamreactor.connect.pulsar.config.{PulsarConfigConstants, PulsarSinkConfig, PulsarSinkSettings}
import com.datamountaineer.streamreactor.connect.utils.{JarManifest, ProgressCounter}
import com.typesafe.scalalogging.slf4j.StrictLogging
import org.apache.kafka.clients.consumer.OffsetAndMetadata
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.connect.sink.{SinkRecord, SinkTask}
import scala.collection.JavaConversions._
/**
* Created by [email protected] on 27/08/2017.
* stream-reactor
*/
class PulsarSinkTask extends SinkTask with StrictLogging {
private val progressCounter = new ProgressCounter
private var enableProgress: Boolean = false
private var writer: Option[PulsarWriter] = None
private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
private var name = ""
private var settings : Option[PulsarSinkSettings] = None
override def start(props: util.Map[String, String]): Unit = {
logger.info(scala.io.Source.fromInputStream(getClass.getResourceAsStream("/pulsar-sink-ascii.txt")).mkString + s" v $version")
logger.info(manifest.printManifest())
PulsarSinkConfig.config.parse(props)
val sinkConfig = new PulsarSinkConfig(props)
enableProgress = sinkConfig.getBoolean(PulsarConfigConstants.PROGRESS_COUNTER_ENABLED)
settings = Some(PulsarSinkSettings(sinkConfig))
//if error policy is retry set retry interval
if (settings.get.errorPolicy.equals(ErrorPolicyEnum.RETRY)) {
context.timeout(sinkConfig.getInt(PulsarConfigConstants.ERROR_RETRY_INTERVAL).toLong)
}
name = props.getOrDefault("name", s"kafka-connect-pulsar-sink-${UUID.randomUUID().toString}")
writer = Some(PulsarWriter(name, settings.get))
}
override def put(records: util.Collection[SinkRecord]): Unit = {
require(writer.nonEmpty, "Writer is not set!")
val seq = records.toVector
writer.foreach(w => w.write(records.toSet))
if (enableProgress) {
progressCounter.update(seq)
}
}
/**
* Clean up writer
**/
override def stop(): Unit = {
logger.info("Stopping Pulsar sink.")
writer.foreach(w => w.close)
progressCounter.empty
}
override def flush(map: util.Map[TopicPartition, OffsetAndMetadata]): Unit = {
require(writer.nonEmpty, "Writer is not set!")
writer.foreach(w => w.flush)
}
override def version: String = manifest.version()
}
| CodeSmell/stream-reactor | kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkTask.scala | Scala | apache-2.0 | 3,224 |
package org.bowlerframework.squeryl.dao
import org.squeryl.{Table, KeyedEntity}
import org.bowlerframework.squeryl.SquerylDao
import org.squeryl.PrimitiveTypeMode._
/**
* Created by IntelliJ IDEA.
* User: wfaler
* Date: 30/01/2011
* Time: 15:40
* To change this template use File | Settings | File Templates.
*/
class IntKeyedDao[T <: KeyedEntity[Int]](theTable: Table[T])(implicit m : scala.Predef.Manifest[T]) extends SquerylDao[T, Int](theTable){
def findById(id: Int): Option[T] = {
try{
Some(table.where(f => f.id === id).single)
}catch{
case e: Exception => return None
}
}
override def create(entity: T) = {
com.recursivity.commons.bean.BeanUtils.setProperty(entity.getClass, entity, "id", 0l)
super.create(entity)
}
override def findAll(offset: Int = 0, results: Int = Integer.MAX_VALUE) = from(table)(a => select(a) orderBy(a.id asc)).page(offset, results).toList
} | rkpandey/Bowler | squeryl-mapper/src/main/scala/org/bowlerframework/squeryl/dao/IntKeyedDao.scala | Scala | bsd-3-clause | 922 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import java.net.URI
import java.util.Locale
import java.util.concurrent.Callable
import java.util.concurrent.TimeUnit
import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable
import scala.util.{Failure, Success, Try}
import com.google.common.cache.{Cache, CacheBuilder}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.expressions.{Alias, Expression, ExpressionInfo, ImplicitCastInputTypes, UpCast}
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParserInterface}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, SubqueryAlias, View}
import org.apache.spark.sql.catalyst.util.{CharVarcharUtils, StringUtils}
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.GLOBAL_TEMP_DATABASE
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.{CaseInsensitiveStringMap, PartitioningUtils}
import org.apache.spark.util.Utils
object SessionCatalog {
val DEFAULT_DATABASE = "default"
}
/**
* An internal catalog that is used by a Spark Session. This internal catalog serves as a
* proxy to the underlying metastore (e.g. Hive Metastore) and it also manages temporary
* views and functions of the Spark Session that it belongs to.
*
* This class must be thread-safe.
*/
class SessionCatalog(
externalCatalogBuilder: () => ExternalCatalog,
globalTempViewManagerBuilder: () => GlobalTempViewManager,
functionRegistry: FunctionRegistry,
tableFunctionRegistry: TableFunctionRegistry,
hadoopConf: Configuration,
parser: ParserInterface,
functionResourceLoader: FunctionResourceLoader,
cacheSize: Int = SQLConf.get.tableRelationCacheSize,
cacheTTL: Long = SQLConf.get.metadataCacheTTL) extends SQLConfHelper with Logging {
import SessionCatalog._
import CatalogTypes.TablePartitionSpec
// For testing only.
def this(
externalCatalog: ExternalCatalog,
functionRegistry: FunctionRegistry,
tableFunctionRegistry: TableFunctionRegistry,
conf: SQLConf) = {
this(
() => externalCatalog,
() => new GlobalTempViewManager(conf.getConf(GLOBAL_TEMP_DATABASE)),
functionRegistry,
tableFunctionRegistry,
new Configuration(),
new CatalystSqlParser(),
DummyFunctionResourceLoader,
conf.tableRelationCacheSize,
conf.metadataCacheTTL)
}
// For testing only.
def this(
externalCatalog: ExternalCatalog,
functionRegistry: FunctionRegistry,
conf: SQLConf) = {
this(externalCatalog, functionRegistry, new SimpleTableFunctionRegistry, conf)
}
// For testing only.
def this(
externalCatalog: ExternalCatalog,
functionRegistry: FunctionRegistry,
tableFunctionRegistry: TableFunctionRegistry) = {
this(externalCatalog, functionRegistry, tableFunctionRegistry, SQLConf.get)
}
// For testing only.
def this(externalCatalog: ExternalCatalog, functionRegistry: FunctionRegistry) = {
this(externalCatalog, functionRegistry, SQLConf.get)
}
// For testing only.
def this(externalCatalog: ExternalCatalog) = {
this(externalCatalog, new SimpleFunctionRegistry)
}
lazy val externalCatalog = externalCatalogBuilder()
lazy val globalTempViewManager = globalTempViewManagerBuilder()
/** List of temporary views, mapping from table name to their logical plan. */
@GuardedBy("this")
protected val tempViews = new mutable.HashMap[String, TemporaryViewRelation]
// Note: we track current database here because certain operations do not explicitly
// specify the database (e.g. DROP TABLE my_table). In these cases we must first
// check whether the temporary view or function exists, then, if not, operate on
// the corresponding item in the current database.
@GuardedBy("this")
protected var currentDb: String = formatDatabaseName(DEFAULT_DATABASE)
private val validNameFormat = "([\\w_]+)".r
/**
* Checks if the given name conforms the Hive standard ("[a-zA-Z_0-9]+"),
* i.e. if this name only contains characters, numbers, and _.
*
* This method is intended to have the same behavior of
* org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName.
*/
private def validateName(name: String): Unit = {
if (!validNameFormat.pattern.matcher(name).matches()) {
throw QueryCompilationErrors.invalidNameForTableOrDatabaseError(name)
}
}
/**
* Format table name, taking into account case sensitivity.
*/
protected[this] def formatTableName(name: String): String = {
if (conf.caseSensitiveAnalysis) name else name.toLowerCase(Locale.ROOT)
}
/**
* Format database name, taking into account case sensitivity.
*/
protected[this] def formatDatabaseName(name: String): String = {
if (conf.caseSensitiveAnalysis) name else name.toLowerCase(Locale.ROOT)
}
private val tableRelationCache: Cache[QualifiedTableName, LogicalPlan] = {
var builder = CacheBuilder.newBuilder()
.maximumSize(cacheSize)
if (cacheTTL > 0) {
builder = builder.expireAfterWrite(cacheTTL, TimeUnit.SECONDS)
}
builder.build[QualifiedTableName, LogicalPlan]()
}
/** This method provides a way to get a cached plan. */
def getCachedPlan(t: QualifiedTableName, c: Callable[LogicalPlan]): LogicalPlan = {
tableRelationCache.get(t, c)
}
/** This method provides a way to get a cached plan if the key exists. */
def getCachedTable(key: QualifiedTableName): LogicalPlan = {
tableRelationCache.getIfPresent(key)
}
/** This method provides a way to cache a plan. */
def cacheTable(t: QualifiedTableName, l: LogicalPlan): Unit = {
tableRelationCache.put(t, l)
}
/** This method provides a way to invalidate a cached plan. */
def invalidateCachedTable(key: QualifiedTableName): Unit = {
tableRelationCache.invalidate(key)
}
/** This method discards any cached table relation plans for the given table identifier. */
def invalidateCachedTable(name: TableIdentifier): Unit = {
val dbName = formatDatabaseName(name.database.getOrElse(currentDb))
val tableName = formatTableName(name.table)
invalidateCachedTable(QualifiedTableName(dbName, tableName))
}
/** This method provides a way to invalidate all the cached plans. */
def invalidateAllCachedTables(): Unit = {
tableRelationCache.invalidateAll()
}
/**
* This method is used to make the given path qualified before we
* store this path in the underlying external catalog. So, when a path
* does not contain a scheme, this path will not be changed after the default
* FileSystem is changed.
*/
private def makeQualifiedPath(path: URI): URI = {
val hadoopPath = new Path(path)
val fs = hadoopPath.getFileSystem(hadoopConf)
fs.makeQualified(hadoopPath).toUri
}
private def requireDbExists(db: String): Unit = {
if (!databaseExists(db)) {
throw new NoSuchDatabaseException(db)
}
}
private def requireTableExists(name: TableIdentifier): Unit = {
if (!tableExists(name)) {
val db = name.database.getOrElse(currentDb)
throw new NoSuchTableException(db = db, table = name.table)
}
}
private def requireTableNotExists(name: TableIdentifier): Unit = {
if (tableExists(name)) {
val db = name.database.getOrElse(currentDb)
throw new TableAlreadyExistsException(db = db, table = name.table)
}
}
// ----------------------------------------------------------------------------
// Databases
// ----------------------------------------------------------------------------
// All methods in this category interact directly with the underlying catalog.
// ----------------------------------------------------------------------------
def createDatabase(dbDefinition: CatalogDatabase, ignoreIfExists: Boolean): Unit = {
val dbName = formatDatabaseName(dbDefinition.name)
if (dbName == globalTempViewManager.database) {
throw QueryCompilationErrors.cannotCreateDatabaseWithSameNameAsPreservedDatabaseError(
globalTempViewManager.database)
}
validateName(dbName)
externalCatalog.createDatabase(
dbDefinition.copy(name = dbName, locationUri = makeQualifiedDBPath(dbDefinition.locationUri)),
ignoreIfExists)
}
private def makeQualifiedDBPath(locationUri: URI): URI = {
if (locationUri.isAbsolute) {
locationUri
} else {
val fullPath = new Path(conf.warehousePath, CatalogUtils.URIToString(locationUri))
makeQualifiedPath(fullPath.toUri)
}
}
def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = {
val dbName = formatDatabaseName(db)
if (dbName == DEFAULT_DATABASE) {
throw QueryCompilationErrors.cannotDropDefaultDatabaseError
}
if (!ignoreIfNotExists) {
requireDbExists(dbName)
}
if (cascade && databaseExists(dbName)) {
listTables(dbName).foreach { t =>
invalidateCachedTable(QualifiedTableName(dbName, t.table))
}
}
externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade)
}
def alterDatabase(dbDefinition: CatalogDatabase): Unit = {
val dbName = formatDatabaseName(dbDefinition.name)
requireDbExists(dbName)
externalCatalog.alterDatabase(dbDefinition.copy(
name = dbName, locationUri = makeQualifiedDBPath(dbDefinition.locationUri)))
}
def getDatabaseMetadata(db: String): CatalogDatabase = {
val dbName = formatDatabaseName(db)
requireDbExists(dbName)
externalCatalog.getDatabase(dbName)
}
def databaseExists(db: String): Boolean = {
val dbName = formatDatabaseName(db)
externalCatalog.databaseExists(dbName)
}
def listDatabases(): Seq[String] = {
externalCatalog.listDatabases()
}
def listDatabases(pattern: String): Seq[String] = {
externalCatalog.listDatabases(pattern)
}
def getCurrentDatabase: String = synchronized { currentDb }
def setCurrentDatabase(db: String): Unit = {
val dbName = formatDatabaseName(db)
if (dbName == globalTempViewManager.database) {
throw QueryCompilationErrors.cannotUsePreservedDatabaseAsCurrentDatabaseError(
globalTempViewManager.database)
}
requireDbExists(dbName)
synchronized { currentDb = dbName }
}
/**
* Get the path for creating a non-default database when database location is not provided
* by users.
*/
def getDefaultDBPath(db: String): URI = {
CatalogUtils.stringToURI(formatDatabaseName(db) + ".db")
}
// ----------------------------------------------------------------------------
// Tables
// ----------------------------------------------------------------------------
// There are two kinds of tables, temporary views and metastore tables.
// Temporary views are isolated across sessions and do not belong to any
// particular database. Metastore tables can be used across multiple
// sessions as their metadata is persisted in the underlying catalog.
// ----------------------------------------------------------------------------
// ----------------------------------------------------
// | Methods that interact with metastore tables only |
// ----------------------------------------------------
/**
* Create a metastore table in the database specified in `tableDefinition`.
* If no such database is specified, create it in the current database.
*/
def createTable(
tableDefinition: CatalogTable,
ignoreIfExists: Boolean,
validateLocation: Boolean = true): Unit = {
val isExternal = tableDefinition.tableType == CatalogTableType.EXTERNAL
if (isExternal && tableDefinition.storage.locationUri.isEmpty) {
throw QueryCompilationErrors.createExternalTableWithoutLocationError
}
val db = formatDatabaseName(tableDefinition.identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableDefinition.identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
validateName(table)
val newTableDefinition = if (tableDefinition.storage.locationUri.isDefined
&& !tableDefinition.storage.locationUri.get.isAbsolute) {
// make the location of the table qualified.
val qualifiedTableLocation =
makeQualifiedTablePath(tableDefinition.storage.locationUri.get, db)
tableDefinition.copy(
storage = tableDefinition.storage.copy(locationUri = Some(qualifiedTableLocation)),
identifier = tableIdentifier)
} else {
tableDefinition.copy(identifier = tableIdentifier)
}
requireDbExists(db)
if (tableExists(newTableDefinition.identifier)) {
if (!ignoreIfExists) {
throw new TableAlreadyExistsException(db = db, table = table)
}
} else if (validateLocation) {
validateTableLocation(newTableDefinition)
}
externalCatalog.createTable(newTableDefinition, ignoreIfExists)
}
def validateTableLocation(table: CatalogTable): Unit = {
// SPARK-19724: the default location of a managed table should be non-existent or empty.
if (table.tableType == CatalogTableType.MANAGED) {
val tableLocation =
new Path(table.storage.locationUri.getOrElse(defaultTablePath(table.identifier)))
val fs = tableLocation.getFileSystem(hadoopConf)
if (fs.exists(tableLocation) && fs.listStatus(tableLocation).nonEmpty) {
throw QueryCompilationErrors.cannotOperateManagedTableWithExistingLocationError(
"create", table.identifier, tableLocation)
}
}
}
private def makeQualifiedTablePath(locationUri: URI, database: String): URI = {
if (locationUri.isAbsolute) {
locationUri
} else {
val dbName = formatDatabaseName(database)
val dbLocation = makeQualifiedDBPath(getDatabaseMetadata(dbName).locationUri)
new Path(new Path(dbLocation), CatalogUtils.URIToString(locationUri)).toUri
}
}
/**
* Alter the metadata of an existing metastore table identified by `tableDefinition`.
*
* If no database is specified in `tableDefinition`, assume the table is in the
* current database.
*
* Note: If the underlying implementation does not support altering a certain field,
* this becomes a no-op.
*/
def alterTable(tableDefinition: CatalogTable): Unit = {
val db = formatDatabaseName(tableDefinition.identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableDefinition.identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
requireDbExists(db)
requireTableExists(tableIdentifier)
val newTableDefinition = if (tableDefinition.storage.locationUri.isDefined
&& !tableDefinition.storage.locationUri.get.isAbsolute) {
// make the location of the table qualified.
val qualifiedTableLocation =
makeQualifiedTablePath(tableDefinition.storage.locationUri.get, db)
tableDefinition.copy(
storage = tableDefinition.storage.copy(locationUri = Some(qualifiedTableLocation)),
identifier = tableIdentifier)
} else {
tableDefinition.copy(identifier = tableIdentifier)
}
externalCatalog.alterTable(newTableDefinition)
}
/**
* Alter the data schema of a table identified by the provided table identifier. The new data
* schema should not have conflict column names with the existing partition columns, and should
* still contain all the existing data columns.
*
* @param identifier TableIdentifier
* @param newDataSchema Updated data schema to be used for the table
*/
def alterTableDataSchema(
identifier: TableIdentifier,
newDataSchema: StructType): Unit = {
val db = formatDatabaseName(identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
requireDbExists(db)
requireTableExists(tableIdentifier)
val catalogTable = externalCatalog.getTable(db, table)
val oldDataSchema = catalogTable.dataSchema
// not supporting dropping columns yet
val nonExistentColumnNames =
oldDataSchema.map(_.name).filterNot(columnNameResolved(newDataSchema, _))
if (nonExistentColumnNames.nonEmpty) {
throw QueryCompilationErrors.dropNonExistentColumnsNotSupportedError(nonExistentColumnNames)
}
externalCatalog.alterTableDataSchema(db, table, newDataSchema)
}
private def columnNameResolved(schema: StructType, colName: String): Boolean = {
schema.fields.map(_.name).exists(conf.resolver(_, colName))
}
/**
* Alter Spark's statistics of an existing metastore table identified by the provided table
* identifier.
*/
def alterTableStats(identifier: TableIdentifier, newStats: Option[CatalogStatistics]): Unit = {
val db = formatDatabaseName(identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
requireDbExists(db)
requireTableExists(tableIdentifier)
externalCatalog.alterTableStats(db, table, newStats)
// Invalidate the table relation cache
refreshTable(identifier)
}
/**
* Return whether a table/view with the specified name exists. If no database is specified, check
* with current database.
*/
def tableExists(name: TableIdentifier): Boolean = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
externalCatalog.tableExists(db, table)
}
/**
* Retrieve the metadata of an existing permanent table/view. If no database is specified,
* assume the table/view is in the current database.
* We replace char/varchar with "annotated" string type in the table schema, as the query
* engine doesn't support char/varchar yet.
*/
@throws[NoSuchDatabaseException]
@throws[NoSuchTableException]
def getTableMetadata(name: TableIdentifier): CatalogTable = {
val t = getTableRawMetadata(name)
t.copy(schema = CharVarcharUtils.replaceCharVarcharWithStringInSchema(t.schema))
}
/**
* Retrieve the metadata of an existing permanent table/view. If no database is specified,
* assume the table/view is in the current database.
*/
@throws[NoSuchDatabaseException]
@throws[NoSuchTableException]
def getTableRawMetadata(name: TableIdentifier): CatalogTable = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Some(db)))
externalCatalog.getTable(db, table)
}
/**
* Retrieve all metadata of existing permanent tables/views. If no database is specified,
* assume the table/view is in the current database.
* Only the tables/views belong to the same database that can be retrieved are returned.
* For example, if none of the requested tables could be retrieved, an empty list is returned.
* There is no guarantee of ordering of the returned tables.
*/
@throws[NoSuchDatabaseException]
def getTablesByName(names: Seq[TableIdentifier]): Seq[CatalogTable] = {
if (names.nonEmpty) {
val dbs = names.map(_.database.getOrElse(getCurrentDatabase))
if (dbs.distinct.size != 1) {
val tables = names.map(name => formatTableName(name.table))
val qualifiedTableNames = dbs.zip(tables).map { case (d, t) => QualifiedTableName(d, t)}
throw QueryCompilationErrors.cannotRetrieveTableOrViewNotInSameDatabaseError(
qualifiedTableNames)
}
val db = formatDatabaseName(dbs.head)
requireDbExists(db)
val tables = names.map(name => formatTableName(name.table))
externalCatalog.getTablesByName(db, tables)
} else {
Seq.empty
}
}
/**
* Load files stored in given path into an existing metastore table.
* If no database is specified, assume the table is in the current database.
* If the specified table is not found in the database then a [[NoSuchTableException]] is thrown.
*/
def loadTable(
name: TableIdentifier,
loadPath: String,
isOverwrite: Boolean,
isSrcLocal: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Some(db)))
externalCatalog.loadTable(db, table, loadPath, isOverwrite, isSrcLocal)
}
/**
* Load files stored in given path into the partition of an existing metastore table.
* If no database is specified, assume the table is in the current database.
* If the specified table is not found in the database then a [[NoSuchTableException]] is thrown.
*/
def loadPartition(
name: TableIdentifier,
loadPath: String,
spec: TablePartitionSpec,
isOverwrite: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Some(db)))
requireNonEmptyValueInPartitionSpec(Seq(spec))
externalCatalog.loadPartition(
db, table, loadPath, spec, isOverwrite, inheritTableSpecs, isSrcLocal)
}
def defaultTablePath(tableIdent: TableIdentifier): URI = {
val dbName = formatDatabaseName(tableIdent.database.getOrElse(getCurrentDatabase))
val dbLocation = getDatabaseMetadata(dbName).locationUri
new Path(new Path(dbLocation), formatTableName(tableIdent.table)).toUri
}
// ----------------------------------------------
// | Methods that interact with temp views only |
// ----------------------------------------------
/**
* Create a local temporary view.
*/
def createTempView(
name: String,
viewDefinition: TemporaryViewRelation,
overrideIfExists: Boolean): Unit = synchronized {
val table = formatTableName(name)
if (tempViews.contains(table) && !overrideIfExists) {
throw new TempTableAlreadyExistsException(name)
}
tempViews.put(table, viewDefinition)
}
/**
* Create a global temporary view.
*/
def createGlobalTempView(
name: String,
viewDefinition: TemporaryViewRelation,
overrideIfExists: Boolean): Unit = {
globalTempViewManager.create(formatTableName(name), viewDefinition, overrideIfExists)
}
/**
* Alter the definition of a local/global temp view matching the given name, returns true if a
* temp view is matched and altered, false otherwise.
*/
def alterTempViewDefinition(
name: TableIdentifier,
viewDefinition: TemporaryViewRelation): Boolean = synchronized {
val viewName = formatTableName(name.table)
if (name.database.isEmpty) {
if (tempViews.contains(viewName)) {
createTempView(viewName, viewDefinition, overrideIfExists = true)
true
} else {
false
}
} else if (formatDatabaseName(name.database.get) == globalTempViewManager.database) {
globalTempViewManager.update(viewName, viewDefinition)
} else {
false
}
}
/**
* Return a local temporary view exactly as it was stored.
*/
def getRawTempView(name: String): Option[TemporaryViewRelation] = synchronized {
tempViews.get(formatTableName(name))
}
/**
* Generate a [[View]] operator from the temporary view stored.
*/
def getTempView(name: String): Option[View] = synchronized {
getRawTempView(name).map(getTempViewPlan)
}
def getTempViewNames(): Seq[String] = synchronized {
tempViews.keySet.toSeq
}
/**
* Return a global temporary view exactly as it was stored.
*/
def getRawGlobalTempView(name: String): Option[TemporaryViewRelation] = {
globalTempViewManager.get(formatTableName(name))
}
/**
* Generate a [[View]] operator from the global temporary view stored.
*/
def getGlobalTempView(name: String): Option[View] = {
getRawGlobalTempView(name).map(getTempViewPlan)
}
/**
* Drop a local temporary view.
*
* Returns true if this view is dropped successfully, false otherwise.
*/
def dropTempView(name: String): Boolean = synchronized {
tempViews.remove(formatTableName(name)).isDefined
}
/**
* Drop a global temporary view.
*
* Returns true if this view is dropped successfully, false otherwise.
*/
def dropGlobalTempView(name: String): Boolean = {
globalTempViewManager.remove(formatTableName(name))
}
// -------------------------------------------------------------
// | Methods that interact with temporary and metastore tables |
// -------------------------------------------------------------
/**
* Retrieve the metadata of an existing temporary view or permanent table/view.
*
* If a database is specified in `name`, this will return the metadata of table/view in that
* database.
* If no database is specified, this will first attempt to get the metadata of a temporary view
* with the same name, then, if that does not exist, return the metadata of table/view in the
* current database.
*/
def getTempViewOrPermanentTableMetadata(name: TableIdentifier): CatalogTable = synchronized {
val table = formatTableName(name.table)
if (name.database.isEmpty) {
tempViews.get(table).map(_.tableMeta).getOrElse(getTableMetadata(name))
} else if (formatDatabaseName(name.database.get) == globalTempViewManager.database) {
globalTempViewManager.get(table).map(_.tableMeta)
.getOrElse(throw new NoSuchTableException(globalTempViewManager.database, table))
} else {
getTableMetadata(name)
}
}
/**
* Rename a table.
*
* If a database is specified in `oldName`, this will rename the table in that database.
* If no database is specified, this will first attempt to rename a temporary view with
* the same name, then, if that does not exist, rename the table in the current database.
*
* This assumes the database specified in `newName` matches the one in `oldName`.
*/
def renameTable(oldName: TableIdentifier, newName: TableIdentifier): Unit = synchronized {
val db = formatDatabaseName(oldName.database.getOrElse(currentDb))
newName.database.map(formatDatabaseName).foreach { newDb =>
if (db != newDb) {
throw QueryCompilationErrors.renameTableSourceAndDestinationMismatchError(db, newDb)
}
}
val oldTableName = formatTableName(oldName.table)
val newTableName = formatTableName(newName.table)
if (db == globalTempViewManager.database) {
globalTempViewManager.rename(oldTableName, newTableName)
} else {
requireDbExists(db)
if (oldName.database.isDefined || !tempViews.contains(oldTableName)) {
requireTableExists(TableIdentifier(oldTableName, Some(db)))
requireTableNotExists(TableIdentifier(newTableName, Some(db)))
validateName(newTableName)
validateNewLocationOfRename(oldName, newName)
externalCatalog.renameTable(db, oldTableName, newTableName)
} else {
if (newName.database.isDefined) {
throw QueryCompilationErrors.cannotRenameTempViewWithDatabaseSpecifiedError(
oldName, newName)
}
if (tempViews.contains(newTableName)) {
throw QueryCompilationErrors.cannotRenameTempViewToExistingTableError(
oldName, newName)
}
val table = tempViews(oldTableName)
tempViews.remove(oldTableName)
tempViews.put(newTableName, table)
}
}
}
/**
* Drop a table.
*
* If a database is specified in `name`, this will drop the table from that database.
* If no database is specified, this will first attempt to drop a temporary view with
* the same name, then, if that does not exist, drop the table from the current database.
*/
def dropTable(
name: TableIdentifier,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = synchronized {
val db = formatDatabaseName(name.database.getOrElse(currentDb))
val table = formatTableName(name.table)
if (db == globalTempViewManager.database) {
val viewExists = globalTempViewManager.remove(table)
if (!viewExists && !ignoreIfNotExists) {
throw new NoSuchTableException(globalTempViewManager.database, table)
}
} else {
if (name.database.isDefined || !tempViews.contains(table)) {
requireDbExists(db)
// When ignoreIfNotExists is false, no exception is issued when the table does not exist.
// Instead, log it as an error message.
if (tableExists(TableIdentifier(table, Option(db)))) {
externalCatalog.dropTable(db, table, ignoreIfNotExists = true, purge = purge)
} else if (!ignoreIfNotExists) {
throw new NoSuchTableException(db = db, table = table)
}
} else {
tempViews.remove(table)
}
}
}
/**
* Return a [[LogicalPlan]] that represents the given table or view.
*
* If a database is specified in `name`, this will return the table/view from that database.
* If no database is specified, this will first attempt to return a temporary view with
* the same name, then, if that does not exist, return the table/view from the current database.
*
* Note that, the global temp view database is also valid here, this will return the global temp
* view matching the given name.
*
* If the relation is a view, we generate a [[View]] operator from the view description, and
* wrap the logical plan in a [[SubqueryAlias]] which will track the name of the view.
* [[SubqueryAlias]] will also keep track of the name and database(optional) of the table/view
*
* @param name The name of the table/view that we look up.
*/
def lookupRelation(name: TableIdentifier): LogicalPlan = {
synchronized {
val db = formatDatabaseName(name.database.getOrElse(currentDb))
val table = formatTableName(name.table)
if (db == globalTempViewManager.database) {
globalTempViewManager.get(table).map { viewDef =>
SubqueryAlias(table, db, getTempViewPlan(viewDef))
}.getOrElse(throw new NoSuchTableException(db, table))
} else if (name.database.isDefined || !tempViews.contains(table)) {
val metadata = externalCatalog.getTable(db, table)
getRelation(metadata)
} else {
SubqueryAlias(table, getTempViewPlan(tempViews(table)))
}
}
}
def getRelation(
metadata: CatalogTable,
options: CaseInsensitiveStringMap = CaseInsensitiveStringMap.empty()): LogicalPlan = {
val name = metadata.identifier
val db = formatDatabaseName(name.database.getOrElse(currentDb))
val table = formatTableName(name.table)
val multiParts = Seq(CatalogManager.SESSION_CATALOG_NAME, db, table)
if (metadata.tableType == CatalogTableType.VIEW) {
// The relation is a view, so we wrap the relation by:
// 1. Add a [[View]] operator over the relation to keep track of the view desc;
// 2. Wrap the logical plan in a [[SubqueryAlias]] which tracks the name of the view.
SubqueryAlias(multiParts, fromCatalogTable(metadata, isTempView = false))
} else {
SubqueryAlias(multiParts, UnresolvedCatalogRelation(metadata, options))
}
}
private def getTempViewPlan(viewInfo: TemporaryViewRelation): View = viewInfo.plan match {
case Some(p) => View(desc = viewInfo.tableMeta, isTempView = true, child = p)
case None => fromCatalogTable(viewInfo.tableMeta, isTempView = true)
}
private def buildViewDDL(metadata: CatalogTable, isTempView: Boolean): Option[String] = {
if (isTempView) {
None
} else {
val viewName = metadata.identifier.unquotedString
val viewText = metadata.viewText.get
val userSpecifiedColumns =
if (metadata.schema.fieldNames.toSeq == metadata.viewQueryColumnNames) {
""
} else {
s"(${metadata.schema.fieldNames.mkString(", ")})"
}
Some(s"CREATE OR REPLACE VIEW $viewName $userSpecifiedColumns AS $viewText")
}
}
private def fromCatalogTable(metadata: CatalogTable, isTempView: Boolean): View = {
val viewText = metadata.viewText.getOrElse {
throw new IllegalStateException("Invalid view without text.")
}
val viewConfigs = metadata.viewSQLConfigs
val parsedPlan = SQLConf.withExistingConf(View.effectiveSQLConf(viewConfigs, isTempView)) {
parser.parsePlan(viewText)
}
val viewColumnNames = if (metadata.viewQueryColumnNames.isEmpty) {
// For view created before Spark 2.2.0, the view text is already fully qualified, the plan
// output is the same with the view output.
metadata.schema.fieldNames.toSeq
} else {
assert(metadata.viewQueryColumnNames.length == metadata.schema.length)
metadata.viewQueryColumnNames
}
// For view queries like `SELECT * FROM t`, the schema of the referenced table/view may
// change after the view has been created. We need to add an extra SELECT to pick the columns
// according to the recorded column names (to get the correct view column ordering and omit
// the extra columns that we don't require), with UpCast (to make sure the type change is
// safe) and Alias (to respect user-specified view column names) according to the view schema
// in the catalog.
// Note that, the column names may have duplication, e.g. `CREATE VIEW v(x, y) AS
// SELECT 1 col, 2 col`. We need to make sure that the matching attributes have the same
// number of duplications, and pick the corresponding attribute by ordinal.
val viewConf = View.effectiveSQLConf(metadata.viewSQLConfigs, isTempView)
val normalizeColName: String => String = if (viewConf.caseSensitiveAnalysis) {
identity
} else {
_.toLowerCase(Locale.ROOT)
}
val nameToCounts = viewColumnNames.groupBy(normalizeColName).mapValues(_.length)
val nameToCurrentOrdinal = scala.collection.mutable.HashMap.empty[String, Int]
val viewDDL = buildViewDDL(metadata, isTempView)
val projectList = viewColumnNames.zip(metadata.schema).map { case (name, field) =>
val normalizedName = normalizeColName(name)
val count = nameToCounts(normalizedName)
val ordinal = nameToCurrentOrdinal.getOrElse(normalizedName, 0)
nameToCurrentOrdinal(normalizedName) = ordinal + 1
val col = GetViewColumnByNameAndOrdinal(
metadata.identifier.toString, name, ordinal, count, viewDDL)
Alias(UpCast(col, field.dataType), field.name)(explicitMetadata = Some(field.metadata))
}
View(desc = metadata, isTempView = isTempView, child = Project(projectList, parsedPlan))
}
def lookupTempView(table: String): Option[SubqueryAlias] = {
val formattedTable = formatTableName(table)
getTempView(formattedTable).map { view =>
SubqueryAlias(formattedTable, view)
}
}
def lookupGlobalTempView(db: String, table: String): Option[SubqueryAlias] = {
val formattedDB = formatDatabaseName(db)
if (formattedDB == globalTempViewManager.database) {
val formattedTable = formatTableName(table)
getGlobalTempView(formattedTable).map { view =>
SubqueryAlias(formattedTable, formattedDB, view)
}
} else {
None
}
}
/**
* Return whether the given name parts belong to a temporary or global temporary view.
*/
def isTempView(nameParts: Seq[String]): Boolean = {
if (nameParts.length > 2) return false
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
isTempView(nameParts.asTableIdentifier)
}
def lookupTempView(name: TableIdentifier): Option[View] = {
val tableName = formatTableName(name.table)
if (name.database.isEmpty) {
tempViews.get(tableName).map(getTempViewPlan)
} else if (formatDatabaseName(name.database.get) == globalTempViewManager.database) {
globalTempViewManager.get(tableName).map(getTempViewPlan)
} else {
None
}
}
/**
* Return whether a table with the specified name is a temporary view.
*
* Note: The temporary view cache is checked only when database is not
* explicitly specified.
*/
def isTempView(name: TableIdentifier): Boolean = synchronized {
lookupTempView(name).isDefined
}
def isView(nameParts: Seq[String]): Boolean = {
nameParts.length <= 2 && {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
val ident = nameParts.asTableIdentifier
try {
getTempViewOrPermanentTableMetadata(ident).tableType == CatalogTableType.VIEW
} catch {
case _: NoSuchTableException => false
case _: NoSuchDatabaseException => false
case _: NoSuchNamespaceException => false
}
}
}
/**
* List all tables in the specified database, including local temporary views.
*
* Note that, if the specified database is global temporary view database, we will list global
* temporary views.
*/
def listTables(db: String): Seq[TableIdentifier] = listTables(db, "*")
/**
* List all matching tables in the specified database, including local temporary views.
*
* Note that, if the specified database is global temporary view database, we will list global
* temporary views.
*/
def listTables(db: String, pattern: String): Seq[TableIdentifier] = listTables(db, pattern, true)
/**
* List all matching tables in the specified database, including local temporary views
* if includeLocalTempViews is enabled.
*
* Note that, if the specified database is global temporary view database, we will list global
* temporary views.
*/
def listTables(
db: String,
pattern: String,
includeLocalTempViews: Boolean): Seq[TableIdentifier] = {
val dbName = formatDatabaseName(db)
val dbTables = if (dbName == globalTempViewManager.database) {
globalTempViewManager.listViewNames(pattern).map { name =>
TableIdentifier(name, Some(globalTempViewManager.database))
}
} else {
requireDbExists(dbName)
externalCatalog.listTables(dbName, pattern).map { name =>
TableIdentifier(name, Some(dbName))
}
}
if (includeLocalTempViews) {
dbTables ++ listLocalTempViews(pattern)
} else {
dbTables
}
}
/**
* List all matching views in the specified database, including local temporary views.
*/
def listViews(db: String, pattern: String): Seq[TableIdentifier] = {
val dbName = formatDatabaseName(db)
val dbViews = if (dbName == globalTempViewManager.database) {
globalTempViewManager.listViewNames(pattern).map { name =>
TableIdentifier(name, Some(globalTempViewManager.database))
}
} else {
requireDbExists(dbName)
externalCatalog.listViews(dbName, pattern).map { name =>
TableIdentifier(name, Some(dbName))
}
}
dbViews ++ listLocalTempViews(pattern)
}
/**
* List all matching local temporary views.
*/
def listLocalTempViews(pattern: String): Seq[TableIdentifier] = {
synchronized {
StringUtils.filterPattern(tempViews.keys.toSeq, pattern).map { name =>
TableIdentifier(name)
}
}
}
/**
* Refresh table entries in structures maintained by the session catalog such as:
* - The map of temporary or global temporary view names to their logical plans
* - The relation cache which maps table identifiers to their logical plans
*
* For temp views, it refreshes their logical plans, and as a consequence of that it can refresh
* the file indexes of the base relations (`HadoopFsRelation` for instance) used in the views.
* The method still keeps the views in the internal lists of session catalog.
*
* For tables/views, it removes their entries from the relation cache.
*
* The method is supposed to use in the following situations:
* 1. The logical plan of a table/view was changed, and cached table/view data is cleared
* explicitly. For example, like in `AlterTableRenameCommand` which re-caches the table
* itself. Otherwise if you need to refresh cached data, consider using of
* `CatalogImpl.refreshTable()`.
* 2. A table/view doesn't exist, and need to only remove its entry in the relation cache since
* the cached data is invalidated explicitly like in `DropTableCommand` which uncaches
* table/view data itself.
* 3. Meta-data (such as file indexes) of any relation used in a temporary view should be
* updated.
*/
def refreshTable(name: TableIdentifier): Unit = synchronized {
lookupTempView(name).map(_.refresh).getOrElse {
val dbName = formatDatabaseName(name.database.getOrElse(currentDb))
val tableName = formatTableName(name.table)
val qualifiedTableName = QualifiedTableName(dbName, tableName)
tableRelationCache.invalidate(qualifiedTableName)
}
}
/**
* Drop all existing temporary views.
* For testing only.
*/
def clearTempTables(): Unit = synchronized {
tempViews.clear()
}
// ----------------------------------------------------------------------------
// Partitions
// ----------------------------------------------------------------------------
// All methods in this category interact directly with the underlying catalog.
// These methods are concerned with only metastore tables.
// ----------------------------------------------------------------------------
// TODO: We need to figure out how these methods interact with our data source
// tables. For such tables, we do not store values of partitioning columns in
// the metastore. For now, partition values of a data source table will be
// automatically discovered when we load the table.
/**
* Create partitions in an existing table, assuming it exists.
* If no database is specified, assume the table is in the current database.
*/
def createPartitions(
tableName: TableIdentifier,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
requireExactMatchedPartitionSpec(parts.map(_.spec), getTableMetadata(tableName))
requireNonEmptyValueInPartitionSpec(parts.map(_.spec))
externalCatalog.createPartitions(
db, table, partitionWithQualifiedPath(tableName, parts), ignoreIfExists)
}
/**
* Drop partitions from a table, assuming they exist.
* If no database is specified, assume the table is in the current database.
*/
def dropPartitions(
tableName: TableIdentifier,
specs: Seq[TablePartitionSpec],
ignoreIfNotExists: Boolean,
purge: Boolean,
retainData: Boolean): Unit = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
requirePartialMatchedPartitionSpec(specs, getTableMetadata(tableName))
requireNonEmptyValueInPartitionSpec(specs)
externalCatalog.dropPartitions(db, table, specs, ignoreIfNotExists, purge, retainData)
}
/**
* Override the specs of one or many existing table partitions, assuming they exist.
*
* This assumes index i of `specs` corresponds to index i of `newSpecs`.
* If no database is specified, assume the table is in the current database.
*/
def renamePartitions(
tableName: TableIdentifier,
specs: Seq[TablePartitionSpec],
newSpecs: Seq[TablePartitionSpec]): Unit = {
val tableMetadata = getTableMetadata(tableName)
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
requireExactMatchedPartitionSpec(specs, tableMetadata)
requireExactMatchedPartitionSpec(newSpecs, tableMetadata)
requireNonEmptyValueInPartitionSpec(specs)
requireNonEmptyValueInPartitionSpec(newSpecs)
externalCatalog.renamePartitions(db, table, specs, newSpecs)
}
/**
* Alter one or many table partitions whose specs that match those specified in `parts`,
* assuming the partitions exist.
*
* If no database is specified, assume the table is in the current database.
*
* Note: If the underlying implementation does not support altering a certain field,
* this becomes a no-op.
*/
def alterPartitions(tableName: TableIdentifier, parts: Seq[CatalogTablePartition]): Unit = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
requireExactMatchedPartitionSpec(parts.map(_.spec), getTableMetadata(tableName))
requireNonEmptyValueInPartitionSpec(parts.map(_.spec))
externalCatalog.alterPartitions(db, table, partitionWithQualifiedPath(tableName, parts))
}
/**
* Retrieve the metadata of a table partition, assuming it exists.
* If no database is specified, assume the table is in the current database.
*/
def getPartition(tableName: TableIdentifier, spec: TablePartitionSpec): CatalogTablePartition = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
requireExactMatchedPartitionSpec(Seq(spec), getTableMetadata(tableName))
requireNonEmptyValueInPartitionSpec(Seq(spec))
externalCatalog.getPartition(db, table, spec)
}
/**
* List the names of all partitions that belong to the specified table, assuming it exists.
*
* A partial partition spec may optionally be provided to filter the partitions returned.
* For instance, if there exist partitions (a='1', b='2'), (a='1', b='3') and (a='2', b='4'),
* then a partial spec of (a='1') will return the first two only.
*/
def listPartitionNames(
tableName: TableIdentifier,
partialSpec: Option[TablePartitionSpec] = None): Seq[String] = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
partialSpec.foreach { spec =>
requirePartialMatchedPartitionSpec(Seq(spec), getTableMetadata(tableName))
requireNonEmptyValueInPartitionSpec(Seq(spec))
}
externalCatalog.listPartitionNames(db, table, partialSpec)
}
/**
* List the metadata of all partitions that belong to the specified table, assuming it exists.
*
* A partial partition spec may optionally be provided to filter the partitions returned.
* For instance, if there exist partitions (a='1', b='2'), (a='1', b='3') and (a='2', b='4'),
* then a partial spec of (a='1') will return the first two only.
*/
def listPartitions(
tableName: TableIdentifier,
partialSpec: Option[TablePartitionSpec] = None): Seq[CatalogTablePartition] = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
partialSpec.foreach { spec =>
requirePartialMatchedPartitionSpec(Seq(spec), getTableMetadata(tableName))
requireNonEmptyValueInPartitionSpec(Seq(spec))
}
externalCatalog.listPartitions(db, table, partialSpec)
}
/**
* List the metadata of partitions that belong to the specified table, assuming it exists, that
* satisfy the given partition-pruning predicate expressions.
*/
def listPartitionsByFilter(
tableName: TableIdentifier,
predicates: Seq[Expression]): Seq[CatalogTablePartition] = {
val db = formatDatabaseName(tableName.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableName.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Option(db)))
externalCatalog.listPartitionsByFilter(db, table, predicates, conf.sessionLocalTimeZone)
}
/**
* Verify if the input partition spec has any empty value.
*/
private def requireNonEmptyValueInPartitionSpec(specs: Seq[TablePartitionSpec]): Unit = {
specs.foreach { s =>
if (s.values.exists(v => v != null && v.isEmpty)) {
val spec = s.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
throw QueryCompilationErrors.invalidPartitionSpecError(
s"The spec ($spec) contains an empty partition column value")
}
}
}
/**
* Verify if the input partition spec exactly matches the existing defined partition spec
* The columns must be the same but the orders could be different.
*/
private def requireExactMatchedPartitionSpec(
specs: Seq[TablePartitionSpec],
table: CatalogTable): Unit = {
specs.foreach { spec =>
PartitioningUtils.requireExactMatchedPartitionSpec(
table.identifier.toString,
spec,
table.partitionColumnNames)
}
}
/**
* Verify if the input partition spec partially matches the existing defined partition spec
* That is, the columns of partition spec should be part of the defined partition spec.
*/
private def requirePartialMatchedPartitionSpec(
specs: Seq[TablePartitionSpec],
table: CatalogTable): Unit = {
val defined = table.partitionColumnNames
specs.foreach { s =>
if (!s.keys.forall(defined.contains)) {
throw QueryCompilationErrors.invalidPartitionSpecError(
s"The spec (${s.keys.mkString(", ")}) must be contained " +
s"within the partition spec (${table.partitionColumnNames.mkString(", ")}) defined " +
s"in table '${table.identifier}'")
}
}
}
/**
* Make the partition path qualified.
* If the partition path is relative, e.g. 'paris', it will be qualified with
* parent path using table location, e.g. 'file:/warehouse/table/paris'
*/
private def partitionWithQualifiedPath(
tableIdentifier: TableIdentifier,
parts: Seq[CatalogTablePartition]): Seq[CatalogTablePartition] = {
lazy val tbl = getTableMetadata(tableIdentifier)
parts.map { part =>
if (part.storage.locationUri.isDefined && !part.storage.locationUri.get.isAbsolute) {
val partPath = new Path(new Path(tbl.location), new Path(part.storage.locationUri.get))
val qualifiedPartPath = makeQualifiedPath(CatalogUtils.stringToURI(partPath.toString))
part.copy(storage = part.storage.copy(locationUri = Some(qualifiedPartPath)))
} else part
}
}
// ----------------------------------------------------------------------------
// Functions
// ----------------------------------------------------------------------------
// There are two kinds of functions, temporary functions and metastore
// functions (permanent UDFs). Temporary functions are isolated across
// sessions. Metastore functions can be used across multiple sessions as
// their metadata is persisted in the underlying catalog.
// ----------------------------------------------------------------------------
// -------------------------------------------------------
// | Methods that interact with metastore functions only |
// -------------------------------------------------------
/**
* Create a function in the database specified in `funcDefinition`.
* If no such database is specified, create it in the current database.
*
* @param ignoreIfExists: When true, ignore if the function with the specified name exists
* in the specified database.
*/
def createFunction(funcDefinition: CatalogFunction, ignoreIfExists: Boolean): Unit = {
val db = formatDatabaseName(funcDefinition.identifier.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
val identifier = FunctionIdentifier(funcDefinition.identifier.funcName, Some(db))
val newFuncDefinition = funcDefinition.copy(identifier = identifier)
if (!functionExists(identifier)) {
externalCatalog.createFunction(db, newFuncDefinition)
} else if (!ignoreIfExists) {
throw new FunctionAlreadyExistsException(db = db, func = identifier.toString)
}
}
/**
* Drop a metastore function.
* If no database is specified, assume the function is in the current database.
*/
def dropFunction(name: FunctionIdentifier, ignoreIfNotExists: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
val identifier = name.copy(database = Some(db))
if (functionExists(identifier)) {
if (functionRegistry.functionExists(identifier)) {
// If we have loaded this function into the FunctionRegistry,
// also drop it from there.
// For a permanent function, because we loaded it to the FunctionRegistry
// when it's first used, we also need to drop it from the FunctionRegistry.
functionRegistry.dropFunction(identifier)
}
externalCatalog.dropFunction(db, name.funcName)
} else if (!ignoreIfNotExists) {
throw new NoSuchPermanentFunctionException(db = db, func = identifier.toString)
}
}
/**
* overwrite a metastore function in the database specified in `funcDefinition`..
* If no database is specified, assume the function is in the current database.
*/
def alterFunction(funcDefinition: CatalogFunction): Unit = {
val db = formatDatabaseName(funcDefinition.identifier.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
val identifier = FunctionIdentifier(funcDefinition.identifier.funcName, Some(db))
val newFuncDefinition = funcDefinition.copy(identifier = identifier)
if (functionExists(identifier)) {
if (functionRegistry.functionExists(identifier)) {
// If we have loaded this function into the FunctionRegistry,
// also drop it from there.
// For a permanent function, because we loaded it to the FunctionRegistry
// when it's first used, we also need to drop it from the FunctionRegistry.
functionRegistry.dropFunction(identifier)
}
externalCatalog.alterFunction(db, newFuncDefinition)
} else {
throw new NoSuchPermanentFunctionException(db = db, func = identifier.toString)
}
}
/**
* Retrieve the metadata of a metastore function.
*
* If a database is specified in `name`, this will return the function in that database.
* If no database is specified, this will return the function in the current database.
*/
def getFunctionMetadata(name: FunctionIdentifier): CatalogFunction = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
externalCatalog.getFunction(db, name.funcName)
}
/**
* Check if the function with the specified name exists
*/
def functionExists(name: FunctionIdentifier): Boolean = {
functionRegistry.functionExists(name) || tableFunctionRegistry.functionExists(name) || {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
externalCatalog.functionExists(db, name.funcName)
}
}
// ----------------------------------------------------------------
// | Methods that interact with temporary and metastore functions |
// ----------------------------------------------------------------
/**
* Constructs a [[FunctionBuilder]] based on the provided class that represents a function.
*/
private def makeFunctionBuilder(name: String, functionClassName: String): FunctionBuilder = {
val clazz = Utils.classForName(functionClassName)
(input: Seq[Expression]) => makeFunctionExpression(name, clazz, input)
}
/**
* Constructs a [[Expression]] based on the provided class that represents a function.
*
* This performs reflection to decide what type of [[Expression]] to return in the builder.
*/
protected def makeFunctionExpression(
name: String,
clazz: Class[_],
input: Seq[Expression]): Expression = {
// Unfortunately we need to use reflection here because UserDefinedAggregateFunction
// and ScalaUDAF are defined in sql/core module.
val clsForUDAF =
Utils.classForName("org.apache.spark.sql.expressions.UserDefinedAggregateFunction")
if (clsForUDAF.isAssignableFrom(clazz)) {
val cls = Utils.classForName("org.apache.spark.sql.execution.aggregate.ScalaUDAF")
val e = cls.getConstructor(
classOf[Seq[Expression]], clsForUDAF, classOf[Int], classOf[Int], classOf[Option[String]])
.newInstance(
input,
clazz.getConstructor().newInstance().asInstanceOf[Object],
Int.box(1),
Int.box(1),
Some(name))
.asInstanceOf[ImplicitCastInputTypes]
// Check input argument size
if (e.inputTypes.size != input.size) {
throw QueryCompilationErrors.invalidFunctionArgumentsError(
name, e.inputTypes.size.toString, input.size)
}
e
} else {
throw QueryCompilationErrors.noHandlerForUDAFError(clazz.getCanonicalName)
}
}
/**
* Loads resources such as JARs and Files for a function. Every resource is represented
* by a tuple (resource type, resource uri).
*/
def loadFunctionResources(resources: Seq[FunctionResource]): Unit = {
resources.foreach(functionResourceLoader.loadResource)
}
/**
* Registers a temporary or permanent function into a session-specific [[FunctionRegistry]]
*/
def registerFunction(
funcDefinition: CatalogFunction,
overrideIfExists: Boolean,
functionBuilder: Option[FunctionBuilder] = None): Unit = {
val func = funcDefinition.identifier
if (functionRegistry.functionExists(func) && !overrideIfExists) {
throw QueryCompilationErrors.functionAlreadyExistsError(func)
}
val info = new ExpressionInfo(
funcDefinition.className,
func.database.orNull,
func.funcName,
null,
"",
"",
"",
"",
"",
"",
"hive")
val builder =
functionBuilder.getOrElse {
val className = funcDefinition.className
if (!Utils.classIsLoadable(className)) {
throw QueryCompilationErrors.cannotLoadClassWhenRegisteringFunctionError(className, func)
}
makeFunctionBuilder(func.unquotedString, className)
}
functionRegistry.registerFunction(func, info, builder)
}
/**
* Unregister a temporary or permanent function from a session-specific [[FunctionRegistry]]
* Return true if function exists.
*/
def unregisterFunction(name: FunctionIdentifier): Boolean = {
functionRegistry.dropFunction(name)
}
/**
* Drop a temporary function.
*/
def dropTempFunction(name: String, ignoreIfNotExists: Boolean): Unit = {
if (!functionRegistry.dropFunction(FunctionIdentifier(name)) &&
!tableFunctionRegistry.dropFunction(FunctionIdentifier(name)) &&
!ignoreIfNotExists) {
throw new NoSuchTempFunctionException(name)
}
}
/**
* Returns whether it is a temporary function. If not existed, returns false.
*/
def isTemporaryFunction(name: FunctionIdentifier): Boolean = {
// copied from HiveSessionCatalog
val hiveFunctions = Seq("histogram_numeric")
// A temporary function is a function that has been registered in functionRegistry
// without a database name, and is neither a built-in function nor a Hive function
name.database.isEmpty &&
(functionRegistry.functionExists(name) || tableFunctionRegistry.functionExists(name)) &&
!FunctionRegistry.builtin.functionExists(name) &&
!TableFunctionRegistry.builtin.functionExists(name) &&
!hiveFunctions.contains(name.funcName.toLowerCase(Locale.ROOT))
}
def isTempFunction(name: String): Boolean = {
isTemporaryFunction(FunctionIdentifier(name))
}
/**
* Return whether this function has been registered in the function registry of the current
* session. If not existed, return false.
*/
def isRegisteredFunction(name: FunctionIdentifier): Boolean = {
functionRegistry.functionExists(name) || tableFunctionRegistry.functionExists(name)
}
/**
* Returns whether it is a persistent function. If not existed, returns false.
*/
def isPersistentFunction(name: FunctionIdentifier): Boolean = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
databaseExists(db) && externalCatalog.functionExists(db, name.funcName)
}
protected[sql] def failFunctionLookup(
name: FunctionIdentifier, cause: Option[Throwable] = None): Nothing = {
throw new NoSuchFunctionException(
db = name.database.getOrElse(getCurrentDatabase), func = name.funcName, cause)
}
/**
* Look up the [[ExpressionInfo]] associated with the specified function, assuming it exists.
*/
def lookupFunctionInfo(name: FunctionIdentifier): ExpressionInfo = synchronized {
// TODO: just make function registry take in FunctionIdentifier instead of duplicating this
val database = name.database.orElse(Some(currentDb)).map(formatDatabaseName)
val qualifiedName = name.copy(database = database)
functionRegistry.lookupFunction(name)
.orElse(functionRegistry.lookupFunction(qualifiedName))
.orElse(tableFunctionRegistry.lookupFunction(name))
.getOrElse {
val db = qualifiedName.database.get
requireDbExists(db)
if (externalCatalog.functionExists(db, name.funcName)) {
val metadata = externalCatalog.getFunction(db, name.funcName)
new ExpressionInfo(
metadata.className,
qualifiedName.database.orNull,
qualifiedName.identifier,
null,
"",
"",
"",
"",
"",
"",
"hive")
} else {
failFunctionLookup(name)
}
}
}
/**
* Look up a specific function, assuming it exists.
*
* For a temporary function or a permanent function that has been loaded,
* this method will simply lookup the function through the
* FunctionRegistry and create an expression based on the builder.
*
* For a permanent function that has not been loaded, we will first fetch its metadata
* from the underlying external catalog. Then, we will load all resources associated
* with this function (i.e. jars and files). Finally, we create a function builder
* based on the function class and put the builder into the FunctionRegistry.
* The name of this function in the FunctionRegistry will be `databaseName.functionName`.
*/
private def lookupFunction[T](
name: FunctionIdentifier,
children: Seq[Expression],
registry: FunctionRegistryBase[T]): T = synchronized {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.MultipartIdentifierHelper
// Note: the implementation of this function is a little bit convoluted.
// We probably shouldn't use a single FunctionRegistry to register all three kinds of functions
// (built-in, temp, and external).
if (name.database.isEmpty && registry.functionExists(name)) {
val referredTempFunctionNames = AnalysisContext.get.referredTempFunctionNames
val isResolvingView = AnalysisContext.get.catalogAndNamespace.nonEmpty
// Lookup the function as a temporary or a built-in function (i.e. without database) and
// 1. if we are not resolving view, we don't care about the function type and just return it.
// 2. if we are resolving view, only return a temp function if it's referred by this view.
if (!isResolvingView ||
!isTemporaryFunction(name) ||
referredTempFunctionNames.contains(name.funcName)) {
// This function has been already loaded into the function registry.
return registry.lookupFunction(name, children)
}
}
// Get the database from AnalysisContext if it's defined, otherwise, use current database
val currentDatabase = AnalysisContext.get.catalogAndNamespace match {
case Seq() => getCurrentDatabase
case Seq(_, db) => db
case Seq(catalog, namespace @ _*) =>
throw new IllegalStateException(s"[BUG] unexpected v2 catalog: $catalog, and " +
s"namespace: ${namespace.quoted} in v1 function lookup")
}
// If the name itself is not qualified, add the current database to it.
val database = formatDatabaseName(name.database.getOrElse(currentDatabase))
val qualifiedName = name.copy(database = Some(database))
if (registry.functionExists(qualifiedName)) {
// This function has been already loaded into the function registry.
// Unlike the above block, we find this function by using the qualified name.
return registry.lookupFunction(qualifiedName, children)
}
// The function has not been loaded to the function registry, which means
// that the function is a permanent function (if it actually has been registered
// in the metastore). We need to first put the function in the FunctionRegistry.
// TODO: why not just check whether the function exists first?
val catalogFunction = try {
externalCatalog.getFunction(database, name.funcName)
} catch {
case _: AnalysisException => failFunctionLookup(name)
}
loadFunctionResources(catalogFunction.resources)
// Please note that qualifiedName is provided by the user. However,
// catalogFunction.identifier.unquotedString is returned by the underlying
// catalog. So, it is possible that qualifiedName is not exactly the same as
// catalogFunction.identifier.unquotedString (difference is on case-sensitivity).
// At here, we preserve the input from the user.
registerFunction(catalogFunction.copy(identifier = qualifiedName), overrideIfExists = false)
// Now, we need to create the Expression.
registry.lookupFunction(qualifiedName, children)
}
/**
* Return an [[Expression]] that represents the specified function, assuming it exists.
*/
def lookupFunction(name: FunctionIdentifier, children: Seq[Expression]): Expression = {
lookupFunction[Expression](name, children, functionRegistry)
}
/**
* Return a [[LogicalPlan]] that represents the specified function, assuming it exists.
*/
def lookupTableFunction(name: FunctionIdentifier, children: Seq[Expression]): LogicalPlan = {
lookupFunction[LogicalPlan](name, children, tableFunctionRegistry)
}
/**
* List all functions in the specified database, including temporary functions. This
* returns the function identifier and the scope in which it was defined (system or user
* defined).
*/
def listFunctions(db: String): Seq[(FunctionIdentifier, String)] = listFunctions(db, "*")
/**
* List all matching functions in the specified database, including temporary functions. This
* returns the function identifier and the scope in which it was defined (system or user
* defined).
*/
def listFunctions(db: String, pattern: String): Seq[(FunctionIdentifier, String)] = {
val dbName = formatDatabaseName(db)
requireDbExists(dbName)
val dbFunctions = externalCatalog.listFunctions(dbName, pattern).map { f =>
FunctionIdentifier(f, Some(dbName)) }
val loadedFunctions = StringUtils
.filterPattern(
(functionRegistry.listFunction() ++ tableFunctionRegistry.listFunction())
.map(_.unquotedString), pattern).map { f =>
// In functionRegistry, function names are stored as an unquoted format.
Try(parser.parseFunctionIdentifier(f)) match {
case Success(e) => e
case Failure(_) =>
// The names of some built-in functions are not parsable by our parser, e.g., %
FunctionIdentifier(f)
}
}
val functions = dbFunctions ++ loadedFunctions
// The session catalog caches some persistent functions in the FunctionRegistry
// so there can be duplicates.
functions.map {
case f if FunctionRegistry.functionSet.contains(f) => (f, "SYSTEM")
case f if TableFunctionRegistry.functionSet.contains(f) => (f, "SYSTEM")
case f => (f, "USER")
}.distinct
}
// -----------------
// | Other methods |
// -----------------
/**
* Drop all existing databases (except "default"), tables, partitions and functions,
* and set the current database to "default".
*
* This is mainly used for tests.
*/
def reset(): Unit = synchronized {
setCurrentDatabase(DEFAULT_DATABASE)
externalCatalog.setCurrentDatabase(DEFAULT_DATABASE)
listDatabases().filter(_ != DEFAULT_DATABASE).foreach { db =>
dropDatabase(db, ignoreIfNotExists = false, cascade = true)
}
listTables(DEFAULT_DATABASE).foreach { table =>
dropTable(table, ignoreIfNotExists = false, purge = false)
}
listFunctions(DEFAULT_DATABASE).map(_._1).foreach { func =>
if (func.database.isDefined) {
dropFunction(func, ignoreIfNotExists = false)
} else {
dropTempFunction(func.funcName, ignoreIfNotExists = false)
}
}
clearTempTables()
globalTempViewManager.clear()
functionRegistry.clear()
tableFunctionRegistry.clear()
tableRelationCache.invalidateAll()
// restore built-in functions
FunctionRegistry.builtin.listFunction().foreach { f =>
val expressionInfo = FunctionRegistry.builtin.lookupFunction(f)
val functionBuilder = FunctionRegistry.builtin.lookupFunctionBuilder(f)
require(expressionInfo.isDefined, s"built-in function '$f' is missing expression info")
require(functionBuilder.isDefined, s"built-in function '$f' is missing function builder")
functionRegistry.registerFunction(f, expressionInfo.get, functionBuilder.get)
}
// restore built-in table functions
TableFunctionRegistry.builtin.listFunction().foreach { f =>
val expressionInfo = TableFunctionRegistry.builtin.lookupFunction(f)
val functionBuilder = TableFunctionRegistry.builtin.lookupFunctionBuilder(f)
require(expressionInfo.isDefined, s"built-in function '$f' is missing expression info")
require(functionBuilder.isDefined, s"built-in function '$f' is missing function builder")
tableFunctionRegistry.registerFunction(f, expressionInfo.get, functionBuilder.get)
}
}
/**
* Copy the current state of the catalog to another catalog.
*
* This function is synchronized on this [[SessionCatalog]] (the source) to make sure the copied
* state is consistent. The target [[SessionCatalog]] is not synchronized, and should not be
* because the target [[SessionCatalog]] should not be published at this point. The caller must
* synchronize on the target if this assumption does not hold.
*/
private[sql] def copyStateTo(target: SessionCatalog): Unit = synchronized {
target.currentDb = currentDb
// copy over temporary views
tempViews.foreach(kv => target.tempViews.put(kv._1, kv._2))
}
/**
* Validate the new location before renaming a managed table, which should be non-existent.
*/
private def validateNewLocationOfRename(
oldName: TableIdentifier,
newName: TableIdentifier): Unit = {
val oldTable = getTableMetadata(oldName)
if (oldTable.tableType == CatalogTableType.MANAGED) {
val databaseLocation =
externalCatalog.getDatabase(oldName.database.getOrElse(currentDb)).locationUri
val newTableLocation = new Path(new Path(databaseLocation), formatTableName(newName.table))
val fs = newTableLocation.getFileSystem(hadoopConf)
if (fs.exists(newTableLocation)) {
throw QueryCompilationErrors.cannotOperateManagedTableWithExistingLocationError(
"rename", oldName, newTableLocation)
}
}
}
}
| chuckchen/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala | Scala | apache-2.0 | 73,410 |
package term.project.SentimentalStats
import scala.io.Source
/**
* Handles queries to sportsdatabase.com
*
* Author: David Prichard
* Last Modified: 4-19-2016
*/
object SportsDatabase {
val baseURL = "http://api.sportsdatabase.com/"
val queryFormat = "/query.json?output=json&sdql="
val apiKey = "&api_key=guest"
/**
* Query api.sportsdatabase.com and parse the results.
*/
def getRecord(team: Team): Record = {
val request = getQueryURL(team)
val response = Source.fromURL(request).mkString
JsonParser.parseSportsDBRecord(response)
}
/**
* Construct the URL that queries the database.
*/
private def getQueryURL(team: Team): String = {
val queryBody = escapeForURL(
team.league.sport.metrics.mkString(",")
+ "@team="
+ team.name
+ " and season="
+ team.seasonYear
)
( baseURL
+ team.league.name
+ queryFormat
+ queryBody
+ apiKey
)
}
/**
* Percent-encode an raw "SDQL" query for use in URL.
*/
private def escapeForURL(raw: String): String = {
object Encoding {
val comma = "%2C"
val at = "%40"
val equals = "%3D"
val space = "%20"
}
raw.replace(",", Encoding.comma)
.replace("@", Encoding.at)
.replace("=", Encoding.equals)
.replace(" ", Encoding.space)
}
} | 4102/Sentimental-Stats | src/main/scala/SportsDatabase.scala | Scala | gpl-3.0 | 1,364 |
package de.tu_berlin.formic.datastructure.linear.server
import de.tu_berlin.formic.common.datastructure.DataStructureName
import upickle.default._
/**
* @author Ronny Bräunlich
*/
class DoubleListDataStructureFactory extends LinearServerDataStructureFactory[Double] {
override val name: DataStructureName = DoubleListDataStructureFactory.name
}
object DoubleListDataStructureFactory {
val name = DataStructureName("DoubleList")
}
| rbraeunlich/formic | linear/jvm/src/main/scala/de/tu_berlin/formic/datastructure/linear/server/DoubleListDataStructureFactory.scala | Scala | apache-2.0 | 442 |
package scavenger
import scala.concurrent.Future
import scala.concurrent.ExecutionContext
import scavenger.categories.formalccc
/** A distinguished type of morphism that chooses the
* first component of a product type.
*
* @since 2.1
* @author Andrey Tyukin
*/
case class Fst[X, Y]() extends AtomicAlgorithm[(X, Y), X] {
def identifier = formalccc.Fst
def apply(xy: (X, Y), ctx: Context): Future[X] = {
import ctx.executionContext
val (x, y) = xy
Future{ x }
}
def difficulty = Cheap
}
/** A distinguished type of morphisms that chooses the
* second component of a product type.
*/
case class Snd[X, Y]() extends AtomicAlgorithm[(X, Y), Y] {
def identifier = formalccc.Snd
def apply(xy: (X, Y), ctx: Context): Future[Y] = {
import ctx.executionContext
val (x, y) = xy
Future{ y }
}
def difficulty = Cheap
}
| tyukiand/scavenger_2_x | src/main/scala/scavenger/Projections.scala | Scala | gpl-3.0 | 864 |
package org.oneugene.log.akka
import akka.actor.{Actor, ActorRef, Props}
import akka.event.{Logging, LoggingAdapter}
import org.oneugene.log.PropertyChange
import org.oneugene.log.model.User
import org.oneugene.log.play.{NoChangesRecord, PropertyChangeRecord}
import scala.collection.mutable.ArrayBuffer
/**
* Contains user object and allows to modify user
*
* @param changeNotifications listeners to receive changes events
* @param user initial state
*/
class UserActor(val changeNotifications: ActorRef, var user: User) extends Actor {
val log: LoggingAdapter = Logging(context.system, this)
val changelog: ArrayBuffer[PropertyChange[_]] = ArrayBuffer.empty[PropertyChange[_]]
override def receive: PartialFunction[Any, Unit] = {
case PropertyChangeRecord(changedValue, change) if changedValue.isInstanceOf[User] =>
user = changedValue.asInstanceOf[User]
changelog += change
changeNotifications ! change
log.info(s"Changed user to $user")
case NoChangesRecord =>
case UserActor.GetUser =>
sender ! user
}
}
object UserActor {
def props(changeNotifications: ActorRef, user: User): Props =
Props(new UserActor(changeNotifications, user))
case class GetUser()
}
| oneugene/evgeniy-portfolio | changelog-demo/src/main/scala/org/oneugene/log/akka/UserActor.scala | Scala | mit | 1,251 |
package lib
import models.IatiDataSource
import reactivemongo.bson._
import models.IatiDataSource._
import play.api.libs.ws.WS
import play.api.libs.iteratee.Enumerator
import concurrent.ExecutionContext.Implicits.global
import reactivemongo.bson.handlers.DefaultBSONHandlers.DefaultBSONDocumentWriter
import reactivemongo.bson.handlers.DefaultBSONHandlers.DefaultBSONReaderHandler
import play.api.libs.json.{JsNumber, JsArray}
import reactivemongo.bson.BSONBoolean
import reactivemongo.bson.BSONString
import play.api.Logger
import traits.SourceSelector
import reactivemongo.api.DefaultDB
import com.google.inject.Inject
import scala.concurrent.Await
import scala.concurrent.duration.Duration
class IatiDataSourceSelector @Inject()(database: DefaultDB) extends SourceSelector {
private val datasources = database.collection("iati-datasources")
private val logger = Logger.logger
def get(sourceType: String, activeOnly: Boolean) = {
val query = BSONDocument(Seq(
Some("sourceType" -> BSONString(sourceType)),
if(activeOnly)
Some("active" -> BSONBoolean(true))
else
None
).flatten: _*)
logger.debug(s"Querying DB for ${query.elements.map(e => e.name + "=" + e.value.toString).mkString(",")}")
datasources.find(query).toList
}
def activate(sourceType: String, ids: String*) {
datasources.update(
BSONDocument(
"sourceType" -> BSONString(sourceType)
),
BSONDocument(
"$set" -> BSONDocument("active" -> BSONBoolean(false))
),
upsert = false,
multi = true
) onComplete { case _ =>
datasources.update(
BSONDocument(
"_id" -> BSONDocument(
"$in" -> BSONArray(ids.map(id => BSONObjectID(id)): _*)
)
),
BSONDocument(
"$set" -> BSONDocument("active" -> BSONBoolean(true))
),
upsert = false,
multi = true
)
}
}
/**
* The load process is a bit complex. First of all we need to extract all
* the current organisations and get all the active URLs. Then we hit the
* IATI Registry endpoint and parse all the provider files. Any of the
* currently active URLs are as active again. The DB collection is
* dropped and recreated again.
* @return
*/
def load(sourceType: String) = {
datasources.find(
BSONDocument("sourceType" -> BSONString(sourceType))
).toList.map(_.filter(_.active).map(_.url)).flatMap { list =>
// drop the data sources first
Await.ready(datasources.remove(
BSONDocument("sourceType" -> BSONString(sourceType)),
firstMatchOnly = false
), Duration.Inf)
// the IATI registry will only ever return 999 elements and ignore the limit value
// we then need to bash it with more requests and page the entries
WS.url(s"http://www.iatiregistry.org/api/search/dataset?extras_filetype=$sourceType").get.map { result =>
val count = (result.json \\ "count").as[JsNumber].value.toInt
val pages = (count/999.0).ceil.toInt
// define paging as a loopable arrays
0.to(pages-1).foreach { offsetWindow =>
val offset = offsetWindow * 999
val url = s"http://www.iatiregistry.org/api/search/dataset?extras_filetype=$sourceType&all_fields=1&limit=999&offset=$offset"
val response = Await.result(WS.url(url).get, Duration Inf)
val results = (response.json \\ "results").as[JsArray].value
val orgs = results.flatMap { json =>
val downloadUrl = (json \\ "res_url")(0).asOpt[String]
val title = (json \\ "title").as[String]
if(downloadUrl.isEmpty) {
None
} else {
Some(IatiDataSource(None, sourceType, title, downloadUrl.get, list.contains(downloadUrl.get)))
}
}
datasources.insert(Enumerator(orgs: _*), orgs.size)
}
}
}
}
}
| DFID/aid-platform-beta | src/platform/modules/admin/app/lib/IatiDataSourceSelector.scala | Scala | mit | 3,941 |
package picasso.model.integer
import picasso.utils._
sealed abstract class Expression
case class Plus(l: Expression, r: Expression) extends Expression
case class Minus(l: Expression, r: Expression) extends Expression
case class Constant(i: Int) extends Expression {
override def toString = i.toString
}
case class Variable(name: String) extends Expression {
override def toString = name
}
object Expression {
def priority(e: Expression): Int = e match {
case Plus(_,_) => 10
case Minus(_,_) => 15
case Constant(_) | Variable(_) => 20
}
def needParenthesis(currentPriority: Int, e: Expression): String = {
if (priority(e) < currentPriority) "(" + print(e) + ")"
else print(e)
}
def print(e: Expression): String = e match {
case Plus(l,r) => needParenthesis(priority(e), l) + " + " + needParenthesis(priority(e), r)
case Minus(l,r) => needParenthesis(priority(e), l) + " + " + needParenthesis(priority(e), r)
case Constant(c) => c.toString
case Variable(v) => v
}
def variables(e: Expression): Set[Variable] = e match {
case Plus(l,r) => variables(l) ++ variables(r)
case Minus(l,r) => variables(l) ++ variables(r)
case Constant(_) => Set()
case v @ Variable(_) => Set(v)
}
def getTerms(e: Expression): List[Expression] = e match {
case Plus(l,r) => getTerms(l) ::: getTerms(r)
//case Minus(l,r) => getTerms(l) ::: getTerms(r)
case cstOrVar => List(cstOrVar)
}
def getPosTerms(e: Expression): List[Expression] = e match {
case Plus(l,r) => getPosTerms(l) ::: getPosTerms(r)
case Minus(l,r) => getPosTerms(l) ::: getNegTerms(r)
case cstOrVar => List(cstOrVar)
}
def getNegTerms(e: Expression): List[Expression] = e match {
case Plus(l,r) => getNegTerms(l) ::: getNegTerms(r)
case Minus(l,r) => getNegTerms(l) ::: getPosTerms(r)
case cstOrVar => List(cstOrVar)
}
def getPosNegTerms(e: Expression): (List[Expression], List[Expression]) = e match {
case Plus(l,r) =>
val (p1, n1) = getPosNegTerms(l)
val (p2, n2) = getPosNegTerms(r)
(p1 ::: p2, n1 ::: n2)
case Minus(l,r) =>
val (p1, n1) = getPosNegTerms(l)
val (p2, n2) = getPosNegTerms(r)
(p1 ::: n2, n1 ::: p2)
case cstOrVar => (List(cstOrVar), Nil)
}
def decomposePosNeg(e: Expression): (List[Variable], List[Variable], Constant) = {
val (pos, neg) = getPosNegTerms(e)
val (pVars, pCst) = ( (List[Variable](), 0) /: pos)( (acc, p) => p match {
case v @ Variable(_) => (v :: acc._1, acc._2)
case Constant(c) => (acc._1, acc._2 + c)
case other => Logger.logAndThrow("integer.AST", LogError, "expected Variable or Constant, found: " + other)
})
val (nVars, nCst) = ( (List[Variable](), 0) /: neg)( (acc, p) => p match {
case v @ Variable(_) => (v :: acc._1, acc._2)
case Constant(c) => (acc._1, acc._2 + c)
case other => Logger.logAndThrow("integer.AST", LogError, "expected Variable or Constant, found: " + other)
})
(pVars, nVars, Constant(pCst - nCst))
}
/** Returns a list of variables with positive polarity, then negative variables, then constant */
def decompose(e: Expression): (List[Variable], Constant) = {
val pos = getTerms(e)
val (posVar, posCst) = pos.partition{
case Variable(_) => true
case Constant(_) => false
case other => Logger.logAndThrow("integer.AST", LogError, "expected Variable or Constant, found: " + other)
}
val posVar2: List[Variable] = posVar.map{
case v @ Variable(_) => v
case other => Logger.logAndThrow("integer.AST", LogError, "expected Variable, found: " + other)
}
val cst = (0 /: posCst)( (acc, c) => c match {
case Constant(value) => acc + value
case other => Logger.logAndThrow("integer.AST", LogError, "expected Constant, found: " + other)
})
(posVar2, Constant(cst))
}
//returns a vector of coefficients (variables) and a constant term.
def decomposeVector(e: Expression, idxMap: Map[Variable, Int]): (Array[Int], Int) = {
val coeffArray = Array.ofDim[Int](idxMap.size)
var constantTerm = 0
val (pos, neg) = getPosNegTerms(e)
pos.foreach{
case v @ Variable(_) => coeffArray(idxMap(v)) += 1
case Constant(c) => constantTerm += c
case other => Logger.logAndThrow("integer.AST", LogError, "expected Variable or Constant, found: " + other)
}
neg.foreach{
case v @ Variable(_) => coeffArray(idxMap(v)) -= 1
case Constant(c) => constantTerm -= c
case other => Logger.logAndThrow("integer.AST", LogError, "expected Variable or Constant, found: " + other)
}
(coeffArray, constantTerm)
}
def decomposeVector(e: Expression, vars: Seq[Variable]): (Array[Int], Int) = {
val idxMap = vars.zipWithIndex.toMap //bad when there is a lot of variables (10k) ...
decomposeVector(e, idxMap)
}
def recompose(pos: List[Variable], neg: List[Variable], cst: Constant): Expression = {
if (pos.isEmpty) {
((cst: Expression) /: neg)(Minus(_, _))
} else {
val posTerm = (pos: List[Expression]).reduceLeft(Plus(_, _))
val negTerm = (posTerm /: neg)(Minus(_, _))
if (cst.i == 0) negTerm
else Plus(negTerm, cst)
}
}
def recompose(pos: List[Variable], cst: Constant): Expression = {
recompose(pos, Nil, cst)
}
def recomposeVector(coeffs: Seq[Int], cst: Int, vars: Seq[Variable]): Expression = {
assert(coeffs forall (_ >= 0) )
val pos = for (i <- 0 until coeffs.length; j <- 0 until coeffs(i)) yield vars(i)
val neg = for (i <- 0 until coeffs.length; j <- coeffs(i) until 0) yield vars(i)
recompose(pos.toList, neg.toList, Constant(cst))
}
def simplify(e: Expression): Expression = {
val vars = Expression.variables(e).toSeq
val (p, c) = decomposeVector(e, vars)
recomposeVector(p,c,vars)
}
//TODO lazyCopier
def alpha(e: Expression, subst: Map[Variable,Expression]): Expression = e match {
case Plus(l,r) => Plus(alpha(l, subst), alpha(r, subst))
case Minus(l,r) => Minus(alpha(l, subst), alpha(r, subst))
case c @ Constant(_) => c
case v @ Variable(_) => subst.getOrElse(v, v)
}
}
abstract class Condition
case class Eq(l: Expression, r: Expression) extends Condition
case class Lt(l: Expression, r: Expression) extends Condition
case class Leq(l: Expression, r: Expression) extends Condition
case class And(lst: List[Condition]) extends Condition
case class Or(lst: List[Condition]) extends Condition
case class Not(c: Condition) extends Condition
case class Literal(b: Boolean) extends Condition
object Condition {
def priority(e: Condition): Int = e match {
case Eq(_,_) | Lt(_,_) | Leq(_,_) => 30
case And(_) => 11
case Or(_) => 10
case Not(_) => 20
case Literal(_) => 30
}
def needParenthesis(currentPriority: Int, e: Condition): String = {
if (priority(e) < currentPriority) "(" + print(e) + ")"
else print(e)
}
def print(e: Condition): String = e match {
case Eq(l,r) => l + " == " + r
case Lt(l,r) => l + " < " + r
case Leq(l,r) => l + " <= " + r
case And(lst) => lst.map(needParenthesis(priority(e), _)).mkString(" && ")
case Or(lst) => lst.map(needParenthesis(priority(e), _)).mkString(" || ")
case Not(c) => "!" + needParenthesis(priority(e), c)
case Literal(b) => b.toString
}
def variables(c: Condition): Set[Variable] = c match {
case Eq(l,r) => Expression.variables(l) ++ Expression.variables(r)
case Lt(l,r) => Expression.variables(l) ++ Expression.variables(r)
case Leq(l,r) => Expression.variables(l) ++ Expression.variables(r)
case And(lst) => (Set[Variable]() /: lst)(_ ++ variables(_))
case Or(lst) => (Set[Variable]() /: lst)(_ ++ variables(_))
case Not(c) => variables(c)
case Literal(_) => Set()
}
def nnf(c: Condition): Condition = {
def process(c: Condition, negate: Boolean): Condition = c match {
case e @ Eq(_,_) => if (negate) Not(e) else e
case Lt(l,r) => if (negate) Leq(r,l) else Lt(l,r)
case Leq(l,r) => if (negate) Lt(r,l) else Leq(l,r)
case And(lst) =>
val lst2 = lst map (process(_, negate))
if (negate) Or(lst2) else And(lst2)
case Or(lst) =>
val lst2 = lst map (process(_, negate))
if (negate) And(lst2) else Or(lst2)
case Not(c) => process(c, !negate)
case Literal(b) => if (negate) Literal(!b) else Literal(b)
}
process(c, false)
}
def simplify(c: Condition): Condition = c match {
case l @ Literal(_) => l
case e @ Eq(Constant(c1), Constant(c2)) => if (c1 == c2) Literal(true) else Literal(false)
case e @ Eq(e1, e2) => if (e1 == e2) Literal(true) else e
case e @ Leq(Constant(c1), Constant(c2)) => if (c1 <= c2) Literal(true) else Literal(false)
case e @ Leq(e1, e2) => if (e1 == e2) Literal(true) else e
case e @ Lt(Constant(c1), Constant(c2)) => if (c1 < c2) Literal(true) else Literal(false)
case e @ Lt(e1, e2) => if (e1 == e2) Literal(false) else e
case And(lst) =>
val lst2 = lst.view.map(simplify)
val lst3 = lst2 flatMap getTopLevelClauses
val lst4 = lst3 filterNot (_ == Literal(true))
if (lst4.isEmpty) Literal(true)
else if (lst4 contains Literal(false)) Literal(false)
else And(lst4.toList) //TODO remove duplicates ?
case Or(lst) =>
val lst2 = lst.view.map(simplify)
val lst3 = lst2 flatMap getTopLevelDisj
val lst4 = lst3 filterNot (_ == Literal(false))
if (lst4.isEmpty) Literal(false)
else if (lst4 contains Literal(true)) Literal(true)
else Or(lst4.toList) //TODO remove duplicates ?
case Not(c1) =>
nnf(Not(simplify(c1)))
}
//TODO lazyCopier
def alpha(c: Condition, subst: Map[Variable,Expression]): Condition = c match {
case Eq(l,r) => Eq(Expression.alpha(l, subst), Expression.alpha(r, subst))
case Lt(l,r) => Lt(Expression.alpha(l, subst), Expression.alpha(r, subst))
case Leq(l,r) => Leq(Expression.alpha(l, subst), Expression.alpha(r, subst))
case And(lst) => And(lst.map(alpha(_, subst)))
case Or(lst) => Or(lst.map(alpha(_, subst)))
case Not(c) => Not(alpha(c, subst))
case l @ Literal(_) => l
}
def getTopLevelClauses(c: Condition): Seq[Condition] = c match {
case And(lst) => lst flatMap getTopLevelClauses
case other => Seq(other)
}
def getTopLevelDisj(c: Condition): Seq[Condition] = c match {
case Or(lst) => lst flatMap getTopLevelDisj
case other => Seq(other)
}
def getLowerBounds(guard: Condition): Map[Variable, Int] = {
def process(c: Condition): Seq[(Variable, Int)] = c match {
case Eq(v @ Variable(_), Constant(c)) => Seq(v -> c)
case Eq(Constant(c), v @ Variable(_)) => Seq(v -> c)
case Leq(Constant(c), v @ Variable(_)) => Seq(v -> c)
case Lt(Constant(c), v @ Variable(_)) => Seq(v -> (c+1))
case And(lst) => lst flatMap process
case _ => Seq()
}
(Map[Variable, Int]() /: process(guard))( (acc, lb) => {
if (acc contains lb._1) acc + (lb._1 -> math.max(acc(lb._1), lb._2))
else acc + lb
})
}
def getUpperBounds(guard: Condition): Map[Variable, Int] = {
def process(c: Condition): Seq[(Variable, Int)] = c match {
case Eq(v @ Variable(_), Constant(c)) => Seq(v -> c)
case Eq(Constant(c), v @ Variable(_)) => Seq(v -> c)
case Leq(v @ Variable(_), Constant(c)) => Seq(v -> c)
case Lt(v @ Variable(_), Constant(c)) => Seq(v -> (c-1))
case And(lst) => lst flatMap process
case _ => Seq()
}
(Map[Variable, Int]() /: process(guard))( (acc, lb) => {
if (acc contains lb._1) acc + (lb._1 -> math.min(acc(lb._1), lb._2))
else acc + lb
})
}
}
| dzufferey/picasso | core/src/main/scala/picasso/model/integer/AST.scala | Scala | bsd-2-clause | 11,784 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.views.application.assets.insurancePolicy
import iht.controllers.application.assets.insurancePolicy.routes
import iht.forms.ApplicationForms._
import iht.models.application.assets.InsurancePolicy
import iht.testhelpers.{CommonBuilder, TestHelper}
import iht.views.application.{CancelComponent, YesNoQuestionViewBehaviour}
import iht.views.html.application.asset.insurancePolicy.insurance_policy_details_in_trust
import play.api.data.Form
import play.twirl.api.HtmlFormat.Appendable
class InsurancePolicyDetailsInTrustViewTest extends YesNoQuestionViewBehaviour[InsurancePolicy] {
lazy val regDetails = CommonBuilder.buildRegistrationDetails1
lazy val deceasedName = regDetails.deceasedDetails.fold("")(x => x.name)
lazy val insurancePolicyDetailsInTrustView: insurance_policy_details_in_trust = app.injector.instanceOf[insurance_policy_details_in_trust]
override def pageTitle = messagesApi("page.iht.application.insurance.policies.section4.title", deceasedName)
override def browserTitle = messagesApi("page.iht.application.insurance.policies.section4.browserTitle")
override def guidance = noGuidance
override def formTarget = Some(routes.InsurancePolicyDetailsInTrustController.onSubmit())
override def form: Form[InsurancePolicy] = insurancePolicyInTrustForm
override def formToView: Form[InsurancePolicy] => Appendable =
form => insurancePolicyDetailsInTrustView(form, regDetails)
override def cancelComponent = Some(CancelComponent(routes.InsurancePolicyOverviewController.onPageLoad(),
messagesApi("site.link.return.insurance.policies"),
TestHelper.InsurancePlacedInTrustYesNoID
))
"InsurancePolicyDetailsInTrustViewTest" must {
behave like yesNoQuestion
}
}
| hmrc/iht-frontend | test/iht/views/application/assets/insurancePolicy/InsurancePolicyDetailsInTrustViewTest.scala | Scala | apache-2.0 | 2,335 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor
import akka.dispatch.sysmsg._
import akka.dispatch.{ UnboundedMessageQueueSemantics, RequiresMessageQueue }
import akka.routing._
import akka.event._
import akka.util.{ Switch, Helpers }
/**
* @note IMPLEMENT IN SCALA.JS
*
import akka.japi.Util.immutableSeq
*/
import akka.util.Collections.EmptyImmutableSeq
import scala.util.{ Success, Failure }
import scala.util.control.NonFatal
import java.util.concurrent.atomic.AtomicLong
import scala.concurrent.{ ExecutionContext, ExecutionContextExecutor, Future, Promise }
import scala.annotation.implicitNotFound
import akka.ConfigurationException
import akka.dispatch.{MessageDispatcher, Mailboxes}
/**
* Interface for all ActorRef providers to implement.
*/
trait ActorRefProvider {
/**
* Reference to the supervisor of guardian and systemGuardian; this is
* exposed so that the ActorSystemImpl can use it as lookupRoot, i.e.
* for anchoring absolute actor look-ups.
*/
def rootGuardian: InternalActorRef
/**
* Reference to the supervisor of guardian and systemGuardian at the specified address;
* this is exposed so that the ActorRefFactory can use it as lookupRoot, i.e.
* for anchoring absolute actor selections.
*/
def rootGuardianAt(address: Address): ActorRef
/**
* Reference to the supervisor used for all top-level user actors.
*/
def guardian: LocalActorRef
/**
* Reference to the supervisor used for all top-level system actors.
*/
def systemGuardian: LocalActorRef
/**
* Dead letter destination for this provider.
*/
def deadLetters: ActorRef
/**
* The root path for all actors within this actor system, not including any remote address information.
*/
def rootPath: ActorPath
/**
* The Settings associated with this ActorRefProvider
*/
def settings: ActorSystem.Settings
/**
* Initialization of an ActorRefProvider happens in two steps: first
* construction of the object with settings, eventStream, etc.
* and then—when the ActorSystem is constructed—the second phase during
* which actors may be created (e.g. the guardians).
*/
def init(system: ActorSystemImpl): Unit
/**
* The Deployer associated with this ActorRefProvider
*/
/**
* @note IMPLEMENT IN SCALA.JS
* Deployer is NOT implemented
*
* def deployer: Deployer
*/
/**
* Generates and returns a unique actor path below “/temp”.
*/
def tempPath(): ActorPath
/**
* Returns the actor reference representing the “/temp” path.
*/
def tempContainer: InternalActorRef
/**
* Registers an actorRef at a path returned by tempPath(); do NOT pass in any other path.
*/
def registerTempActor(actorRef: InternalActorRef, path: ActorPath): Unit
/**
* Unregister a temporary actor from the “/temp” path (i.e. obtained from tempPath()); do NOT pass in any other path.
*/
def unregisterTempActor(path: ActorPath): Unit
/**
* Actor factory with create-only semantics: will create an actor as
* described by props with the given supervisor and path (may be different
* in case of remote supervision). If systemService is true, deployment is
* bypassed (local-only). If ``Some(deploy)`` is passed in, it should be
* regarded as taking precedence over the nominally applicable settings,
* but it should be overridable from external configuration; the lookup of
* the latter can be suppressed by setting ``lookupDeploy`` to ``false``.
*/
def actorOf(
system: ActorSystemImpl,
props: Props,
supervisor: InternalActorRef,
path: ActorPath,
systemService: Boolean,
deploy: Option[Deploy],
lookupDeploy: Boolean,
async: Boolean): InternalActorRef
/**
* Create actor reference for a specified local or remote path. If no such
* actor exists, it will be (equivalent to) a dead letter reference.
*/
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(path: ActorPath): InternalActorRef
/**
* Create actor reference for a specified local or remote path, which will
* be parsed using java.net.URI. If no such actor exists, it will be
* (equivalent to) a dead letter reference. If `s` is a relative URI, resolve
* it relative to the given ref.
*/
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(ref: InternalActorRef, s: String): InternalActorRef
/**
* Create actor reference for the specified child path starting at the
* given starting point. This method always returns an actor which is “logically local”,
* i.e. it cannot be used to obtain a reference to an actor which is not
* physically or logically attached to this actor system.
*/
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(ref: InternalActorRef, p: Iterable[String]): InternalActorRef
/**
* Create actor reference for a specified path. If no such
* actor exists, it will be (equivalent to) a dead letter reference.
*/
def resolveActorRef(path: String): ActorRef
/**
* Create actor reference for a specified path. If no such
* actor exists, it will be (equivalent to) a dead letter reference.
*/
def resolveActorRef(path: ActorPath): ActorRef
/**
* This Future is completed upon termination of this ActorRefProvider, which
* is usually initiated by stopping the guardian via ActorSystem.stop().
*/
def terminationFuture: Future[Unit]
/**
* Obtain the address which is to be used within sender references when
* sending to the given other address or none if the other address cannot be
* reached from this system (i.e. no means of communication known; no
* attempt is made to verify actual reachability).
*/
def getExternalAddressFor(addr: Address): Option[Address]
/**
* Obtain the external address of the default transport.
*/
def getDefaultAddress: Address
}
/**
* Interface implemented by ActorSystem and ActorContext, the only two places
* from which you can get fresh actors.
*/
@implicitNotFound("implicit ActorRefFactory required: if outside of an Actor you need an implicit ActorSystem, inside of an actor this should be the implicit ActorContext")
trait ActorRefFactory {
/**
* INTERNAL API
*/
protected def systemImpl: ActorSystemImpl
/**
* INTERNAL API
*/
protected def provider: ActorRefProvider
/**
* Returns the default MessageDispatcher associated with this ActorRefFactory
*/
implicit def dispatcher: ExecutionContextExecutor
/**
* Father of all children created by this interface.
*
* INTERNAL API
*/
protected def guardian: InternalActorRef
/**
* INTERNAL API
*/
protected def lookupRoot: InternalActorRef
/**
* Create new actor as child of this context and give it an automatically
* generated name (currently similar to base64-encoded integer count,
* reversed and with “$” prepended, may change in the future).
*
* See [[akka.actor.Props]] for details on how to obtain a `Props` object.
*
* @throws akka.ConfigurationException if deployment, dispatcher
* or mailbox configuration is wrong
*/
def actorOf(props: Props): ActorRef
/**
* Create new actor as child of this context with the given name, which must
* not be null, empty or start with “$”. If the given name is already in use,
* an `InvalidActorNameException` is thrown.
*
* See [[akka.actor.Props]] for details on how to obtain a `Props` object.
* @throws akka.actor.InvalidActorNameException if the given name is
* invalid or already in use
* @throws akka.ConfigurationException if deployment, dispatcher
* or mailbox configuration is wrong
*/
def actorOf(props: Props, name: String): ActorRef
/**
* Look-up an actor by path; if it does not exist, returns a reference to
* the dead-letter mailbox of the [[akka.actor.ActorSystem]]. If the path
* point to an actor which is not local, no attempt is made during this
* call to verify that the actor it represents does exist or is alive; use
* `watch(ref)` to be notified of the target’s termination, which is also
* signaled if the queried path cannot be resolved.
*/
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(path: ActorPath): ActorRef = provider.actorFor(path)
/**
* Look-up an actor by path represented as string.
*
* Absolute URIs like `akka://appname/user/actorA` are looked up as described
* for look-ups by `actorOf(ActorPath)`.
*
* Relative URIs like `/service/actorA/childB` are looked up relative to the
* root path of the [[akka.actor.ActorSystem]] containing this factory and as
* described for look-ups by `actorOf(Iterable[String])`.
*
* Relative URIs like `myChild/grandChild` or `../myBrother` are looked up
* relative to the current context as described for look-ups by
* `actorOf(Iterable[String])`
*/
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(path: String): ActorRef = provider.actorFor(lookupRoot, path)
/**
* Look-up an actor by applying the given path elements, starting from the
* current context, where `".."` signifies the parent of an actor.
*
* Example:
* {{{
* class MyActor extends Actor {
* def receive = {
* case msg =>
* ...
* val target = context.actorFor(Seq("..", "myBrother", "myNephew"))
* ...
* }
* }
* }}}
*
* For maximum performance use a collection with efficient head & tail operations.
*/
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(path: Iterable[String]): ActorRef = provider.actorFor(lookupRoot, path)
/**
* Java API: Look-up an actor by applying the given path elements, starting from the
* current context, where `".."` signifies the parent of an actor.
*
* Example:
* {{{
* public class MyActor extends UntypedActor {
* public void onReceive(Object msg) throws Exception {
* ...
* final List<String> path = new ArrayList<String>();
* path.add("..");
* path.add("myBrother");
* path.add("myNephew");
* final ActorRef target = getContext().actorFor(path);
* ...
* }
* }
* }}}
*
* For maximum performance use a collection with efficient head & tail operations.
*/
/**
* @note IMPLEMENT IN SCALA.JS
*
@deprecated("use actorSelection instead of actorFor", "2.2")
def actorFor(path: java.lang.Iterable[String]): ActorRef = provider.actorFor(lookupRoot, immutableSeq(path))
*/
/**
* Construct an [[akka.actor.ActorSelection]] from the given path, which is
* parsed for wildcards (these are replaced by regular expressions
* internally). No attempt is made to verify the existence of any part of
* the supplied path, it is recommended to send a message and gather the
* replies in order to resolve the matching set of actors.
*/
def actorSelection(path: String): ActorSelection = path match {
case RelativeActorPath(elems) ⇒
if (elems.isEmpty) ActorSelection(provider.deadLetters, "")
else if (elems.head.isEmpty) ActorSelection(provider.rootGuardian, elems.tail)
else ActorSelection(lookupRoot, elems)
case ActorPathExtractor(address, elems) ⇒
ActorSelection(provider.rootGuardianAt(address), elems)
case _ ⇒
ActorSelection(provider.deadLetters, "")
}
/**
* Construct an [[akka.actor.ActorSelection]] from the given path, which is
* parsed for wildcards (these are replaced by regular expressions
* internally). No attempt is made to verify the existence of any part of
* the supplied path, it is recommended to send a message and gather the
* replies in order to resolve the matching set of actors.
*/
def actorSelection(path: ActorPath): ActorSelection =
ActorSelection(provider.rootGuardianAt(path.address), path.elements)
/**
* Stop the actor pointed to by the given [[akka.actor.ActorRef]]; this is
* an asynchronous operation, i.e. involves a message send.
*/
def stop(actor: ActorRef): Unit
}
/**
* Internal Akka use only, used in implementation of system.stop(child).
*/
private[akka] case class StopChild(child: ActorRef)
/**
* INTERNAL API
*/
private[akka] object SystemGuardian {
/**
* For the purpose of orderly shutdown it's possible
* to register interest in the termination of systemGuardian
* and receive a notification [[akka.actor.Guardian.TerminationHook]]
* before systemGuardian is stopped. The registered hook is supposed
* to reply with [[akka.actor.Guardian.TerminationHookDone]] and the
* systemGuardian will not stop until all registered hooks have replied.
*/
case object RegisterTerminationHook
case object TerminationHook
case object TerminationHookDone
}
private[akka] object LocalActorRefProvider {
/*
* Root and user guardian
*/
private class Guardian(override val supervisorStrategy: SupervisorStrategy) extends Actor
with RequiresMessageQueue[UnboundedMessageQueueSemantics] {
def receive = {
case Terminated(_) ⇒ context.stop(self)
case StopChild(child) ⇒ context.stop(child)
case m ⇒ context.system.deadLetters forward DeadLetter(m, sender(), self)
}
// guardian MUST NOT lose its children during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
}
/**
* System guardian
*/
private class SystemGuardian(override val supervisorStrategy: SupervisorStrategy, val guardian: ActorRef)
extends Actor with RequiresMessageQueue[UnboundedMessageQueueSemantics] {
import SystemGuardian._
var terminationHooks = Set.empty[ActorRef]
def receive = {
case Terminated(`guardian`) ⇒
// time for the systemGuardian to stop, but first notify all the
// termination hooks, they will reply with TerminationHookDone
// and when all are done the systemGuardian is stopped
context.become(terminating)
terminationHooks foreach { _ ! TerminationHook }
stopWhenAllTerminationHooksDone()
case Terminated(a) ⇒
// a registered, and watched termination hook terminated before
// termination process of guardian has started
terminationHooks -= a
case StopChild(child) ⇒ context.stop(child)
case RegisterTerminationHook if sender() != context.system.deadLetters ⇒
terminationHooks += sender()
context watch sender()
case m ⇒ context.system.deadLetters forward DeadLetter(m, sender(), self)
}
def terminating: Receive = {
case Terminated(a) ⇒ stopWhenAllTerminationHooksDone(a)
case TerminationHookDone ⇒ stopWhenAllTerminationHooksDone(sender())
case m ⇒ context.system.deadLetters forward DeadLetter(m, sender(), self)
}
def stopWhenAllTerminationHooksDone(remove: ActorRef): Unit = {
terminationHooks -= remove
stopWhenAllTerminationHooksDone()
}
def stopWhenAllTerminationHooksDone(): Unit =
if (terminationHooks.isEmpty) {
context.system.eventStream.stopDefaultLoggers(context.system)
context.stop(self)
}
// guardian MUST NOT lose its children during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
}
}
/**
* Local ActorRef provider.
*
* INTERNAL API!
*
* Depending on this class is not supported, only the [[ActorRefProvider]] interface is supported.
*/
@scala.scalajs.js.annotation.JSExport
/*private[akka]*/ class LocalActorRefProvider /*private[akka]*/ (
_systemName: String,
override val settings: ActorSystem.Settings,
val eventStream: EventStream,
/**
* @note IMPLEMENT IN SCALA.JS
*
val dynamicAccess: DynamicAccess,
override val deployer: Deployer,
*/
_deadLetters: Option[ActorPath ⇒ InternalActorRef])
extends ActorRefProvider {
// this is the constructor needed for reflectively instantiating the provider
def this(_systemName: String,
settings: ActorSystem.Settings,
eventStream: EventStream
/** @note IMPLEMENT IN SCALA.JS , dynamicAccess: DynamicAccess */) =
this(_systemName,
settings,
eventStream,
/**
* @note IMPLEMENT IN SCALA.JS
* Deployer not implemented
* dynamicAccess,
* new Deployer(settings, dynamicAccess),
*/
None)
override val rootPath: ActorPath = RootActorPath(Address("akka", _systemName))
private[akka] val log: LoggingAdapter = Logging(eventStream, "LocalActorRefProvider(" + rootPath.address + ")")
override val deadLetters: InternalActorRef =
_deadLetters.getOrElse((p: ActorPath) ⇒ new DeadLetterActorRef(this, p, eventStream)).apply(rootPath / "deadLetters")
/*
* generate name for temporary actor refs
*/
private val tempNumber = new AtomicLong
private def tempName() = Helpers.base64(tempNumber.getAndIncrement())
private val tempNode = rootPath / "temp"
override def tempPath(): ActorPath = tempNode / tempName()
/**
* Top-level anchor for the supervision hierarchy of this actor system. Will
* receive only Supervise/ChildTerminated system messages or Failure message.
*/
private[akka] val theOneWhoWalksTheBubblesOfSpaceTime: InternalActorRef = new MinimalActorRef {
val stopped = new Switch(false)
@volatile
var causeOfTermination: Option[Throwable] = None
val path = rootPath / "bubble-walker"
def provider: ActorRefProvider = LocalActorRefProvider.this
override def stop(): Unit = stopped switchOn { terminationPromise.complete(causeOfTermination.map(Failure(_)).getOrElse(Success(()))) }
@deprecated("Use context.watch(actor) and receive Terminated(actor)", "2.2") override def isTerminated: Boolean = stopped.isOn
override def !(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = stopped.ifOff(message match {
case null ⇒ throw new InvalidMessageException("Message is null")
case _ ⇒ log.error(s"$this received unexpected message [$message]")
})
override def sendSystemMessage(message: SystemMessage): Unit = stopped ifOff {
message match {
case Failed(child, ex, _) ⇒
log.error(ex, s"guardian $child failed, shutting down!")
causeOfTermination = Some(ex)
child.asInstanceOf[InternalActorRef].stop()
case Supervise(_, _) ⇒ // TODO register child in some map to keep track of it and enable shutdown after all dead
case _: DeathWatchNotification ⇒ stop()
case _ ⇒ log.error(s"$this received unexpected system message [$message]")
}
}
}
/*
* The problem is that ActorRefs need a reference to the ActorSystem to
* provide their service. Hence they cannot be created while the
* constructors of ActorSystem and ActorRefProvider are still running.
* The solution is to split out that last part into an init() method,
* but it also requires these references to be @volatile and lazy.
*/
@volatile
private var system: ActorSystemImpl = _
lazy val terminationPromise: Promise[Unit] = Promise[Unit]()
def terminationFuture: Future[Unit] = terminationPromise.future
@volatile
private var extraNames: Map[String, InternalActorRef] = Map()
/**
* Higher-level providers (or extensions) might want to register new synthetic
* top-level paths for doing special stuff. This is the way to do just that.
* Just be careful to complete all this before ActorSystem.start() finishes,
* or before you start your own auto-spawned actors.
*/
def registerExtraNames(_extras: Map[String, InternalActorRef]): Unit = extraNames ++= _extras
/**
* @note IMPLEMENT IN SCALA.JS
*
private def guardianSupervisorStrategyConfigurator =
dynamicAccess.createInstanceFor[SupervisorStrategyConfigurator](settings.SupervisorStrategyClass, EmptyImmutableSeq).get
*/
/**
* Overridable supervision strategy to be used by the “/user” guardian.
*/
protected def rootGuardianStrategy: SupervisorStrategy = OneForOneStrategy() {
case ex ⇒
log.error(ex, "guardian failed, shutting down system")
SupervisorStrategy.Stop
}
/**
* Overridable supervision strategy to be used by the “/user” guardian.
*/
/**
* @note IMPLEMENT IN SCALA.JS
*
protected def guardianStrategy: SupervisorStrategy = guardianSupervisorStrategyConfigurator.create()
*/
protected def guardianStrategy: SupervisorStrategy = SupervisorStrategy.defaultStrategy
/**
* Overridable supervision strategy to be used by the “/user” guardian.
*/
protected def systemGuardianStrategy: SupervisorStrategy = SupervisorStrategy.defaultStrategy
private lazy val defaultDispatcher = system.dispatchers.defaultGlobalDispatcher
private lazy val defaultMailbox = system.mailboxes.lookup(Mailboxes.DefaultMailboxId)
//val mailboxes: Mailboxes = new Mailboxes(deadLetters)
override lazy val rootGuardian: LocalActorRef =
new LocalActorRef(
system,
/**
* @note IMPLEMENT IN SCALA.JS
*
Props(classOf[LocalActorRefProvider.Guardian], rootGuardianStrategy),
*/
Props(new LocalActorRefProvider.Guardian(rootGuardianStrategy)),
defaultDispatcher,
defaultMailbox,
theOneWhoWalksTheBubblesOfSpaceTime,
rootPath) {
override def getParent: InternalActorRef = this
override def getSingleChild(name: String): InternalActorRef = name match {
case "temp" ⇒ tempContainer
case "deadLetters" ⇒ deadLetters
case other ⇒ extraNames.get(other).getOrElse(super.getSingleChild(other))
}
}
override def rootGuardianAt(address: Address): ActorRef =
if (address == rootPath.address) rootGuardian
else deadLetters
override lazy val guardian: LocalActorRef = {
val cell = rootGuardian.underlying
cell.reserveChild("user")
/**
* @note IMPLEMENT IN SCALA.JS
*
val ref = new LocalActorRef(system, Props(classOf[LocalActorRefProvider.Guardian], guardianStrategy),
defaultDispatcher, defaultMailbox, rootGuardian, rootPath / "user")
*/
val ref = new LocalActorRef(system, Props(new LocalActorRefProvider.Guardian(guardianStrategy)),
defaultDispatcher, defaultMailbox, rootGuardian, rootPath / "user")
cell.initChild(ref)
ref.start()
ref
}
override lazy val systemGuardian: LocalActorRef = {
val cell = rootGuardian.underlying
cell.reserveChild("system")
/**
* @note IMPLEMENT IN SCALA.JS
*
val ref = new LocalActorRef(
system, Props(classOf[LocalActorRefProvider.SystemGuardian], systemGuardianStrategy, guardian),
defaultDispatcher, defaultMailbox, rootGuardian, rootPath / "system")
*/
val ref = new LocalActorRef(
system, Props( new LocalActorRefProvider.SystemGuardian(systemGuardianStrategy, guardian)),
defaultDispatcher, defaultMailbox, rootGuardian, rootPath / "system")
cell.initChild(ref)
ref.start()
ref
}
lazy val tempContainer = new VirtualPathContainer(system.provider, tempNode, rootGuardian, log)
def registerTempActor(actorRef: InternalActorRef, path: ActorPath): Unit = {
assert(path.parent eq tempNode, "cannot registerTempActor() with anything not obtained from tempPath()")
tempContainer.addChild(path.name, actorRef)
}
def unregisterTempActor(path: ActorPath): Unit = {
assert(path.parent eq tempNode, "cannot unregisterTempActor() with anything not obtained from tempPath()")
tempContainer.removeChild(path.name)
}
def init(_system: ActorSystemImpl) {
system = _system
rootGuardian.start()
// chain death watchers so that killing guardian stops the application
systemGuardian.sendSystemMessage(Watch(guardian, systemGuardian))
rootGuardian.sendSystemMessage(Watch(systemGuardian, rootGuardian))
eventStream.startDefaultLoggers(_system)
}
@deprecated("use actorSelection instead of actorFor", "2.2")
override def actorFor(ref: InternalActorRef, path: String): InternalActorRef = path match {
case RelativeActorPath(elems) ⇒
if (elems.isEmpty) {
log.debug("look-up of empty path string [{}] fails (per definition)", path)
deadLetters
} else if (elems.head.isEmpty) actorFor(rootGuardian, elems.tail)
else actorFor(ref, elems)
case ActorPathExtractor(address, elems) if address == rootPath.address ⇒ actorFor(rootGuardian, elems)
case _ ⇒
log.debug("look-up of unknown path [{}] failed", path)
deadLetters
}
@deprecated("use actorSelection instead of actorFor", "2.2")
override def actorFor(path: ActorPath): InternalActorRef =
if (path.root == rootPath) actorFor(rootGuardian, path.elements)
else {
log.debug("look-up of foreign ActorPath [{}] failed", path)
deadLetters
}
@deprecated("use actorSelection instead of actorFor", "2.2")
override def actorFor(ref: InternalActorRef, path: Iterable[String]): InternalActorRef =
if (path.isEmpty) {
log.debug("look-up of empty path sequence fails (per definition)")
deadLetters
} else ref.getChild(path.iterator) match {
case Nobody ⇒
log.debug("look-up of path sequence [/{}] failed", path.mkString("/"))
new EmptyLocalActorRef(system.provider, ref.path / path, eventStream)
case x ⇒ x
}
def resolveActorRef(path: String): ActorRef = path match {
case ActorPathExtractor(address, elems) if address == rootPath.address ⇒ resolveActorRef(rootGuardian, elems)
case _ ⇒
log.debug("resolve of unknown path [{}] failed", path)
deadLetters
}
def resolveActorRef(path: ActorPath): ActorRef = {
if (path.root == rootPath) resolveActorRef(rootGuardian, path.elements)
else {
log.debug("resolve of foreign ActorPath [{}] failed", path)
deadLetters
}
}
/**
* INTERNAL API
*/
private[akka] def resolveActorRef(ref: InternalActorRef, pathElements: Iterable[String]): InternalActorRef =
if (pathElements.isEmpty) {
log.debug("resolve of empty path sequence fails (per definition)")
deadLetters
} else ref.getChild(pathElements.iterator) match {
case Nobody ⇒
log.debug("resolve of path sequence [/{}] failed", pathElements.mkString("/"))
new EmptyLocalActorRef(system.provider, ref.path / pathElements, eventStream)
case x ⇒ x
}
/*def actorOf(system: ActorSystemImpl, /** @note IMPLEMENT IN SCALA.JS props */ props2: Props, supervisor: InternalActorRef, path: ActorPath,
systemService: Boolean, deploy: Option[Deploy], lookupDeploy: Boolean, async: Boolean): InternalActorRef = {
try {
// @note IMPLEMENT IN SCALA.JS val dispatcher = system.dispatchers.lookup(props2.dispatcher)
val dispatcher = system.dispatchers.lookup(akka.dispatch.Dispatchers.DefaultDispatcherId)
val mailboxType = system.mailboxes.getMailboxType(props2, dispatcher.configurator.config)
new LocalActorRef(system, props2, dispatcher, mailboxType, supervisor, path)
} catch {
case NonFatal(e) ⇒ throw new ConfigurationException(s"configuration problem while creating [$path] with dispatcher [${props2.dispatcher}] and mailbox [${props2.mailbox}]", e)
}
}*/
def actorOf(system: ActorSystemImpl, props: Props, supervisor: InternalActorRef, path: ActorPath,
systemService: Boolean, deploy: Option[Deploy], lookupDeploy: Boolean, async: Boolean): InternalActorRef = {
props.deploy.routerConfig match {
case NoRouter ⇒
if (settings.DebugRouterMisconfiguration) {
/** @note IMPLEMENT IN SCALA.JS
deployer.lookup(path) foreach { d ⇒
if (d.routerConfig != NoRouter)
log.warning("Configuration says that [{}] should be a router, but code disagrees. Remove the config or add a routerConfig to its Props.", path)
}
*/
}
val props2 = props
/** @note IMPLEMENT IN SCALA.JS
val props2 =
// mailbox and dispatcher defined in deploy should override props
(if (lookupDeploy) deployer.lookup(path) else deploy) match {
case Some(d) ⇒
(d.dispatcher, d.mailbox) match {
case (Deploy.NoDispatcherGiven, Deploy.NoMailboxGiven) ⇒ props
case (dsp, Deploy.NoMailboxGiven) ⇒ props.withDispatcher(dsp)
case (Deploy.NoMailboxGiven, mbx) ⇒ props.withMailbox(mbx)
case (dsp, mbx) ⇒ props.withDispatcher(dsp).withMailbox(mbx)
}
case _ ⇒ props // no deployment config found
}
if (!system.dispatchers.hasDispatcher(props2.dispatcher))
throw new ConfigurationException(s"Dispatcher [${props2.dispatcher}] not configured for path $path")
*/
try {
// @note IMPLEMENT IN SCALA.JS val dispatcher = system.dispatchers.lookup(props2.dispatcher)
val dispatcher = system.dispatchers.lookup(akka.dispatch.Dispatchers.DefaultDispatcherId)
val mailboxType = system.mailboxes.getMailboxType(props2, dispatcher.configurator.config)
if (async) new RepointableActorRef(system, props2, dispatcher, mailboxType, supervisor, path).initialize(async)
else new LocalActorRef(system, props2, dispatcher, mailboxType, supervisor, path)
} catch {
case NonFatal(e) ⇒
throw new ConfigurationException(
s"configuration problem while creating [$path] with dispatcher [${props2.dispatcher}] and mailbox [${props2.mailbox}]", e)
}
case router ⇒
val lookup = None // @note IMPLEMENT IN SCALA.JS if (lookupDeploy) deployer.lookup(path) else None
val fromProps = Iterator(props.deploy.copy(routerConfig = props.deploy.routerConfig withFallback router))
val d = fromProps ++ deploy.iterator ++ lookup.iterator reduce ((a, b) ⇒ b withFallback a)
val p = props.withRouter(d.routerConfig)
/*if (!system.dispatchers.hasDispatcher(p.dispatcher))
throw new ConfigurationException(s"Dispatcher [${p.dispatcher}] not configured for routees of $path")
if (!system.dispatchers.hasDispatcher(d.routerConfig.routerDispatcher))
throw new ConfigurationException(s"Dispatcher [${p.dispatcher}] not configured for router of $path")*/
val routerProps = Props(p.deploy.copy(dispatcher = p.routerConfig.routerDispatcher),
classOf[/** @note IMPLEMENT IN SCALA.JS RoutedActorCell.*/RouterActorCreator], Vector(p.routerConfig))
val routeeProps = p.withRouter(NoRouter)
try {
val routerDispatcher = system.dispatchers.lookup(p.routerConfig.routerDispatcher)
val routerMailbox = system.mailboxes.getMailboxType(routerProps, routerDispatcher.configurator.config)
// routers use context.actorOf() to create the routees, which does not allow us to pass
// these through, but obtain them here for early verification
val routeeDispatcher = system.dispatchers.lookup(p.dispatcher)
val routeeMailbox = system.mailboxes.getMailboxType(routeeProps, routeeDispatcher.configurator.config)
new RoutedActorRef(system, routerProps, routerDispatcher, routerMailbox, routeeProps, supervisor, path).initialize(async)
} catch {
case NonFatal(e) ⇒ throw new ConfigurationException(
s"configuration problem while creating [$path] with router dispatcher [${routerProps.dispatcher}] and mailbox [${routerProps.mailbox}] " +
s"and routee dispatcher [${routeeProps.dispatcher}] and mailbox [${routeeProps.mailbox}]", e)
}
}
}
def getExternalAddressFor(addr: Address): Option[Address] = if (addr == rootPath.address) Some(addr) else None
def getDefaultAddress: Address = rootPath.address
}
| jmnarloch/akka.js | akka-js-actor/js/src/main/scala/akka/actor/ActorRefProvider.scala | Scala | bsd-3-clause | 32,982 |
package com.github.tminglei.slickpg
import java.util.concurrent.Executors
import org.json4s._
import org.scalatest.FunSuite
import slick.jdbc.GetResult
import scala.concurrent.{ExecutionContext, Await}
import scala.concurrent.duration._
class PgJson4sSupportSuite extends FunSuite {
implicit val testExecContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(4))
import slick.driver.PostgresDriver
object MyPostgresDriver extends PostgresDriver
with PgJson4sSupport
with array.PgArrayJdbcTypes {
/// for json support
override val pgjson = "jsonb"
type DOCType = text.Document
override val jsonMethods = org.json4s.native.JsonMethods
override val api = new API with JsonImplicits {
implicit val strListTypeMapper = new SimpleArrayJdbcType[String]("text").to(_.toList)
implicit val json4sJsonArrayTypeMapper =
new AdvancedArrayJdbcType[JValue](pgjson,
(s) => utils.SimpleArrayUtils.fromString[JValue](jsonMethods.parse(_))(s).orNull,
(v) => utils.SimpleArrayUtils.mkString[JValue](j=>jsonMethods.compact(jsonMethods.render(j)))(v)
).to(_.toList)
}
val plainAPI = new API with Json4sJsonPlainImplicits
}
///
import MyPostgresDriver.api._
import MyPostgresDriver.jsonMethods._
val db = Database.forURL(url = utils.dbUrl, driver = "org.postgresql.Driver")
case class JsonBean(id: Long, json: JValue, jsons: List[JValue])
class JsonTestTable(tag: Tag) extends Table[JsonBean](tag, "JsonTest1") {
def id = column[Long]("id", O.AutoInc, O.PrimaryKey)
def json = column[JValue]("json")
def jsons = column[List[JValue]]("jsons")
def * = (id, json, jsons) <> (JsonBean.tupled, JsonBean.unapply)
}
val JsonTests = TableQuery[JsonTestTable]
//------------------------------------------------------------------------------
val testRec1 = JsonBean(33L, parse(""" { "a":101, "b":"aaa", "c":[3,4,5,9] } """), List(parse(""" { "a":101, "b":"aaa", "c":[3,4,5,9] } """)))
val testRec2 = JsonBean(35L, parse(""" [ {"a":"v1","b":2}, {"a":"v5","b":3} ] """), List(parse(""" [ {"a":"v1","b":2}, {"a":"v5","b":3} ] """)))
val testRec3 = JsonBean(37L, parse(""" ["a", "b"] """), List(parse("{\"level\":\"DAILY_LEVEL\",\"start\":{\"year\":2013,\"month\":1,\"day\":1},\"end\":{\"year\":2016,\"month\":1,\"day\":1}}")))
test("Json4s Lifted support") {
val json1 = parse(""" {"a":"v1","b":2} """)
val json2 = parse(""" {"a":"v5","b":3} """)
Await.result(db.run(
DBIO.seq(
JsonTests.schema create,
///
JsonTests forceInsertAll List(testRec1, testRec2, testRec3)
).andThen(
DBIO.seq(
JsonTests.filter(_.id === testRec2.id.bind).map(_.json).result.head.map(
r => assert(JArray(List(json1,json2)) === r)
),
JsonTests.to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// null return
JsonTests.filter(_.json.+>>("a") === "101").map(_.json.+>("d")).result.head.map(
r => assert(JNull === r)
),
// ->>/->
JsonTests.filter(_.json.+>>("a") === "101").map(_.json.+>>("c")).result.head.map(
r => assert("[3,4,5,9]" === r.replace(" ", ""))
),
JsonTests.filter(_.json.+>>("a") === "101".bind).map(_.json.+>("c")).result.head.map(
r => assert(JArray(List(JInt(3), JInt(4), JInt(5), JInt(9))) === r)
),
JsonTests.filter(_.id === testRec2.id).map(_.json.~>(1)).result.head.map(
r => assert(json2 === r)
),
JsonTests.filter(_.id === testRec2.id).map(_.json.~>>(1)).result.head.map(
r => assert("""{"a":"v5","b":3}""" === r.replace(" ", ""))
),
// #>>/#>
JsonTests.filter(_.id === testRec1.id).map(_.json.#>(List("c"))).result.head.map(
r => assert(parse("[3,4,5,9]") === r)
),
JsonTests.filter(_.json.#>>(List("a")) === "101").result.head.map(
r => assert(testRec1 === r)
),
// {}_array_length
JsonTests.filter(_.id === testRec2.id).map(_.json.arrayLength).result.head.map(
r => assert(2 === r)
),
// {}_array_elements
JsonTests.filter(_.id === testRec2.id).map(_.json.arrayElements).to[List].result.map(
r => assert(List(json1, json2) === r)
),
JsonTests.filter(_.id === testRec2.id).map(_.json.arrayElements).result.head.map(
r => assert(json1 === r)
),
// {}_array_elements_text
JsonTests.filter(_.id === testRec2.id).map(_.json.arrayElementsText).result.head.map(
r => assert(compact(render(json1)).replace(" ", "") === r.replace(" ", ""))
),
// {}_object_keys
JsonTests.filter(_.id === testRec1.id).map(_.json.objectKeys).to[List].result.map(
r => assert(List("a","b","c") === r)
),
JsonTests.filter(_.id === testRec1.id).map(_.json.objectKeys).result.head.map(
r => assert("a" === r)
),
// @>
JsonTests.filter(_.json @> parse(""" {"b":"aaa"} """)).result.head.map(
r => assert(33L === r.id)
),
JsonTests.filter(_.json @> parse(""" [{"a":"v5"}] """)).result.head.map(
r => assert(35L === r.id)
),
// <@
JsonTests.filter(parse(""" {"b":"aaa"} """) <@: _.json).result.head.map(
r => assert(33L === r.id)
),
// {}_typeof
JsonTests.filter(_.id === testRec1.id).map(_.json.+>("a").jsonType).result.head.map(
r => assert("number" === r.toLowerCase)
),
// ?
JsonTests.filter(_.json ?? "b".bind).to[List].result.map(
r => assert(List(testRec1, testRec3) === r)
),
// ?|
JsonTests.filter(_.json ?| List("a", "c").bind).to[List].result.map(
r => assert(List(testRec1, testRec3) === r)
),
// ?&
JsonTests.filter(_.json ?& List("a", "c").bind).to[List].result.map(
r => assert(List(testRec1) === r)
),
// ||
JsonTests.filter(_.id === 33L).map(_.json || parse(""" {"d":"test"} """)).result.head.map(
r => assert(""" {"a": 101, "b": "aaa", "c": [3, 4, 5, 9], "d": "test"} """.replace(" ", "") === compact(render(r)))
),
// -
JsonTests.filter(_.id === 33L).map(_.json - "c".bind).result.head.map(
r => assert(""" {"a": 101, "b": "aaa"} """.replace(" ", "") === compact(render(r)))
),
// #-
JsonTests.filter(_.id === 33L).map(_.json #- List("c")).result.head.map(
r => assert(""" {"a": 101, "b": "aaa"} """.replace(" ", "") === compact(render(r)))
),
// #-
JsonTests.filter(_.id === 33L).map(_.json.set(List("c"), parse(""" [1] """).bind)).result.head.map(
r => assert(""" {"a": 101, "b": "aaa", "c": [1]} """.replace(" ", "") === compact(render(r)))
)
)
).andFinally(
JsonTests.schema drop
).transactionally
), Duration.Inf)
}
//------------------------------------------------------------------------------
case class JsonBean1(id: Long, json: JValue)
test("Json4s Plain SQL support") {
import MyPostgresDriver.plainAPI._
implicit val getJsonBeanResult = GetResult(r => JsonBean1(r.nextLong(), r.nextJson()))
val b = JsonBean1(34L, parse(""" { "a":101, "b":"aaa", "c":[3,4,5,9] } """))
Await.result(db.run(
DBIO.seq(
sqlu"""create table JsonTest1(
id int8 not null primary key,
json #${MyPostgresDriver.pgjson} not null)
""",
///
sqlu""" insert into JsonTest1 values(${b.id}, ${b.json}) """,
sql""" select * from JsonTest1 where id = ${b.id} """.as[JsonBean1].head.map(
r => assert(b === r)
),
///
sqlu"drop table if exists JsonTest1 cascade"
).transactionally
), Duration.Inf)
}
}
| vikraman/slick-pg | addons/json4s/src/test/scala/com/github/tminglei/slickpg/PgJson4sSupportSuite.scala | Scala | bsd-2-clause | 8,215 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.rayguns.items.misc
import com.castlebravostudios.rayguns.api.LensGrinderRecipeRegistry
import com.castlebravostudios.rayguns.mod.Config
import com.castlebravostudios.rayguns.mod.ModularRayguns
import net.minecraft.block.Block
import net.minecraft.item.Item
import net.minecraft.item.ItemStack
object Tier3GainMedium extends Item {
setCreativeTab(ModularRayguns.raygunsTab)
setUnlocalizedName("rayguns.Tier3GainMedium")
setTextureName("rayguns:gain_medium_t3")
} | Redattack34/ModularRayguns | src/main/scala/com/castlebravostudios/rayguns/items/misc/Tier3GainMedium.scala | Scala | bsd-3-clause | 2,099 |
/*
* Copyright (c) 2015-2017 Toby Weston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package s4j.scala.chapter06
object NamedArgs {
swap(3, 1) // (1, 3)
swap(first = 3, second = 1) // (1, 3)
swap(second = 1, first = 3) // (1, 3)
def swap(first: Int, second: Int) = (second, first)
}
| tobyweston/learn-scala-java-devs | src/main/scala/s4j/scala/chapter06/NamedArgs.scala | Scala | apache-2.0 | 837 |
object Test {
case object Bob { override def equals(other: Any) = true }
class Bob2 {
override def equals(other: Any) = true
}
val Bob2 = new Bob2
def f0(x: Any) = x match { case Bob2 => Bob2 }
def f1(x: Any) = x match { case Bob => Bob }
// def f2(x: Any): Bob.type = x match { case x @ Bob => x } // should not type check
def main(args: Array[String]): Unit = {
assert(f0(Bob2) eq Bob2)
assert(f0(0) eq Bob2)
assert(f0(Nil) eq Bob2)
assert(f1(Bob) eq Bob)
assert(f1(0) eq Bob)
assert(f1(Nil) eq Bob)
// assert(f2(Bob) eq Bob)
// assert(f2(0) eq Bob)
// assert(f2(Nil) eq Bob)
}
}
| som-snytt/dotty | tests/run/i1463.scala | Scala | apache-2.0 | 645 |
/*
* Copyright 2001-2012 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.concurrent
import org.scalatest._
import exceptions.{TestFailedDueToTimeoutException, TestFailedException, TestPendingException}
import org.scalatest.exceptions.StackDepthExceptionHelper.getStackDepthFun
import org.scalatest.Suite.anExceptionThatShouldCauseAnAbort
import scala.annotation.tailrec
import time.{Nanosecond, Span, Nanoseconds}
// TODO describe backoff algo
/**
* Trait that provides the <code>eventually</code> construct, which periodically retries executing
* a passed by-name parameter, until it either succeeds or the configured timeout has been surpassed.
*
* <p>
* The by-name parameter "succeeds" if it returns a result. It "fails" if it throws any exception that
* would normally cause a test to fail. (These are any exceptions except <a href="TestPendingException"><code>TestPendingException</code></a> and
* <code>Error</code>s listed in the
* <a href="Suite.html#errorHandling">Treatment of <code>java.lang.Error</code>s</a> section of the
* documentation of trait <code>Suite</code>.)
* </p>
*
* <p>
* For example, the following invocation of <code>eventually</code> would succeed (not throw an exception):
* </p>
*
* <pre class="stHighlight">
* val xs = 1 to 125
* val it = xs.iterator
* eventually { it.next should be (3) }
* </pre>
*
* <p>
* However, because the default timeout is 150 milliseconds, the following invocation of
* <code>eventually</code> would ultimately produce a <code>TestFailedDueToTimeoutException</code>:
* </p>
*
* <a name="secondExample"></a>
* <pre class="stHighlight">
* val xs = 1 to 125
* val it = xs.iterator
* eventually { Thread.sleep(50); it.next should be (110) }
* </pre>
*
* <p>
* Assuming the default configuration parameters, a <code>timeout</code> of 150 milliseconds and an <code>interval</code> of 15 milliseconds,
* were passed implicitly to <code>eventually</code>, the detail message of the thrown
* <a href="../exceptions/TestFailedDueToTimeoutException.html"><code>TestFailedDueToTimeoutException</code></a> would look like:
* </p>
*
* <p>
* <code>The code passed to eventually never returned normally. Attempted 2 times over 166.682 milliseconds. Last failure message: 2 was not equal to 110.</code>
* </p>
*
* <p>
* The cause of the thrown <code>TestFailedDueToTimeoutException</code> will be the exception most recently thrown by the block of code passed to eventually. (In
* the previous example, the cause would be the <code>TestFailedException</code> with the detail message <code>2 was not equal to 100</code>.)
* </p>
*
* <a name="patienceConfig"></a><h2>Configuration of <code>eventually</code></h2>
*
* <p>
* The <code>eventually</code> methods of this trait can be flexibly configured.
* The two configuration parameters for <code>eventually</code> along with their
* default values and meanings are described in the following table:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
* <tr>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong>Configuration Parameter</strong>
* </th>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong>Default Value</strong>
* </th>
* <th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black">
* <strong>Meaning</strong>
* </th>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <code>timeout</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <code>scaled(150 milliseconds)</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* the maximum amount of time to allow unsuccessful attempts before giving up and throwing <code>TestFailedException</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <code>interval</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <code>scaled(15 milliseconds)</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* the amount of time to sleep between each attempt
* </td>
* </tr>
* </table>
*
* <p>
* The default values of both timeout and interval are passed to the <code>scaled</code> method, inherited
* from <a href="ScaledTimeSpans.html"><code>ScaledTimeSpans</code></a>, so that the defaults can be scaled up
* or down together with other scaled time spans. See the documentation for trait <a href="ScaledTimeSpans.html"><code>ScaledTimeSpans</code></a>
* for more information.
* </p>
*
* <p>
* The <code>eventually</code> methods of trait <code>Eventually</code> each take an <code>PatienceConfig</code>
* object as an implicit parameter. This object provides values for the two configuration parameters. (These configuration parameters
* are called "patience" because they determine how <em>patient</em> tests will be with asynchronous operations: how long
* they will tolerate failures before giving up and how long they will wait before checking again after a failure.) Trait
* <code>Eventually</code> provides an implicit <code>val</code> named <code>patienceConfig</code> with each
* configuration parameter set to its default value.
* If you want to set one or more configuration parameters to a different value for all invocations of
* <code>eventually</code> in a suite you can override this
* val (or hide it, for example, if you are importing the members of the <code>Eventually</code> companion object rather
* than mixing in the trait). For example, if
* you always want the default <code>timeout</code> to be 2 seconds and the default <code>interval</code> to be 5 milliseconds, you
* can override <code>patienceConfig</code>, like this:
*
* <pre class="stHighlight">
* implicit override val patienceConfig =
* PatienceConfig(timeout = scaled(Span(2, Seconds)), interval = scaled(Span(5, Millis)))
* </pre>
*
* <p>
* Or, hide it by declaring a variable of the same name in whatever scope you want the changed values to be in effect:
* </p>
*
* <pre class="stHighlight">
* implicit val patienceConfig =
* PatienceConfig(timeout = scaled(Span(2, Seconds)), interval = scaled(Span(5, Millis)))
* </pre>
*
* <p>
* Passing your new default values to <code>scaled</code> is optional, but a good idea because it allows them to
* be easily scaled if run on a slower or faster system.
* </p>
*
* <p>
* In addition to taking a <code>PatienceConfig</code> object as an implicit parameter, the <code>eventually</code> methods of trait
* <code>Eventually</code> include overloaded forms that take one or two <code>PatienceConfigParam</code>
* objects that you can use to override the values provided by the implicit <code>PatienceConfig</code> for a single <code>eventually</code>
* invocation. For example, if you want to set <code>timeout</code> to 5000 for just one particular <code>eventually</code> invocation,
* you can do so like this:
* </p>
*
* <pre class="stHighlight">
* eventually (timeout(Span(5, Seconds))) { Thread.sleep(10); it.next should be (110) }
* </pre>
*
* <p>
* This invocation of <code>eventually</code> will use 5 seconds for the <code>timeout</code> and whatever value is specified by the
* implicitly passed <code>PatienceConfig</code> object for the <code>interval</code> configuration parameter.
* If you want to set both configuration parameters in this way, just list them separated by commas:
* </p>
*
* <pre class="stHighlight">
* eventually (timeout(Span(5, Seconds)), interval(Span(5, Millis))) { it.next should be (110) }
* </pre>
*
* <p>
* You can also import or mix in the members of <a href="../time/SpanSugar.html"><code>SpanSugar</code></a> if
* you want a more concise DSL for expressing time spans:
* </p>
*
* <pre class="stHighlight">
* eventually (timeout(5 seconds), interval(5 millis)) { it.next should be (110) }
* </pre>
*
* <p>
* Note that ScalaTest will not scale any time span that is not explicitly passed to <code>scaled</code> to make
* the meaning of the code as obvious as possible. Thus
* if you ask for "<code>timeout(5 seconds)</code>" you will get exactly that: a timeout of five seconds. If you want such explicitly
* given values to be scaled, you must say pass them to <code>scale</code> explicitly like this:
* </p>
*
* <pre class="stHighlight">
* eventually (timeout(scaled(5 seconds))) { it.next should be (110) }
* </pre>
*
* <p>
* The previous code says more clearly that the timeout will be five seconds, unless scaled higher or lower by the <code>scaled</code> method.
* </p>
*
* <a name="simpleBackoff"></a><h2>Simple backoff algorithm</h2>
*
* <p>
* The <code>eventually</code> methods employ a very simple backoff algorithm to try and maximize the speed of tests. If an asynchronous operation
* completes quickly, a smaller interval will yield a faster test. But if an asynchronous operation takes a while, a small interval will keep the CPU
* busy repeatedly checking and rechecking a not-ready operation, to some extent taking CPU cycles away from other processes that could proceed. To
* strike the right balance between these design tradeoffs, the <code>eventually</code> methods will check more frequently during the initial interval.
* </p>
*
* </p>
* Rather than sleeping an entire interval if the initial attempt fails, <code>eventually</code> will only sleep 1/10 of the configured interval. It
* will continue sleeping only 1/10 of the configured interval until the configured interval has passed, after which it sleeps the configured interval
* between attempts. Here's an example in which the timeout is set equal to the interval:
* </p>
*
* <pre class="stHighlight">
* val xs = 1 to 125
* val it = xs.iterator
* eventually(timeout(100 milliseconds), interval(100 milliseconds)) { it.next should be (110) }
* </pre>
*
* <p>
* Even though this call to <code>eventually</code> will time out after only one interval, approximately, the error message will likely report that more
* than one (and less than ten) attempts were made:
* </p>
*
*<p>
* <code>The code passed to eventually never returned normally. Attempted 6 times over 100.485 milliseconds. Last failure message: 6 was not equal to 110.</code>
*</p>
*
* <p>
* Note that if the initial attempt takes longer than the configured interval to complete, <code>eventually</code> will never sleep for
* a 1/10 interval. You can observe this behavior in the <a href="#secondExample">second example</a> above in which the first statement in the block of code passed to <code>eventually</code>
* was <code>Thread.sleep(50)</code>.
* </p>
*
* <a name="patienceConfig"></a><h2>Usage note: <code>Eventually</code> intended primarily for integration testing</h2>
*
* <p>
* Although the default timeouts of trait <code>Eventually</code> are tuned for unit testing, the use of <code>Eventually</code> in unit tests is
* a choice you should question. Usually during unit testing you'll want to mock out subsystems that would require <code>Eventually</code>, such as
* network services with varying and unpredictable response times. This will allow your unit tests to run as fast as possible while still testing
* the focused bits of behavior they are designed to test.
*
* <p>
* Nevertheless, because sometimes it will make sense to use <code>Eventually</code> in unit tests (and
* because it is destined to happen anyway even when it isn't the best choice), <code>Eventually</code> by default uses
* timeouts tuned for unit tests: Calls to <code>eventually</code> are more likely to succeed on fast development machines, and if a call does time out,
* it will do so quickly so the unit tests can move on.
* </p>
*
* <p>
* When you are using <code>Eventually</code> for integration testing, therefore, the default timeout and interval may be too small. A
* good way to override them is by mixing in trait <a href="IntegrationPatience.html"><code>IntegrationPatience</code></a> or a similar trait of your
* own making. Here's an example:
* </p>
*
* <pre class="stHighlight">
* class ExampleSpec extends FeatureSpec with Eventually with IntegrationPatience {
* // Your integration tests here...
* }
* </pre>
*
* <p>
* Trait <code>IntegrationPatience</code> increases the default timeout from 150 milliseconds to 15 seconds, the default
* interval from 15 milliseconds to 150 milliseconds. If need be, you can do fine tuning of the timeout and interval by
* specifying a <a href="../tools/Runner$#timeSpanScaleFactor">time span scale factor</a> when you
* run your tests.
* </p>
*
* @author Bill Venners
* @author Chua Chee Seng
*/
trait Eventually extends PatienceConfiguration {
/**
* Invokes the passed by-name parameter repeatedly until it either succeeds, or a configured maximum
* amount of time has passed, sleeping a configured interval between attempts.
*
* <p>
* The by-name parameter "succeeds" if it returns a result. It "fails" if it throws any exception that
* would normally cause a test to fail. (These are any exceptions except <a href="TestPendingException"><code>TestPendingException</code></a> and
* <code>Error</code>s listed in the
* <a href="Suite.html#errorHandling">Treatment of <code>java.lang.Error</code>s</a> section of the
* documentation of trait <code>Suite</code>.)
* </p>
*
* <p>
* The maximum amount of time in milliseconds to tolerate unsuccessful attempts before giving up and throwing
* <code>TestFailedException</code> is configured by the value contained in the passed
* <code>timeout</code> parameter.
* The interval to sleep between attempts is configured by the value contained in the passed
* <code>interval</code> parameter.
* </p>
*
* @param timeout the <code>Timeout</code> configuration parameter
* @param interval the <code>Interval</code> configuration parameter
* @param fun the by-name parameter to repeatedly invoke
* @return the result of invoking the <code>fun</code> by-name parameter, the first time it succeeds
*/
def eventually[T](timeout: Timeout, interval: Interval)(fun: => T): T =
eventually(fun)(PatienceConfig(timeout.value, interval.value))
/**
* Invokes the passed by-name parameter repeatedly until it either succeeds, or a configured maximum
* amount of time has passed, sleeping a configured interval between attempts.
*
* <p>
* The by-name parameter "succeeds" if it returns a result. It "fails" if it throws any exception that
* would normally cause a test to fail. (These are any exceptions except <a href="TestPendingException"><code>TestPendingException</code></a> and
* <code>Error</code>s listed in the
* <a href="Suite.html#errorHandling">Treatment of <code>java.lang.Error</code>s</a> section of the
* documentation of trait <code>Suite</code>.)
* </p>
*
* <p>
* The maximum amount of time in milliseconds to tolerate unsuccessful attempts before giving up and throwing
* <code>TestFailedException</code> is configured by the value contained in the passed
* <code>timeout</code> parameter.
* The interval to sleep between attempts is configured by the <code>interval</code> field of
* the <code>PatienceConfig</code> passed implicitly as the last parameter.
* </p>
*
* @param timeout the <code>Timeout</code> configuration parameter
* @param fun the by-name parameter to repeatedly invoke
* @param config the <code>PatienceConfig</code> object containing the (unused) <code>timeout</code> and
* (used) <code>interval</code> parameters
* @return the result of invoking the <code>fun</code> by-name parameter, the first time it succeeds
*/
def eventually[T](timeout: Timeout)(fun: => T)(implicit config: PatienceConfig): T =
eventually(fun)(PatienceConfig(timeout.value, config.interval))
/**
* Invokes the passed by-name parameter repeatedly until it either succeeds, or a configured maximum
* amount of time has passed, sleeping a configured interval between attempts.
*
* <p>
* The by-name parameter "succeeds" if it returns a result. It "fails" if it throws any exception that
* would normally cause a test to fail. (These are any exceptions except <a href="TestPendingException"><code>TestPendingException</code></a> and
* <code>Error</code>s listed in the
* <a href="Suite.html#errorHandling">Treatment of <code>java.lang.Error</code>s</a> section of the
* documentation of trait <code>Suite</code>.)
* </p>
*
* <p>
* The maximum amount of time in milliseconds to tolerate unsuccessful attempts before giving up is configured by the <code>timeout</code> field of
* the <code>PatienceConfig</code> passed implicitly as the last parameter.
* The interval to sleep between attempts is configured by the value contained in the passed
* <code>interval</code> parameter.
* </p>
*
* @param interval the <code>Interval</code> configuration parameter
* @param fun the by-name parameter to repeatedly invoke
* @param config the <code>PatienceConfig</code> object containing the (used) <code>timeout</code> and
* (unused) <code>interval</code> parameters
* @return the result of invoking the <code>fun</code> by-name parameter, the first time it succeeds
*/
def eventually[T](interval: Interval)(fun: => T)(implicit config: PatienceConfig): T =
eventually(fun)(PatienceConfig(config.timeout, interval.value))
/**
* Invokes the passed by-name parameter repeatedly until it either succeeds, or a configured maximum
* amount of time has passed, sleeping a configured interval between attempts.
*
* <p>
* The by-name parameter "succeeds" if it returns a result. It "fails" if it throws any exception that
* would normally cause a test to fail. (These are any exceptions except <a href="TestPendingException"><code>TestPendingException</code></a> and
* <code>Error</code>s listed in the
* <a href="Suite.html#errorHandling">Treatment of <code>java.lang.Error</code>s</a> section of the
* documentation of trait <code>Suite</code>.)
* </p>
*
* <p>
* The maximum amount of time in milliseconds to tolerate unsuccessful attempts before giving up is configured by the <code>timeout</code> field of
* the <code>PatienceConfig</code> passed implicitly as the last parameter.
* The interval to sleep between attempts is configured by the <code>interval</code> field of
* the <code>PatienceConfig</code> passed implicitly as the last parameter.
* </p>
*
* @param fun the by-name parameter to repeatedly invoke
* @param config the <code>PatienceConfig</code> object containing the <code>timeout</code> and
* <code>interval</code> parameters
* @return the result of invoking the <code>fun</code> by-name parameter, the first time it succeeds
*/
def eventually[T](fun: => T)(implicit config: PatienceConfig): T = {
val startNanos = System.nanoTime
def makeAValiantAttempt(): Either[Throwable, T] = {
try {
Right(fun)
}
catch {
case tpe: TestPendingException => throw tpe
case e: Throwable if !anExceptionThatShouldCauseAnAbort(e) => Left(e)
}
}
val initialInterval = Span(config.interval.totalNanos * 0.1, Nanoseconds) // config.interval scaledBy 0.1
@tailrec
def tryTryAgain(attempt: Int): T = {
val timeout = config.timeout
val interval = config.interval
makeAValiantAttempt() match {
case Right(result) => result
case Left(e) =>
val duration = System.nanoTime - startNanos
if (duration < timeout.totalNanos) {
if (duration < interval.totalNanos) // For first interval, we wake up every 1/10 of the interval. This is mainly for optimization purpose.
Thread.sleep(initialInterval.millisPart, initialInterval.nanosPart)
else
Thread.sleep(interval.millisPart, interval.nanosPart)
}
else {
val durationSpan = Span(1, Nanosecond) scaledBy duration // Use scaledBy to get pretty units
def msg =
if (e.getMessage == null)
Resources("didNotEventuallySucceed", attempt.toString, durationSpan.prettyString)
else
Resources("didNotEventuallySucceedBecause", attempt.toString, durationSpan.prettyString, e.getMessage)
throw new TestFailedDueToTimeoutException(
sde => Some(msg),
Some(e),
getStackDepthFun("Eventually.scala", "eventually"),
None,
config.timeout
)
}
tryTryAgain(attempt + 1)
}
}
tryTryAgain(1)
}
}
/**
* Companion object that facilitates the importing of <code>Eventually</code> members as
* an alternative to mixing in the trait. One use case is to import <code>Eventually</code>'s members so you can use
* them in the Scala interpreter:
*
* <pre class="stREPL">
* $ scala -cp scalatest-1.8.jar
* Welcome to Scala version 2.9.1.final (Java HotSpot(TM) 64-Bit Server VM, Java 1.6.0_29).
* Type in expressions to have them evaluated.
* Type :help for more information.
*
* scala> import org.scalatest._
* import org.scalatest._
*
* scala> import matchers.ShouldMatchers._
* import matchers.ShouldMatchers._
*
* scala> import concurrent.Eventually._
* import concurrent.Eventually._
*
* scala> val xs = 1 to 125
* xs: scala.collection.immutable.Range.Inclusive = Range(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ..., 125)
*
* scala> val it = xs.iterator
* it: Iterator[Int] = non-empty iterator
*
* scala> eventually { it.next should be (3) }
*
* scala> eventually { Thread.sleep(999); it.next should be (3) }
* org.scalatest.TestFailedException: The code passed to eventually never returned normally.
* Attempted 2 times, sleeping 10 milliseconds between each attempt.
* at org.scalatest.Eventually$class.tryTryAgain$1(Eventually.scala:313)
* at org.scalatest.Eventually$class.eventually(Eventually.scala:322)
* ...
* </pre>
*/
object Eventually extends Eventually
| svn2github/scalatest | src/main/scala/org/scalatest/concurrent/Eventually.scala | Scala | apache-2.0 | 23,109 |
package controllers.web
import play.api._
import play.api.mvc._
import play.api.Play.current
import play.api.data.Form
import play.api.data.Forms._
import play.api.data.validation.Constraints._
import play.api.i18n.Messages
import models._
import utils.silhouette._
import utils.silhouette.Implicits._
import com.mohiva.play.silhouette.core.{SignUpEvent, LoginEvent, LogoutEvent}
import com.mohiva.play.silhouette.core.providers.Credentials
import com.mohiva.play.silhouette.core.exceptions.{AuthenticationException, AccessDeniedException}
import utils.Constraints._
import utils.web.Mailer
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits._
object Auth extends SilhouetteWebController {
// SIGN UP
val signUpForm = Form(
mapping(
"id" -> ignored(None: Option[Long]),
"email" -> email.verifying(maxLength(250), userUnique),
"emailConfirmed" -> ignored(false),
"password" -> nonEmptyText.verifying(minLength(6)),
"nick" -> nonEmptyText,
"firstName" -> nonEmptyText,
"lastName" -> nonEmptyText
)(User.apply)(User.unapply)
)
/**
* Starts the sign up mechanism. It shows a form that the user have to fill in and submit.
*/
def startSignUp = UserAwareAction.async { implicit request =>
Future.successful( request.identity match {
case Some(user) => Redirect(routes.Application.index)
case None => Ok(views.html.web.auth.signUp(signUpForm))
})
}
/**
* Handles the form filled by the user. The user and its password are saved and it sends him an email with a link to confirm his email address.
*/
def handleStartSignUp = Action.async { implicit request =>
signUpForm.bindFromRequest.fold(
formWithErrors => Future.successful(BadRequest(views.html.web.auth.signUp(formWithErrors))),
user => {
User.save(user)
val authInfo = passwordHasher.hash(user.password)
authInfoService.save(user.email, authInfo)
val token = TokenUser(user.email, isSignUp = true)
tokenService.create(token)
Mailer.welcome(user, link = routes.Auth.signUp(token.id).absoluteURL())
Future.successful(Ok(views.html.web.auth.almostSignedUp(user)))
}
)
}
/**
* Confirms the user's email address based on the token and authenticates him.
*/
def signUp (tokenId: String) = Action.async { implicit request =>
tokenService.retrieve(tokenId).flatMap {
case Some(token) if (token.isSignUp && !token.isExpired) => {
User.findByEmail(token.email).flatMap {
case Some(user) => {
authenticatorService.create(user).flatMap { authenticator =>
if (!user.emailConfirmed) {
User.save(user.copy(emailConfirmed = true))
eventBus.publish(SignUpEvent(user, request, request2lang))
}
eventBus.publish(LoginEvent(user, request, request2lang))
tokenService.consume(tokenId)
authenticatorService.init(authenticator, Future.successful(Ok(views.html.web.auth.signedUp(user))))
}
}
case None => Future.failed(new AuthenticationException("Couldn't find user"))
}
}
case Some(token) => {
tokenService.consume(tokenId)
notFoundDefault
}
case None => notFoundDefault
}
}
// SIGN IN
val signInForm = Form(
mapping(
"identifier" -> email,
"password" -> nonEmptyText
)(Credentials.apply)(Credentials.unapply)
)
/**
* Starts the sign in mechanism. It shows the login form.
*/
def signIn = UserAwareAction.async { implicit request =>
Future.successful( request.identity match {
case Some(user) => Redirect(routes.Application.index)
case None => Ok(views.html.web.auth.signIn(signInForm))
})
}
/**
* Authenticates the user based on his email and password
*/
def authenticate = Action.async { implicit request =>
signInForm.bindFromRequest.fold(
formWithErrors => Future.successful(BadRequest(views.html.web.auth.signIn(formWithErrors))),
credentials => {
credentialsProvider.authenticate(credentials).flatMap { loginInfo =>
identityService.retrieve(loginInfo).flatMap {
case Some(user) => authenticatorService.create(user).flatMap { authenticator =>
eventBus.publish(LoginEvent(user, request, request2lang))
authenticatorService.init(authenticator, Future.successful(Redirect(routes.Application.index)))
}
case None => Future.failed(new AuthenticationException("Couldn't find user"))
}
}.recoverWith {
case e: AccessDeniedException => Future.successful(Redirect(routes.Auth.signIn).flashing("error" -> Messages("access.credentials.incorrect")))
}.recoverWith(exceptionHandler)
}
)
}
// SIGN OUT
/**
* Signs out the user
*/
def signOut = SecuredAction.async { implicit request =>
eventBus.publish(LogoutEvent(request.identity, request, request2lang))
authenticatorService.retrieve.flatMap {
case Some(authenticator) => authenticatorService.discard(authenticator, Future.successful(Redirect(routes.Application.index)))
case None => Future.failed(new AuthenticationException("Couldn't find authenticator"))
}
}
// FORGOT PASSWORD
val emailForm = Form(single("email" -> email.verifying(userExists)))
/**
* Starts the reset password mechanism if the user has forgot his password. It shows a form to insert his email address.
*/
def forgotPassword = UserAwareAction.async { implicit request =>
Future.successful( request.identity match {
case Some(user) => Redirect(routes.Application.index)
case None => Ok(views.html.web.auth.forgotPassword(emailForm))
})
}
/**
* Sends an email to the user with a link to reset the password
*/
def handleForgotPassword = Action.async { implicit request =>
emailForm.bindFromRequest.fold(
formWithErrors => Future.successful(BadRequest(views.html.web.auth.forgotPassword(formWithErrors))),
email => {
val token = TokenUser(email, isSignUp = false)
tokenService.create(token)
Mailer.forgotPassword(email, link = routes.Auth.resetPassword(token.id).absoluteURL())
Future.successful(Ok(views.html.web.auth.forgotPasswordSent(email)))
}
)
}
val passwordsForm = Form(tuple(
"password1" -> nonEmptyText(minLength = 6),
"password2" -> nonEmptyText
) verifying(Messages("passwords.not.equal"), passwords => passwords._2 == passwords._1 ))
/**
* Confirms the user's link based on the token and shows him a form to reset the password
*/
def resetPassword (tokenId: String) = Action.async { implicit request =>
tokenService.retrieve(tokenId).flatMap {
case Some(token) if (!token.isSignUp && !token.isExpired) => {
Future.successful(Ok(views.html.web.auth.resetPassword(tokenId, passwordsForm)))
}
case Some(token) => {
tokenService.consume(tokenId)
notFoundDefault
}
case None => notFoundDefault
}
}
/**
* Saves the new password and authenticates the user
*/
def handleResetPassword (tokenId: String) = Action.async { implicit request =>
passwordsForm.bindFromRequest.fold(
formWithErrors => Future.successful(BadRequest(views.html.web.auth.resetPassword(tokenId, formWithErrors))),
passwords => {
tokenService.retrieve(tokenId).flatMap {
case Some(token) if (!token.isSignUp && !token.isExpired) => {
User.findByEmail(token.email).flatMap {
case Some(user) => {
val authInfo = passwordHasher.hash(passwords._1)
authInfoService.save(token.email, authInfo)
authenticatorService.create(user).flatMap { authenticator =>
eventBus.publish(LoginEvent(user, request, request2lang))
tokenService.consume(tokenId)
authenticatorService.init(authenticator, Future.successful(Ok(views.html.web.auth.resetedPassword(user))))
}
}
case None => Future.failed(new AuthenticationException("Couldn't find user"))
}
}
case Some(token) => {
tokenService.consume(tokenId)
notFoundDefault
}
case None => notFoundDefault
}
}
)
}
def notFoundDefault (implicit request: RequestHeader) =
Future.successful(NotFound(views.html.web.errors.onHandlerNotFound(request)))
} | vtapadia/crickit | modules/web/app/controllers/web/Auth.scala | Scala | apache-2.0 | 8,073 |
package org.bitcoins.testkit.wallet
import org.bitcoins.commons.config.AppConfig
import org.bitcoins.core.api.chain.ChainQueryApi
import org.bitcoins.core.api.node.NodeApi
import org.bitcoins.core.currency.Satoshis
import org.bitcoins.core.protocol.dlc.models.ContractOraclePair
import org.bitcoins.dlc.wallet.{DLCAppConfig, DLCWallet}
import org.bitcoins.rpc.client.common.BitcoindRpcClient
import org.bitcoins.core.protocol.dlc.models.SingleContractInfo
import org.bitcoins.server.BitcoinSAppConfig
import org.bitcoins.testkit.BitcoinSTestAppConfig
import org.bitcoins.testkit.wallet.DLCWalletUtil.InitializedDLCWallet
import org.bitcoins.testkit.wallet.FundWalletUtil.FundedDLCWallet
import org.bitcoins.wallet.config.WalletAppConfig
import org.scalatest.FutureOutcome
import scala.concurrent.Future
trait BitcoinSDualWalletTest extends BitcoinSWalletTest {
import BitcoinSWalletTest._
implicit protected def config2: BitcoinSAppConfig =
BitcoinSTestAppConfig.getSpvTestConfig()
implicit protected def wallet2AppConfig: WalletAppConfig = {
config2.walletConf
}
implicit protected def dlc2AppConfig: DLCAppConfig = {
config2.dlcConf
}
override def beforeAll(): Unit = {
AppConfig.throwIfDefaultDatadir(getFreshConfig.walletConf)
AppConfig.throwIfDefaultDatadir(config2.walletConf)
AppConfig.throwIfDefaultDatadir(getFreshConfig.dlcConf)
AppConfig.throwIfDefaultDatadir(config2.dlcConf)
super.beforeAll()
}
/** Creates two segwit wallets that are funded with some bitcoin, these wallets are NOT
* peered with a bitcoind so the funds in the wallets are not tied to an
* underlying blockchain
*/
def withDualFundedDLCWallets(test: OneArgAsyncTest): FutureOutcome = {
makeDependentFixture(
build = () =>
for {
walletA <-
FundWalletUtil.createFundedDLCWallet(nodeApi,
chainQueryApi,
getBIP39PasswordOpt(),
Some(segwitWalletConf))
walletB <- FundWalletUtil.createFundedDLCWallet(
nodeApi,
chainQueryApi,
getBIP39PasswordOpt(),
Some(segwitWalletConf))(config2, system)
} yield (walletA, walletB),
destroy = { fundedWallets: (FundedDLCWallet, FundedDLCWallet) =>
for {
_ <- destroyDLCWallet(fundedWallets._1.wallet)
_ <- destroyDLCWallet(fundedWallets._2.wallet)
} yield ()
}
)(test)
}
/** Dual funded DLC wallets that are backed by a bitcoind node */
def withDualFundedDLCWallets(
test: OneArgAsyncTest,
bitcoind: BitcoindRpcClient): FutureOutcome = {
makeDependentFixture(
build = () => {
createDualFundedDLCWallet(nodeApi = bitcoind, chainQueryApi = bitcoind)
},
destroy = { fundedWallets: (FundedDLCWallet, FundedDLCWallet) =>
destroyDLCWallets(dlcWallet1 = fundedWallets._1.wallet,
dlcWallet2 = fundedWallets._2.wallet)
}
)(test)
}
private def createDualFundedDLCWallet(
nodeApi: NodeApi,
chainQueryApi: ChainQueryApi): Future[
(FundedDLCWallet, FundedDLCWallet)] = {
val walletAF = FundWalletUtil.createFundedDLCWallet(
nodeApi = nodeApi,
chainQueryApi = chainQueryApi,
bip39PasswordOpt = getBIP39PasswordOpt(),
extraConfig = Some(segwitWalletConf))
val walletBF = FundWalletUtil.createFundedDLCWallet(
nodeApi,
chainQueryApi,
getBIP39PasswordOpt(),
Some(segwitWalletConf))(config2, system)
for {
walletA <- walletAF
walletB <- walletBF
} yield (walletA, walletB)
}
private def destroyDLCWallets(
dlcWallet1: DLCWallet,
dlcWallet2: DLCWallet): Future[Unit] = {
val destroy1F = destroyDLCWallet(dlcWallet1)
val destroy2F = destroyDLCWallet(dlcWallet2)
for {
_ <- destroy1F
_ <- destroy2F
} yield ()
}
/** Creates 2 funded segwit wallets that have a DLC initiated */
def withDualDLCWallets(
test: OneArgAsyncTest,
contractOraclePair: ContractOraclePair): FutureOutcome = {
makeDependentFixture(
build = () => {
createDualWalletsWithDLC(contractOraclePair = contractOraclePair,
nodeApi = nodeApi,
chainQueryApi = chainQueryApi)
},
destroy = { dlcWallets: (InitializedDLCWallet, InitializedDLCWallet) =>
destroyDLCWallets(dlcWallet1 = dlcWallets._1.wallet,
dlcWallet2 = dlcWallets._2.wallet)
}
)(test)
}
def withDualDLCWallets(
test: OneArgAsyncTest,
contractOraclePair: ContractOraclePair,
bitcoind: BitcoindRpcClient): FutureOutcome = {
makeDependentFixture(
build = () => {
createDualWalletsWithDLC(contractOraclePair = contractOraclePair,
bitcoind = bitcoind)
},
destroy = { dlcWallets: (InitializedDLCWallet, InitializedDLCWallet) =>
destroyDLCWallets(dlcWallet1 = dlcWallets._1.wallet,
dlcWallet2 = dlcWallets._2.wallet)
}
)(test)
}
private def createDualWalletsWithDLC(
contractOraclePair: ContractOraclePair,
bitcoind: BitcoindRpcClient): Future[
(InitializedDLCWallet, InitializedDLCWallet)] = {
for {
walletA <- FundWalletUtil.createFundedDLCWalletWithBitcoind(
bitcoind,
getBIP39PasswordOpt(),
Some(segwitWalletConf))
walletB <- FundWalletUtil.createFundedDLCWalletWithBitcoind(
bitcoind = bitcoind,
bip39PasswordOpt = getBIP39PasswordOpt(),
extraConfig = Some(segwitWalletConf))(config2, system)
amt = expectedDefaultAmt / Satoshis(2)
contractInfo = SingleContractInfo(amt.satoshis, contractOraclePair)
(dlcWalletA, dlcWalletB) <-
DLCWalletUtil.initDLC(walletA, walletB, contractInfo)
} yield (dlcWalletA, dlcWalletB)
}
private def createDualWalletsWithDLC(
contractOraclePair: ContractOraclePair,
nodeApi: NodeApi,
chainQueryApi: ChainQueryApi): Future[
(InitializedDLCWallet, InitializedDLCWallet)] = {
for {
walletA <- FundWalletUtil.createFundedDLCWallet(
nodeApi = nodeApi,
chainQueryApi = chainQueryApi,
bip39PasswordOpt = getBIP39PasswordOpt(),
extraConfig = Some(segwitWalletConf))
walletB <- FundWalletUtil.createFundedDLCWallet(
nodeApi = nodeApi,
chainQueryApi = chainQueryApi,
bip39PasswordOpt = getBIP39PasswordOpt(),
extraConfig = Some(segwitWalletConf))(config2, system)
amt = expectedDefaultAmt / Satoshis(2)
contractInfo = SingleContractInfo(amt.satoshis, contractOraclePair)
(dlcWalletA, dlcWalletB) <-
DLCWalletUtil.initDLC(walletA, walletB, contractInfo)
} yield (dlcWalletA, dlcWalletB)
}
}
| bitcoin-s/bitcoin-s | testkit/src/main/scala/org/bitcoins/testkit/wallet/BitcoinSDualWalletTest.scala | Scala | mit | 6,975 |
package org.orbeon.oxf.fr.relational
sealed trait Version
case object Unspecified extends Version
case object Next extends Version
case class Specific (version: Int) extends Version
case class ForDocument(documentId: String) extends Version
object Version {
val OrbeonForDocumentId = "Orbeon-For-Document-Id"
val OrbeonFormDefinitionVersion = "Orbeon-Form-Definition-Version"
val OrbeonForDocumentIdLower = OrbeonForDocumentId.toLowerCase
val OrbeonFormDefinitionVersionLower = OrbeonFormDefinitionVersion.toLowerCase
def apply(documentId: Option[String], version: Option[String]): Version =
documentId match {
case Some(id) ⇒ ForDocument(id)
case None ⇒
version match {
case None ⇒ Unspecified
case Some("next") ⇒ Next
case Some(v) ⇒ Specific(v.toInt)
}
}
} | wesley1001/orbeon-forms | src/main/scala/org/orbeon/oxf/fr/relational/Version.scala | Scala | lgpl-2.1 | 958 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import org.joda.time.LocalDate
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
case class J25A(value: Option[LocalDate]) extends SchemeDateBox{
override def validate(boxRetriever: CT600BoxRetriever): Set[CtValidation] =
validateSchemeDate(boxRetriever.retrieveJ20(), boxRetriever.retrieveJ20A(), boxRetriever.retrieveJ25())
}
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600j/v3/J25A.scala | Scala | apache-2.0 | 1,013 |
package net.fwbrasil.radon.util
import scala.collection._
trait ExclusiveThreadLocalItem {
private[util] var boundThread: Option[Thread] = None
private[util] def setBoundThread(thread: Option[Thread]) =
boundThread = thread
}
class ExclusiveThreadLocal[T <: ExclusiveThreadLocalItem] {
val underlying = new ThreadLocal[Option[T]]
def get: Option[T] = {
val underlyingGet = underlying.get
if (underlyingGet == null)
None
else
underlyingGet
}
def set(value: Option[T]) = {
require(value != null && value.isDefined)
val item = value.get
val currentThread = Thread.currentThread
item.synchronized {
val actualBoundThread = item.boundThread
if (actualBoundThread.isDefined && currentThread != actualBoundThread.get)
throw new IllegalStateException(
"ExclusiveThreadLocal: value is bound to another thread.")
underlying.set(value)
item.setBoundThread(Some(currentThread))
}
}
def clean(value: Option[T]) = {
underlying.set(None)
if (value.isDefined)
value.get.setBoundThread(None)
}
} | fwbrasil/radon-stm | src/main/scala/net/fwbrasil/radon/util/ExclusiveThreadLocal.scala | Scala | lgpl-2.1 | 1,272 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC128C(value: Option[Int]) extends CtBoxIdentifier(name = "Tangible assets - Motor vehicles - depreciation at POA START")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value, min = 0)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC128C.scala | Scala | apache-2.0 | 1,154 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Steven Blundy, Mark Harrah, David MacIver, Mikko Peltonen
*/
package sbt
import scala.collection.immutable.TreeSet
private trait RunCompleteAction extends NotNull
private case class Exit(code: Int) extends RunCompleteAction
private object Reload extends RunCompleteAction
/** This class is the entry point for sbt. If it is given any arguments, it interprets them
* as actions, executes the corresponding actions, and exits. If there were no arguments provided,
* sbt enters interactive mode.*/
object Main
{
/** The entry point for sbt. If arguments are specified, they are interpreted as actions, executed,
* and then the program terminates. If no arguments are specified, the program enters interactive
* mode. Call run if you need to run sbt in the same JVM.*/
def main(args: Array[String])
{
val exitCode = run(args)
if(exitCode == RebootExitCode)
{
println("Rebooting is not supported when the sbt loader is not used.")
println("Please manually restart sbt.")
}
System.exit(exitCode)
}
val RebootExitCode = -1
val NormalExitCode = 0
val SetupErrorExitCode = 1
val SetupDeclinedExitCode = 2
val LoadErrorExitCode = 3
val UsageErrorExitCode = 4
val BuildErrorExitCode = 5
def run(args: Array[String]): Int =
{
val startTime = System.currentTimeMillis
Project.loadProject match
{
case err: LoadSetupError =>
println("\n" + err.message)
ExitHooks.runExitHooks(Project.bootLogger)
SetupErrorExitCode
case LoadSetupDeclined =>
ExitHooks.runExitHooks(Project.bootLogger)
SetupDeclinedExitCode
case err: LoadError =>
{
val log = Project.bootLogger
println(err.message)
ExitHooks.runExitHooks(log)
// Because this is an error that can probably be corrected, prompt user to try again.
val line =
try { SimpleReader.readLine("\n Hit enter to retry or 'exit' to quit: ") }
catch
{
case e =>
log.trace(e)
log.error(e.toString)
None
}
line match
{
case Some(l) => if(!isTerminateAction(l)) run(args) else NormalExitCode
case None => LoadErrorExitCode
}
}
case success: LoadSuccess =>
{
import success.project
val doNext: RunCompleteAction =
// in interactive mode, fill all undefined properties
if(args.length > 0 || fillUndefinedProjectProperties(project.projectClosure.toList.reverse))
startProject(project, args, startTime)
else
new Exit(NormalExitCode)
ExitHooks.runExitHooks(project.log)
doNext match
{
case Reload => run(args)
case x: Exit => x.code
}
}
}
}
private def startProject(project: Project, args: Array[String], startTime: Long): RunCompleteAction =
{
project.log.info("Building project " + project.name + " " + project.version.toString + " using " + project.getClass.getName)
val scalaVersionOpt = ScalaVersion.current orElse project.scalaVersion.get
for(sbtVersion <- project.sbtVersion.get; scalaVersion <- scalaVersionOpt if !sbtVersion.isEmpty && !scalaVersion.isEmpty)
project.log.info(" with sbt " + sbtVersion + " and Scala " + scalaVersion)
args match
{
case Array() =>
CrossBuild.load() match
{
case None =>
project.log.info("No actions specified, interactive session started. Execute 'help' for more information.")
val doNext = interactive(project)
printTime(project, startTime, "session")
doNext
case Some(cross) =>
crossBuildNext(project, cross)
new Exit(RebootExitCode)
}
case CrossBuild(action) =>
val exitCode =
CrossBuild.load() match
{
case None => if(startCrossBuild(project, action)) RebootExitCode else BuildErrorExitCode
case Some(cross) => if(crossBuildNext(project, cross)) RebootExitCode else NormalExitCode
}
new Exit(exitCode)
case _ =>
val source = args.toList.elements
def nextCommand = if(source.hasNext) Right(source.next) else Left(new Exit(NormalExitCode) )
val result = loop(project, project, p => nextCommand, false)
result match
{
case Exit(NormalExitCode) => project.log.success("Build completed successfully.")
case Exit(_) => project.log.error("Error during build.")
case _ => ()
}
printTime(project, startTime, "build")
result
}
}
private def crossBuildNext(project: Project, cross: CrossBuild) =
{
val setScalaVersion = (newVersion: String) => { System.setProperty(ScalaVersion.LiveKey, newVersion); () }
val complete =
if(handleAction(project, cross.command))
cross.versionComplete(setScalaVersion)
else
cross.error(setScalaVersion)
if(complete)
printTime(project, cross.startTime, "cross-build")
!complete
}
/** The name of the command that loads a console with access to the current project through the variable 'project'.*/
val ProjectConsoleAction = "console-project"
/** The name of the command that shows the current project and logging level of that project.*/
val ShowCurrent = "current"
/** The name of the command that shows all available actions.*/
val ShowActions = "actions"
/** The name of the command that sets the currently active project.*/
val ProjectAction = "project"
/** The name of the command that shows all available projects.*/
val ShowProjectsAction = "projects"
/** The list of lowercase command names that may be used to terminate the program.*/
val TerminateActions: Iterable[String] = "exit" :: "quit" :: Nil
/** The name of the command that sets the value of the property given as its argument.*/
val SetAction = "set"
/** The name of the command that gets the value of the property given as its argument.*/
val GetAction = "get"
/** The name of the command that displays the help message. */
val HelpAction = "help"
/** The command for rebooting sbt. Requires sbt to have been launched by the loader.*/
val RebootCommand = "reboot"
/** The name of the command that reloads a project. This is useful for when the project definition has changed. */
val ReloadAction = "reload"
/** The name of the command that toggles logging stacktraces. */
val TraceCommand = "trace"
/** The name of the command that compiles all sources continuously when they are modified. */
val ContinuousCompileCommand = "cc"
/** The prefix used to identify a request to execute the remaining input on source changes.*/
val ContinuousExecutePrefix = "~"
/** The prefix used to identify a request to execute the remaining input across multiple Scala versions.*/
val CrossBuildPrefix = "+"
/** Error message for when the user tries to prefix an action with CrossBuildPrefix but the loader is not used.*/
val CrossBuildUnsupported = "Cross-building is not supported when the loader is not used."
/** The number of seconds between polling by the continuous compile command.*/
val ContinuousCompilePollDelaySeconds = 1
/** The list of all available commands at the interactive prompt in addition to the tasks defined
* by a project.*/
protected def interactiveCommands: Iterable[String] = basicCommands.toList ++ logLevels.toList
/** The list of logging levels.*/
private def logLevels: Iterable[String] = TreeSet.empty[String] ++ Level.levels.map(_.toString)
/** The list of all interactive commands other than logging level.*/
private def basicCommands: Iterable[String] = TreeSet(ShowProjectsAction, ShowActions, ShowCurrent, HelpAction,
RebootCommand, ReloadAction, TraceCommand, ContinuousCompileCommand, ProjectConsoleAction)
/** Enters interactive mode for the given root project. It uses JLine for tab completion and
* history. It returns normally when the user terminates or reloads the interactive session. That is,
* it does not call System.exit to quit.
**/
private def interactive(baseProject: Project): RunCompleteAction =
{
val projectNames = baseProject.projectClosure.map(_.name)
val prefixes = ContinuousExecutePrefix :: CrossBuildPrefix :: Nil
val completors = new Completors(ProjectAction, projectNames, interactiveCommands, List(GetAction, SetAction), prefixes)
val reader = new JLineReader(baseProject.historyPath, completors, baseProject.log)
def updateTaskCompletions(project: Project)
{
val methodCompletions = for( (name, method) <- project.methods) yield (name, method.completions)
reader.setVariableCompletions(project.taskNames, project.propertyNames, methodCompletions)
}
def readCommand(currentProject: Project) =
{
updateTaskCompletions(currentProject) // this is done before every command because the completions could change due to the action previously invoked
reader.readLine("> ").toRight(new Exit(NormalExitCode))
}
loop(baseProject, baseProject, readCommand, true)
}
/** Prompts the user for the next command using 'currentProject' as context.
* If the command indicates that the user wishes to terminate or reload the session,
* the function returns the appropriate value.
* Otherwise, the command is handled and this function is called again
* (tail recursively) to prompt for the next command. */
private def loop(baseProject: Project, currentProject: Project, nextCommand: Project => Either[RunCompleteAction, String], isInteractive: Boolean): RunCompleteAction =
nextCommand(currentProject).right.flatMap{ line => process(baseProject, currentProject, line, isInteractive) } match
{
case Left(complete) => complete
case Right(project) => loop(baseProject, project, nextCommand, isInteractive)
}
private def process(baseProject: Project, currentProject: Project, line: String, isInteractive: Boolean): Either[RunCompleteAction, Project] =
{
def keepCurrent(success: Boolean) = if(success) Right(currentProject) else Left(new Exit(BuildErrorExitCode) )
def interactiveKeepCurrent(success: Boolean) = keepCurrent(success || isInteractive)
def keep(u: Unit) = Right(currentProject)
val trimmed = line.trim
if(trimmed.isEmpty)
Right(currentProject)
else if(isTerminateAction(trimmed))
Left(new Exit(NormalExitCode))
else if(ReloadAction == trimmed)
Left(Reload)
else if(RebootCommand == trimmed)
{
if(!isInteractive) currentProject.log.warn("'reboot' does not pick up changes to 'scala.version' in batch mode.")
System.setProperty(ScalaVersion.LiveKey, "")
Left(new Exit(RebootExitCode))
}
else if(trimmed.startsWith(CrossBuildPrefix))
{
if(startCrossBuild(currentProject, trimmed.substring(CrossBuildPrefix.length).trim))
Left(new Exit(RebootExitCode))
else
Right(currentProject)
}
else if(trimmed.startsWith(ProjectAction + " "))
{
val projectName = trimmed.substring(ProjectAction.length + 1)
baseProject.projectClosure.find(_.name == projectName) match
{
case Some(newProject) =>
printProject("Set current project to ", newProject)
Right(newProject)
case None =>
currentProject.log.error("Invalid project name '" + projectName + "' (type 'projects' to list available projects).")
keepCurrent(isInteractive)
}
}
else if(trimmed == HelpAction)
keep(displayHelp(isInteractive))
else if(trimmed == ShowProjectsAction)
keep(baseProject.projectClosure.foreach(listProject))
else if(trimmed.startsWith(SetAction + " "))
interactiveKeepCurrent( setProperty(currentProject, trimmed.substring(SetAction.length + 1)) )
else if(trimmed.startsWith(GetAction + " "))
interactiveKeepCurrent( getProperty(currentProject, trimmed.substring(GetAction.length + 1)) )
else if(trimmed == ProjectConsoleAction)
interactiveKeepCurrent(showResult(Run.projectConsole(currentProject), currentProject.log))
else
interactiveKeepCurrent( handleCommand(currentProject, trimmed) )
}
private def printCmd(name:String, desc:String) = Console.println("\t" + name + ": " + desc)
val BatchHelpHeader = "You may execute any project action or method or one of the commands described below."
val InteractiveHelpHeader = "You may execute any project action or one of the commands described below. Only one action " +
"may be executed at a time in interactive mode and is entered by name, as it would be at the command line." +
" Also, tab completion is available."
private def displayHelp(isInteractive: Boolean)
{
Console.println(if(isInteractive) InteractiveHelpHeader else BatchHelpHeader)
Console.println("Available Commands:")
printCmd("<action name>", "Executes the project specified action.")
printCmd("<method name> <parameter>*", "Executes the project specified method.")
printCmd(ContinuousExecutePrefix + " <command>", "Executes the project specified action or method whenever source files change.")
printCmd(CrossBuildPrefix + " <command>", "Executes the project specified action or method for all versions of Scala defined in crossScalaVersions.")
printCmd(ShowActions, "Shows all available actions.")
printCmd(RebootCommand, "Changes to scala.version or sbt.version are processed and the project definition is reloaded.")
printCmd(HelpAction, "Displays this help message.")
printCmd(ReloadAction, "Reloads sbt, recompiling modified project definitions if necessary.")
printCmd(ShowCurrent, "Shows the current project and logging level of that project.")
printCmd(Level.levels.mkString(", "), "Set logging for the current project to the specified level.")
printCmd(TraceCommand, "Toggles whether logging stack traces is enabled.")
printCmd(ProjectAction + " <project name>", "Sets the currently active project.")
printCmd(ShowProjectsAction, "Shows all available projects.")
printCmd(TerminateActions.elements.mkString(", "), "Terminates the build.")
printCmd(SetAction + " <property> <value>", "Sets the value of the property given as its argument.")
printCmd(GetAction + " <property>", "Gets the value of the property given as its argument.")
printCmd(ProjectConsoleAction, "Enters the Scala interpreter with the current project bound to the variable 'current' and all members imported.")
}
private def listProject(p: Project) = printProject("\t", p)
private def printProject(prefix: String, p: Project): Unit =
Console.println(prefix + p.name + " " + p.version)
private def startCrossBuild(project: Project, action: String) =
{
checkBooted && checkAction(project, action) &&
{
val againstScalaVersions = project.crossScalaVersions
val versionsDefined = !againstScalaVersions.isEmpty
if(versionsDefined)
CrossBuild(againstScalaVersions, action, System.currentTimeMillis)
else
Console.println("Project does not declare any Scala versions to cross-build against.")
versionsDefined
}
}
private def checkBooted =
Project.booted ||
{
Console.println(CrossBuildUnsupported)
false
}
/** Handles the given command string provided at the command line. Returns false if there was an error*/
private def handleCommand(project: Project, command: String): Boolean =
{
command match
{
case GetAction => getArgumentError(project.log)
case SetAction => setArgumentError(project.log)
case ProjectAction => setProjectError(project.log)
case ShowCurrent =>
printProject("Current project is ", project)
Console.println("Current log level is " + project.log.getLevel)
printTraceEnabled(project)
true
case ShowActions => showActions(project); true
case TraceCommand => toggleTrace(project); true
case Level(level) => setLevel(project, level); true
case ContinuousCompileCommand => compileContinuously(project)
case action if action.startsWith(ContinuousExecutePrefix) => executeContinuously(project, action.substring(ContinuousExecutePrefix.length).trim)
case action => handleAction(project, action)
}
}
private def showActions(project: Project): Unit = Console.println(project.taskAndMethodList)
// returns true if it succeeded
private def handleAction(project: Project, action: String): Boolean =
{
def show(result: Option[String]): Boolean = showResult(result, project.log)
val startTime = System.currentTimeMillis
val result = withAction(project, action)( (name, params) => show(project.call(name, params)))( name => show(project.act(name)))
printTime(project, startTime, "")
result
}
// returns true if it succeeded
private def showResult(result: Option[String], log: Logger): Boolean =
{
result match
{
case Some(errorMessage) => log.error(errorMessage); false
case None => log.success("Successful."); true
}
}
// true if the action exists
private def checkAction(project: Project, actionString: String): Boolean =
withAction(project, actionString)( (n,p) => true)( n => true)
private def withAction(project: Project, actionString: String)(ifMethod: (String, Array[String]) => Boolean)(ifAction: String => Boolean): Boolean =
{
def didNotExist(taskType: String, name: String) =
{
project.log.error("No " + taskType + " named '" + name + "' exists.")
project.log.info("Execute 'help' for a list of commands or 'actions' for a list of available project actions and methods.")
false
}
impl.CommandParser.parse(actionString) match
{
case Left(errMsg) => project.log.error(errMsg); false
case Right((name, parameters)) =>
if(project.methods.contains(name))
ifMethod(name, parameters.toArray)
else if(!parameters.isEmpty)
didNotExist("method", name)
else if(project.deepTasks.contains(name))
ifAction(name)
else
didNotExist("action", name)
}
}
/** Toggles whether stack traces are enabled.*/
private def toggleTrace(project: Project)
{
val newValue = !project.log.traceEnabled
project.projectClosure.foreach(_.log.enableTrace(newValue))
printTraceEnabled(project)
}
private def printTraceEnabled(project: Project)
{
Console.println("Stack traces are " + (if(project.log.traceEnabled) "enabled" else "disabled"))
}
/** Sets the logging level on the given project.*/
private def setLevel(project: Project, level: Level.Value)
{
project.projectClosure.foreach(_.log.setLevel(level))
Console.println("Set log level to " + project.log.getLevel)
}
/** Prints the elapsed time to the given project's log using the given
* initial time and the label 's'.*/
private def printTime(project: Project, startTime: Long, s: String)
{
val endTime = System.currentTimeMillis()
project.log.info("")
val ss = if(s.isEmpty) "" else s + " "
project.log.info("Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s")
}
/** Provides a partial message describing why the given property is undefined. */
private def undefinedMessage(property: Project#UserProperty[_]): String =
{
property.resolve match
{
case vu: UndefinedValue => " is not defined."
case e: ResolutionException => " has invalid value: " + e.toString
case _ => ""
}
}
/** Prompts the user for the value of undefined properties. 'first' is true if this is the first time
* that the current property has been prompted.*/
private def fillUndefinedProperties(project: Project, properties: List[(String, Project#Property[_])], first: Boolean): Boolean =
{
properties match
{
case (name, variable) :: tail =>
{
val shouldAdvanceOrQuit =
variable match
{
case property: Project#UserProperty[_] =>
if(first)
project.log.error(" Property '" + name + "' " + undefinedMessage(property))
for(newValue <- SimpleReader.readLine(" Enter new value for " + name + " : ")) yield
{
try
{
property.setStringValue(newValue)
true
}
catch
{
case e =>
project.log.error("Invalid value: " + e.getMessage)
false
}
}
case _ => Some(true)
}
shouldAdvanceOrQuit match
{
case Some(shouldAdvance) => fillUndefinedProperties(project, if(shouldAdvance) tail else properties, shouldAdvance)
case None => false
}
}
case Nil => true
}
}
/** Iterates over the undefined properties in the given projects, prompting the user for the value of each undefined
* property.*/
private def fillUndefinedProjectProperties(projects: List[Project]): Boolean =
{
projects match
{
case project :: remaining =>
val uninitialized = project.uninitializedProperties.toList
if(uninitialized.isEmpty)
fillUndefinedProjectProperties(remaining)
else
{
project.log.error("Project in " + project.info.projectDirectory.getAbsolutePath + " has undefined properties.")
val result = fillUndefinedProperties(project, uninitialized, true) && fillUndefinedProjectProperties(remaining)
project.saveEnvironment()
result
}
case Nil => true
}
}
/** Prints the value of the property with the given name in the given project. */
private def getProperty(project: Project, propertyName: String): Boolean =
{
if(propertyName.isEmpty)
{
project.log.error("No property name specified.")
false
}
else
{
project.getPropertyNamed(propertyName) match
{
case Some(property) =>
property.resolve match
{
case u: UndefinedValue => project.log.error("Value of property '" + propertyName + "' is undefined."); false
case ResolutionException(m, e) => project.log.error(m); false
case DefinedValue(value, isInherited, isDefault) => Console.println(value.toString); true
}
case None =>
val value = System.getProperty(propertyName)
if(value == null)
project.log.error("No property named '" + propertyName + "' is defined.")
else
Console.println(value)
value != null
}
}
}
/** Separates the space separated property name/value pair and stores the value in the user-defined property
* with the given name in the given project. If no such property exists, the value is stored in a system
* property. */
private def setProperty(project: Project, propertyNameAndValue: String): Boolean =
{
val m = """(\S+)(\s+\S.*)?""".r.pattern.matcher(propertyNameAndValue)
if(m.matches())
{
val name = m.group(1)
val newValue =
{
val v = m.group(2)
if(v == null) "" else v.trim
}
def notePending(changed: String): Unit = Console.println(" Build will use " + changed + newValue + " after running 'reboot' command or restarting sbt.")
project.getPropertyNamed(name) match
{
case Some(property) =>
{
try
{
property.setStringValue(newValue)
property match
{
case project.scalaVersion => notePending("Scala ")
case project.sbtVersion => notePending("sbt ")
case _ => Console.println(" Set property '" + name + "' = '" + newValue + "'")
}
true
}
catch { case e =>
project.log.error("Error setting property '" + name + "' in " + project.environmentLabel + ": " + e.toString)
false
}
finally { project.saveEnvironment().foreach(msg => project.log.error("Error saving environment: " + msg)) }
}
case None =>
{
System.setProperty(name, newValue)
project.log.info(" Set system property '" + name + "' = '" + newValue + "'")
true
}
}
}
else
setArgumentError(project.log)
}
private def compileContinuously(project: Project) = executeContinuously(project, "test-compile")
private def executeContinuously(project: Project, action: String) =
{
def shouldTerminate: Boolean = (System.in.available > 0) && (project.terminateWatch(System.in.read()) || shouldTerminate)
val actionValid = checkAction(project, action)
if(actionValid)
{
SourceModificationWatch.watchUntil(project, ContinuousCompilePollDelaySeconds)(shouldTerminate)
{
handleAction(project, action)
Console.println("Waiting for source changes... (press enter to interrupt)")
}
while (System.in.available() > 0) System.in.read()
}
actionValid
}
private def isTerminateAction(s: String) = TerminateActions.elements.contains(s.toLowerCase)
private def setArgumentError(log: Logger) = logError(log)("Invalid arguments for 'set': expected property name and new value.")
private def getArgumentError(log: Logger) = logError(log)("Invalid arguments for 'get': expected property name.")
private def setProjectError(log: Logger) = logError(log)("Invalid arguments for 'project': expected project name.")
private def logError(log: Logger)(s: String) = { log.error(s); false }
}
private class CrossBuild(val remainingScalaVersions: Set[String], val command: String, val startTime: Long)
{
def error(setScalaVersion: String => Unit) = clearScalaVersion(setScalaVersion)
private def clearScalaVersion(setScalaVersion: String => Unit) =
{
CrossBuild.clear()
setScalaVersion("")
true
}
def versionComplete(setScalaVersion: String => Unit) =
{
val remaining = remainingScalaVersions - ScalaVersion.currentString
if(remaining.isEmpty)
clearScalaVersion(setScalaVersion)
else
{
CrossBuild.setProperties(remaining, command, startTime.toString)
setScalaVersion(remaining.toSeq.first)
false
}
}
}
private object CrossBuild
{
private val RemainingScalaVersionsKey = "sbt.remaining.scala.versions"
private val CrossCommandKey = "sbt.cross.build.command"
private val StartTimeKey = "sbt.cross.start.time"
private def setProperties(remainingScalaVersions: Set[String], command: String, startTime: String)
{
System.setProperty(RemainingScalaVersionsKey, remainingScalaVersions.mkString(" "))
System.setProperty(CrossCommandKey, command)
System.setProperty(StartTimeKey, startTime)
}
private def getProperty(key: String) =
{
val value = System.getProperty(key)
if(value == null)
""
else
value.trim
}
private def clear() { setProperties(Set.empty, "", "") }
def load() =
{
val command = getProperty(CrossCommandKey)
val remaining = getProperty(RemainingScalaVersionsKey)
val startTime = getProperty(StartTimeKey)
if(command.isEmpty || remaining.isEmpty || startTime.isEmpty)
None
else
Some(new CrossBuild(Set(remaining.split(" ") : _*), command, startTime.toLong))
}
def apply(remainingScalaVersions: Set[String], command: String, startTime: Long) =
{
setProperties(remainingScalaVersions, command, startTime.toString)
new CrossBuild(remainingScalaVersions, command, startTime)
}
import Main.CrossBuildPrefix
def unapply(s: String): Option[String] =
{
val trimmed = s.trim
if(trimmed.startsWith(CrossBuildPrefix))
Some(trimmed.substring(CrossBuildPrefix.length).trim)
else
None
}
def unapply(s: Array[String]): Option[String] =
s match
{
case Array(CrossBuild(crossBuildAction)) => Some(crossBuildAction)
case _ => None
}
} | matheshar/simple-build-tool | src/main/scala/sbt/Main.scala | Scala | bsd-3-clause | 26,513 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.crossdata.catalog.persistent
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.crossdata.catalog.XDCatalog.{CrossdataIndex, CrossdataTable, ViewIdentifierNormalized}
import org.apache.spark.sql.crossdata.catalog.interfaces.{XDCatalogCommon, XDPersistentCatalog}
import org.apache.spark.sql.crossdata.catalog.{IndexIdentifierNormalized, TableIdentifierNormalized}
import org.apache.spark.sql.crossdata.util.CreateRelationUtil
import scala.collection.mutable
/**
* PersistentCatalog aims to provide a mechanism to persist the
* [[org.apache.spark.sql.catalyst.analysis.Catalog]] metadata.
*/
abstract class PersistentCatalogWithCache(catalystConf: CatalystConf) extends XDPersistentCatalog
with Serializable {
import CreateRelationUtil._
val tableCache: mutable.Map[TableIdentifierNormalized, LogicalPlan] = mutable.Map.empty
val viewCache: mutable.Map[TableIdentifierNormalized, LogicalPlan] = mutable.Map.empty
val indexCache: mutable.Map[TableIdentifierNormalized, CrossdataIndex] = mutable.Map.empty
override final def relation(relationIdentifier: TableIdentifierNormalized)(implicit sqlContext: SQLContext): Option[LogicalPlan] =
(tableCache get relationIdentifier) orElse (viewCache get relationIdentifier) orElse {
logInfo(s"PersistentCatalog: Looking up table ${relationIdentifier.unquotedString}")
lookupTable(relationIdentifier) map { crossdataTable =>
val table: LogicalPlan = createLogicalRelation(sqlContext, crossdataTable)
tableCache.put(relationIdentifier, table)
table
}
} orElse {
log.debug(s"Table Not Found: ${relationIdentifier.unquotedString}")
lookupView(relationIdentifier).map { sqlView =>
val viewPlan: LogicalPlan = sqlContext.sql(sqlView).logicalPlan
viewCache.put(relationIdentifier, viewPlan)
viewPlan
}
}
override final def refreshCache(tableIdent: ViewIdentifierNormalized): Unit = tableCache clear
override final def saveView(viewIdentifier: ViewIdentifierNormalized, plan: LogicalPlan, sqlText: String)(implicit sqlContext:SQLContext): Unit = {
import XDCatalogCommon._
def checkPlan(plan: LogicalPlan): Unit = {
plan collect {
case UnresolvedRelation(tIdent, _) => tIdent
} foreach { tIdent =>
if (relation(tIdent.normalize(catalystConf))(sqlContext).isEmpty) {
throw new RuntimeException("Views only can be created with a previously persisted table")
}
}
}
checkPlan(plan)
if (relation(viewIdentifier)(sqlContext).isDefined) {
val msg = s"The view ${viewIdentifier.unquotedString} already exists"
logWarning(msg)
throw new UnsupportedOperationException(msg)
} else {
logInfo(s"Persisting view ${viewIdentifier.unquotedString}")
persistViewMetadata(viewIdentifier, sqlText)
viewCache.put(viewIdentifier, plan)
}
}
override final def saveTable(crossdataTable: CrossdataTable, table: LogicalPlan)(implicit sqlContext:SQLContext): Unit = {
val tableIdentifier = crossdataTable.tableIdentifier
if (relation(tableIdentifier)(sqlContext).isDefined) {
logWarning(s"The table $tableIdentifier already exists")
throw new UnsupportedOperationException(s"The table $tableIdentifier already exists")
} else {
logInfo(s"Persisting table ${crossdataTable.tableIdentifier.table}")
persistTableMetadata(crossdataTable.copy(schema = Option(table.schema)))
tableCache.put(tableIdentifier, table)
}
}
override final def saveIndex(crossdataIndex: CrossdataIndex): Unit = {
val indexIdentifier = crossdataIndex.indexIdentifier
if(lookupIndex(indexIdentifier).isDefined) {
logWarning(s"The index $indexIdentifier already exists")
throw new UnsupportedOperationException(s"The index $indexIdentifier already exists")
} else {
logInfo(s"Persisting index ${crossdataIndex.indexIdentifier}")
persistIndexMetadata(crossdataIndex)
indexCache.put(crossdataIndex.tableIdentifier, crossdataIndex)
}
}
override final def dropTable(tableIdentifier: TableIdentifierNormalized): Unit = {
tableCache remove tableIdentifier
dropTableMetadata(tableIdentifier)
dropIndexesFromTable(tableIdentifier)
}
override final def dropView(viewIdentifier: ViewIdentifierNormalized): Unit = {
viewCache remove viewIdentifier
dropViewMetadata(viewIdentifier)
}
override final def dropIndexesFromTable(tableIdentifier: TableIdentifierNormalized): Unit = {
indexCache remove tableIdentifier
dropIndexMetadata(tableIdentifier)
}
override final def dropIndex(indexIdentifer: IndexIdentifierNormalized): Unit = {
val found: Option[(TableIdentifierNormalized, CrossdataIndex)] = indexCache find { case(key,value) => value.indexIdentifier == indexIdentifer}
if(found.isDefined) indexCache remove found.get._1
dropIndexMetadata(indexIdentifer)
}
override final def tableHasIndex(tableIdentifier: TableIdentifierNormalized): Boolean =
indexCache.contains(tableIdentifier)
override final def dropAllViews(): Unit = {
viewCache.clear
dropAllViewsMetadata()
}
override final def dropAllTables(): Unit = {
tableCache.clear
dropAllTablesMetadata()
}
override final def dropAllIndexes(): Unit = {
indexCache.clear
dropAllIndexesMetadata()
}
protected def schemaNotFound() = throw new RuntimeException("the schema must be non empty")
//New Methods
def lookupView(viewIdentifier: ViewIdentifierNormalized): Option[String]
def persistTableMetadata(crossdataTable: CrossdataTable): Unit
def persistViewMetadata(tableIdentifier: TableIdentifierNormalized, sqlText: String): Unit
def persistIndexMetadata(crossdataIndex: CrossdataIndex): Unit
def dropTableMetadata(tableIdentifier: TableIdentifierNormalized): Unit
def dropViewMetadata(viewIdentifier: ViewIdentifierNormalized): Unit
def dropIndexMetadata(indexIdentifier: IndexIdentifierNormalized): Unit
def dropIndexMetadata(tableIdentifier: TableIdentifierNormalized): Unit
def dropAllViewsMetadata(): Unit
def dropAllTablesMetadata(): Unit
def dropAllIndexesMetadata(): Unit
} | jjlopezm/crossdata | core/src/main/scala/org/apache/spark/sql/crossdata/catalog/persistent/PersistentCatalogWithCache.scala | Scala | apache-2.0 | 7,038 |
package documentation
import org.qirx.littlespec.Specification
import testUtils.PostToApplication
import testUtils.ApiExampleSpecification
import play.api.libs.json.Json.obj
import play.api.libs.json.Json.arr
import testUtils.PutToApplication
import testUtils.GetFromApplication
import testUtils.PatchToApplication
class _02_04_Patch_Failures extends Specification with ApiExampleSpecification {
val privateApiPrefix = "/api/private"
withApiExampleIntroduction(apiPrefix = privateApiPrefix) { app =>
val PATCH = new PatchToApplication(app, privateApiPrefix)
val POST = new PostToApplication(app, privateApiPrefix)
"For these examples we make sure an article exists" - example {
val (status, body) = POST(obj("title" -> "Article 1")) at "/article"
status is 201
body is obj(
"id" -> "article_1"
)
}
"Invalid id" - example {
val (status, body) = PATCH("/article/not_existent") using obj()
status is 404
body is obj(
"status" -> 404,
"error" -> "notFound"
)
}
"Invalid instance" - example {
val article = obj(
"title" -> 0,
"body" -> "no json",
"tags" -> "not an array",
"date" -> "is generated",
"publishDate" -> "invalid date"
)
val (status, body) = PATCH("/article/article_1") using article
status is 422
body is obj(
"status" -> 422,
"propertyErrors" -> arr(
obj(
"id" -> "label",
"name" -> "title",
"error" -> "invalidType"
),
obj(
"id" -> "rich_content",
"name" -> "body",
"error" -> "invalidType"
),
obj(
"id" -> "tag",
"name" -> "tags",
"error" -> "invalidType"
),
obj(
"id" -> "date",
"name" -> "date",
"error" -> "generated"
),
obj(
"id" -> "date",
"name" -> "publishDate",
"messageKey" -> "invalidDate",
"message" -> "The value `invalid date` is not a valid date"
)
)
)
}
"Malformed json" - example {
val (status, body) = PATCH("/article/article_1") using "no json"
status is 400
body is obj(
"status" -> 400,
"error" -> "badRequest"
)
}
"Non exsistent endpoint" - example {
val (status, body) = PATCH("/non_existing/article_1") using obj()
status is 404
body is obj(
"status" -> 404,
"error" -> "notFound"
)
}
"Non exsistent document" - example {
val (status, body) = PATCH("/article/non_existing") using obj()
status is 404
body is obj(
"status" -> 404,
"error" -> "notFound"
)
}
"Wrong document json" - example {
val (status, body) = PATCH("/article/article_1") using arr()
status is 422
body is obj(
"status" -> 422,
"error" -> "jsonObjectExpected"
)
}
"Wrong path" - example {
val (status, body) = PATCH("/article/article_1/non_existing") using obj()
status is 404
body is obj(
"status" -> 404,
"error" -> "notFound"
)
}
}
} | EECOLOR/play-cms | cms/src/test/scala/documentation/_02_04_Patch_Failures.scala | Scala | mit | 3,304 |
import scala.tools.nsc.interpreter.IMain
object Test extends dotty.runtime.LegacyApp {
val engine = new IMain.Factory getScriptEngine()
engine.asInstanceOf[IMain].settings.usejavacp.value = true
engine put ("n", 10)
engine eval "1 to n.asInstanceOf[Int] foreach print"
}
| yusuke2255/dotty | tests/pending/run/t7843-jsr223-service.scala | Scala | bsd-3-clause | 280 |
package org.scalatra.test
import scala.util.DynamicVariable
import java.net.URLEncoder.encode
import dispatch._
import org.apache.http.{HttpEntity, HttpResponse}
trait DispatchClient extends Client {
type Response = SimpleResponse
def baseUrl: String
lazy val http: Http = Http
def submit[A](
method: String,
uri: String,
queryParams: Iterable[(String, String)] = Map.empty,
headers: Map[String, String] = Map.empty,
body: String = null)(f: => A): A =
{
var req = url(baseUrl + uri) <<? queryParams <:< headers
Option(body) foreach { str => req <<<= body }
def headerMap(res: HttpResponse) =
(Map[String, Seq[String]]().withDefaultValue(Seq()) /: res.getAllHeaders) {
(m, h) => m + (h.getName -> (m(h.getName) :+ h.getValue))
}
val res = http x (req as_str) { case (status, res, _, body) =>
SimpleResponse(status, headerMap(res), body())
}
withResponse(res) { f }
}
def status = response.status
def headers = response.headers
def body = response.body
}
| kuochaoyi/scalatra | test/src/main/scala/org/scalatra/test/DispatchClient.scala | Scala | bsd-2-clause | 1,049 |
package com.mesosphere.cosmos.error
import com.mesosphere.universe
import io.circe.Encoder
import io.circe.JsonObject
import io.circe.generic.semiauto.deriveEncoder
final case class ServiceMarathonTemplateNotFound(
packageName: String,
packageVersion: universe.v3.model.Version
) extends CosmosError {
override def data: Option[JsonObject] = CosmosError.deriveData(this)
override def message: String = {
s"Package: [$packageName] version: [$packageVersion] does not have a Marathon " +
"template defined and can not be rendered"
}
}
object ServiceMarathonTemplateNotFound {
implicit val encoder: Encoder[ServiceMarathonTemplateNotFound] = deriveEncoder
}
| dcos/cosmos | cosmos-common/src/main/scala/com/mesosphere/cosmos/error/ServiceMarathonTemplateNotFound.scala | Scala | apache-2.0 | 678 |
package chapter13
import chapter7.Nonblocking.Par
sealed trait Console[A] {
def toPar: Par[A]
def toThunk: () => A
def flatMap[B](f: A => Console[B]): Console[B] = {
val thunk = this.toThunk
val value = thunk()
f(value)
}
}
object Console {
case class PrintLine(line: String) extends Console[Unit] {
def toPar = Par.lazyUnit(println(line))
def toThunk = () => println(line)
}
case object ReadLine extends Console[Option[String]] {
def toPar = Par.lazyUnit(run)
def toThunk = () => run
def run: Option[String] =
try Some(scala.io.StdIn.readLine())
catch {
case e: Exception => None
}
}
type ConsoleIO[A] = Free[Console, A]
def readLn: ConsoleIO[Option[String]] = Suspend(ReadLine)
def printLn(line: String): ConsoleIO[Unit] = Suspend(PrintLine(line))
}
| RaphMad/FPInScala | src/main/scala/chapter13/Console.scala | Scala | mit | 841 |
package com.gettyimages.spray.swagger
import com.wordnik.swagger.annotations._
import javax.ws.rs.Path
import spray.routing.HttpService
import spray.httpx.Json4sSupport
@Api(value = "/dict", description = "This is a dictionary api.")
trait DictHttpService extends HttpService with Json4sSupport {
var dict: Map[String, String] = Map[String, String]()
@ApiOperation(value = "Add dictionary entry.", notes = "Will a new entry to the dictionary, indexed by key, with an optional expiration value.", httpMethod = "POST")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "entry", value = "Key/Value pair of dictionary entry, with optional expiration time.", required = true, dataType = "DictEntry", paramType = "body")
))
@ApiResponses(Array(
new ApiResponse(code = 400, message = "Client Error")
))
def createRoute = post {
path("/dict") {
entity(as[DictEntry]) { e =>
dict += e.key -> e.value
complete("ok")
}
}
}
@ApiOperation(value = "Find entry by key.", notes = "Will look up the dictionary entry for the provided key.", response = classOf[DictEntry], httpMethod = "GET", nickname = "someothername")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "key", value = "Keyword for the dictionary entry.", required = true, dataType = "String", paramType = "path")
))
@ApiResponses(Array(
new ApiResponse(code = 404, message = "Dictionary does not exist.")
))
def readRoute = get {
path("/dict" / Segment) { key =>
complete(dict(key))
}
}
}
| devsprint/akka-http-swagger | src/test/scala/com/gettyimages/spray/swagger/samples/DictHttpService.scala | Scala | apache-2.0 | 1,549 |
/*
This file is part of Intake24.
Copyright 2015, 2016 Newcastle University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package security
import javax.inject.{Inject, Singleton}
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.api.util.PasswordInfo
import play.api.Logger
import uk.ac.ncl.openlab.intake24.errors.RecordNotFound
import uk.ac.ncl.openlab.intake24.services.systemdb.admin.UserAdminService
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag
@Singleton
class AuthInfoServiceImpl @Inject()(userAdminService: UserAdminService,
implicit val executionContext: ExecutionContext) extends AuthInfoRepository {
def find[T](loginInfo: LoginInfo)(implicit tag: ClassTag[T]): Future[Option[T]] = Future {
val databaseResult = loginInfo.providerID match {
case SurveyAliasProvider.ID => userAdminService.getUserPasswordByAlias(SurveyAliasUtils.fromString(loginInfo.providerKey))
case EmailProvider.ID => userAdminService.getUserPasswordByEmail(loginInfo.providerKey)
case x => throw new RuntimeException(s"Auth info provider $x not supported")
}
databaseResult match {
case Right(password) => Some(PasswordInfo(password.hasher, password.hashBase64, Some(password.saltBase64)).asInstanceOf[T])
case Left(RecordNotFound(_)) => None
case Left(e) => throw e.exception
}
}
def save[T](loginInfo: LoginInfo, authInfo: T): Future[T] = ???
def add[T](loginInfo: LoginInfo, authInfo: T): Future[T] = ???
def remove[T](loginInfo: LoginInfo)(implicit tag: scala.reflect.ClassTag[T]): Future[Unit] = ???
def update[T](loginInfo: LoginInfo, authInfo: T): scala.concurrent.Future[T] = ???
} | digitalinteraction/intake24 | ApiPlayServer/app/security/AuthInfoServiceImpl.scala | Scala | apache-2.0 | 2,305 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.factory.mapper
import com.signalcollect.interfaces.MapperFactory
import com.signalcollect.interfaces.VertexToWorkerMapper
import com.signalcollect.messaging.DefaultVertexToWorkerMapper
/**
* Default random hash partitioning mapper.
* Has good load balancing but poor locality.
*/
class DefaultMapperFactory[@specialized(Int, Long) Id] extends MapperFactory[Id] {
def createInstance(numberOfNodes: Int, workersPerNode: Int): VertexToWorkerMapper[Id] =
new DefaultVertexToWorkerMapper[Id](numberOfNodes, workersPerNode)
override def toString = "DefaultMapperFactory"
}
| danihegglin/DynDCO | src/main/scala/com/signalcollect/factory/mapper/DefaultMapperFactory.scala | Scala | apache-2.0 | 1,284 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.jdbc.connection
import java.sql.{Connection, Driver}
import java.util.Properties
import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
/**
* Connection provider which opens connection toward various databases (database specific instance
* needed). If kerberos authentication required then it's the provider's responsibility to set all
* the parameters.
*/
private[jdbc] trait ConnectionProvider {
/**
* Additional properties for data connection (Data source property takes precedence).
*/
def getAdditionalProperties(): Properties = new Properties()
/**
* Opens connection toward the database.
*/
def getConnection(): Connection
}
private[jdbc] object ConnectionProvider extends Logging {
def create(driver: Driver, options: JDBCOptions): ConnectionProvider = {
if (options.keytab == null || options.principal == null) {
logDebug("No authentication configuration found, using basic connection provider")
new BasicConnectionProvider(driver, options)
} else {
logDebug("Authentication configuration found, using database specific connection provider")
options.driverClass match {
case PostgresConnectionProvider.driverClass =>
logDebug("Postgres connection provider found")
new PostgresConnectionProvider(driver, options)
case MariaDBConnectionProvider.driverClass =>
logDebug("MariaDB connection provider found")
new MariaDBConnectionProvider(driver, options)
case DB2ConnectionProvider.driverClass =>
logDebug("DB2 connection provider found")
new DB2ConnectionProvider(driver, options)
case MSSQLConnectionProvider.driverClass =>
logDebug("MS SQL connection provider found")
new MSSQLConnectionProvider(driver, options)
case _ =>
throw new IllegalArgumentException(s"Driver ${options.driverClass} does not support " +
"Kerberos authentication")
}
}
}
}
| ConeyLiu/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProvider.scala | Scala | apache-2.0 | 2,881 |
/*
* StructuredBPTest.scala
* Test of structure belief propagation algorithm.
*
* Created By: Avi Pfeffer ([email protected])
* Creation Date: March 1, 2015
*
* Copyright 2015 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.algorithm.structured.strategy
import org.scalatest.{ WordSpec, Matchers }
import com.cra.figaro.language._
import com.cra.figaro.library.compound.If
import com.cra.figaro.algorithm.structured.algorithm.structured.StructuredBP
import com.cra.figaro.algorithm.lazyfactored.ValueSet._
import com.cra.figaro.language.Element.toBooleanElement
import com.cra.figaro.algorithm.structured.algorithm.structured.StructuredMPEBP
class StructuredBPTest extends WordSpec with Matchers {
"Executing a recursive structured BP solver strategy" when {
"given a flat model with an atomic flip without evidence" should {
"produce the correct answer" in {
Universe.createNew()
val e2 = Flip(0.6)
val e3 = Apply(e2, (b: Boolean) => b)
StructuredBP.probability(e3, true) should be(0.6 +- 0.000000001)
}
}
"given a flat model with a compound Flip without evidence" should {
"produce the correct answer" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = Apply(e2, (b: Boolean) => b)
StructuredBP.probability(e3, true) should be(0.6 +- 0.000000001)
}
}
"given a flat model with evidence" should {
"produce the correct answer" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = Apply(e2, (b: Boolean) => b)
e3.observe(true)
StructuredBP.probability(e1, 0.3) should be(0.125 +- 0.000000001)
}
}
"given a model with multiple targets and no evidence" should {
"produce the correct probability over both targets" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = Apply(e2, (b: Boolean) => b)
val alg = StructuredBP(100, e2, e3)
alg.start()
alg.probability(e2, true) should be(0.6 +- 0.000000001)
alg.probability(e3, true) should equal(0.6 +- 0.000000001)
}
}
"given a model with multiple targets with evidence" should {
"produce the correct probability over both targets" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = Apply(e2, (b: Boolean) => b)
e3.observe(true)
val alg = StructuredBP(100, e2, e1)
alg.start()
alg.probability(e2, true) should equal(1.0)
alg.probability(e1, 0.3) should be(0.125 +- 0.000000001)
}
}
"given a one-level nested model without evidence" should {
"produce the correct answer" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = If(e2, Constant(true), Constant(false))
val alg = StructuredBP(100, e3)
alg.start()
alg.probability(e3, true) should be(0.6 +- 0.000000001)
}
}
"given a one-level nested model with nested evidence" should {
"produce the correct answer" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = If(e2, { val e = Flip(0.5); e.observe(true); e }, Constant(false))
val alg = StructuredBP(100, e3)
alg.start()
alg.probability(e3, true) should be(0.6 +- 0.000000001)
}
}
"given a two-level nested model" should {
"produce the correct answer" in {
Universe.createNew()
val e1 = Select(0.25 -> 0.3, 0.25 -> 0.5, 0.25 -> 0.7, 0.25 -> 0.9)
val e2 = Flip(e1)
val e3 = If(e2, If(Flip(0.9), Constant(true), Constant(false)), Constant(false))
val alg = StructuredBP(100, e3)
alg.start()
alg.probability(e3, true) should be((0.6 * 0.9) +- 0.000000001)
}
}
"expanding an element with two different arguments" should {
"expand both the arguments" in {
Universe.createNew()
val e1 = Flip(0.4)
val e2 = Flip(0.3)
val e3 = e1 && e2
StructuredBP.probability(e3, true) should be(0.12 +- 0.000000001)
}
}
"expanding an argument that is used more than once" should {
"only expand the argument once" in {
var count = 0
Universe.createNew()
val e1 = Apply(Constant(true), (b: Boolean) => { count += 1; 5 })
val e2 = e1 === e1
StructuredBP.probability(e2, true) should equal(1.0)
count should equal(1)
// Note that this should now only expand once since Apply Maps have been added to Components
}
}
// The below test is loopy so BP's answer can't be predicted easily
// "expanding an argument that needs another argument later expanded" should {
// "create values for the ancestor argument first" in {
// Universe.createNew()
// val e1 = Flip(0.4)
// val e2 = If(e1, Constant(1), Constant(2))
// val e3 = Apply(e2, e1, (i: Int, b: Boolean) => if (b) i + 1 else i + 2)
// // e3 is 2 iff e1 is true, because then e2 is 1
// StructuredBP.probability(e3, 2) should be (0.4 +- 0.000000001)
// }
// }
"solving a problem with a reused nested subproblem" should {
"only process the nested subproblem once" in {
var count = 0
val f = (p: Boolean) => {
count += 1
Constant(p)
}
val e1 = Chain(Flip(0.5), f)
val e2 = Chain(Flip(0.4), f)
val e3 = e1 && e2
StructuredBP.probability(e3, true) should be((0.5 * 0.4) +- 0.000000001)
count should equal(2) // One each for p = true and p = false, but only expanded once
}
}
"given a problem with unneeded elements in the universe" should {
"not process the unneeded elements" in {
var count = 0
val e1 = Apply(Constant(1), (i: Int) => { count += 1; 5 })
val e2 = Flip(0.5)
StructuredBP.probability(e2, true) should equal(0.5)
count should equal(0)
}
}
}
"MPE BP" when {
"given a flat model without evidence should produce the right answer" in {
Universe.createNew()
val e1 = Select(0.75 -> 0.2, 0.25 -> 0.3)
val e2 = Flip(e1)
val e3 = Flip(e1)
val e4 = e2 === e3
val alg = StructuredMPEBP(20)
alg.start
// p(e1=.2,e2=T,e3=T,e4=T) = 0.75 * 0.2 * 0.2 = .03
// p(e1=.2,e2=F,e3=F,e4=T) = 0.75 * 0.8 * 0.8 = .48
// p(e1=.3,e2=T,e3=T,e4=T) = 0.25 * 0.3 * 0.3 = .0225
// p(e1=.3,e2=F,e3=F,e4=T) = 0.25 * 0.7 * 0.7 = .1225
// p(e1=.2,e2=T,e3=F,e4=F) = 0.75 * 0.2 * 0.8 = .12
// p(e1=.2,e2=F,e3=T,e4=F) = 0.75 * 0.8 * 0.2 = .12
// p(e1=.3,e2=T,e3=F,e4=F) = 0.25 * 0.3 * 0.7 = .0525
// p(e1=.3,e2=F,e3=T,e4=F) = 0.25 * 0.7 * 0.3 = .0525
// MPE: e1=.2,e2=F,e3=F,e4=T
alg.mostLikelyValue(e1) should be(0.2 +- 0.0000001)
alg.mostLikelyValue(e2) should equal(false)
alg.mostLikelyValue(e3) should equal(false)
alg.mostLikelyValue(e4) should equal(true)
alg.kill
}
"given a flat model with evidence should produce the right answer" in {
Universe.createNew()
val e1 = Select(0.5 -> 0.2, 0.5 -> 0.3)
e1.addConstraint((d: Double) => if (d < 0.25) 3.0 else 1.0)
val e2 = Flip(e1)
val e3 = Flip(e1)
val e4 = e2 === e3
e4.observe(true)
val alg = StructuredMPEBP(20)
alg.start
// p(e1=.2,e2=T,e3=T,e4=T) = 0.75 * 0.2 * 0.2 = .03
// p(e1=.2,e2=F,e3=F,e4=T) = 0.75 * 0.8 * 0.8 = .48
// p(e1=.3,e2=T,e3=T,e4=T) = 0.25 * 0.3 * 0.3 = .0225
// p(e1=.3,e2=F,e3=F,e4=T) = 0.25 * 0.7 * 0.7 = .1225
// MPE: e1=.2,e2=F,e3=F,e4=T
alg.mostLikelyValue(e1) should be(0.2 +- 0.0000001)
alg.mostLikelyValue(e2) should equal(false)
alg.mostLikelyValue(e3) should equal(false)
alg.mostLikelyValue(e4) should equal(true)
alg.kill
}
"given a structured model with evidence should produce the right answer" in {
Universe.createNew()
val e1 = Flip(0.5)
e1.setConstraint((b: Boolean) => if (b) 3.0; else 1.0)
val e2 = Chain(e1, (b: Boolean) => {
if (b) Flip(0.4) || Flip(0.2)
else Flip(0.9) || Flip(0.2)
})
val e3 = If(e1, Flip(0.52), Flip(0.4))
val e4 = e2 === e3
e4.observe(true)
// p(e1=T,e2=T,f1=T,f2=T,e3=T) = 0.75 * 0.4 * 0.2 * 0.52 = .0312
// p(e1=T,e2=T,f1=T,f2=F,e3=T) = 0.75 * 0.4 * 0.8 * 0.52 = .1248
// p(e1=T,e2=T,f1=F,f2=T,e3=T) = 0.75 * 0.6 * 0.2 * 0.52 = .0468
// p(e1=T,e2=F,f1=F,f2=F,e3=F) = 0.75 * 0.6 * 0.8 * 0.48 = .1728
// p(e1=F,e2=T,f1=T,f2=T,e3=T) = 0.25 * 0.9 * 0.2 * 0.4 = .018
// p(e1=F,e2=T,f1=T,f2=F,e3=T) = 0.25 * 0.9 * 0.8 * 0.4 = .072
// p(e1=F,e2=T,f1=F,f2=T,e3=T) = 0.25 * 0.1 * 0.2 * 0.4 = .002
// p(e1=F,e2=F,f1=F,f2=F,e3=F) = 0.25 * 0.1 * 0.8 * 0.6 = .012
// MPE: e1=T,e2=F,e3=F,e4=T
val alg = StructuredMPEBP(20)
alg.start
alg.mostLikelyValue(e1) should equal(true)
alg.mostLikelyValue(e2) should equal(false)
alg.mostLikelyValue(e3) should equal(false)
alg.mostLikelyValue(e4) should equal(true)
alg.kill
}
}
}
| scottcb/figaro | Figaro/src/test/scala/com/cra/figaro/test/algorithm/structured/strategy/StructuredBPTest.scala | Scala | bsd-3-clause | 9,820 |
package org.datacleaner.result.html
import org.junit.{Assert, Test}
import org.scalatestplus.junit.AssertionsForJUnit
class FlotChartLocatorTest extends AssertionsForJUnit {
@Test
def testBasicStuff = {
Assert.assertEquals("http://cdnjs.cloudflare.com/ajax/libs/flot/0.8.3/jquery.flot.min.js", FlotChartLocator.getFlotBaseUrl);
FlotChartLocator.setFlotHome("../bar/");
Assert.assertEquals("../bar/jquery.flot.min.js", FlotChartLocator.getFlotBaseUrl);
FlotChartLocator.setFlotHome(null)
Assert.assertEquals("http://cdnjs.cloudflare.com/ajax/libs/flot/0.8.3/jquery.flot.min.js", FlotChartLocator.getFlotBaseUrl);
}
}
| datacleaner/DataCleaner | components/html-rendering/src/test/scala/org/datacleaner/result/html/FlotChartLocatorTest.scala | Scala | lgpl-3.0 | 665 |
package tests
import so.eval.{ EvaluationRequest, Router }
import org.scalatest.{ BeforeAndAfter, FunSpec, Inside, ParallelTestExecution }
import org.scalatest.matchers.ShouldMatchers
class Base64
extends FunSpec
with ShouldMatchers
with Inside
with BeforeAndAfter
with ParallelTestExecution {
describe("Base 64 handling") {
it("should encode output files correctly") {
val res = Router.route(
"ruby",
"`echo 'foobar' > output/foo`; puts 123").get.evaluate
res.get.outputFiles.get.head._2 should be("Zm9vYmFyCg==")
}
it("should decode input files correctly") {
val req = EvaluationRequest(
"puts File.read('foo')",
files = Some(Map("foo" -> "Zm9vYmFyCg==")))
val res = Router.route("ruby", req).get.evaluate
res.get.stdout.trim should be("foobar")
}
}
}
| eval-so/minibcs | src/test/scala/Base64.scala | Scala | apache-2.0 | 850 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka.tools.status
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.kafka.tools.{ConsumerKDSConnectionParams, KafkaDataStoreCommand}
import org.locationtech.geomesa.kafka10.KafkaUtils10
import org.locationtech.geomesa.tools.{Command, RequiredTypeNameParam}
import scala.collection.JavaConversions._
import scala.util.control.NonFatal
class KafkaDescribeSchemaCommand extends KafkaDataStoreCommand {
override val name = "get-schema"
override val params = new KafkaDescribeSchemaParams
def execute(): Unit = withDataStore { (ds) =>
Command.user.info(s"Describing attributes of feature type '${params.featureName}' at zkPath '${params.zkPath}'...")
try {
val sft = ds.getSchema(params.featureName)
val sb = new StringBuilder()
sft.getAttributeDescriptors.foreach { attr =>
sb.clear()
val name = attr.getLocalName
// TypeName
sb.append(name)
sb.append(": ")
sb.append(attr.getType.getBinding.getSimpleName)
if (sft.getGeometryDescriptor == attr) sb.append(" (Default geometry)")
if (attr.getDefaultValue != null) sb.append("- Default Value: ", attr.getDefaultValue)
Command.output.info(sb.toString())
}
val userData = sft.getUserData
if (!userData.isEmpty) {
Command.user.info("\\nUser data:")
userData.foreach { case (key, value) => Command.user.info(s" $key: $value") }
}
Command.user.info("\\nFetching Kafka topic metadata...")
val zkUtils = KafkaUtils10.createZkUtils(params.zookeepers, Int.MaxValue, Int.MaxValue)
try {
val topicName = zkUtils.zkClient.readData[String](ds.getTopicPath(params.featureName))
val topicMetadata = zkUtils.fetchTopicMetadataFromZk(topicName)
Command.user.info(s"Topic: ${topicMetadata.topicName} Number of partitions: ${topicMetadata.numberOfPartitions}")
} finally {
zkUtils.close()
}
} catch {
case npe: NullPointerException =>
Command.user.error(s"Error: feature '${params.featureName}' not found. Check arguments...", npe)
case e: Exception =>
Command.user.error(s"Error describing feature '${params.featureName}': " + e.getMessage, e)
case NonFatal(e) =>
Command.user.warn(s"Non fatal error encountered describing feature '${params.featureName}': ", e)
}
}
}
@Parameters(commandDescription = "Describe the attributes of a given feature in GeoMesa")
class KafkaDescribeSchemaParams extends ConsumerKDSConnectionParams with RequiredTypeNameParam
| nagavallia/geomesa | geomesa-kafka/geomesa-kafka-tools/geomesa-kafka-10-tools/src/main/scala/org/locationtech/geomesa/kafka/tools/status/KafkaDescribeSchemaCommand.scala | Scala | apache-2.0 | 3,070 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package synthesis
package rules.unused
import purescala.Constructors._
import purescala.Expressions.{Equals, Expr, FunctionInvocation}
import purescala.Extractors.TopLevelAnds
case object SelectiveInlining extends Rule("Sel. Inlining") {
def instantiateOn(implicit hctx: SearchContext, p: Problem): Traversable[RuleInstantiation] = {
val TopLevelAnds(exprs) = p.phi
val eqfuncalls = exprs.collect{
case eq @ Equals(FunctionInvocation(fd, args), e) =>
((fd, e), args, eq : Expr)
case eq @ Equals(e, FunctionInvocation(fd, args)) =>
((fd, e), args, eq : Expr)
}
val candidates = eqfuncalls.groupBy(_._1).filter(_._2.size > 1)
if (candidates.nonEmpty) {
var newExprs = exprs
for (cands <- candidates.values) {
val cand = cands.take(2)
val toRemove = cand.map(_._3).toSet
val argss = cand.map(_._2)
val args = argss(0) zip argss(1)
newExprs ++= args.map{ case (l, r) => Equals(l, r) }
newExprs = newExprs.filterNot(toRemove)
}
val sub = p.copy(phi = andJoin(newExprs))
Some(decomp(List(sub), forward, s"Inlining ${candidates.keySet.map(_._1.id).mkString(", ")}"))
} else {
None
}
}
}
| regb/leon | src/main/scala/leon/synthesis/rules/unused/SelectiveInlining.scala | Scala | gpl-3.0 | 1,294 |
/*
* Copyright 2014 – 2015 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.redis.commands
import rx.redis._
import rx.redis.serialization._
class ConnectionCommandsSpec extends CommandsSuite {
test("PING") {
val ping = Ping
sers(ping, "PING")
ser(ping, cmd"PING")
}
test("ECHO") {
val echo = Echo("foobar")
sers(echo, "ECHO", "\\u0000\\u0000\\u0000\\u0006foobar")
ser(echo, cmd"ECHO \\u0000\\u0000\\u0000\\u0006foobar")
}
}
| knutwalker/rx-redis | tests/src/test/scala/rx/redis/commands/ConnectionCommandsSpec.scala | Scala | apache-2.0 | 991 |
package skuber
import org.specs2.mutable.Specification // for unit-style testing
import LabelSelector.dsl._
/**
* @author David O'Riordan
*/
class LabelSelectorSpec extends Specification {
"A label selector can be constructed" >> {
"from a label existence requirement" >> {
val sel = LabelSelector("production")
sel.requirements.size mustEqual 1
sel.requirements(0) mustEqual LabelSelector.ExistsRequirement("production")
}
}
"A label selector can be constructed" >> {
"from a label equality requirement" >> {
val sel = LabelSelector("env" is "production")
sel.requirements.size mustEqual 1
sel.requirements(0) mustEqual LabelSelector.IsEqualRequirement("env", "production")
}
}
"A label selector can be constructed" >> {
"from a label inequality requirement" >> {
val sel = LabelSelector("env" isNot "production")
sel.requirements.size mustEqual 1
sel.requirements(0) mustEqual LabelSelector.IsNotEqualRequirement("env", "production")
}
}
"A label selector can be constructed" >> {
"from a 'In' set requirement" >> {
val sel = LabelSelector("env" isIn List("production", "staging"))
sel.requirements.size mustEqual 1
sel.requirements(0) mustEqual LabelSelector.InRequirement("env", List("production", "staging"))
}
}
"A label selector can be constructed" >> {
"from a 'NotIn' set requirement" >> {
val sel = LabelSelector("env" isNotIn List("production", "staging"))
sel.requirements.size mustEqual 1
sel.requirements(0) mustEqual LabelSelector.NotInRequirement("env", List("production", "staging"))
}
}
"A label selector can be constructed" >> {
"from a mixed equality and set based requirement" >> {
val sel = LabelSelector("tier" is "frontend", "env" isNotIn List("production", "staging"))
sel.requirements.size mustEqual 2
sel.requirements(0) mustEqual LabelSelector.IsEqualRequirement("tier", "frontend")
sel.requirements(1) mustEqual LabelSelector.NotInRequirement("env", List("production", "staging"))
}
}
"A label selector can be constructed" >> {
"from multiple requirements" >> {
val sel = LabelSelector(
"tier" is "frontend",
"release" doesNotExist,
"env" isNotIn List("production", "staging"))
sel.requirements.size mustEqual 3
sel.requirements(0) mustEqual LabelSelector.IsEqualRequirement("tier", "frontend")
sel.requirements(1) mustEqual LabelSelector.NotExistsRequirement("release")
sel.requirements(2) mustEqual LabelSelector.NotInRequirement("env", List("production", "staging"))
}
}
}
| minatjanster/skuber | client/src/test/scala/skuber/model/LabelSelectorSpec.scala | Scala | apache-2.0 | 2,670 |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.core.db.types
import io.doolse.simpledba.jdbc.SizedIso
case class String255(value: String) extends AnyVal
case class String20(value: String) extends AnyVal
case class String40(value: String) extends AnyVal
object String255 {
implicit def stringIso: SizedIso[String255, String] = SizedIso(255, _.value, String255.apply)
implicit def fromString(s: String): String255 = String255(s)
implicit def fromStringOpt(s: String): Option[String255] = Option(s).map(String255.apply)
}
object String20 {
implicit def string20Iso: SizedIso[String20, String] = SizedIso(20, _.value, String20.apply)
implicit def fromString(s: String): String20 = String20(s)
implicit def toString(s20: String20): String = s20.value
}
object String40 {
implicit def string20Iso: SizedIso[String40, String] = SizedIso(40, _.value, String40.apply)
implicit def fromString(s: String): String40 = String40(s)
}
| equella/Equella | Source/Plugins/Core/com.equella.serverbase/scalasrc/com/tle/core/db/types/SizedStrings.scala | Scala | apache-2.0 | 1,755 |
object Test {
case class Bar[A](a: A)
trait Toto[A, B]
////////////////////////////////////////////////
// PoC of controlled KindPolymorphism in Scala
//
// The idea is NOT to provide universal kind-polymorphism that would be a bad idea anyway
// but to bring a "controlled" kind-polymorphism relying on accepted kinds defined by typeclass implicits
// Thus, kind-polymorphism is strictly scoped to your domain and is what you expect to be, nothing else.
//
// `Ykind-polymorphism` flag aims at deferring just a bit Scalac type inference when encountering AnyKind higher bounds
// without losing any strictness in the final typing.
// `<: AnyKind` type-bound is purely technicaland totally eliminated after erasure. There is not type associated to it.
//
// Here are code-samples that work now:
// - basic kind polymorphism controlled by implicits
// - Kindness proofs based on typeclasses (specially SameKind)
// - Kind-Polymorphic list (on type & value) (2 different implementations)
// - Some weird cases we don't want the compiler to authorize
////////////////////////////////////////////////
// Basic Kind polymorphism sample
trait Foo[T <: AnyKind] { type Out ; def id(t: Out): Out = t }
object Foo {
implicit def foo0[T]: Foo[T] { type Out = T } = new Foo[T] { type Out = T }
implicit def foo1[T[_]]: Foo[T] { type Out = T[Any] } = new Foo[T] { type Out = T[Any] }
implicit def foo2[T[_, _]]: Foo[T] { type Out = T[Any, Any] } = new Foo[T] { type Out = T[Any, Any] }
}
def foo[T <: AnyKind](implicit f: Foo[T]): f.type = f
foo[Int].id(23)
foo[List].id(List[Any](1, 2, 3))
foo[Map].id(Map[Any, Any](1 -> "toto", 2 -> "tata", 3 -> "tutu"))
////////////////////////////////////////////////
// Is a type M Kinded as you want ?
trait Kinded[M <: AnyKind] { type Out <: AnyKind }
object Kinded {
type Aux[M <: AnyKind, Out0 <: AnyKind] = Kinded[M] { type Out = Out0 }
implicit def kinded0[M]: Aux[M, M] = new Kinded[M] { type Out = M }
implicit def kinded1[M[_]]: Aux[M, M] = new Kinded[M] { type Out[t] = M[t] }
implicit def kinded2[M[_, _]]: Aux[M, M] = new Kinded[M] { type Out[t, u] = M[t, u] }
}
implicitly[Kinded.Aux[Int, Int]]
implicitly[Kinded.Aux[List, List]]
implicitly[Kinded.Aux[Map, Map]]
////////////////////////////////////////////////
// Extract Kind from a type
trait Kinder[MA] { type M <: AnyKind; type Args <: HList }
object Kinder extends KinderLowerImplicits {
type Aux[MA, M0 <: AnyKind, Args0 <: HList] = Kinder[MA] { type M = M0; type Args = Args0 }
implicit def kinder2[M0[_, _], A0, B0]: Kinder.Aux[M0[A0, B0], M0, A0 :: B0 :: HNil] = new Kinder[M0[A0, B0]] { type M[t, u] = M0[t, u]; type Args = A0 :: B0 :: HNil }
implicit def kinder1[M0[_], A0]: Kinder.Aux[M0[A0], M0, A0 :: HNil] = new Kinder[M0[A0]] { type M[t] = M0[t]; type Args = A0 :: HNil }
}
trait KinderLowerImplicits {
implicit def kinder0[A]: Kinder.Aux[A, A, HNil] = new Kinder[A] { type M = A; type Args = HNil }
}
////////////////////////////////////////////////
//IsoKindness Test
trait SameKind[M <: AnyKind, M2 <: AnyKind]
object SameKind {
implicit def sameKind0[A, B]: SameKind[A, B] = new {}
implicit def sameKind01[M1[_], M2[_]]: SameKind[M1, M2] = new {}
implicit def sameKind02[M1[_, _], M2[_, _]]: SameKind[M1, M2] = new {}
}
def sameKind[M1 <: AnyKind, M2 <: AnyKind](implicit sameKind: SameKind[M1, M2]) = sameKind
sameKind[Int, String] // OK
sameKind[List, Bar] // OK
sameKind[Map, Toto] // OK
// sameKind[List, String] // KO
// sameKind[Map, List] // KO
// sameKind[Map, Boolean] // KO
////////////////////////////////////////////////
// Kind-Polymorphic List style
// Classic Heterogenous List used in KindPolymorphic List
sealed trait HList
final case class ::[+H, +T <: HList](head : H, tail : T) extends HList
sealed trait HNil extends HList
case object HNil extends HNil
object New {
// The Kind Polymorphic List
sealed trait KPList
sealed trait KPNil extends KPList
case object KPNil extends KPNil {
def :::[H, M <: AnyKind, HL <: HList](h:H)(implicit kinder: Kinder.Aux[H, M, HL]) =
New.:::(h, KPNil)
}
sealed case class :::[H, T <: KPList, M <: AnyKind, HL0 <: HList](
head: H
, tail: T
)(implicit val kinder: Kinder.Aux[H, M, HL0]) extends KPList
final case class KPListOps[L <: KPList](l : L) {
def :::[H, M <: AnyKind, HL <: HList](h:H)(implicit kinder: Kinder.Aux[H, M, HL]) =
New.:::(h, l)
}
implicit def kplistOps[L <: KPList](l: L): KPListOps[L] = new KPListOps(l)
val kl = Bar(5) ::: "toto" ::: List(1, 2, 3) ::: Map("toto" -> 1L, "tata" -> 2L) ::: KPNil
val h: Bar[Int] = kl.head
val h2: String = kl.tail.head
val h3: List[Int] = kl.tail.tail.head
val h4: Map[String, Long] = kl.tail.tail.tail.head
}
////////////////////////////////////////////////
// SPECIAL CASES
// def foo0[F <: AnyKind]: F = null.asInstanceOf[F] // error: F cannot be used as a value type
// val i = foo0[Int]
// val li = foo0[List[Int]]
// foo0[List] // KO -> neg
// val l = foo0[List] // KO -> neg
// def foo1[F <: AnyKind, A <: AnyKind]: F[A] = ??? // KO
// def foo2: AnyKind = ??? // KO
// Older implementation Kind-Polymorphic List but I prefer the one above
object Old {
// The Kind Polymorphic List
sealed trait KPList
sealed trait KPNil extends KPList
case object KPNil extends KPNil
sealed trait :::[H <: AnyKind, T <: KPList] extends KPList
trait KPCons[M <: AnyKind, T <: KPList] extends :::[M, T] {
type HL <: HList
type H
def head: H
def tail: T
}
object KPCons {
type Aux[M <: AnyKind, T <: KPList, H0, HL0 <: HList] = KPCons[M, T] { type H = H0; type HL = HL0 }
// Polymorphic
trait Apply[M <: AnyKind, A <: HList] { type Out }
object Apply {
type Aux[M <: AnyKind, A <: HList, Out0] = Apply[M, A] { type Out = Out0 }
implicit def apply0[M]: Aux[M, HNil, M] = new Apply[M, HNil] { type Out = M }
implicit def apply1[M[_], A]: Aux[M, A :: HNil, M[A]] = new Apply[M, A :: HNil] { type Out = M[A] }
implicit def apply2[M[_, _], A, B]: Aux[M, A :: B :: HNil, M[A, B]] = new Apply[M, A :: B :: HNil] { type Out = M[A, B] }
}
trait Unapply[M <: AnyKind, O] { type Out <: HList }
object Unapply {
type Aux[M <: AnyKind, O, Out0 <: HList] = Unapply[M, O] { type Out = Out0 }
implicit def unapply0[M]: Aux[M, M, HNil] = new Unapply[M, M] { type Out = HNil }
implicit def unapply1[M[_], A0]: Unapply.Aux[M, M[A0], A0 :: HNil] = new Unapply[M, M[A0]] { type Out = A0 :: HNil }
implicit def unapply2[M[_, _], A0, B0]: Aux[M, M[A0, B0], A0 :: B0 :: HNil] = new Unapply[M, M[A0, B0]] { type Out = A0 :: B0 :: HNil }
}
// the list builder
trait KPConsBuilder[M <: AnyKind] {
def apply[H0, HL0 <: HList, T <: KPList](head0: H0, tail0: T)(implicit unap: Unapply.Aux[M, H0, HL0]): KPCons.Aux[M, T, H0, HL0] = new KPCons[M, T] {
type HL = HL0
type H = H0
val head: H = head0
val tail: T = tail0
}
}
def apply[M <: AnyKind] = new KPConsBuilder[M] {}
}
// Let's create some kind-polymorphic list
val kl =
KPCons[Bar](
Bar(5)
, KPCons[String](
"toto"
, KPCons[List](
List(1, 2, 3)
, KPCons[Map](
Map("toto" -> 1L, "tata" -> 2L)
, KPNil
)
)
)
)
val h: Bar[Int] = kl.head
val h2: String = kl.tail.head
val h3: List[Int] = kl.tail.tail.head
val h4: Map[String, Long] = kl.tail.tail.tail.head
}
} | som-snytt/dotty | tests/pos/anykind.scala | Scala | apache-2.0 | 7,888 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2017-2020
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.xymon.util.parser
import com.google.common.base.CharMatcher
import io.techcode.streamy.util.json._
import io.techcode.streamy.util.parser.{ByteStringParser, CharMatchers, ParseException}
import io.techcode.streamy.xymon.component.XymonTransformer
object XymonParser {
private[parser] val GroupNameMatcher: CharMatcher = CharMatchers.AlphaNum.or(CharMatchers.PrintUsAscii)
private[parser] val HostNameMatcher: CharMatcher = CharMatchers.Alpha.or(CharMatcher.is(',')).precomputed()
private[parser] val TestNameMatcher: CharMatcher = CharMatcher.noneOf(". ").precomputed()
def parser(conf: XymonTransformer.Parser.Config): ByteStringParser[Json] = new XymonParser(conf)
}
/**
* Parser helpers containing various shortcut for character matching.
*/
private[parser] trait ParserHelpers {
this: ByteStringParser[Json] =>
@inline def sp(): Boolean = ch(' ')
@inline def plus(): Boolean = ch('+')
@inline def slash(): Boolean = ch('/')
@inline def colon(): Boolean = ch(':')
@inline def dot(): Boolean = ch('.')
}
/**
* Xymon parser for the status command. The message syntax is as follows:
* status[+LIFETIME][/group:GROUP] HOSTNAME.TESTNAME COLOR <additional_text>
* For more information on the parameters, see the xymon man page
*/
private[parser] class XymonParser(config: XymonTransformer.Parser.Config)
extends ByteStringParser[Json] with ParserHelpers {
private val binding = config.binding
private implicit val builder: JsObjectBuilder = Json.objectBuilder()
def run(): Json = {
if (root()) {
builder.result()
} else {
throw new ParseException(s"Unexpected input at index ${_cursor}")
}
}
override def root(): Boolean =
str(XymonTransformer.Id.Status) &&
lifetimeAndGroup() && sp() &&
hostAndService() && sp() &&
color() &&
additionalText() &&
eoi()
def lifetimeAndGroup(): Boolean =
lifetime() && group()
def lifetime(): Boolean =
optional(
plus() &&
capture(binding.lifetime, duration() && optional(durationUnit())) { (key, value) =>
// Unsafe can be use because duration is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def duration(): Boolean = oneOrMore(CharMatchers.Digit)
def durationUnit(): Boolean = times(1, CharMatchers.LowerAlpha)
def group(): Boolean =
optional(
slash() &&
oneOrMore(CharMatchers.LowerAlpha) &&
colon() &&
capture(binding.group, oneOrMore(XymonParser.GroupNameMatcher)) { (key, value) =>
// Unsafe can be use because group is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def hostAndService(): Boolean =
host() && dot() && service()
def host(): Boolean =
capture(binding.host, oneOrMore(XymonParser.HostNameMatcher)) { (key, value) =>
// Unsafe can be use because group is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
def service(): Boolean =
capture(binding.service, oneOrMore(XymonParser.TestNameMatcher)) { (key, value) =>
// Unsafe can be use because service is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
def color(): Boolean =
capture(binding.color, oneOrMore(CharMatchers.LowerAlpha)) { (key, value) =>
// Unsafe can be use because service is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
def additionalText(): Boolean =
optional(
sp() &&
capture(binding.message, any()) { (key, value) =>
// Unsafe can be use because message is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
override def cleanup(): Unit = {
super.cleanup()
builder.clear()
}
}
| amannocci/streamy | plugin-xymon/src/main/scala/io/techcode/streamy/xymon/util/parser/XymonParser.scala | Scala | mit | 4,994 |
package calc
object Calc4 extends App {
val perms = (1 to 4).permutations
def compute(perm: IndexedSeq[Int]): Double = math.pow(perm(0), math.pow(perm(1), math.pow(perm(2), perm(3))))
println(perms.count(compute(_) == 1))
}
| ebowman/calc | src/main/scala/calc/Calc4.scala | Scala | unlicense | 234 |
package purespark.examples
import purespark.GraphX._
import purespark.Prelude._
/**
* Computes shortest paths to the given set of landmark vertices, returning a graph where each
* vertex attribute is a map containing the shortest-path distance to each reachable landmark.
*/
object ShortestPaths {
/** Stores a map from the vertex id of a landmark to the distance to that landmark. */
type SPMap = Map[VertexId, Int]
private def makeMap (x: (VertexId, Int)*) = Map(x: _*)
private def incrementMap (spmap: SPMap): SPMap = spmap.map { case (v, d) => v -> (d + 1)}
private def addMaps (spmap1: SPMap, spmap2: SPMap): SPMap =
(spmap1.keySet ++ spmap2.keySet).map {
k => k -> scala.math.min(spmap1.getOrElse(k, Int.MaxValue), spmap2.getOrElse(k, Int.MaxValue))
}.toMap
/**
* Computes shortest paths to the given set of landmark vertices.
*
* @tparam E the edge attribute type (not used in the computation)
*
* @param graph the graph for which to compute the shortest paths
* @param landmarks the list of landmark vertex ids. Shortest paths will be computed to each
* landmark.
*
* @return a graph where each vertex attribute is a map containing the shortest-path distance to
* each reachable landmark vertex.
*/
def run[V, E] (graph: GraphRDD[V, E], landmarks: Seq[VertexId]): GraphRDD[SPMap, E] = {
val spGraph = GraphRDD(
mapVertices(graph.vertexRDD) { v =>
if (landmarks.contains(v.id)) makeMap(v.id -> 0) else makeMap()
},
graph.edgeRDD)
val initialMessage = makeMap()
def sendMessage (edge: EdgeTriplet[SPMap, _]): List[Vertex[SPMap]] = {
val newAttr = incrementMap(edge.dstAttr)
if (edge.srcAttr != addMaps(newAttr, edge.srcAttr))
List(Vertex(edge.srcId, newAttr))
else
List.empty
}
Pregel(spGraph)(initialMessage)((v, msg) => addMaps(v.attr, msg))(sendMessage)(addMaps)
}
}
object ShortestPathsExample extends App {
// convert from an RDD of tuples to an edge RDD
private def fromTuplesToEdges (edges: RDD[(VertexId, VertexId)]): EdgeRDD[Null] =
map(edges)(map(_) { case (s, t) => Edge(s, t, null)})
def run = {
// an edge RDD with three partitions
val edges: EdgeRDD[Null] = fromTuplesToEdges(List(
List((1, 2), (3, 2), (4, 5), (6, 8)),
List((5, 7), (8, 9), (10, 9), (9, 6)),
List((9, 10))
))
val vertices: VertexRDD[Null] = List(List.range(1, 11).map(Vertex(_, null)))
val result = ShortestPaths.run(GraphRDD(vertices, edges), Seq(2, 7, 8, 10))
println()
println("Finding shortest paths to landmarks:")
result.vertexRDD.flatten.foreach(println)
}
ShortestPathsExample.run
} | ericpony/scala-examples | PureSpark/examples/ShortestPaths.scala | Scala | mit | 2,803 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.electro
import squants._
import squants.space.Length
/**
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.electro.SiemensPerMeter]]
*/
final class Conductivity private (val value: Double, val unit: ConductivityUnit)
extends Quantity[Conductivity] {
def dimension = Conductivity
def *(that: Length): ElectricalConductance = Siemens(this.toSiemensPerMeter * that.toMeters)
def toSiemensPerMeter = to(SiemensPerMeter)
def inOhmMeters = OhmMeters(1d / toSiemensPerMeter)
}
object Conductivity extends Dimension[Conductivity] {
private[electro] def apply[A](n: A, unit: ConductivityUnit)(implicit num: Numeric[A]) = new Conductivity(num.toDouble(n), unit)
def apply(value: Any) = parse(value)
def name = "Conductivity"
def primaryUnit = SiemensPerMeter
def siUnit = SiemensPerMeter
def units = Set(SiemensPerMeter)
}
trait ConductivityUnit extends UnitOfMeasure[Conductivity] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = Conductivity(n, this)
}
object SiemensPerMeter extends ConductivityUnit with PrimaryUnit with SiUnit {
val symbol = "S/m"
}
object ConductivityConversions {
lazy val siemenPerMeter = SiemensPerMeter(1)
implicit class ConductivityConversions[A](n: A)(implicit num: Numeric[A]) {
def siemensPerMeter = SiemensPerMeter(n)
}
implicit object ConductivityNumeric extends AbstractQuantityNumeric[Conductivity](Conductivity.primaryUnit)
}
| typelevel/squants | shared/src/main/scala/squants/electro/Conductivity.scala | Scala | apache-2.0 | 1,996 |
package org.backuity.clist
import org.backuity.clist.Command.ParseContext
import org.backuity.clist.util.Read
import org.backuity.matchete.JunitMatchers
import org.junit.Test
class CommandTest extends JunitMatchers {
@Test
def parseContextValidateShouldOnlyRemoveOneArgument(): Unit = {
val arg = CliOptionalArgument(classOf[String], "", "", None, "")(implicitly[Read[String]])
new ParseContext(List(arg), Set.empty, List("arg")).validate(arg, "arg").remainingArgs must_== List()
new ParseContext(List(arg), Set.empty, List("arg", "arg")).validate(arg, "arg").remainingArgs must_== List("arg")
new ParseContext(List(arg), Set.empty, List("hey", "arg", "arg")).validate(arg, "arg").remainingArgs must_== List("hey", "arg")
new ParseContext(List(arg), Set.empty, List("hey", "arg")).validate(arg, "arg").remainingArgs must_== List("hey")
new ParseContext(List(arg), Set.empty, List("hey", "arg", "arg", "hoy")).validate(arg, "arg").remainingArgs must_== List("hey", "arg", "hoy")
}
}
| backuity/clist | core/src/test/scala/org/backuity/clist/CommandTest.scala | Scala | apache-2.0 | 1,016 |
package com.insweat.hssd.lib
import com.insweat.hssd.lib.util._
import scala.collection.immutable.HashMap
import com.insweat.hssd.lib.essence.ValueData
import java.text.SimpleDateFormat
import java.text.ParseException
import com.insweat.hssd.lib.essence.SimpleThypeLike
package object constraints {
private val builtin = List(
BoolConstraint,
NumberConstraint,
DateTimeConstraint,
NotNull,
RangedInt,
RangedLong,
RangedFloat,
RangedDouble,
RangedDateTime,
Regexed
)
private var _factories: Map[String, ConstraintFactory] = {
HashMap((builtin map {e => e.name -> e}): _*)
}
private val dateTimeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
dateTimeFmt.setLenient(false)
private object NotNull extends Constraint with ConstraintFactory {
val name = "com.insweat.hssd.constraints.notNull"
override def apply(vd: ValueData, value: Any): Option[String] = {
if(vd.entryNode.isLeaf
&& vd.element.thype.isInstanceOf[SimpleThypeLike]) {
value match {
case null => Some("Value cannot be null.")
case _ => None
}
}
else None
}
override def apply(attribs: Map[String, String]): Constraint = this
}
private object BoolConstraint
extends Constraint
with ConstraintFactory {
val name = "com.insweat.hssd.constraints.boolConstraint"
override def apply(vd: ValueData, value: Any): Option[String] = {
if(value.isInstanceOf[Boolean]) None
else if(value == None) None
else Some(s"${ss(value)} is not a Boolean.")
}
override def apply(attribs: Map[String, String]): Constraint = this
}
private object NumberConstraint extends Constraint with ConstraintFactory {
val name = "com.insweat.hssd.constraints.numberConstraint"
override def apply(vd: ValueData, value: Any): Option[String] = {
if(value == None) None
else {
val d = Convert.toDouble(value)
if(!d.isNaN) None else Some(s"${ss(value)} is not a Number.")
}
}
override def apply(attribs: Map[String, String]): Constraint = this
}
private object DateTimeConstraint extends Constraint with ConstraintFactory {
val name = "com.insweat.hssd.constraints.dateTimeConstraint"
override def apply(vd: ValueData, value: Any): Option[String] = {
value match {
case null => None
case None => None
case s: String => {
if(s.startsWith("+") &&
s.length() >= 2 &&
Character.isDigit(s.charAt(1))) {
try {
val i = Integer.parseInt(s.substring(1))
if(i < 0) {
mkError(value)
}
else {
None
}
}
catch {
case e: Exception => mkError(value)
}
}
else {
try {
toUnixTime(s)
None
}
catch {
case e : ParseException => mkError(value)
}
}
}
}
}
private def mkError(value: Any) =
Some(s"${ss(value)} is not a valid date-time string")
override def apply(attribs: Map[String, String]): Constraint = this
}
def register(cf: ConstraintFactory) {
if(_factories.contains(cf.name)) {
throw new ConstraintSetupError(
s"A constraint named ${cf.name} already exists.")
}
_factories += cf.name -> cf
}
def apply(name: String): Constraint = apply(name, Map.empty)
def apply(name: String, attribs: Map[String, String]) = {
_factories.get(name) match {
case Some(factory) => factory(attribs)
case None =>
throw new NoSuchElementException(s"No constraint named $name")
}
}
def toUnixTime(s: String): Long = dateTimeFmt.parse(s).getTime / 1000
}
| insweat/hssd | com.insweat.hssd.lib/src/com/insweat/hssd/lib/constraints/package.scala | Scala | lgpl-3.0 | 4,787 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.common
import com.twitter.zipkin.Constants
import java.nio.ByteBuffer
import javax.net.ssl.HostnameVerifier
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class SpanTest extends FunSuite {
val annotationValue = "NONSENSE"
val expectedAnnotation = Annotation(1, annotationValue, Some(Endpoint(1, 2, "service")))
val expectedSpan = Span(12345, "methodcall", 666, None,
List(expectedAnnotation), Nil)
val annotation1 = Annotation(1, "value1", Some(Endpoint(1, 2, "service")))
val annotation2 = Annotation(2, "value2", Some(Endpoint(3, 4, "Service"))) // upper case service name
val annotation3 = Annotation(3, "value3", Some(Endpoint(5, 6, "service")))
val binaryAnnotation1 = BinaryAnnotation("key1", ByteBuffer.wrap("value1".getBytes), AnnotationType.String, Some(Endpoint(1, 2, "service1")))
val binaryAnnotation2 = BinaryAnnotation("key2", ByteBuffer.wrap("value2".getBytes), AnnotationType.String, Some(Endpoint(3, 4, "service2")))
val spanWith3Annotations = Span(12345, "methodcall", 666, None,
List(annotation1, annotation2, annotation3), Nil)
val spanWith2BinaryAnnotations = Span(12345, "methodcall", 666, None,
Nil, List(binaryAnnotation1, binaryAnnotation2))
test("serviceNames is lowercase") {
val names = spanWith3Annotations.serviceNames
assert(names.size === 1)
assert(names.toSeq(0) === "service")
}
test("serviceNames") {
val map = expectedSpan.getAnnotationsAsMap
val actualAnnotation = map.get(annotationValue).get
assert(expectedAnnotation === actualAnnotation)
}
test("merge two span parts") {
val ann1 = Annotation(1, "value1", Some(Endpoint(1, 2, "service")))
val ann2 = Annotation(2, "value2", Some(Endpoint(3, 4, "service")))
val span1 = Span(12345, "", 666, None, List(ann1), Nil, true)
val span2 = Span(12345, "methodcall", 666, None, List(ann2), Nil, false)
val expectedSpan = Span(12345, "methodcall", 666, None, List(ann1, ann2), Nil, true)
val actualSpan = span1.mergeSpan(span2)
assert(actualSpan === expectedSpan)
}
test("merge span with Unknown span name with known span name") {
val span1 = Span(1, "Unknown", 2, None, List(), Seq())
val span2 = Span(1, "get", 2, None, List(), Seq())
assert(span1.mergeSpan(span2).name === "get")
assert(span2.mergeSpan(span1).name === "get")
}
test("return the first annotation") {
assert(spanWith3Annotations.firstAnnotation.get === annotation1)
}
test("return the last annotation") {
assert(spanWith3Annotations.lastAnnotation.get === annotation3)
}
test("know this is not a client side span") {
val spanSr = Span(1, "n", 2, None, List(Annotation(1, Constants.ServerRecv, None)), Nil)
assert(!spanSr.isClientSide)
}
test("get duration") {
assert(spanWith3Annotations.duration === Some(2))
}
test("don't get duration duration when there are no annotations") {
val span = Span(1, "n", 2, None, List(), Nil)
assert(span.duration === None)
}
test("validate span") {
val cs = Annotation(1, Constants.ClientSend, None)
val sr = Annotation(2, Constants.ServerRecv, None)
val ss = Annotation(3, Constants.ServerSend, None)
val cr = Annotation(4, Constants.ClientRecv, None)
val cs2 = Annotation(5, Constants.ClientSend, None)
val s1 = Span(1, "i", 123, None, List(cs, sr, ss, cr), Nil)
assert(s1.isValid)
val s3 = Span(1, "i", 123, None, List(cs, sr, ss, cr, cs2), Nil)
assert(!s3.isValid)
}
test("get binary annotation") {
assert(spanWith2BinaryAnnotations.getBinaryAnnotation("key1") === Some(binaryAnnotation1))
assert(spanWith2BinaryAnnotations.getBinaryAnnotation("NoExitingKey") === None)
}
}
| wyzssw/zipkin | zipkin-common/src/test/scala/com/twitter/zipkin/common/SpanTest.scala | Scala | apache-2.0 | 4,416 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.