code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
*************************************************************************************
* Copyright 2013 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.rule.category
import com.normation.rudder.domain.RudderDit
import com.normation.ldap.sdk.LDAPConnectionProvider
import com.normation.rudder.repository.ldap.LDAPEntityMapper
import com.normation.utils.ScalaReadWriteLock
import net.liftweb.common._
import com.normation.ldap.sdk.RoLDAPConnection
import com.unboundid.ldap.sdk.Filter._
import com.normation.ldap.sdk.BuildFilter._
import com.unboundid.ldap.sdk.DN
import com.normation.rudder.domain.RudderLDAPConstants._
import com.normation.inventory.ldap.core.LDAPConstants._
import com.normation.ldap.sdk.LDAPEntry
import scala.collection.immutable.SortedMap
import com.normation.ldap.sdk._
import com.normation.utils.Control.{boxSequence, sequence}
import com.normation.utils.Utils
import com.normation.utils.StringUuidGenerator
import com.normation.eventlog.ModificationId
import com.normation.eventlog.EventActor
import com.normation.ldap.ldif.LDIFNoopChangeRecord
import com.normation.rudder.services.user.PersonIdentService
import com.normation.rudder.repository.GitRuleArchiver
import com.unboundid.ldap.sdk.LDAPException
import com.unboundid.ldap.sdk.ResultCode
import org.joda.time.DateTime
import org.joda.time.format.ISODateTimeFormat
/**
* Here is the ordering for a List[RuleCategoryId]
* MUST start by the root !
*/
object RuleCategoryOrdering extends Ordering[List[RuleCategoryId]] {
type ID = RuleCategoryId
override def compare(x:List[ID],y:List[ID]) = {
Utils.recTreeStringOrderingCompare(x.map( _.value ), y.map( _.value ))
}
}
class RoLDAPRuleCategoryRepository(
val rudderDit : RudderDit
, val ldap : LDAPConnectionProvider[RoLDAPConnection]
, val mapper : LDAPEntityMapper
, val categoryMutex : ScalaReadWriteLock //that's a scala-level mutex to have some kind of consistency with LDAP
) extends RoRuleCategoryRepository with Loggable {
repo =>
/**
* Get category with given Id
*/
def get(id:RuleCategoryId) : Box[RuleCategory] = {
for {
con <- ldap
entry <- getCategoryEntry(con, id) ?~! s"Entry with ID '${id.value}' was not found"
category <- mapper.entry2RuleCategory(entry) ?~! s"Error when transforming LDAP entry ${entry} into a server group category"
} yield {
category
}
}
/**
* Retrieve the category entry for the given ID, with the given connection
* Used to get the ldap dn
*/
def getCategoryEntry(con:RoLDAPConnection, id:RuleCategoryId, attributes:String*) : Box[LDAPEntry] = {
val categoryEntries = categoryMutex.readLock {
con.searchSub(rudderDit.RULECATEGORY.dn, EQ(A_RULE_CATEGORY_UUID, id.value), attributes:_*)
}
categoryEntries.size match {
case 0 => Empty
case 1 => Full(categoryEntries(0))
case _ =>
val categoryDN = categoryEntries.map( _.dn).mkString("; ")
Failure(s"Error, the directory contains multiple occurrence of group category with id ${id.value}. DN: ${categoryDN}")
}
}
/**
* get Root category
*/
override def getRootCategory(): Box[RuleCategory] = {
val catAttributes = Seq(A_OC, A_RULE_CATEGORY_UUID, A_NAME, A_RULE_TARGET, A_DESCRIPTION, A_IS_ENABLED, A_IS_SYSTEM)
(for {
con <- ldap
entries = categoryMutex.readLock { con.searchSub(rudderDit.RULECATEGORY.dn, IS(OC_RULE_CATEGORY), catAttributes:_*) }
// look for sub categories
categories <- sequence(entries){ entry =>
mapper.entry2RuleCategory(entry).map(c => (entry.dn, c)) ?~! s"Error when mapping from an LDAP entry to a RuleCategory: ${entry}"
}
rootCategory <- buildHierarchy(rudderDit.RULECATEGORY.dn, categories.toList)
} yield {
rootCategory
})
}
/**
* Build the hierarchy defined by the list of categories, filling children.
* The starting point is given by the root id.
*/
private[this] def buildHierarchy(rootDn: DN, categories: List[(DN, RuleCategory)]): Box[RuleCategory] = {
def getChildren(parentDn: DN): List[RuleCategory] = categories.collect { case (dn, r) if(dn.getParent == parentDn) =>
val cc = getChildren(dn)
r.copy(childs = cc)
}
for {
root <- Box(categories.find( _._1 == rootDn)) ?~! s"The category with id '${rootDn}' was not found on the back but is referenced by other categories"
} yield {
root._2.copy(childs = getChildren(rootDn))
}
}
}
class WoLDAPRuleCategoryRepository(
roruleCategoryRepo : RoLDAPRuleCategoryRepository
, ldap : LDAPConnectionProvider[RwLDAPConnection]
, uuidGen : StringUuidGenerator
, gitArchiver : GitRuleCategoryArchiver
, personIdentService : PersonIdentService
, autoExportOnModify : Boolean
) extends WoRuleCategoryRepository with Loggable {
repo =>
import roruleCategoryRepo.{ldap => roLdap, _}
/**
* Check if a category exist with the given name
*/
private[this] def categoryExists(
con : RoLDAPConnection
, name : String
, parentDn : DN
) : Boolean = {
con.searchOne(parentDn, AND(IS(OC_RULE_CATEGORY), EQ(A_NAME, name)), A_RULE_CATEGORY_UUID).size match {
case 0 => false
case 1 => true
case _ =>
logger.error(s"More than one Rule Category has ${name} name under ${parentDn}")
true
}
}
/**
* Check if a category exist with the given name
*/
private[this] def categoryExists(
con : RoLDAPConnection
, name : String
, parentDn : DN
, currentId : RuleCategoryId
) : Boolean = {
con.searchOne(parentDn, AND(NOT(EQ(A_RULE_CATEGORY_UUID, currentId.value)), AND(IS(OC_RULE_CATEGORY), EQ(A_NAME, name))), A_RULE_CATEGORY_UUID).size match {
case 0 => false
case 1 => true
case _ =>
logger.error(s"More than one Rule Category has ${name} name under ${parentDn}")
true
}
}
/**
* Return the list of parents for that category, from the root category
*/
private[this] def getParents(id:RuleCategoryId) : Box[List[RuleCategory]] = {
for {
root <- getRootCategory
parents <- root.findParents(id)
} yield {
parents
}
}
/**
* Add that category into the given parent category
* Fails if the parent category does not exist or
* if it already contains that category.
*
* return the new category.
*/
override def create (
that : RuleCategory
, into : RuleCategoryId
, modId : ModificationId
, actor : EventActor
, reason : Option[String]
): Box[RuleCategory] = {
for {
con <- ldap
parentCategoryEntry <- getCategoryEntry(con, into, "1.1") ?~! s"The parent category '${into.value}' was not found, can not add"
canAddByName <- if (categoryExists(con, that.name, parentCategoryEntry.dn)) {
Failure(s"Cannot create the Node Group Category with name '${that.name}' : a category with the same name exists at the same level")
} else {
Full("OK, can add")
}
categoryEntry = mapper.ruleCategory2ldap(that,parentCategoryEntry.dn)
result <- categoryMutex.writeLock { con.save(categoryEntry, removeMissingAttributes = true) }
autoArchive <- if(autoExportOnModify && !result.isInstanceOf[LDIFNoopChangeRecord] && !that.isSystem) {
for {
parents <- getParents(that.id)
commiter <- personIdentService.getPersonIdentOrDefault(actor.name)
archive <- gitArchiver.archiveRuleCategory(that,parents.map( _.id), Some(modId,commiter, reason))
} yield {
archive
}
} else Full("ok")
newCategory <- get(that.id) ?~! s"The newly created category '${that.id.value}' was not found"
} yield {
newCategory
}
}
/**
* Update and move an existing category
*/
override def updateAndMove(
category : RuleCategory
, containerId : RuleCategoryId
, modId : ModificationId
, actor : EventActor
, reason : Option[String]
) : Box[RuleCategory] = {
repo.synchronized { for {
con <- ldap
oldParents <- if(autoExportOnModify) {
getParents(category.id)
} else Full(Nil)
oldCategoryEntry <- getCategoryEntry(con, category.id, "1.1") ?~! s"Entry with ID '${category.id.value}' was not found"
newParent <- getCategoryEntry(con, containerId, "1.1") ?~! s"Parent entry with ID '${containerId.value}' was not found"
canAddByName <- if (categoryExists(con, category.name, newParent.dn, category.id)) {
Failure(s"Cannot update the Node Group Category with name ${category.name} : a category with the same name exists at the same level")
} else {
Full("OK")
}
categoryEntry = mapper.ruleCategory2ldap(category,newParent.dn)
moved <- if (newParent.dn == oldCategoryEntry.dn.getParent) {
Full(LDIFNoopChangeRecord(oldCategoryEntry.dn))
} else {
categoryMutex.writeLock { con.move(oldCategoryEntry.dn, newParent.dn) }
}
result <- categoryMutex.writeLock { con.save(categoryEntry, removeMissingAttributes = true) }
updated <- get(category.id)
autoArchive <- (moved, result) match {
case (_:LDIFNoopChangeRecord, _:LDIFNoopChangeRecord) => Full("OK, nothing to archive")
case _ if(autoExportOnModify && !updated.isSystem) =>
(for {
parents <- getParents(updated.id)
commiter <- personIdentService.getPersonIdentOrDefault(actor.name)
moved <- gitArchiver.moveRuleCategory(updated, oldParents.map( _.id), parents.map( _.id), Some(modId,commiter, reason))
} yield {
moved
}) ?~! "Error when trying to automaticallyarchive the category move or update"
case _ => Full("ok")
}
} yield {
updated
} }
}
/**
* Delete the category.
* If no category with such id exists, it is a success.
* If checkEmtpy is set to true, the deletion may be done only if
* the category is empty (else, category and children are deleted).
* @param category to delete
* @param checkEmtpy Is a category containing subElements can be deleted
* true => can only delete empty category
* @return
* - Full(category id) for a success
* - Failure(with error message) if an error happened.
*/
override def delete(
that : RuleCategoryId
, modId : ModificationId
, actor : EventActor
, reason : Option[String]
, checkEmpty : Boolean = true
) : Box[RuleCategoryId] = {
for {
con <-ldap
deleted <- {
getCategoryEntry(con, that) match {
case Full(entry) =>
for {
parents <- if(autoExportOnModify) {
getParents(that)
} else Full(Nil)
ok <- try {
categoryMutex.writeLock { con.delete(entry.dn, recurse = !checkEmpty) ?~! s"Error when trying to delete category with ID '${that.value}'" }
} catch {
case e:LDAPException if(e.getResultCode == ResultCode.NOT_ALLOWED_ON_NONLEAF) =>
Failure("Can not delete a non empty category")
case e:Exception =>
Failure(s"Exception when trying to delete category with ID '${that.value}'", Full(e), Empty)
}
category <- mapper.entry2RuleCategory(entry)
autoArchive <- (if(autoExportOnModify && ok.size > 0 && !category.isSystem) {
for {
commiter <- personIdentService.getPersonIdentOrDefault(actor.name)
archive <- gitArchiver.deleteRuleCategory(that,parents.map( _.id), Some(modId, commiter, reason))
} yield {
archive
}
} else Full("ok") ) ?~! "Error when trying to archive automatically the category deletion"
} yield {
that
}
case Empty => Full(that)
case f:Failure => f
}
}
} yield {
deleted
}
}
}
| armeniaca/rudder | rudder-core/src/main/scala/com/normation/rudder/rule/category/LDAPRuleCategoryRepository.scala | Scala | gpl-3.0 | 14,853 |
package lila.tournament
import akka.actor._
import akka.pattern.{ ask, pipe }
import actorApi._
import lila.game.actorApi.FinishGame
import lila.hub.actorApi.map.Ask
import lila.hub.actorApi.WithUserIds
import makeTimeout.short
private[tournament] final class Organizer(
api: TournamentApi,
reminder: ActorRef,
isOnline: String => Boolean,
socketHub: ActorRef) extends Actor {
context.system.lilaBus.subscribe(self, 'finishGame, 'adjustCheater, 'adjustBooster)
def receive = {
case AllCreatedTournaments => TournamentRepo allCreated 30 foreach {
_ foreach { tour =>
tour.schedule match {
case None => PlayerRepo count tour.id foreach {
case 0 => api wipe tour
case nb if tour.hasWaitedEnough =>
if (nb >= Tournament.minPlayers) api start tour
else api wipe tour
case _ =>
}
case Some(schedule) if tour.hasWaitedEnough => api start tour
case _ => ejectLeavers(tour)
}
}
}
case StartedTournaments => TournamentRepo.started foreach {
_ foreach { tour =>
PlayerRepo activeUserIds tour.id foreach { activeUserIds =>
if (tour.secondsToFinish == 0) api finish tour
else if (!tour.scheduled && activeUserIds.size < 2) api finish tour
else if (!tour.isAlmostFinished) startPairing(tour, activeUserIds)
reminder ! RemindTournament(tour, activeUserIds)
}
}
}
case FinishGame(game, _, _) => api finishGame game
case lila.hub.actorApi.mod.MarkCheater(userId) => api ejectLame userId
case lila.hub.actorApi.mod.MarkBooster(userId) => api ejectLame userId
case lila.hub.actorApi.round.Berserk(gameId, userId) => api.berserk(gameId, userId)
}
private def ejectLeavers(tour: Tournament) =
PlayerRepo userIds tour.id foreach {
_ filterNot isOnline foreach { api.withdraw(tour.id, _) }
}
private def startPairing(tour: Tournament, activeUserIds: List[String]) =
getWaitingUsers(tour) zip PairingRepo.playingUserIds(tour) foreach {
case (waitingUsers, playingUserIds) =>
val users = waitingUsers intersect activeUserIds diff playingUserIds
tour.system.pairingSystem.createPairings(tour, users) onSuccess {
case (pairings, events) => pairings.toNel foreach { api.makePairings(tour, _, events) }
}
}
private def getWaitingUsers(tour: Tournament): Fu[WaitingUsers] =
socketHub ? Ask(tour.id, GetWaitingUsers) mapTo manifest[WaitingUsers]
}
| Happy0/lila | modules/tournament/src/main/Organizer.scala | Scala | mit | 2,631 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.cds.spark.samples
import scala.collection.mutable._
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.log4j.Level
import org.apache.log4j.Logger
import org.apache.spark.Accumulator
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream._
import org.http4s._
import org.http4s.Http4s._
import org.http4s.Status._
import org.http4s.client.Client
import org.http4s.client.blaze.PooledHttp1Client
import org.http4s.headers.Authorization
import com.ibm.couchdb._
import scalaz._
import scalaz.concurrent.Task
import twitter4j.Status
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.DataFrame
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.EmptyRDD
import com.google.common.base.CharMatcher
import scala.math.BigDecimal
import com.ibm.cds.spark.samples.config.DemoConfig
import com.ibm.cds.spark.samples.ToneAnalyzer.ToneCategory
import org.apache.spark.Logging
/**
* @author dtaieb
*/
object StreamingTwitter extends Logging {
var ssc: StreamingContext = null
var sqlContext: SQLContext = null
var workingRDD: RDD[Row] = null
var schemaTweets : StructType = null
val logger: Logger = Logger.getLogger( "com.ibm.cds.spark.samples.StreamingTwitter" )
//main method invoked when running as a standalone Spark Application
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Spark Streaming Twitter Demo")
val sc = new SparkContext(conf)
startTwitterStreaming(sc, Seconds(10));
}
//Hold configuration key/value pairs
val config = new DemoConfig
//Wrapper api for Notebook access
def setConfig(key:String, value:String){
config.setConfig(key, value)
}
def startTwitterStreaming( sc: SparkContext, stopAfter: Duration = Seconds(0) ){
println("Starting twitter stream 0.3");
if ( ssc != null ){
println("Twitter Stream already running");
println("Please use stopTwitterStreaming() first and try again");
return;
}
if ( !config.validateConfiguration(DemoConfig.CHECKPOINT_DIR_KEY) ){
println("Unable to validate config")
return;
}
Logger.getLogger("org.apache.spark").setLevel(Level.OFF)
workingRDD = sc.emptyRDD
//Broadcast the config to each worker node
val broadcastVar = sc.broadcast(config.toImmutableMap)
var canStopTwitterStream = true
var batchesProcessed=0
ssc = new StreamingContext( sc, Seconds(5) )
ssc.addStreamingListener( new StreamingListener )
try{
sqlContext = new SQLContext(sc)
val keys = config.getConfig("tweets.key").split(",");
val firstFilter = config.getConfig("filter.first");
val secondFilter = config.getConfig("filter.second");
val filters = Array(firstFilter, secondFilter);
val stream = org.apache.spark.streaming.twitter.TwitterUtils.createStream( ssc, None, filters );
if ( schemaTweets == null ){
val schemaString = "id_str author fav_count retweet_count screen_name date lang text hashtags urls lat:double long:double"
schemaTweets =
StructType(
schemaString.split(" ").map(
fieldName => {
val ar = fieldName.split(":")
StructField(
ar.lift(0).get,
ar.lift(1).getOrElse("string") match{
case "int" => IntegerType
case "double" => DoubleType
case _ => StringType
},
true)
}
).union(
ToneAnalyzer.sentimentFactors.map( f => StructField( f._1, DoubleType )).toArray[StructField]
)
)
}
val tweets = stream.filter {
t =>
val tags = t.getText.split(" ").map(_.toLowerCase)
tags.contains("tutorial") ||
tags.contains("practical") ||
tags.contains("explained") ||
tags.contains("building") ||
tags.contains("learn") ||
tags.contains("comprehensive") ||
tags.contains("popular") ||
tags.contains("step") ||
tags.contains("debug") ||
tags.contains("debugging") ||
tags.contains("things") ||
tags.contains("making") ||
tags.contains("step-by-step") ||
tags.contains("guide") ||
tags.contains("build") ||
tags.contains("built") ||
tags.contains("make") ||
tags.contains("how") ||
tags.contains("popular") ||
tags.contains("learning") ||
tags.contains("learnt") ||
tags.contains("tips") ||
tags.contains("understanding") ||
tags.contains("modern")
val links = t.getURLEntities
links.length > 0
}
lazy val client = PooledHttp1Client()
val rowTweets = tweets.map(status=> {
val sentiment = ToneAnalyzer.computeSentiment( client, status, broadcastVar )
var colValues = Array[Any](
status.getId.toString,
status.getUser.getName, //author
status.getFavoriteCount.toString,
status.getRetweetCount.toString,
status.getUser.getScreenName,
status.getCreatedAt.toString, //date
status.getUser.getLang, //Lang
status.getText, //text
status.getHashtagEntities().map(_.getText()).mkString(" "),
status.getURLEntities().map(_.getText()).mkString(" "),
Option(status.getGeoLocation).map{ _.getLatitude}.getOrElse(0.0), //lat
Option(status.getGeoLocation).map{_.getLongitude}.getOrElse(0.0) //long
//exception
)
var scoreMap : Map[String, Double] = Map()
if ( sentiment != null ){
for( toneCategory <- Option(sentiment.tone_categories).getOrElse( Seq() )){
for ( tone <- Option( toneCategory.tones ).getOrElse( Seq() ) ){
scoreMap.put( tone.tone_id, tone.score )
}
}
}
colValues = colValues ++ ToneAnalyzer.sentimentFactors.map { f => (BigDecimal(scoreMap.get(f._2).getOrElse(0.0)).setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) * 100.0 }
//Return [Row, (sentiment, status)]
(Row(colValues.toArray:_*),(sentiment, status))
})
rowTweets.foreachRDD( rdd => {
if(batchesProcessed==0){
canStopTwitterStream=false
}
try{
if( rdd.count > 0 ){
batchesProcessed += 1
workingRDD = sc.parallelize( rdd.map( t => t._1 ).collect()).union( workingRDD )
val saveToCloudant = broadcastVar.value.get("cloudant.save").get.toBoolean
if ( saveToCloudant ){
rdd.foreachPartition { iterator =>
var db: CouchDbApi = null;
val couch = CouchDb( broadcastVar.value.get("cloudant.hostName").get,
broadcastVar.value.get("cloudant.port").get.toInt,
broadcastVar.value.get("cloudant.https").get.toBoolean,
broadcastVar.value.get("cloudant.username").get,
broadcastVar.value.get("cloudant.password").get
);
val dbName = "spark-streaming-twitter"
couch.dbs.get(dbName).attemptRun match{
case -\\/(e) => logger.trace("Couch Database does not exist, creating it now"); couch.dbs.create(dbName).run
case \\/-(a) => println("Connected to cloudant db " + dbName )
}
val typeMapping = TypeMapping(classOf[ToneAnalyzer.Tweet] -> "Tweet")
db = couch.db(dbName, typeMapping)
iterator.foreach( t => {
saveTweetToCloudant( client, db, t._2._2, t._2._1 )
}
)
}
}
}
}catch{
case e: InterruptedException=>//Ignore
case e: Exception => logError(e.getMessage, e )
}finally{
canStopTwitterStream = true
}
})
}catch{
case e : Exception => logError(e.getMessage, e )
return
}
ssc.start()
println("Twitter stream started javascript + filter");
println(config.getConfig("filter.first"));
println(config.getConfig("filter.second"));
println("Tweets are collected real-time and analyzed")
println("To stop the streaming and start interacting with the data use: StreamingTwitter.stopTwitterStreaming")
if ( !stopAfter.isZero ){
//Automatically stop it after 10s
new Thread( new Runnable {
var displayMessage = true;
def run(){
Thread.sleep( stopAfter.milliseconds )
var loop = true
while(loop){
if (canStopTwitterStream){
stopTwitterStreaming
loop = false
}else{
if ( displayMessage ){
displayMessage = false
println("Received directive to stop twitter Stream: Waiting for already received tweets to be processed...")
}
Thread.sleep(5000L)
}
}
}
}).start
}
}
def saveTweetToCloudant(client: Client, db: CouchDbApi, status:Status, sentiment: ToneAnalyzer.Sentiment) : Status = {
if ( db != null){
logger.trace("Creating new Tweet in Couch Database " + status.getText())
val task:Task[Res.DocOk] = db.docs.create(
ToneAnalyzer.Tweet(
status.getUser().getName,
status.getCreatedAt().toString(),
status.getUser().getLang(),
status.getText(),
ToneAnalyzer.Geo(
Option(status.getGeoLocation).map{ _.getLatitude}.getOrElse(0.0),
Option(status.getGeoLocation).map{_.getLongitude}.getOrElse(0.0)
),
sentiment
)
)
// Execute the actions and process the result
task.attemptRun match {
case -\\/(e) => logError(e.getMessage, e );
case \\/-(a) => logger.trace("Successfully create new Tweet in Couch Database " + status.getText() )
}
}
status
}
def createTwitterDataFrames(sc: SparkContext) : (SQLContext, DataFrame) = {
if ( workingRDD.count <= 0 ){
println("No data receive. Please start the Twitter stream again to collect data")
return null
}
try{
val df = sqlContext.createDataFrame( workingRDD, schemaTweets )
df.registerTempTable("tweets")
println("A new table named tweets with " + df.count() + " records has been correctly created and can be accessed through the SQLContext variable")
println("Here's the schema for tweets")
df.printSchema()
(sqlContext, df)
}catch{
case e: Exception => {logError(e.getMessage, e ); return null}
}
}
def stopTwitterStreaming(){
if ( ssc == null){
println("No Twitter stream to stop");
return;
}
println("Stopping Twitter stream. Please wait this may take a while")
ssc.stop(stopSparkContext = false, stopGracefully = false)
ssc = null
println("Twitter stream stopped");
println( "You can now create a sqlContext and DataFrame with " + workingRDD.count + " Tweets created. Sample usage: ")
println("val (sqlContext, df) = com.ibm.cds.spark.samples.StreamingTwitter.createTwitterDataFrames(sc)")
println("df.printSchema")
println("sqlContext.sql(\\"select author, text from tweets\\").show")
}
}
| maxday/spark.samples | streaming-twitter/src/main/scala/com/ibm/cds/spark/samples/StreamingTwitter.scala | Scala | apache-2.0 | 12,475 |
package org.littlewings.tweetbot.job
import javax.enterprise.inject.spi.{Bean, CDI}
import javax.servlet.annotation.WebListener
import javax.servlet.{ServletContextEvent, ServletContextListener}
import org.apache.deltaspike.scheduler.spi.Scheduler
import org.littlewings.tweetbot.LoggerSupport
import org.quartz.Job
import scala.collection.JavaConverters._
@WebListener
class JobActivator extends ServletContextListener with LoggerSupport {
override def contextInitialized(sce: ServletContextEvent): Unit = {
val scheduler = CDI.current.select(classOf[Scheduler[Job]]).get
val beanManager = CDI.current.getBeanManager
val jobBeans =
beanManager
.getBeans(classOf[Job])
.asInstanceOf[java.util.Set[Bean[_ <: Job]]]
.asScala
logger.info("found JobBeans size = {}", jobBeans.size)
jobBeans.foreach { jobBean =>
logger.info("register JobClass = {}", jobBean)
scheduler.registerNewJob(jobBean.getBeanClass.asInstanceOf[Class[Job]])
}
}
override def contextDestroyed(sce: ServletContextEvent): Unit = ()
}
| kazuhira-r/tweet-bot | src/main/scala/org/littlewings/tweetbot/job/JobActivator.scala | Scala | mit | 1,080 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package utils
import inox.utils._
import java.io.OutputStream
import scala.reflect._
import stainless.termination.{TerminationReport => TR}
class StainlessSerializer(override val trees: ast.Trees, serializeProducts: Boolean = false)
extends InoxSerializer(trees, serializeProducts) {
import trees._
final inline def stainlessClassSerializer[T: ClassTag](inline id: Int): (Class[_], Serializer[T]) =
classTag[T].runtimeClass -> stainlessClassSerializerMacro[T](this, id).asInstanceOf[Serializer[T]]
/** An extension to the set of registered classes in the `InoxSerializer`.
* occur within Stainless programs.
*
* The new identifiers in the mapping range from 120 to 172.
*
* NEXT ID: 173
*/
override protected def classSerializers: Map[Class[_], Serializer[_]] =
super.classSerializers ++ Map(
stainlessClassSerializer[NoTree] (120),
stainlessClassSerializer[Error] (121),
stainlessClassSerializer[Require] (122),
stainlessClassSerializer[Annotated] (123),
stainlessClassSerializer[Ensuring] (124),
stainlessClassSerializer[Assert] (125),
stainlessClassSerializer[MatchExpr] (126),
stainlessClassSerializer[MatchCase] (127),
stainlessClassSerializer[WildcardPattern] (128),
stainlessClassSerializer[ADTPattern] (129),
stainlessClassSerializer[TuplePattern] (130),
stainlessClassSerializer[LiteralPattern[Any]](131),
stainlessClassSerializer[UnapplyPattern] (132),
stainlessClassSerializer[FiniteArray] (133),
stainlessClassSerializer[LargeArray] (134),
stainlessClassSerializer[ArraySelect] (135),
stainlessClassSerializer[ArrayUpdated] (136),
stainlessClassSerializer[ArrayLength] (137),
stainlessClassSerializer[SizedADT] (154),
stainlessClassSerializer[Passes] (158),
stainlessClassSerializer[Max] (160),
// Stainless ast Types
stainlessClassSerializer[ArrayType] (138),
stainlessClassSerializer[RecursiveType](152),
stainlessClassSerializer[ValueType] (153),
stainlessClassSerializer[AnnotatedType](157),
// Stainless Flags
stainlessClassSerializer[Extern.type] (139),
stainlessClassSerializer[Opaque.type] (140),
stainlessClassSerializer[DropVCs.type] (141),
stainlessClassSerializer[Library.type] (168),
stainlessClassSerializer[Derived] (142),
stainlessClassSerializer[IsField] (143),
stainlessClassSerializer[IsUnapply] (144),
stainlessClassSerializer[ClassParamInit] (170),
stainlessClassSerializer[DropConjunct.type](171),
stainlessClassSerializer[SplitVC.type] (172),
stainlessClassSerializer[TerminationStatus] (161),
stainlessClassSerializer[TR.Unknown.type] (162),
stainlessClassSerializer[TR.Terminating.type] (163),
stainlessClassSerializer[TR.NonTerminating.type] (164),
mappingSerializer[SymbolIdentifier](145)
(id => (id.globalId, id.id, id.symbol.path, id.symbol.id))
(p => new SymbolIdentifier(new Identifier(p._3.last, p._1, p._2), new Symbol(p._3, p._4))),
stainlessClassSerializer[PartialEval.type] (146),
stainlessClassSerializer[Law.type] (150),
stainlessClassSerializer[Ghost.type] (147),
stainlessClassSerializer[Private.type] (148),
stainlessClassSerializer[Final.type] (149),
stainlessClassSerializer[Decreases] (151),
stainlessClassSerializer[Erasable.type] (155),
stainlessClassSerializer[IndexedAt] (156),
stainlessClassSerializer[Wrapping.type] (159),
stainlessClassSerializer[Synthetic.type] (165),
stainlessClassSerializer[InlineInvariant.type](166),
stainlessClassSerializer[Lazy.type] (167),
stainlessClassSerializer[Template.type] (169),
)
}
class XLangSerializer(override val trees: extraction.xlang.Trees, serializeProducts: Boolean = false)
extends StainlessSerializer(trees, serializeProducts) {
import trees._
/** An extension to the set of registered classes in the `StainlessSerializer`.
* occur within Stainless programs.
*
* The new identifiers in the mapping range from 180 to 260.
*
* NEXT ID: 261
*/
override protected def classSerializers: Map[Class[_], Serializer[_]] =
super.classSerializers ++ Map(
// Termination trees
stainlessClassSerializer[Decreases](180),
// Induction trees
stainlessClassSerializer[TraceInduct.type] (244),
stainlessClassSerializer[Induct.type] (258),
// Inlining trees
stainlessClassSerializer[Inline.type] (181),
stainlessClassSerializer[InlineOnce.type](228),
// Inner-function trees
stainlessClassSerializer[LocalFunDef](183),
stainlessClassSerializer[LetRec] (184),
stainlessClassSerializer[ApplyLetRec](185),
stainlessClassSerializer[Outer] (186),
stainlessClassSerializer[Inner] (187),
// Imperative trees
stainlessClassSerializer[Block] (188),
stainlessClassSerializer[LetVar] (189),
stainlessClassSerializer[Assignment] (190),
stainlessClassSerializer[FieldAssignment] (191),
stainlessClassSerializer[While] (192),
stainlessClassSerializer[ArrayUpdate] (193),
stainlessClassSerializer[Old] (194),
stainlessClassSerializer[BoolBitwiseAnd] (195),
stainlessClassSerializer[BoolBitwiseOr] (196),
stainlessClassSerializer[BoolBitwiseXor] (197),
stainlessClassSerializer[IsVar.type] (198),
stainlessClassSerializer[IsMutable.type] (199),
stainlessClassSerializer[IsPure.type] (230),
stainlessClassSerializer[Snapshot] (239),
stainlessClassSerializer[MutableMapType] (248),
stainlessClassSerializer[MutableMapWithDefault] (249),
stainlessClassSerializer[MutableMapApply] (250),
stainlessClassSerializer[MutableMapUpdate] (251),
stainlessClassSerializer[MutableMapUpdated] (252),
stainlessClassSerializer[MutableMapDuplicate] (253),
stainlessClassSerializer[Swap] (259),
stainlessClassSerializer[FreshCopy] (260),
stainlessClassSerializer[Reads] (182),
stainlessClassSerializer[Modifies] (210),
// Object-oriented trees
stainlessClassSerializer[ClassConstructor] (200),
stainlessClassSerializer[ClassSelector] (201),
stainlessClassSerializer[IsInstanceOf] (202),
stainlessClassSerializer[AsInstanceOf] (203),
stainlessClassSerializer[ClassPattern] (204),
stainlessClassSerializer[InstanceOfPattern](205),
stainlessClassSerializer[ClassType] (206),
stainlessClassSerializer[AnyType] (207),
stainlessClassSerializer[NothingType] (208),
// `UnionType` and `IntersectionType` are package-private to `oo`
stainlessClassSerializer[TypeBounds] (209),
stainlessClassSerializer[ClassDef] (222),
stainlessClassSerializer[IsInvariant.type] (223),
stainlessClassSerializer[IsAbstract.type] (224),
stainlessClassSerializer[IsSealed.type] (225),
stainlessClassSerializer[Bounds] (226),
stainlessClassSerializer[Variance] (227),
stainlessClassSerializer[IsCaseObject.type](229),
stainlessClassSerializer[TypeSelect] (237),
stainlessClassSerializer[TypeApply] (254),
stainlessClassSerializer[TypeDef] (255),
stainlessClassSerializer[IsTypeMemberOf] (246),
// Inner classes trees
stainlessClassSerializer[LetClass] (232),
stainlessClassSerializer[LocalClassDef] (233),
stainlessClassSerializer[LocalMethodDef] (234),
stainlessClassSerializer[LocalMethodInvocation](240),
stainlessClassSerializer[LocalClassConstructor](235),
stainlessClassSerializer[LocalClassSelector] (241),
stainlessClassSerializer[LocalClassType] (236),
stainlessClassSerializer[LocalThis] (242),
stainlessClassSerializer[LocalTypeDef] (247),
// Throwing trees
stainlessClassSerializer[Throwing](211),
stainlessClassSerializer[Throw] (212),
stainlessClassSerializer[Try] (213),
stainlessClassSerializer[Return] (257),
// Methods trees
stainlessClassSerializer[This] (214),
stainlessClassSerializer[Super] (215),
stainlessClassSerializer[MethodInvocation](216),
stainlessClassSerializer[IsMethodOf] (217),
stainlessClassSerializer[IsAccessor] (231),
stainlessClassSerializer[ValueClass.type] (243),
// XLang trees
stainlessClassSerializer[Ignore.type] (218),
stainlessClassSerializer[FieldDefPosition](245),
stainlessClassSerializer[Import] (219),
stainlessClassSerializer[UnitDef] (220),
stainlessClassSerializer[ModuleDef] (221),
stainlessClassSerializer[UnknownType] (238),
stainlessClassSerializer[StrictBV.type] (256),
)
}
object Serializer {
def apply(t: ast.Trees, serializeProducts: Boolean = false): Serializer { val trees: t.type } =
(t match {
case xt: extraction.xlang.Trees => new XLangSerializer(xt)
case _ => new StainlessSerializer(t)
}).asInstanceOf[Serializer { val trees: t.type }]
}
| epfl-lara/stainless | core/src/main/scala/stainless/utils/Serialization.scala | Scala | apache-2.0 | 9,967 |
package honor
/**
* @author ponkotuy
* Date: 15/05/05.
*/
object HasUsers extends HonorCategory {
override def category: Int = 17
override def comment: String = "提督人数突破記念"
override def approved(memberId: Long, db: HonorCache): List[String] = List("提督500人突破", "提督1000人突破")
}
| b-wind/MyFleetGirls | server/app/honor/HasUsers.scala | Scala | mit | 322 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.maven
import javax.inject.Inject
import org.apache.maven.execution.MavenSession
import scala.beans.BeanProperty
/**
* Internal goal, invoked by other Lagom mojos that work with multiple projects at once, to read plugin configuration
* for a project and set up the projects context values.
*/
class ConfigureMojo @Inject() (session: MavenSession) extends LagomAbstractMojo {
@BeanProperty
var lagomService: Boolean = _
@BeanProperty
var playService: Boolean = _
override def execute(): Unit = {
LagomKeys.LagomService.put(session.getCurrentProject, lagomService)
LagomKeys.PlayService.put(session.getCurrentProject, playService)
}
}
| edouardKaiser/lagom | dev/maven-plugin/src/main/scala/com/lightbend/lagom/maven/ConfigureMojo.scala | Scala | apache-2.0 | 767 |
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.extensions.iterativebatch.compiler
package graph
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.concurrent.{ ExecutionContext, Future }
import scala.runtime.BoxedUnit
import org.objectweb.asm.{ Opcodes, Type }
import org.objectweb.asm.signature.SignatureVisitor
import com.asakusafw.lang.compiler.extension.directio.DirectFileIoModels
import com.asakusafw.lang.compiler.model.graph.{ ExternalOutput, MarkerOperator }
import com.asakusafw.lang.compiler.planning.{ Plan, Planning, SubPlan }
import com.asakusafw.spark.compiler.graph.Instantiator
import com.asakusafw.spark.compiler.`package`._
import com.asakusafw.spark.compiler.planning._
import com.asakusafw.spark.compiler.util.SparkIdioms._
import com.asakusafw.spark.runtime.{ JobContext, RoundContext }
import com.asakusafw.spark.runtime.graph._
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm.MethodBuilder._
import com.asakusafw.spark.tools.asm4s._
import com.asakusafw.utils.graph.Graphs
import com.asakusafw.spark.extensions.iterativebatch.compiler.spi.RoundAwareNodeCompiler
import com.asakusafw.spark.extensions.iterativebatch.runtime.graph._
class IterativeJobClassBuilder(
plan: Plan)(
implicit context: IterativeJobCompiler.Context)
extends ClassBuilder(
Type.getType(s"L${GeneratedClassPackageInternalName}/${context.flowId}/graph/IterativeJob;"),
classOf[IterativeJob].asType) {
private val directOutputs = collectDirectOutputs(plan.getElements.toSet[SubPlan])
private def useDirectOut: Boolean = directOutputs.nonEmpty
private val subplans = Graphs.sortPostOrder(Planning.toDependencyGraph(plan)).toSeq.zipWithIndex
private val subplanToIdx = subplans.toMap
override def defFields(fieldDef: FieldDef): Unit = {
fieldDef.newField(
Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
"jobContext",
classOf[JobContext].asType)
fieldDef.newField(
Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
"nodes",
classOf[Seq[Node]].asType,
new TypeSignatureBuilder()
.newClassType(classOf[Seq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[Node].asType)
})
if (useDirectOut) {
fieldDef.newField(
Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
"commit",
classOf[DirectOutputCommitForIterative].asType)
}
}
override def defConstructors(ctorDef: ConstructorDef): Unit = {
ctorDef.newInit(Seq(
classOf[JobContext].asType)) { implicit mb =>
val thisVar :: jobContextVar :: _ = mb.argVars
thisVar.push().invokeInit(
superType,
jobContextVar.push())
thisVar.push().putField("jobContext", jobContextVar.push())
val nodesVar = pushNewArray(classOf[Node].asType, subplans.size).store()
val broadcastsVar = pushObject(mutable.Map)
.invokeV("empty", classOf[mutable.Map[BroadcastId, Broadcast[_]]].asType)
.store()
subplans.foreach {
case (_, i) =>
thisVar.push().invokeV(s"node${i}", nodesVar.push(), broadcastsVar.push())
}
thisVar.push().putField(
"nodes", pushObject(Predef)
.invokeV(
"wrapRefArray",
classOf[mutable.WrappedArray[_]].asType,
nodesVar.push().asType(classOf[Array[AnyRef]].asType))
.asType(classOf[Seq[_]].asType))
if (useDirectOut) {
val setupVar = thisVar.push()
.invokeV("setup", classOf[DirectOutputSetupForIterative].asType)
.store()
val preparesVar = buildSet { builder =>
(0 until directOutputs.size).foreach { i =>
builder += thisVar.push()
.invokeV(
s"prepare${i}",
classOf[DirectOutputPrepareForIterative[_]].asType,
setupVar.push())
}
}.store()
val commitVar = thisVar.push()
.invokeV("commit", classOf[DirectOutputCommitForIterative].asType, preparesVar.push())
.store()
thisVar.push().putField(
"commit",
commitVar.push().asType(classOf[DirectOutputCommitForIterative].asType))
}
}
}
override def defMethods(methodDef: MethodDef): Unit = {
super.defMethods(methodDef)
methodDef.newMethod("nodes", classOf[Seq[Node]].asType, Seq.empty,
new MethodSignatureBuilder()
.newReturnType {
_.newClassType(classOf[Seq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[Node].asType)
}
}) { implicit mb =>
val thisVar :: _ = mb.argVars
`return`(thisVar.push().getField("nodes", classOf[Seq[_]].asType))
}
methodDef.newMethod("doCommit", classOf[Future[Unit]].asType,
Seq(
classOf[RoundContext].asType,
classOf[Seq[RoundContext]].asType,
classOf[ExecutionContext].asType),
new MethodSignatureBuilder()
.newParameterType(classOf[RoundContext].asType)
.newParameterType {
_.newClassType(classOf[Seq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[RoundContext].asType)
}
}
.newParameterType(classOf[ExecutionContext].asType)
.newReturnType {
_.newClassType(classOf[Future[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[BoxedUnit].asType)
}
}) { implicit mb =>
val thisVar :: originVar :: rcsVar :: ecVar :: _ = mb.argVars
if (useDirectOut) {
`return`(
thisVar.push().getField("commit", classOf[DirectOutputCommitForIterative].asType)
.invokeV(
"perform",
classOf[Future[Unit]].asType,
originVar.push(), rcsVar.push(), ecVar.push()))
} else {
`return`(
pushObject(Future)
.invokeV("successful", classOf[Future[_]].asType,
getStatic(classOf[BoxedUnit].asType, "UNIT", classOf[BoxedUnit].asType)
.asType(classOf[AnyRef].asType)))
}
}
subplans.foreach {
case (subplan, i) =>
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
s"node${i}",
Seq(classOf[Array[Node]].asType, classOf[mutable.Map[BroadcastId, Broadcast[_]]].asType))(
defNodeMethod(subplan, i)(_))
}
if (useDirectOut) {
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
"setup",
classOf[DirectOutputSetupForIterative].asType,
Seq.empty) { implicit mb =>
val thisVar :: _ = mb.argVars
val t = DirectOutputSetupForIterativeCompiler.compile(directOutputs.map(_._2))
val setup = pushNew(t)
setup.dup().invokeInit(thisVar.push().getField("jobContext", classOf[JobContext].asType))
`return`(setup)
}
directOutputs.toSeq.map(_._1).sortBy(subplanToIdx).zipWithIndex.foreach {
case (subplan, i) =>
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
s"prepare${i}",
classOf[DirectOutputPrepareForIterative[_]].asType,
Seq(classOf[DirectOutputSetupForIterative].asType)) { implicit mb =>
val thisVar :: setupVar :: _ = mb.argVars
val t = DirectOutputPrepareForIterativeCompiler
.compile(subplan)(context.nodeCompilerContext)
val prepare = pushNew(t)
prepare.dup().invokeInit(
setupVar.push().asType(classOf[IterativeAction[_]].asType),
applySeq(
thisVar.push().getField("nodes", classOf[Seq[_]].asType),
ldc(subplanToIdx(subplan)))
.cast(classOf[DirectOutputPrepareEachForIterative[_]].asType),
thisVar.push().getField("jobContext", classOf[JobContext].asType))
`return`(prepare)
}
}
methodDef.newMethod(
Opcodes.ACC_PRIVATE,
"commit",
classOf[DirectOutputCommitForIterative].asType,
Seq(classOf[Set[DirectOutputPrepareForIterative[_]]].asType)) { implicit mb =>
val thisVar :: preparesVar :: _ = mb.argVars
val t = DirectOutputCommitForIterativeCompiler.compile(directOutputs.map(_._2))
val commit = pushNew(t)
commit.dup().invokeInit(
preparesVar.push(),
thisVar.push().getField("jobContext", classOf[JobContext].asType))
`return`(commit)
}
}
}
private def collectDirectOutputs(subplans: Set[SubPlan]): Set[(SubPlan, ExternalOutput)] = {
if (context.options.useOutputDirect) {
for {
subplan <- subplans
subPlanInfo = subplan.getAttribute(classOf[SubPlanInfo])
primaryOperator = subPlanInfo.getPrimaryOperator
if primaryOperator.isInstanceOf[ExternalOutput]
operator = primaryOperator.asInstanceOf[ExternalOutput]
info <- Option(operator.getInfo)
if DirectFileIoModels.isSupported(info)
} yield {
subplan -> operator
}
} else {
Set.empty
}
}
private def defNodeMethod(
subplan: SubPlan, i: Int)(
implicit mb: MethodBuilder): Unit = {
val thisVar :: nodesVar :: allBroadcastsVar :: _ = mb.argVars
val jobContextVar = thisVar.push().getField("jobContext", classOf[JobContext].asType).store()
val broadcastsVar =
buildMap { builder =>
for {
subPlanInput <- subplan.getInputs
inputInfo <- Option(subPlanInput.getAttribute(classOf[SubPlanInputInfo]))
if inputInfo.getInputType == SubPlanInputInfo.InputType.BROADCAST
broadcastInfo <- Option(subPlanInput.getAttribute(classOf[BroadcastInfo]))
} {
val prevSubPlanOutputs = subPlanInput.getOpposites
if (prevSubPlanOutputs.size == 1) {
val prevSubPlanOperator = prevSubPlanOutputs.head.getOperator
builder += (
context.broadcastIds.getField(subPlanInput.getOperator),
applyMap(
allBroadcastsVar.push(),
context.broadcastIds.getField(prevSubPlanOperator))
.cast(classOf[Broadcast[_]].asType))
} else {
val marker = subPlanInput.getOperator
val iterativeInfo = IterativeInfo.get(subPlanInput)
builder += (
context.broadcastIds.getField(marker),
newBroadcast(
marker,
subplan,
broadcastInfo,
iterativeInfo)(
() => buildSeq { builder =>
prevSubPlanOutputs.foreach { subPlanOutput =>
builder += tuple2(
nodesVar.push().aload(ldc(subplanToIdx(subPlanOutput.getOwner))),
context.branchKeys.getField(subPlanOutput.getOperator))
}
},
jobContextVar.push))
}
}
}.store()
val compiler = RoundAwareNodeCompiler.get(subplan)(context.nodeCompilerContext)
val nodeType = compiler.compile(subplan)(context.nodeCompilerContext)
val instantiator = compiler.instantiator
val nodeVar = instantiator.newInstance(
nodeType,
subplan,
subplanToIdx)(
Instantiator.Vars(jobContextVar, nodesVar, broadcastsVar))(
implicitly, context.instantiatorCompilerContext)
nodesVar.push().astore(ldc(i), nodeVar.push())
for {
subPlanOutput <- subplan.getOutputs
outputInfo <- Option(subPlanOutput.getAttribute(classOf[SubPlanOutputInfo]))
if outputInfo.getOutputType == SubPlanOutputInfo.OutputType.BROADCAST
broadcastInfo <- Option(subPlanOutput.getAttribute(classOf[BroadcastInfo]))
if subPlanOutput.getOpposites.exists(_.getOpposites.size == 1)
} {
val marker = subPlanOutput.getOperator
val iterativeInfo = IterativeInfo.get(subPlanOutput)
addToMap(
allBroadcastsVar.push(),
context.broadcastIds.getField(marker),
newBroadcast(
marker,
subplan,
broadcastInfo,
iterativeInfo)(
() => buildSeq { builder =>
builder += tuple2(
nodeVar.push().asType(classOf[Source].asType),
context.branchKeys.getField(marker))
},
jobContextVar.push))
}
`return`()
}
private def newBroadcast(
marker: MarkerOperator,
subplan: SubPlan,
broadcastInfo: BroadcastInfo,
iterativeInfo: IterativeInfo)(
nodes: () => Stack,
jobContext: () => Stack)(
implicit mb: MethodBuilder): Stack = {
val dataModelRef = marker.getInput.dataModelRef
val group = broadcastInfo.getFormatInfo
val broadcast = iterativeInfo.getRecomputeKind match {
case IterativeInfo.RecomputeKind.ALWAYS =>
pushNew(classOf[MapBroadcastAlways].asType)
case IterativeInfo.RecomputeKind.PARAMETER =>
pushNew(classOf[MapBroadcastByParameter].asType)
case IterativeInfo.RecomputeKind.NEVER =>
pushNew(classOf[MapBroadcastOnce].asType)
}
val label = Seq(
Option(subplan.getAttribute(classOf[SubPlanInfo]))
.flatMap(info => Option(info.getLabel)),
Option(subplan.getAttribute(classOf[NameInfo]))
.map(_.getName))
.flatten match {
case Seq() => "N/A"
case s: Seq[String] => s.mkString(":")
}
broadcast.dup()
val arguments = Seq.newBuilder[Stack]
arguments += nodes()
arguments += option(
sortOrdering(
dataModelRef.groupingTypes(group.getGrouping),
dataModelRef.orderingTypes(group.getOrdering)))
arguments += groupingOrdering(dataModelRef.groupingTypes(group.getGrouping))
arguments += partitioner(ldc(1))
arguments += ldc(label)
if (iterativeInfo.getRecomputeKind == IterativeInfo.RecomputeKind.PARAMETER) {
arguments +=
buildSet { builder =>
iterativeInfo.getParameters.foreach { parameter =>
builder += ldc(parameter)
}
}
}
arguments += jobContext()
broadcast.invokeInit(arguments.result: _*)
broadcast
}
}
| asakusafw/asakusafw-spark | extensions/iterativebatch/compiler/core/src/main/scala/com/asakusafw/spark/extensions/iterativebatch/compiler/graph/IterativeJobClassBuilder.scala | Scala | apache-2.0 | 14,850 |
package kvstore
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.FunSuite
import akka.actor.ActorSystem
import akka.testkit.{ TestProbe, TestKit, ImplicitSender }
import scala.concurrent.duration._
import scala.util.Random
import scala.util.control.NonFatal
import kvstore.Arbiter.{ JoinedSecondary, Join }
import kvstore.Persistence.{ Persisted, Persist }
class Step2_SecondarySpec extends TestKit(ActorSystem("Step2SecondarySpec"))
with FunSuite
with BeforeAndAfterAll
with ShouldMatchers
with ImplicitSender
with Tools {
override def afterAll(): Unit = {
system.shutdown()
}
test("Case 1: Secondary (in isolation) should properly register itself to the provided Arbiter") {
val arbiter = TestProbe()
val secondary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case1-secondary")
arbiter.expectMsg(Join)
}
test("Case 2: Secondary (in isolation) must handle Snapshots") {
import Replicator._
val arbiter = TestProbe()
val replicator = TestProbe()
val secondary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case2-secondary")
val client = session(secondary)
arbiter.expectMsg(Join)
arbiter.send(secondary, JoinedSecondary)
client.get("k1") should be === None
replicator.send(secondary, Snapshot("k1", None, 0L))
replicator.expectMsg(SnapshotAck("k1", 0L))
client.get("k1") should be === None
replicator.send(secondary, Snapshot("k1", Some("v1"), 1L))
replicator.expectMsg(SnapshotAck("k1", 1L))
client.get("k1") should be === Some("v1")
replicator.send(secondary, Snapshot("k1", None, 2L))
replicator.expectMsg(SnapshotAck("k1", 2L))
client.get("k1") should be === None
}
test("Case 3: Secondary should drop and immediately ack snapshots with older sequence numbers") {
import Replicator._
val arbiter = TestProbe()
val replicator = TestProbe()
val secondary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case3-secondary")
val client = session(secondary)
arbiter.expectMsg(Join)
arbiter.send(secondary, JoinedSecondary)
client.get("k1") should be === None
replicator.send(secondary, Snapshot("k1", Some("v1"), 0L))
replicator.expectMsg(SnapshotAck("k1", 0L))
client.get("k1") should be === Some("v1")
replicator.send(secondary, Snapshot("k1", None, 0L))
replicator.expectMsg(SnapshotAck("k1", 0L))
client.get("k1") should be === Some("v1")
replicator.send(secondary, Snapshot("k1", Some("v2"), 1L))
replicator.expectMsg(SnapshotAck("k1", 1L))
client.get("k1") should be === Some("v2")
replicator.send(secondary, Snapshot("k1", None, 0L))
replicator.expectMsg(SnapshotAck("k1", 0L))
client.get("k1") should be === Some("v2")
}
test("Case 4: Secondary should drop snapshots with future sequence numbers") {
import Replicator._
val arbiter = TestProbe()
val replicator = TestProbe()
val secondary = system.actorOf(Replica.props(arbiter.ref, Persistence.props(flaky = false)), "case4-secondary")
val client = session(secondary)
arbiter.expectMsg(Join)
arbiter.send(secondary, JoinedSecondary)
client.get("k1") should be === None
replicator.send(secondary, Snapshot("k1", Some("v1"), 1L))
replicator.expectNoMsg(300.milliseconds)
client.get("k1") should be === None
replicator.send(secondary, Snapshot("k1", Some("v2"), 0L))
replicator.expectMsg(SnapshotAck("k1", 0L))
client.get("k1") should be === Some("v2")
}
} | M4573R/playground-notes | principles-of-reactive-programming/kvstore/src/test/scala/kvstore/Step2_SecondarySpec.scala | Scala | mit | 3,785 |
package scala.meta.internal.semanticdb.scalac
import org.scalameta.unreachable
import scala.{meta => m}
import scala.meta.internal.inputs._
import scala.meta.internal.{semanticdb => s}
trait DiagnosticOps { self: SemanticdbOps =>
implicit class XtensionCompilationUnitDiagnostics(unit: g.CompilationUnit) {
def reportedDiagnostics(mstarts: collection.Map[Int, m.Name]): List[s.Diagnostic] = {
unit.hijackedDiagnostics.map { case (gpos, gseverity, text) =>
val mpos: m.Position = {
// NOTE: The caret in unused import warnings points to Importee.pos, but
// the message position start/end point to the enclosing Import.pos.
// See https://github.com/scalameta/scalameta/issues/839
if (text == "Unused import") {
mstarts.get(gpos.point) match {
case Some(name) => name.pos
case None =>
if (unit.source.content(gpos.point) == '_') // Importee.Wildcard()
m.Position.Range(gpos.source.toInput, gpos.point, gpos.point + 1)
else gpos.toMeta
}
} else gpos.toMeta
}
val sseverity = gseverity match {
case 0 => s.Diagnostic.Severity.INFORMATION
case 1 => s.Diagnostic.Severity.WARNING
case 2 => s.Diagnostic.Severity.ERROR
case _ => unreachable
}
s.Diagnostic(Some(mpos.toRange), sseverity, text)
}
}
}
}
| scalameta/scalameta | semanticdb/scalac/library/src/main/scala/scala/meta/internal/semanticdb/scalac/DiagnosticOps.scala | Scala | bsd-3-clause | 1,453 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.tutorial
import cc.factorie._
import cc.factorie.app.nlp._
import cc.factorie.app.nlp.ner._
import cc.factorie.infer.InferByBPChain
import cc.factorie.model.{DotTemplateWithStatistics1, DotTemplateWithStatistics2, Parameters, TemplateModel}
import cc.factorie.optimize.{LikelihoodExample, Trainer}
import cc.factorie.variable.{BinaryFeatureVectorVariable, CategoricalVectorDomain, HammingObjective}
/**
* An example of a linear-chain CRF system NER which manually defines the model.
*
* For an example using actual factorie infrastructure see app.nlp.ner.ConllChainNer
*/
object ChainNERExample {
object TokenFeaturesDomain extends CategoricalVectorDomain[String]
class TokenFeatures(val token:Token) extends BinaryFeatureVectorVariable[String] {
def domain = TokenFeaturesDomain
}
val model = new TemplateModel with Parameters {
addTemplates(
// Bias term on each individual label
new DotTemplateWithStatistics1[BioConllNerTag] {
//def statisticsDomains = Tuple1(Conll2003NerDomain)
val weights = Weights(new la.DenseTensor1(BioConllNerDomain.size))
},
// Factor between label and observed token
new DotTemplateWithStatistics2[BioConllNerTag,TokenFeatures] {
//def statisticsDomains = ((Conll2003NerDomain, TokenFeaturesDomain))
val weights = Weights(new la.DenseTensor2(BioConllNerDomain.size, TokenFeaturesDomain.dimensionSize))
def unroll1(label: BioConllNerTag) = Factor(label, label.token.attr[TokenFeatures])
def unroll2(tf: TokenFeatures) = Factor(tf.token.attr[BioConllNerTag], tf)
},
// Transition factors between two successive labels
new DotTemplateWithStatistics2[BioConllNerTag, BioConllNerTag] {
//def statisticsDomains = ((Conll2003NerDomain, Conll2003NerDomain))
val weights = Weights(new la.DenseTensor2(BioConllNerDomain.size, BioConllNerDomain.size))
def unroll1(label: BioConllNerTag) = if (label.token.hasPrev) Factor(label.token.prev.attr[BioConllNerTag], label) else Nil
def unroll2(label: BioConllNerTag) = if (label.token.hasNext) Factor(label, label.token.next.attr[BioConllNerTag]) else Nil
}
)
}
def main(args:Array[String]): Unit = {
implicit val random = new scala.util.Random(0)
if (args.length != 2) throw new Error("Usage: ChainNER1 trainfile testfile")
val trainDocuments = load.LoadConll2003.fromFilename(args(0))
val testDocuments = load.LoadConll2003.fromFilename(args(1))
for (document <- trainDocuments ++ testDocuments; token <- document.tokens) {
val features = new TokenFeatures(token)
features += "W="+token.string
features += "SHAPE="+cc.factorie.app.strings.stringShape(token.string, 2)
token.attr += features
}
val trainLabelsSentences: Seq[Seq[LabeledBioConllNerTag]] = trainDocuments.map(_.tokens.toSeq.map(_.attr[LabeledBioConllNerTag]))
val testLabelsSentences: Seq[Seq[LabeledBioConllNerTag]] = testDocuments.map(_.tokens.toSeq.map(_.attr[LabeledBioConllNerTag]))
assert(!testLabelsSentences.contains(null))
// Train and test
println("*** Starting training (#sentences=%d)".format(trainDocuments.map(_.sentences.size).sum))
val start = System.currentTimeMillis
val examples = trainLabelsSentences.map(s => new LikelihoodExample(s, model, InferByBPChain))
Trainer.batchTrain(model.parameters, examples)
println("*** Starting inference (#sentences=%d)".format(testDocuments.map(_.sentences.size).sum))
testLabelsSentences.foreach {
variables => cc.factorie.infer.BP.inferChainMax(variables, model).setToMaximize(null)
}
println("test token accuracy=" + HammingObjective.accuracy(testLabelsSentences.flatten))
println("Total training took " + (System.currentTimeMillis - start) / 1000.0 + " seconds")
}
}
| patverga/factorie | src/main/scala/cc/factorie/tutorial/ChainNERExample.scala | Scala | apache-2.0 | 4,604 |
package play
import scala.language.implicitConversions
import play.api.Mode
import play.api.mvc._
import play.api.mvc.Results.NotFound
import play.core.Router
object navigator {
type Out = Handler
sealed trait PathElem
case class Static(name: String) extends PathElem {
override def toString = name
}
case object * extends PathElem
case object ** extends PathElem
trait Resources[T, Out] {
def index(): Out
def `new`(): Out
def create(): Out
def show(id: T): Out
def edit(id: T): Out
def update(id: T): Out
def delete(id: T): Out
}
trait PlayNavigator {
val self = this
def routesList = _routesList.toList
val _routesList = new collection.mutable.ListBuffer[Route[_]]
def addRoute[R <: Route[_]](route: R) = {
_routesList += route
route
}
def redirect(url: String, status: Int = controllers.Default.SEE_OTHER) = () => Action { controllers.Default.Redirect(url, status) }
def documentation = _documentation
lazy val _documentation = routesList.map { route =>
val (parts, _) = ((List[String](), route.args) /: route.routeDef.elems){
case ((res, x :: xs), *) => (res :+ ("[" + x + "]"), xs)
case ((res, xs), e) => (res :+ e.toString, xs)
}
(route.routeDef.method.toString, parts.mkString("/", "/", "") + route.routeDef.extString, route.args.mkString("(", ", ", ")"))
}
def onRouteRequest(request: RequestHeader) = {
routes.lift(request)
}
def onHandlerNotFound(request: RequestHeader) = {
NotFound(play.api.Play.maybeApplication.map {
case app if app.mode == Mode.Dev => views.html.defaultpages.devNotFound.f
case app => views.html.defaultpages.notFound.f
}.getOrElse(views.html.defaultpages.devNotFound.f)(request, Some(router)))
}
def _routes = new PartialFunction[RequestHeader, Handler] {
private var _lastHandler: () => Handler = null // XXX: this one sucks a lot
def apply(req: RequestHeader) = _lastHandler()
def isDefinedAt(req: RequestHeader) = {
routesList.view.map(_.unapply(req)).collectFirst { case Some(e) => e }.map { r =>
_lastHandler = r // XXX: performance hack
r
}.isDefined
}
}
def routes = _routes
// Provider for Play's 404 dev page
// This object is used ONLY for displaying routes documentation
val router = new play.core.Router.Routes {
def documentation = (("###", "play-navigator routes", "") +: _documentation) ++ play.api.Play.maybeApplication.flatMap(_.routes.map(r => ("###", "play standard routes (in conf/routes file)", "") +: r.documentation)).getOrElse(Nil)
def routes = _routes
def prefix = ""
def setPrefix(prefix: String) {}
}
sealed trait Method {
def on[R](routeDef: RouteDef[R]): R = routeDef.withMethod(this)
def matches(s: String) = this.toString == s
}
val root = RouteDef0(ANY, Nil)
implicit def stringToRouteDef0(name: String) = RouteDef0(ANY, Static(name) :: Nil)
implicit def asterixToRoutePath1(ast: *.type) = RouteDef1(ANY, ast :: Nil)
implicit def stringToStatic(name: String) = Static(name)
case object ANY extends Method {
override def matches(s: String) = true
}
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html
case object OPTIONS extends Method
case object GET extends Method
case object HEAD extends Method
case object POST extends Method
case object PUT extends Method
case object DELETE extends Method
case object TRACE extends Method
case object CONNECT extends Method
// trait BasicRoutePath {
// def parts: List[PathElem]
// def method: Method
// def ext: Option[String]
// def variableIndices = parts.zipWithIndex.collect { case (e,i) if e == * => i }
// def length = parts.length
// override def toString = method.toString + "\t/" + parts.mkString("/") + extString
// def extString = ext.map { "." + _ } getOrElse ""
// }
sealed trait Route[RD] {
def routeDef: RouteDef[RD]
def unapply(req: RequestHeader): Option[() => Out]
def basic(req: RequestHeader) = {
lazy val extMatched = (for { extA <- routeDef.ext; extB <- extractExt(req.path)._2 } yield extA == extB) getOrElse true
routeDef.method.matches(req.method) && extMatched
}
def splitPath(path: String) = extractExt(path)._1.split("/").dropWhile(_ == "").toList
def extractExt(path: String) = {
routeDef.ext.map { _ =>
path.reverse.split("\\.", 2).map(_.reverse).toList match {
case x :: p :: Nil => (p, Some(x))
case p :: Nil => (p, None)
case _ => ("/", None)
}
}.getOrElse((path, None))
}
def args: List[Manifest[_]]
}
case class Route0(routeDef: RouteDef0, f0: () => Out) extends Route[RouteDef0] {
def apply(ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher0(routeDef.elems, ext)())
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher0.unapply(routeDef.elems, splitPath(req.path), f0) else None
def args = Nil
}
sealed trait RouteDef[Self] {
def withMethod(method: Method): Self
def method: Method
def elems: List[PathElem]
def ext: Option[String]
def extString = ext map { "." + _ } getOrElse ""
}
case class RouteDef0(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef0] {
def /(static: Static) = RouteDef0(method, elems :+ static)
def /(p: PathElem) = RouteDef1(method, elems :+ p)
def to(f0: () => Out) = addRoute(Route0(this.copy(elems = currentNamespace ::: elems), f0))
def withMethod(method: Method) = RouteDef0(method, elems)
def as(ext: String) = RouteDef0(method, elems, Some(ext))
def -->[M <: PlayModule](module: PlayNavigator => M) = withNamespace(elems.collect { case s @ Static(_) => s }){
module(PlayNavigator.this)
}
}
object PathMatcher0 {
def apply(elems: List[PathElem], ext: Option[String])(): String = elems.mkString("/", "/", ext.map { "." + _ } getOrElse "")
def unapply(elems: List[PathElem], parts: List[String], handler: () => Out): Option[() => Out] = (elems, parts) match {
case (Nil, Nil) => Some(handler)
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case _ => None
}
}
case class RouteDef1(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef1]{
def /(static: Static) = RouteDef1(method, elems :+ static)
def /(p: PathElem) = RouteDef2(method, elems :+ p)
def to[A: PathParam : Manifest](f1: (A) => Out) = addRoute(Route1(this.copy(elems = currentNamespace ::: elems), f1))
def withMethod(method: Method) = RouteDef1(method, elems)
def as(ext: String) = RouteDef1(method, elems, Some(ext))
}
case class Route1[A: PathParam : Manifest](routeDef: RouteDef1, f1: (A) => Out) extends Route[RouteDef1] {
def apply(a: A, ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher1(routeDef.elems, ext)(a))
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher1.unapply(routeDef.elems, splitPath(req.path), f1) else None
def args = List(implicitly[Manifest[A]])
}
object PathMatcher1 {
def apply[A](elems: List[PathElem], ext: Option[String], prefix: List[PathElem] = Nil)(a: A)(implicit ppa: PathParam[A]): String = elems match {
case Static(x) :: rest => apply(rest, ext, prefix :+ Static(x))(a)
case (* | **) :: rest => PathMatcher0(prefix ::: Static(ppa(a)) :: rest, ext)()
case _ => PathMatcher0(elems, ext)()
}
def unapply[A](elems: List[PathElem], parts: List[String], handler: (A) => Out)(implicit ppa: PathParam[A]): Option[() => Out] = (elems, parts) match {
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case (* :: xs, ppa(a) :: ys) => PathMatcher0.unapply(xs, ys, () => handler(a))
case (** :: xs, ys) => ppa.unapply(ys.mkString("/")).map { a => () => handler(a) }
case _ => None
}
}
case class RouteDef2(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef2]{
def /(static: Static) = RouteDef2(method, elems :+ static)
def /(p: PathElem) = RouteDef3(method, elems :+ p)
def to[A: PathParam : Manifest, B: PathParam : Manifest](f2: (A, B) => Out) = addRoute(Route2(this.copy(elems = currentNamespace ::: elems), f2))
def withMethod(method: Method) = RouteDef2(method, elems)
def as(ext: String) = RouteDef2(method, elems, Some(ext))
}
case class Route2[A: PathParam : Manifest, B: PathParam : Manifest](routeDef: RouteDef2, f2: (A, B) => Out) extends Route[RouteDef2] {
def apply(a: A, b: B, ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher2(routeDef.elems, ext)(a, b))
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher2.unapply(routeDef.elems, splitPath(req.path), f2) else None
def args = List(implicitly[Manifest[A]], implicitly[Manifest[B]])
}
object PathMatcher2 {
def apply[A, B](elems: List[PathElem], ext: Option[String], prefix: List[PathElem] = Nil)(a: A, b: B)(implicit ppa: PathParam[A], ppb: PathParam[B]): String = elems match {
case Static(x) :: rest => apply(rest, ext, prefix :+ Static(x))(a, b)
case (* | **) :: rest => PathMatcher1(prefix ::: Static(ppa(a)) :: rest, ext)(b)
case _ => PathMatcher0(elems, ext)()
}
def unapply[A, B](elems: List[PathElem], parts: List[String], handler: (A, B) => Out)(implicit ppa: PathParam[A], ppb: PathParam[B]): Option[() => Out] = (elems, parts) match {
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case (* :: xs, ppa(a) :: ys) => PathMatcher1.unapply(xs, ys, (b: B) => handler(a, b))
case _ => None
}
}
case class RouteDef3(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef3]{
def /(static: Static) = RouteDef3(method, elems :+ static)
def /(p: PathElem) = RouteDef4(method, elems :+ p)
def to[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest](f3: (A, B, C) => Out) = addRoute(Route3(this.copy(elems = currentNamespace ::: elems), f3))
def withMethod(method: Method) = RouteDef3(method, elems)
def as(ext: String) = RouteDef3(method, elems, Some(ext))
}
case class Route3[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest](routeDef: RouteDef3, f3: (A, B, C) => Out) extends Route[RouteDef3] {
def apply(a: A, b: B, c: C, ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher3(routeDef.elems, ext)(a, b, c))
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher3.unapply(routeDef.elems, splitPath(req.path), f3) else None
def args = List(implicitly[Manifest[A]], implicitly[Manifest[B]], implicitly[Manifest[C]])
}
object PathMatcher3 {
def apply[A, B, C](elems: List[PathElem], ext: Option[String], prefix: List[PathElem] = Nil)(a: A, b: B, c: C)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C]): String = elems match {
case Static(x) :: rest => apply(rest, ext, prefix :+ Static(x))(a, b, c)
case (* | **) :: rest => PathMatcher2(prefix ::: Static(ppa(a)) :: rest, ext)(b, c)
case _ => PathMatcher0(elems, ext)()
}
def unapply[A, B, C](elems: List[PathElem], parts: List[String], handler: (A, B, C) => Out)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C]): Option[() => Out] = (elems, parts) match {
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case (* :: xs, ppa(a) :: ys) => PathMatcher2.unapply(xs, ys, (b: B, c: C) => handler(a, b, c))
case _ => None
}
}
case class RouteDef4(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef4]{
def /(static: Static) = RouteDef4(method, elems :+ static)
def /(p: PathElem) = RouteDef5(method, elems :+ p)
def to[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest, D: PathParam : Manifest](f4: (A, B, C, D) => Out) = addRoute(Route4(this.copy(elems = currentNamespace ::: elems), f4))
def withMethod(method: Method) = RouteDef4(method, elems)
def as(ext: String) = RouteDef4(method, elems, Some(ext))
}
case class Route4[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest, D: PathParam : Manifest](routeDef: RouteDef4, f4: (A, B, C, D) => Out) extends Route[RouteDef4] {
def apply(a: A, b: B, c: C, d: D, ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher4(routeDef.elems, ext)(a, b, c, d))
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher4.unapply(routeDef.elems, splitPath(req.path), f4) else None
def args = List(implicitly[Manifest[A]], implicitly[Manifest[B]], implicitly[Manifest[C]], implicitly[Manifest[D]])
}
object PathMatcher4 {
def apply[A, B, C, D](elems: List[PathElem], ext: Option[String], prefix: List[PathElem] = Nil)(a: A, b: B, c: C, d: D)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C], ppd: PathParam[D]): String = elems match {
case Static(x) :: rest => apply(rest, ext, prefix :+ Static(x))(a, b, c, d)
case (* | **) :: rest => PathMatcher3(prefix ::: Static(ppa(a)) :: rest, ext)(b, c, d)
case _ => PathMatcher0(elems, ext)()
}
def unapply[A, B, C, D](elems: List[PathElem], parts: List[String], handler: (A, B, C, D) => Out)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C], ppd: PathParam[D]): Option[() => Out] = (elems, parts) match {
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case (* :: xs, ppa(a) :: ys) => PathMatcher3.unapply(xs, ys, (b: B, c: C, d: D) => handler(a, b, c, d))
case _ => None
}
}
case class RouteDef5(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef5]{
def /(static: Static) = RouteDef5(method, elems :+ static)
def /(p: PathElem) = RouteDef6(method, elems :+ p)
def to[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest, D: PathParam : Manifest, E: PathParam : Manifest](f5: (A, B, C, D, E) => Out) = addRoute(Route5(this.copy(elems = currentNamespace ::: elems), f5))
def withMethod(method: Method) = RouteDef5(method, elems)
def as(ext: String) = RouteDef5(method, elems, Some(ext))
}
case class Route5[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest, D: PathParam : Manifest, E: PathParam : Manifest](routeDef: RouteDef5, f5: (A, B, C, D, E) => Out) extends Route[RouteDef5] {
def apply(a: A, b: B, c: C, d: D, e: E, ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher5(routeDef.elems, ext)(a, b, c, d, e))
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher5.unapply(routeDef.elems, splitPath(req.path), f5) else None
def args = List(implicitly[Manifest[A]], implicitly[Manifest[B]], implicitly[Manifest[C]], implicitly[Manifest[D]], implicitly[Manifest[E]])
}
object PathMatcher5 {
def apply[A, B, C, D, E](elems: List[PathElem], ext: Option[String], prefix: List[PathElem] = Nil)(a: A, b: B, c: C, d: D, e: E)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C], ppd: PathParam[D], ppe: PathParam[E]): String = elems match {
case Static(x) :: rest => apply(rest, ext, prefix :+ Static(x))(a, b, c, d, e)
case (* | **) :: rest => PathMatcher4(prefix ::: Static(ppa(a)) :: rest, ext)(b, c, d, e)
case _ => PathMatcher0(elems, ext)()
}
def unapply[A, B, C, D, E](elems: List[PathElem], parts: List[String], handler: (A, B, C, D, E) => Out)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C], ppd: PathParam[D], ppe: PathParam[E]): Option[() => Out] = (elems, parts) match {
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case (* :: xs, ppa(a) :: ys) => PathMatcher4.unapply(xs, ys, (b: B, c: C, d: D, e: E) => handler(a, b, c, d, e))
case _ => None
}
}
case class RouteDef6(method: Method, elems: List[PathElem], ext: Option[String] = None) extends RouteDef[RouteDef6]{
def /(static: Static) = RouteDef6(method, elems :+ static)
// def /(p: PathElem) = RouteDef7(method, elems :+ p)
def to[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest, D: PathParam : Manifest, E: PathParam : Manifest, F: PathParam : Manifest](f6: (A, B, C, D, E, F) => Out) = addRoute(Route6(this.copy(elems = currentNamespace ::: elems), f6))
def withMethod(method: Method) = RouteDef6(method, elems)
def as(ext: String) = RouteDef6(method, elems, Some(ext))
}
case class Route6[A: PathParam : Manifest, B: PathParam : Manifest, C: PathParam : Manifest, D: PathParam : Manifest, E: PathParam : Manifest, F: PathParam : Manifest](routeDef: RouteDef6, f6: (A, B, C, D, E, F) => Out) extends Route[RouteDef6] {
def apply(a: A, b: B, c: C, d: D, e: E, f: F, ext: Option[String] = routeDef.ext) = Call(routeDef.method.toString, PathMatcher6(routeDef.elems, ext)(a, b, c, d, e, f))
def unapply(req: RequestHeader): Option[() => Out] =
if(basic(req)) PathMatcher6.unapply(routeDef.elems, splitPath(req.path), f6) else None
def args = List(implicitly[Manifest[A]], implicitly[Manifest[B]], implicitly[Manifest[C]], implicitly[Manifest[D]], implicitly[Manifest[E]], implicitly[Manifest[F]])
}
object PathMatcher6 {
def apply[A, B, C, D, E, F](elems: List[PathElem], ext: Option[String], prefix: List[PathElem] = Nil)(a: A, b: B, c: C, d: D, e: E, f: F)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C], ppd: PathParam[D], ppe: PathParam[E], ppf: PathParam[F]): String = elems match {
case Static(x) :: rest => apply(rest, ext, prefix :+ Static(x))(a, b, c, d, e, f)
case (* | **) :: rest => PathMatcher5(prefix ::: Static(ppa(a)) :: rest, ext)(b, c, d, e, f)
case _ => PathMatcher0(elems, ext)()
}
def unapply[A, B, C, D, E, F](elems: List[PathElem], parts: List[String], handler: (A, B, C, D, E, F) => Out)(implicit ppa: PathParam[A], ppb: PathParam[B], ppc: PathParam[C], ppd: PathParam[D], ppe: PathParam[E], ppf: PathParam[F]): Option[() => Out] = (elems, parts) match {
case (Static(x) :: xs, y :: ys) if x == y => unapply(xs, ys, handler)
case (* :: xs, ppa(a) :: ys) => PathMatcher5.unapply(xs, ys, (b: B, c: C, d: D, e: E, f: F) => handler(a, b, c, d, e, f))
case _ => None
}
}
trait PathParam[T]{
def apply(t: T): String
def unapply(s: String): Option[T]
}
def silent[T](f: => T) = try { Some(f) } catch { case _: Throwable => None }
implicit val IntPathParam: PathParam[Int] = new PathParam[Int] {
def apply(i: Int) = i.toString
def unapply(s: String) = silent(s.toInt)
}
implicit val LongPathParam: PathParam[Long] = new PathParam[Long] {
def apply(l: Long) = l.toString
def unapply(s: String) = silent(s.toLong)
}
implicit val DoublePathParam: PathParam[Double] = new PathParam[Double] {
def apply(d: Double) = d.toString
def unapply(s: String) = silent(s.toDouble)
}
implicit val FloatPathParam: PathParam[Float] = new PathParam[Float] {
def apply(f: Float) = f.toString
def unapply(s: String) = silent(s.toFloat)
}
implicit val StringPathParam: PathParam[String] = new PathParam[String] {
def apply(s: String) = s
def unapply(s: String) = Some(s)
}
implicit val BooleanPathParam: PathParam[Boolean] = new PathParam[Boolean] {
def apply(b: Boolean) = b.toString
def unapply(s: String) = s.toLowerCase match {
case "1" | "true" | "yes" => Some(true)
case "0" | "false" | "no" => Some(false)
case _ => None
}
}
trait ResourcesRouting[T] {
val index: Route0
val `new`: Route0
val create: Route0
val show: Route1[T]
val edit: Route1[T]
val update: Route1[T]
val delete: Route1[T]
}
// resources
def resources[T : PathParam : Manifest](name: String, controller: Resources[T, Out]) = new ResourcesRouting[T] {
val index = GET on name to controller.index
val `new` = GET on name / "new" to controller.`new`
val create = POST on name to controller.create
val show = GET on name / * to controller.show
val edit = GET on name / * / "edit" to controller.edit
val update = PUT on name / * to controller.update
val delete = DELETE on name / * to controller.delete
}
// namespace
protected val namespaceStack = new collection.mutable.Stack[Static]
def currentNamespace = namespaceStack.toList.reverse
def namespace(path: Static)(f: => Unit) = {
namespaceStack push path
f
namespaceStack.pop
}
def withNamespace[T](path: List[Static])(f: => T) = {
path.foreach { p => namespaceStack push p }
val r = f
path.foreach { p => namespaceStack.pop }
r
}
class Namespace(path: Static) extends DelayedInit {
def delayedInit(body: => Unit) = namespace(path)(body)
}
}
class PlayModule(parent: PlayNavigator) extends PlayNavigator with DelayedInit {
def delayedInit(body: => Unit) = withNamespace(parent.currentNamespace)(body)
}
trait PlayResourcesController[T] extends Resources[T, Handler] with Controller
}
| teamon/play-navigator | src/main/scala/play/navigator/PlayNavigator.scala | Scala | mit | 22,229 |
package com.github.mgoeminne.sitar.parser
import scala.util.parsing.combinator.JavaTokenParsers
trait CitationParser extends JavaTokenParsers
{
def citation: Parser[Citation]
}
| mgoeminne/sitar | src/main/scala/com/github/mgoeminne/sitar/parser/CitationParser.scala | Scala | mit | 181 |
package io.youi.http
import io.youi.http.content.Content
import io.youi.http.cookie.RequestCookie
import io.youi.net.{IP, URL}
case class HttpRequest(method: HttpMethod = HttpMethod.Get,
source: IP = IP.LocalHost,
url: URL = URL(),
headers: Headers = Headers.empty,
content: Option[Content] = None,
timestamp: Long = System.currentTimeMillis()) {
lazy val cookies: List[RequestCookie] = Headers.Request.`Cookie`.value(headers)
def withHeader(header: Header): HttpRequest = copy(headers = headers.withHeader(header))
def withHeader(key: String, value: String): HttpRequest = copy(headers = headers.withHeader(key, value))
def withContent(content: Content): HttpRequest = copy(content = Some(content))
def originalSource: IP = headers.first(Headers.Request.`X-Forwarded-For`).map {
case s if s.indexOf(',') != -1 => s.substring(0, s.indexOf(','))
case s => s
}.map(IP.apply).getOrElse(source)
} | outr/youi | core/shared/src/main/scala/io/youi/http/HttpRequest.scala | Scala | mit | 1,029 |
package qq
import qq.cc.Parser
import qq.data.{Definition, FilterAST, Program}
class ParserTest extends QQSyncTestSuite {
import qq.data.QQDSL._
"parse plain dots" in {
Parser.dot.parse(".").get.value.shouldBe(())
}
"parse selections" - {
"select key" in {
Parser.fullPath.parse(".key").get.value shouldBe Vector(selectKey("key"))
Parser.fullPath.parse(".viewUrl").get.value shouldBe Vector(selectKey("viewUrl"))
}
"select index" in {
Parser.selectIndex.parse("1").get.value shouldBe selectIndex(1)
Parser.selectIndex.parse("-1").get.value shouldBe selectIndex(-1)
}
}
"parse single path components" in {
Parser.pathComponent.parse("[\"filter \"]").get.value shouldBe Vector(selectKey("filter "))
}
"parse full paths" in {
Parser.fullPath.parse(".").get.value shouldBe Vector.empty
Parser.fullPath.parse(".[]").get.value shouldBe Vector(collectResults)
Parser.fullPath.parse(".key").get.value shouldBe Vector(selectKey("key"))
Parser.fullPath.parse(".[1]").get.value shouldBe Vector(selectIndex(1))
Parser.fullPath.parse(".[-1]").get.value shouldBe Vector(selectIndex(-1))
Parser.fullPath.parse(".[1][]").get.value shouldBe Vector(selectIndex(1), collectResults)
Parser.fullPath.parse(".key[]").get.value shouldBe Vector(selectKey("key"), collectResults)
Parser.fullPath.parse(""".key.otherkey.1.[1][].[1:3].["this key"]""").get.value shouldBe
Vector(selectKey("key"), selectKey("otherkey"), selectKey("1"),
selectIndex(1), collectResults, selectRange(1, 3), selectKey("this key"))
}
"parse path getters" in {
Parser.filter.parse(".key").get.value shouldBe getPathS(selectKey("key"))
}
"parse path setters" in {
Parser.filter.parse(".key = 1").get.value shouldBe setPath(Vector(selectKey("key")), constNumber(1))
}
"parse path modifiers" in {
Parser.filter.parse(".key |= . + 1").get.value shouldBe modifyPath(Vector(selectKey("key")), add(id, constNumber(1)))
}
"parse called filters" in {
Parser.callFilter.parse("test").get.value shouldBe call("test", Vector.empty)
Parser.callFilter.parse("test(.)").get.value shouldBe call("test", Vector(id))
Parser.callFilter.parse("test(.;.)").get.value shouldBe call("test", Vector(id, id))
}
"parse piped filters" in {
Parser.filter.parse(".key | .dang").get.value shouldBe
(getPathS(selectKey("key")) | selectKey("dang"))
Parser.filter.parse(".key | .dang | .hey").get.value shouldBe
(getPathS(selectKey("key")) | getPathS(selectKey("dang")) | selectKey("hey"))
Parser.filter.parse("(.key) | (.dang)").get.value shouldBe
(getPathS(selectKey("key")) | selectKey("dang"))
Parser.filter.parse("(.key) | (dang)").get.value shouldBe
(getPathS(selectKey("key")) | call("dang"))
}
"parse ensequenced filters" in {
Parser.filter.parse(".key, .dang").get.value shouldBe
ensequence(selectKey("key"), selectKey("dang"))
}
"parse enlisted filters" in {
Parser.enlistedFilter.parse("[.key, .dang]").get.value shouldBe
enlist(ensequence(selectKey("key"), selectKey("dang")))
}
"parse definitions" in {
Parser.definition.parse("def id: .;").get.value shouldBe
Definition("id", Vector.empty, id)
Parser.definition.parse("def id(f): .;").get.value shouldBe
Definition("id", Vector("f"), id)
Parser.definition.parse("def id(f; s): .;").get.value shouldBe
Definition("id", Vector("f", "s"), id)
}
"parse constants" - {
"integers" in {
Parser.filter.parse("1").get.value shouldBe constNumber(1)
Parser.filter.parse("4").get.value shouldBe constNumber(4)
}
"strings" in {
Parser.filter.parse(""""hello"""").get.value shouldBe constString("hello")
}
}
"parse math operators" in {
Parser.filter.parse(". + .").get.value shouldBe add(id, id)
Parser.filter.parse(". - .").get.value shouldBe subtract(id, id)
Parser.filter.parse(". * .").get.value shouldBe multiply(id, id)
Parser.filter.parse(". / .").get.value shouldBe divide(id, id)
Parser.filter.parse(". % .").get.value shouldBe modulo(id, id)
}
"parse enjected pairs" - {
"string key" in (Parser.enjectPair.parse("hello: id").get.value shouldBe Left("hello") -> call("id"))
"escaped string key" in (Parser.enjectPair.parse("\"hello\": id").get.value shouldBe Left("hello") -> call("id"))
"filter key" in (Parser.enjectPair.parse("(hello): id").get.value shouldBe Right(call("hello")) -> call("id"))
"sugar" in (Parser.enjectPair.parse("user").get.value shouldBe Left("user") -> getPathS(selectKey("user")))
}
"parse enjected filters" in {
Parser.filter.parse("{ sugar, user: \"user\", title: .titles[] }").get.value shouldBe
enject(
Vector(
Left("sugar") -> getPathS(selectKey("sugar")),
Left("user") -> constString("user"),
Left("title") -> getPath(Vector(selectKey("titles"), collectResults))
)
)
}
"parse full programs" - {
"with just a body" in (Parser.program.parse("id").get.value shouldBe
Program[FilterAST](Vector.empty[Definition[FilterAST]], call("id")))
"with small definitions" in (Parser.program.parse("def id: .; id").get.value shouldBe
Program[FilterAST](Vector(Definition[FilterAST]("id", Vector.empty, id)), call("id")))
"with whitespace at the start" in (Parser.program.parse(" .").get.value shouldBe
Program[FilterAST](Vector.empty[Definition[FilterAST]], id))
"with whitespace at the end" in (Parser.program.parse(". ").get.value shouldBe
Program[FilterAST](Vector.empty[Definition[FilterAST]], id))
}
"parse string literals" in {
Parser.escapedStringLiteral.parse(""""hello"""").get.value shouldBe "hello"
}
"parse string literal filters" in {
Parser.filter.parse(""""hello"""").get.value shouldBe constString("hello")
Parser.filter.parse(""""\\"""").get.value shouldBe constString("\\")
}
"parse enjected filter regression" in {
Parser.program.parse("{user, (.titleName[]): .titles[]}").get.value
// should just succeed
}
"precedence" in {
Parser.filter.parse("""., . | . + . * .""").get.value shouldBe ensequence(id, compose(id, add(id, multiply(id, id))))
Parser.filter.parse(""". * . + . | ., .""").get.value shouldBe ensequence(compose(add(multiply(id, id), id), id), id)
}
"dereference variables" - {
"literal parsing" in (Parser.dereference.parse("$hello").get.value shouldBe deref("hello"))
"in a filter call" in (Parser.filter.parse("f($hello)").get.value shouldBe call("f", Vector(deref("hello"))))
}
"as bindings" - {
"plain binding" in (Parser.asBinding.parse("$d as . in .").get.value shouldBe asBinding("d", id, id))
"in a filter" in (Parser.filter.parse(". | $d as . in .").get.value shouldBe compose(id, asBinding("d", id, id)))
}
}
| edmundnoble/slate | qq/shared/src/test/scala/qq/ParserTest.scala | Scala | mit | 6,890 |
package cs4r.labs.learningscala.otherexercises
/**
* Created by cs4r on 28/01/17.
*/
package object implicits {
class Greeter(s: String) {
def sayHello: String = "hola " + s + "!"
}
implicit def stringToGreeter(s: String) = new Greeter(s)
}
| Cs4r/LearningScala | src/main/scala/cs4r/labs/learningscala/otherexercises/implicits.scala | Scala | gpl-3.0 | 259 |
package com.paytmlabs.akka.cluster.discovery
import scala.collection.JavaConverters.asJavaCollectionConverter
import scala.collection.JavaConverters.asScalaBufferConverter
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.Cancellable
import akka.actor.Props
import akka.cluster.Cluster
import scala.collection.immutable.SortedSet
import scala.util.Try
import com.paytmlabs.akka.commons.util.ConfigUtils.RichConfig
object SelfJoinActor {
def props = Props(classOf[SelfJoinActor])
val provider = "self"
}
class SelfJoinActor extends Actor with ActorLogging {
import SelfJoinActor._
import context.dispatcher
import com.paytmlabs.akka.cluster.discovery.SeedActor._
val conf = context.system.settings.config.getConfig("akka.cluster")
val discoveryConf = conf.getConfig("discovery")
val interval = discoveryConf.getFiniteDuration("search-interval")
var timer: Option[Cancellable] = None
val eventStream = context.system.eventStream
val cluster = Cluster(context.system)
eventStream.subscribe(self, DiscoverCluster.getClass)
cluster.registerOnMemberUp {
log.info("Cluster is UP")
timer.foreach(_.cancel)
eventStream.unsubscribe(self)
context.stop(self)
}
override def preStart(): Unit = {
log.info(s"Starting SelfJoinActor")
}
override def postStop(): Unit = {
log.info(s"${this.getClass.getSimpleName} stopped successfully")
}
override def receive = {
case Tick =>
eventStream.publish(JoinSelf)
case DiscoverCluster =>
if (timer.isEmpty) {
log.info("Starting SelfJoin discovery ...")
timer = Some(context.system.scheduler.schedule(interval, interval, self, Tick))
}
}
}
| PaytmLabs/akka-batteries | discovery/core/src/main/scala/com/paytmlabs/akka/cluster/discovery/SelfJoinActor.scala | Scala | apache-2.0 | 1,707 |
object test:
import language.experimental.saferExceptions
import java.io.IOException
class Failure extends Exception
def bar(x: Int): Int throws Failure | IOException =
x match
case 1 => throw AssertionError()
case 2 => throw Failure() // ok
case 3 => throw java.io.IOException() // ok
case 4 => throw Exception() // error
case 5 => throw Throwable() // ok: Throwable is treated as unchecked
case _ => 0
def foo(x: Int): Int throws Exception = bar(x)
def baz(x: Int): Int throws Failure = bar(x) // error
| dotty-staging/dotty | tests/neg/saferExceptions.scala | Scala | apache-2.0 | 601 |
package org.jetbrains.plugins.scala.findUsages.factory
import scala.meta.inputs.Input.{File, LabeledString}
import scala.meta.inputs.{Input, Position}
import scala.meta.io.{AbsolutePath, Classpath, Sourcepath}
import scala.meta.semantic.Database
/** An object providing API which relies on semanticdb to get implicit usages.
* Supposed to be loaded with an isolated classloader, thus its API only operates with Java objects.
*/
object SemanticDbUsagesProvider {
def occurrences(db: Database, needle: String): Array[(Position, String)] = {
db.sugars.toArray.collect {
case (pos: Position, sugar: String) if sugar.contains(needle) => (pos, sugar)
}
}
private def getPath(input: Input): Option[String] = input match {
case File(path, _) => Some(path.toString)
case LabeledString(path, _) => Some(path)
case _ => None // TODO: some cases might be missing
}
def findUsages(target: String, classpath: Array[String], sourcepath: String): Array[Array[AnyRef]] = {
val database = Database.load(
Classpath(classpath.map(AbsolutePath.apply)),
Sourcepath(sourcepath)
)
for {
(pos, sugar) <- occurrences(database, target)
path <- getPath(pos.input)
} yield Array(path.asInstanceOf[AnyRef], Int.box(pos.start.offset), sugar)
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/findUsages/factory/SemanticDbUsagesProvider.scala | Scala | apache-2.0 | 1,339 |
package ch20_linked_hash_map
class Node[K, V](var key: Option[K], var data: Option[V], var prev: Option[Node[K, V]], var next: Option[Node[K, V]],
var hNext: Option[Node[K, V]]) {
def this(key: Option[K], data: Option[V]) = this(key, data, None, None, None)
}
/**
* LRU cache - https://leetcode.com/problems/lru-cache/ see unit test from LRUCacheTest
*
* @author [email protected]
*/
class LRUCache[K, V](var head: Node[K, V], var tail: Node[K, V], var table: Array[Node[K, V]],
capacity: Int = 1000, var elementCount: Int = 0) {
head.next = Some(tail)
tail.prev = Some(head)
def this(capacity: Int) = this(new Node(None, None), new Node(None, None), new Array[Node[K, V]](capacity), capacity)
def get(key: K): Option[V] = {
val index = indexFor(key.hashCode())
var hNode = table(index)
if (hNode == null) {
None
} else {
while (!hNode.key.get.equals(key) && hNode.hNext.isDefined) {
hNode = hNode.hNext.get
}
if (hNode.key.get.equals(key)) {
//move this to the end of the linked list
moveHNodeToTail(hNode)
hNode.data
} else {
None
}
}
}
//put data into the linked hash map
//1: check if the data exist in the linked list
//2: if it's not exist , append it in the linked list
//3: if it's exist in the list, move it to the tail of the linked list
//4: return old value if it's exist
def put(key: K, value: V): Option[V] = {
if (elementCount == capacity) {
deleteLRUElement()
}
val node = new Node(Some(key), Some(value))
val index = indexFor(key.hashCode())
var hNode = table(index)
var result: Option[V] = None
if (hNode == null) {
//if it's not exist , append it in the linked list
node.prev = tail.prev
node.next = Some(tail)
tail.prev.get.next = Some(node)
tail.prev = Some(node)
table(index) = node
elementCount += 1
} else {
//we find a key conflict in the hash table
//start to loop the hNode to match the key
while (!hNode.key.get.equals(key) && hNode.hNext.isDefined) {
hNode = hNode.hNext.get
}
if (hNode.key.get.equals(key)) {
//find the old data from the hash table
result = hNode.data
hNode.data = Some(value)
//move the node to the tail of the linked list
moveHNodeToTail(hNode)
//hNext pointer stay untouched
} else {
//could not find the old data
//put the new node into the tail of the linked list
node.prev = tail.prev
node.next = Some(tail)
tail.prev.get.next = Some(node)
tail.prev = Some(node)
//put it the tail of the hash table's list
//iterator to the end of hNode
while (hNode.hNext.isDefined) {
hNode = hNode.hNext.get
}
hNode.hNext = Some(node)
elementCount += 1
}
}
result
}
private[this] def moveHNodeToTail(hNode: Node[K, V]) = {
hNode.prev.get.next = hNode.next
hNode.next.get.prev = hNode.prev
hNode.prev = tail.prev
hNode.next = Some(tail)
tail.prev.get.next = Some(hNode)
tail.prev = Some(hNode)
}
private[this] def deleteLRUElement(): Unit = {
//cache is full, start to delete element from the head
val node = head.next.get
//delete it from head
node.next.get.prev = Some(head)
head.next = node.next
//deal with hNext in the table
val index = indexFor(node.key.get.hashCode())
var hNode = table(index)
//deal with first element in the hash table
if (hNode.key.get.equals(node.key.get)) {
hNode.hNext match {
case Some(n) => table(index) = n
case None => table(index) = null
}
} else {
//deal with not first element in the hash table
var hNodePrev = hNode
hNode = hNode.next.get
while (!hNode.key.get.equals(node.key.get)) {
hNode = hNode.next.get
hNodePrev = hNodePrev.next.get
}
//now hNodePrev is the previous hNode in the hashtable
//remove the hNode
hNodePrev.next = hNode.next
hNode.next match {
case Some(n) => n.prev = Some(hNodePrev)
case None =>
}
}
elementCount -= 1
}
private[this] def indexFor(hash: Int): Int = {
hash % table.length
}
}
| wangzheng0822/algo | scala/src/main/scala/ch20_linked_hash_map/LRUCache.scala | Scala | apache-2.0 | 4,382 |
package com.lorandszakacs.util.list
import com.lorandszakacs.util.math.Identity
import scala.annotation.tailrec
/**
*
* @author Lorand Szakacs, [email protected]
* @since 12 Jul 2017
*
*/
object ListUtilFunctions extends ListUtilFunctions
trait ListUtilFunctions {
implicit class BuffedList[T](thisList: List[T]) {
/**
* Replaces elements of [[thisList]] that identify with elements of ``that``
* keeping all others the same
*/
def replace(that: List[T])(implicit id: Identity[T]): List[T] = {
if (that.isEmpty) {
thisList
}
else {
thisList.map { e =>
val tr = that.find(l => id.identifiesAs(l, e))
tr match {
case None => e
case Some(replacement) => replacement
}
}
}
}
def addOrReplace(that: List[T])(implicit id: Identity[T]): List[T] = {
if (that.isEmpty) {
thisList
}
else {
val (toReplace, toAdd) = that.partition(p => thisList.exists(e => id.identifiesAs(e, p)))
val replaced = this.replace(toReplace)
replaced ++ toAdd
}
}
def distinctById(implicit id: Identity[T]): List[T] = {
@tailrec
def accumulateUnique(left: List[T], acc: List[T]): List[T] = {
if (left.isEmpty) {
acc
}
else {
val head = left.head
val tail = left.tail
if (acc.exists(e => id.identifiesAs(head, e))) {
accumulateUnique(tail, acc)
}
else {
accumulateUnique(tail, acc :+ head)
}
}
}
accumulateUnique(thisList, List.empty[T])
}
}
}
| lorandszakacs/sg-downloader | util/src/main/scala/com/lorandszakacs/util/list/ListUtilFunctions.scala | Scala | apache-2.0 | 1,717 |
package ar.edu.unq.tip.qsim.ui
/**
* Copyright 2014 Tatiana Molinari.
* Copyright 2014 Susana Rosito
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
import java.awt.Color
import scala.collection.JavaConversions.asScalaBuffer
import scala.collection.mutable.Map
import org.apache.commons.lang.StringUtils
import org.uqbar.arena.Application
import org.uqbar.arena.actions.MessageSend
import org.uqbar.arena.bindings.ObservableProperty
import org.uqbar.arena.layout.{ ColumnLayout, HorizontalLayout, VerticalLayout }
import org.uqbar.arena.widgets._
import org.uqbar.arena.widgets.tables.Column
import org.uqbar.arena.widgets.tables.Table
import org.uqbar.arena.windows.{ Dialog, Window, WindowOwner }
import org.uqbar.commons.utils.{ Observable, ReflectionUtils, When }
import ar.edu.unq.tpi.qsim.model.State._
import ar.edu.unq.tpi.qsim.model._
import ar.edu.unq.tpi.qsim.utils._
import com.uqbar.poo.aop.InitializerBehavior
class PuertosWindow(owner: WindowOwner, model: SimuladorAppmodel) extends Dialog[SimuladorAppmodel](owner, model) {
override def createFormPanel(mainPanel: Panel) = {
this.setTitle("Qsim - Puertos")
var form = new Panel(mainPanel)
form.setLayout(new VerticalLayout())
crearPanelDePuertos(form)
agregarBoton(form)
}
def agregarBoton(parent: Panel) {
var form = new Panel(parent)
form.setLayout(new HorizontalLayout())
new Label(form).setWidth(273)
val editable = new Button(form)
.setCaption("Editable")
.onClick(new MessageSend(model, "cambiarEdicion"))
}
def crearPanelDePuertos(parent: Panel) {
var panelForm = new Panel(parent)
var contador = 0
panelForm.setLayout(new ColumnLayout(4))
model.sim.busIO.puertos.celdas.foreach(puerto ⇒ {
val puertosPanel = new Panel(panelForm, puerto)
puertosPanel.setLayout(new ColumnLayout(2))
new Label(puertosPanel).setText(Util.toHex4(65520 + contador) + ":")
contador = contador + 1
val text = new TextBox(puertosPanel)
text.bindEnabled(new ObservableProperty(this.getModelObject(), "enabled"))
text.bindValueToProperty("value.hex")
text.withFilter(new TextFilter() {
def accept(event: TextInputEvent): Boolean = {
event.getPotentialTextResult().matches("[A-F0-9]{0,4}")
}
})
// text.bindBackground("state").setModelToView(new Transformer[Type, Color]() {
// def transform(element: Type) = element match {
// case NONE ⇒ Color.WHITE
// case PROGRAM ⇒ Color.LIGHT_GRAY
// case STORE ⇒ Color.BLUE
// case FECH_DECODE ⇒ Color.GREEN
// case EXECUTED ⇒ Color.CYAN
// case _ ⇒ null
// }
// })
})
}
} | molinarirosito/QSim_UI | src/main/scala/ar/edu/unq/tip/qsim/ui/PuertosWindow.scala | Scala | gpl-3.0 | 3,311 |
package com.github.tonivade.buildtiful
import org.apache.ivy.core.settings.IvySettings
import java.io.File
import org.apache.ivy.Ivy
import org.apache.ivy.plugins.resolver.IBiblioResolver
import org.apache.ivy.core.module.descriptor.ModuleDescriptor
import org.apache.ivy.core.retrieve.RetrieveOptions
import org.apache.ivy.core.resolve.ResolveOptions
import org.apache.ivy.core.module.id.ModuleRevisionId
import org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor
import org.apache.ivy.core.report.ResolveReport
import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor
import org.apache.ivy.core.module.descriptor.DependencyDescriptor
import Config._
object IvyTasks {
val ivy: Ivy = {
val ivySettings = new IvySettings()
ivySettings.setDefaultCache(cache)
val resolver = new IBiblioResolver()
resolver.setM2compatible(true)
resolver.setUsepoms(true)
resolver.setName("central")
ivySettings.addResolver(resolver)
ivySettings.setDefaultResolver(resolver.getName())
Ivy.newInstance(ivySettings)
}
def ivyDownload(build: Build): Task = {
() => module(build).map(resolve(_)).map(retrieve(_))
}
def module(build: Build) : Option[ModuleDescriptor] = {
val module = DefaultModuleDescriptor.newDefaultInstance(
ModuleRevisionId.newInstance(
build.project.groupId,
build.project.artifactId,
build.project.version
)
)
val dependencies =
createDependencies(module, build.dependencies.compile) ++
createDependencies(module, build.dependencies.test)
for {
dep <- dependencies
} yield module.addDependency(dep)
Some(module)
}
def createDependencies(module: ModuleDescriptor, deps: Seq[String]) : Seq[DependencyDescriptor] = {
deps.map(dep => {
val dependency = dep.split(":")
val revisionId = ModuleRevisionId.newInstance(dependency(0), dependency(1), dependency(2))
val dependencyDescriptor = new DefaultDependencyDescriptor(module, revisionId, false, false, false)
dependencyDescriptor.addDependencyConfiguration("default", "master")
dependencyDescriptor
})
}
def resolve(module: ModuleDescriptor) : ResolveReport = {
val options = new ResolveOptions()
options.setTransitive(true)
options.setDownload(true)
val report = ivy.resolve(module, options)
if (report.hasError()) {
// TODO: do not throw exception
throw new RuntimeException(report.getAllProblemMessages().toString())
}
report
}
def retrieve(resolveReport: ResolveReport) {
val module = resolveReport.getModuleDescriptor()
val options = new RetrieveOptions().setConfs(Array[String]("default"))
ivy.retrieve(
module.getModuleRevisionId(),
libs.getAbsolutePath() + "/[artifact](-[classifier]).[ext]",
options)
}
} | tonivade/buildtiful | src/main/scala/IvyTasks.scala | Scala | mit | 2,942 |
package com.pygmalios.reactiveinflux.spark.extensions
import com.pygmalios.reactiveinflux.spark.{DStreamExtensions, _}
import com.pygmalios.reactiveinflux.{PointNoTime, ReactiveInfluxDbName}
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.DStream
import scala.concurrent.duration.Duration
class PointDStreamExtensions[+T <: PointNoTime](dstream: DStream[T]) extends DStreamExtensions[T] {
override def saveToInflux()(implicit reactiveInfluxDbParams: ReactiveInfluxDbName,
awaitAtMost: Duration): Unit = {
dstream.foreachRDD { rdd: RDD[T] =>
rdd.saveToInflux()
}
}
}
| pygmalios/reactiveinflux-spark | src/main/scala/com/pygmalios/reactiveinflux/spark/extensions/PointDStreamExtensions.scala | Scala | apache-2.0 | 643 |
package com.twitter.finagle
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{DefaultStatsReceiver, StatsReceiver}
import com.twitter.finagle.util._
import com.twitter.util._
import com.google.common.cache.{Cache, CacheBuilder}
import java.net.{InetAddress, SocketAddress, UnknownHostException}
import java.security.{PrivilegedAction, Security}
import java.util.concurrent.TimeUnit.SECONDS
import java.util.logging.{Level, Logger}
/**
* Indicates that a [[com.twitter.finagle.Resolver]] was not found for the
* given `scheme`.
*
* Resolvers are discovered via Finagle's [[com.twitter.finagle.util.LoadService]]
* mechanism. These exceptions typically suggest that there are no libraries
* on the classpath that define a Resolver for the given scheme.
*/
class ResolverNotFoundException(scheme: String)
extends Exception(
"Resolver not found for scheme \\"%s\\". Please add the jar containing this resolver to your classpath".format(scheme))
/**
* Indicates that multiple [[com.twitter.finagle.Resolver Resolvers]] were
* discovered for given `scheme`.
*
* Resolvers are discovered via Finagle's [[com.twitter.finagle.util.LoadService]]
* mechanism. These exceptions typically suggest that there are multiple
* libraries on the classpath with conflicting scheme definitions.
*/
class MultipleResolversPerSchemeException(resolvers: Map[String, Seq[Resolver]])
extends NoStacktrace
{
override def getMessage = {
val msgs = resolvers map { case (scheme, rs) =>
"%s=(%s)".format(scheme, rs.map(_.getClass.getName).mkString(", "))
} mkString(" ")
"Multiple resolvers defined: %s".format(msgs)
}
}
/**
* Indicates that a destination name string passed to a
* [[com.twitter.finagle.Resolver]] was invalid according to the destination
* name grammar [1].
*
* [1] http://twitter.github.io/finagle/guide/Names.html
*/
class ResolverAddressInvalid(addr: String)
extends Exception("Resolver address \\"%s\\" is not valid".format(addr))
/**
* A resolver binds a name, represented by a string, to a
* variable address. Resolvers have an associated scheme
* which is used for lookup so that names may be resolved
* in a global context.
*
* These are loaded by Finagle through the
* [[com.twitter.finagle.util.LoadService service loading mechanism]]. Thus, in
* order to implement a new resolver, a class implementing `Resolver` with a
* 0-arg constructor must be registered in a file named
* `META-INF/services/com.twitter.finagle.Resolver` included in the classpath; see
* Oracle's
* [[http://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html ServiceLoader]]
* documentation for further details.
*/
trait Resolver {
val scheme: String
def bind(arg: String): Var[Addr]
@deprecated("Use Resolver.bind", "6.7.x")
final def resolve(name: String): Try[Group[SocketAddress]] =
bind(name) match {
case Var.Sampled(Addr.Failed(e)) => Throw(e)
case va => Return(Group.fromVarAddr(va))
}
}
/**
* An abstract class version of Resolver for java compatibility.
*/
abstract class AbstractResolver extends Resolver
/**
* Resolver for inet scheme.
*
* The Var is refreshed after each TTL timeout, set from "networkaddress.cache.ttl",
* a Java Security Property. If "networkaddress.cache.ttl" is not set or set to
* a non-positive value, the Var is static and no future resolution is attempted.
*/
object InetResolver {
def apply(): Resolver = apply(DefaultStatsReceiver)
def apply(statsReceiver: StatsReceiver): Resolver =
new InetResolver(statsReceiver.scope("inet").scope("dns"))
}
private[finagle] class InetResolver(statsReceiver: StatsReceiver) extends Resolver {
import InetSocketAddressUtil._
private[this] val CACHE_SIZE = 16000L
val scheme = "inet"
private[this] val latencyStat = statsReceiver.stat("lookup_ms")
private[this] val successes = statsReceiver.counter("successes")
private[this] val failures = statsReceiver.counter("failures")
private val log = Logger.getLogger(getClass.getName)
private val ttlOption = {
val t = Try(Option(java.security.AccessController.doPrivileged(
new PrivilegedAction[String] {
override def run(): String = Security.getProperty("networkaddress.cache.ttl")
}
)) map { s => s.toInt })
t match {
case Return(Some(value)) =>
if (value <= 0) {
log.log(Level.INFO,
"networkaddress.cache.ttl is set as non-positive value, DNS cache refresh turned off")
None
} else {
val duration = value.seconds
log.log(Level.CONFIG, "networkaddress.cache.ttl found to be %s".format(duration) +
" will refresh DNS every %s.".format(duration))
Some(duration)
}
case Return(None) =>
log.log(Level.INFO, "networkaddress.cache.ttl is not set, DNS cache refresh turned off")
None
case Throw(exc: NumberFormatException) =>
log.log(Level.WARNING,
"networkaddress.cache.ttl is set as non-number, DNS cache refresh turned off", exc)
None
case Throw(exc) =>
log.log(Level.WARNING, "Unexpected Exception is thrown when getting " +
"networkaddress.cache.ttl, DNS cache refresh turned off", exc)
None
}
}
private val timer = DefaultTimer.twitter
private[this] val addressCacheBuilder =
CacheBuilder.newBuilder().maximumSize(CACHE_SIZE)
private[this] val addressCache: Cache[String, Seq[InetAddress]] = ttlOption match {
case Some(t) => addressCacheBuilder.expireAfterWrite(t.inSeconds, SECONDS).build()
case None => addressCacheBuilder.build()
}
def bindWeightedHostPortsToAddr(hosts: Seq[WeightedHostPort]): Var[Addr] = {
def toAddr(whp: Seq[WeightedHostPort]): Future[Addr] = {
val elapsed = Stopwatch.start()
resolveWeightedHostPorts(whp, addressCache) map { addrs: Seq[SocketAddress] =>
Addr.Bound(addrs.toSet)
} onSuccess { _ =>
successes.incr()
latencyStat.add(elapsed().inMilliseconds)
} onFailure { _ =>
failures.incr()
} rescue {
case exc: UnknownHostException => Future.value(Addr.Neg: Addr)
case NonFatal(exc) => Future.value(Addr.Failed(exc): Addr)
}
}
Var.async(Addr.Pending: Addr) { u =>
toAddr(hosts) onSuccess { u() = _ }
ttlOption match {
case Some(ttl) =>
val updater = new Updater[Unit] {
val one = Seq(())
// Just perform one update at a time.
protected def preprocess(elems: Seq[Unit]) = one
protected def handle(unit: Unit) {
// This always runs in a thread pool; it's okay to block.
u() = Await.result(toAddr(hosts))
}
}
timer.schedule(ttl.fromNow, ttl) {
FuturePool.unboundedPool(updater())
}
case None =>
Closable.nop
}
}
}
/**
* Binds to the specified hostnames, and refreshes the DNS information periodically.
*/
def bind(hosts: String): Var[Addr] = Try(parseHostPorts(hosts)) match {
case Return(hp) =>
val whp = hp collect { case (host, port) =>
(host, port, 1D)
}
bindWeightedHostPortsToAddr(whp)
case Throw(exc) =>
Var.value(Addr.Failed(exc))
}
}
object NegResolver extends Resolver {
val scheme = "neg"
def bind(arg: String) = Var.value(Addr.Neg)
}
object NilResolver extends Resolver {
val scheme = "nil"
def bind(arg: String) = Var.value(Addr.Bound())
}
object FailResolver extends Resolver {
val scheme = "fail"
def bind(arg: String) = Var.value(Addr.Failed(new Exception(arg)))
}
private[finagle] abstract class BaseResolver(f: () => Seq[Resolver]) {
private[this] val inetResolver = InetResolver()
private[this] lazy val resolvers = {
val rs = f()
val log = Logger.getLogger(getClass.getName)
val resolvers = Seq(inetResolver, NegResolver, NilResolver, FailResolver) ++ rs
val dups = resolvers
.groupBy(_.scheme)
.filter { case (_, rs) => rs.size > 1 }
if (dups.size > 0) throw new MultipleResolversPerSchemeException(dups)
for (r <- resolvers)
log.info("Resolver[%s] = %s(%s)".format(r.scheme, r.getClass.getName, r))
resolvers
}
def get[T <: Resolver](clazz: Class[T]): Option[T] =
resolvers find { _.getClass isAssignableFrom clazz } map { _.asInstanceOf[T] }
private[this] sealed trait Token
private[this] case class El(e: String) extends Token
private[this] object Eq extends Token
private[this] object Bang extends Token
private[this] def delex(ts: Seq[Token]) =
ts map {
case El(e) => e
case Bang => "!"
case Eq => "="
} mkString ""
private[this] def lex(s: String) = {
s.foldLeft(List[Token]()) {
case (ts, '=') => Eq :: ts
case (ts, '!') => Bang :: ts
case (El(s) :: ts, c) => El(s+c) :: ts
case (ts, c) => El(""+c) :: ts
}
}.reverse
/**
* Resolve a group from an address, a string. Resolve uses
* `Resolver`s to do this. These are loaded via the Java
* [[http://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html ServiceLoader]]
* mechanism. The default resolver is "inet", resolving DNS
* name/port pairs.
*
* Target names have a simple grammar: The name of the resolver
* precedes the name of the address to be resolved, separated by
* an exclamation mark ("bang"). For example: inet!twitter.com:80
* resolves the name "twitter.com:80" using the "inet" resolver. If no
* resolver name is present, the inet resolver is used.
*
* Names resolved by this mechanism are also a
* [[com.twitter.finagle.LabelledGroup]]. By default, this name is
* simply the `addr` string, but it can be overriden by prefixing
* a name separated by an equals sign from the rest of the addr.
* For example, the addr "www=inet!google.com:80" resolves
* "google.com:80" with the inet resolver, but the returned group's
* [[com.twitter.finagle.LabelledGroup]] name is "www".
*/
@deprecated("Use Resolver.eval", "6.7.x")
def resolve(addr: String): Try[Group[SocketAddress]] =
Try { eval(addr) } flatMap {
case Name.Path(_) =>
Throw(new IllegalArgumentException("Resolver.resolve does not support logical names"))
case [email protected](_) =>
Return(NameGroup(bound))
}
/**
* Parse and evaluate the argument into a Name. Eval parses
* a simple grammar: a scheme is followed by a bang, followed
* by an argument:
* name := scheme ! arg
* The scheme is looked up from registered Resolvers, and the
* argument is passed in.
*
* When `name` begins with the character '/' it is intepreted to be
* a logical name whose interpetation is subject to a
* [[com.twitter.finagle.Dtab Dtab]].
*
* Eval throws exceptions upon failure to parse the name, or
* on failure to scheme lookup. Since names are late bound,
* binding failures are deferred.
*
* @see [[Resolvers.eval]] for Java support
*/
def eval(name: String): Name =
if (name startsWith "/") Name(name)
else {
val (resolver, arg) = lex(name) match {
case (Eq :: _) | (Bang :: _) =>
throw new ResolverAddressInvalid(name)
case El(scheme) :: Bang :: name =>
resolvers.find(_.scheme == scheme) match {
case Some(resolver) => (resolver, delex(name))
case None => throw new ResolverNotFoundException(scheme)
}
case ts => (inetResolver, delex(ts))
}
Name.Bound(resolver.bind(arg), name)
}
/**
* Parse and evaluate the argument into a (Name, label: String) tuple.
* Arguments are parsed with the same grammar as in `eval`. If a label is not
* provided (i.e. no "label=<addr>"), then the empty string is returned.
*
* @see [[Resolvers.evalLabeled]] for Java support
*/
def evalLabeled(addr: String): (Name, String) = {
val (label, rest) = lex(addr) match {
case El(n) :: Eq :: rest => (n, rest)
case rest => ("", rest)
}
(eval(delex(rest)), label)
}
}
/**
* The default [[Resolver]] used by Finagle.
*
* @see [[Resolvers]] for Java support.
*/
object Resolver extends BaseResolver(() => LoadService[Resolver]())
/**
* Java APIs for [[Resolver]].
*/
object Resolvers {
/**
* @see [[Resolver.eval]]
*/
def eval(name: String): Name =
Resolver.eval(name)
/**
* @see [[Resolver.evalLabeled]]
*/
def evalLabeled(addr: String): (Name, String) =
Resolver.evalLabeled(addr)
}
| folone/finagle | finagle-core/src/main/scala/com/twitter/finagle/Resolver.scala | Scala | apache-2.0 | 12,591 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features.bindings
import scala.collection.JavaConverters._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.{KubernetesConf, KubernetesDriverSpecificConf, SparkPod}
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.submit.PythonMainAppResource
class JavaDriverFeatureStepSuite extends SparkFunSuite {
test("Java Step modifies container correctly") {
val baseDriverPod = SparkPod.initialPod()
val sparkConf = new SparkConf(false)
val kubernetesConf = KubernetesConf(
sparkConf,
KubernetesDriverSpecificConf(
Some(PythonMainAppResource("local:///main.jar")),
"test-class",
"java-runner",
Seq("5 7")),
appResourceNamePrefix = "",
appId = "",
roleLabels = Map.empty,
roleAnnotations = Map.empty,
roleSecretNamesToMountPaths = Map.empty,
roleSecretEnvNamesToKeyRefs = Map.empty,
roleEnvs = Map.empty,
sparkFiles = Seq.empty[String])
val step = new JavaDriverFeatureStep(kubernetesConf)
val driverPod = step.configurePod(baseDriverPod).pod
val driverContainerwithJavaStep = step.configurePod(baseDriverPod).container
assert(driverContainerwithJavaStep.getArgs.size === 7)
val args = driverContainerwithJavaStep
.getArgs.asScala
assert(args === List(
"driver",
"--properties-file", SPARK_CONF_PATH,
"--class", "test-class",
"spark-internal", "5 7"))
}
}
| debugger87/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/JavaDriverFeatureStepSuite.scala | Scala | apache-2.0 | 2,321 |
package es.weso.rbe
import es.weso.rdf.nodes._
import es.weso.rdf._
import es.weso.utils.Debugging
/**
* RDF as graphs
*/
case class RDFAsGraph(rdf: RDFReader)
extends Graph[IRI,RDFNode] with Debugging {
def in: RDFNode => Seq[(IRI, RDFNode)] = { n =>
val in = rdf.triplesWithObject(n).map(t => (t.pred,t.subj)).toSeq
debugStep(s"In of $n = $in")
in
}
def out: RDFNode => Seq[(IRI, RDFNode)] = { n =>
debugStep(s"Calculating out of $n")
val out = rdf.triplesWithSubject(n).map(t => {
(t.pred,t.obj)
}).toSeq
debugStep(s"Out of $n = $out")
out
}
def nodes: Seq[RDFNode] = {
//TODO: extend to predicates and objects?
rdf.subjects().toSeq
}
def triples: Seq[(RDFNode, IRI, RDFNode)] = {
rdf.rdfTriples.map(t => (t.subj,t.pred,t.obj)).toSeq
}
}
| labra/ShExcala | src/main/scala/es/weso/rbe/RDFAsGraph.scala | Scala | mit | 822 |
package lila.game
private[game] final class GameJs(path: String, useCache: Boolean) {
def unsigned: String = useCache.fold(cached, readFromSource)
val placeholder = "--ranph--"
def sign(token: String) = unsigned.replace(placeholder, token)
private lazy val cached: String = readFromSource
private def readFromSource = {
val source = scala.io.Source fromFile path
source.mkString ~ { _ => source.close }
}
}
| Enigmahack/lila | modules/game/src/main/GameJs.scala | Scala | mit | 433 |
package com.timushev.sbt.updates
import sbt.Keys._
import com.timushev.sbt.updates.UpdatesKeys._
trait UpdatesPluginTasks {
def dependencyUpdatesDataTask =
(projectID, libraryDependencies, ivySbt, scalaVersion, scalaBinaryVersion, dependencyUpdatesExclusions, dependencyAllowPreRelease, streams)
.map(Reporter.dependencyUpdatesData)
def dependencyUpdatesTask =
(projectID, dependencyUpdatesData, dependencyUpdatesFailBuild, streams)
.map(Reporter.displayDependencyUpdates)
def writeDependencyUpdatesReportTask =
(projectID, dependencyUpdatesData, dependencyUpdatesReportFile, streams)
.map(Reporter.writeDependencyUpdatesReport)
}
| beni55/sbt-updates | src/main/scala/com/timushev/sbt/updates/UpdatesPluginTasks.scala | Scala | bsd-3-clause | 673 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression.proxies.primitives.operations.bitwise
import org.junit.Test
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.BaseIntegrationTest
import org.scalaide.debug.internal.expression.BaseIntegrationTestCompanion
import org.scalaide.debug.internal.expression.TestValues
class BitwiseShiftLeftWithZerosTest extends BaseIntegrationTest(BitwiseShiftLeftWithZerosTest) {
import TestValues.ValuesTestCase._
@Test
def `byte << sth`(): Unit = {
eval("byte << byte2", byte << byte2, Java.primitives.int)
eval("byte << short2", byte << short2, Java.primitives.int)
eval("byte << char2", byte << char2, Java.primitives.int)
eval("byte << int2", byte << int2, Java.primitives.int)
eval("byte << long2", byte << long2, Java.primitives.int)
expectReflectiveCompilationError("byte << float")
expectReflectiveCompilationError("byte << double")
}
@Test
def `short << sth`(): Unit = {
eval("short << byte2", short << byte2, Java.primitives.int)
eval("short << short2", short << short2, Java.primitives.int)
eval("short << char2", short << char2, Java.primitives.int)
eval("short << int2", short << int2, Java.primitives.int)
eval("short << long2", short << long2, Java.primitives.int)
expectReflectiveCompilationError("short << float")
expectReflectiveCompilationError("short << double")
}
@Test
def `char << sth`(): Unit = {
eval("char << byte2", char << byte2, Java.primitives.int)
eval("char << short2", char << short2, Java.primitives.int)
eval("char << char2", char << char2, Java.primitives.int)
eval("char << int2", char << int2, Java.primitives.int)
eval("char << long2", char << long2, Java.primitives.int)
expectReflectiveCompilationError("char << float")
expectReflectiveCompilationError("char << double")
}
@Test
def `int << sth`(): Unit = {
eval("int << byte2", int << byte2, Java.primitives.int)
eval("int << short2", int << short2, Java.primitives.int)
eval("int << char", int << char, Java.primitives.int)
eval("int << int2", int << int2, Java.primitives.int)
eval("int << long2", int << long2, Java.primitives.int)
expectReflectiveCompilationError("int << float")
expectReflectiveCompilationError("int << double")
}
@Test
def `long << sth`(): Unit = {
eval("long << byte2", long << byte2, Java.primitives.long)
eval("long << short2", long << short2, Java.primitives.long)
eval("long << char", long << char, Java.primitives.long)
eval("long << int2", long << int2, Java.primitives.long)
eval("long << long2", long << long2, Java.primitives.long)
expectReflectiveCompilationError("long << float")
expectReflectiveCompilationError("long << double")
}
@Test
def `float << sth`(): Unit = {
expectReflectiveCompilationError("float << byte2")
expectReflectiveCompilationError("float << short2")
expectReflectiveCompilationError("float << char")
expectReflectiveCompilationError("float << int2")
expectReflectiveCompilationError("float << long2")
expectReflectiveCompilationError("float << float2")
expectReflectiveCompilationError("float << double")
}
@Test
def `double << sth`(): Unit = {
expectReflectiveCompilationError("double << byte2")
expectReflectiveCompilationError("double << short2")
expectReflectiveCompilationError("double << char")
expectReflectiveCompilationError("double << int2")
expectReflectiveCompilationError("double << long2")
expectReflectiveCompilationError("double << float")
expectReflectiveCompilationError("double << double2")
}
@Test
def `'c' << 2L`(): Unit = eval("'c' << 2L", 'c' << 2L, Java.primitives.int)
@Test
def `1 << 2L`(): Unit = eval("1 << 2L", 1 << 2L, Java.primitives.int)
}
object BitwiseShiftLeftWithZerosTest extends BaseIntegrationTestCompanion
| scala-ide/scala-ide | org.scala-ide.sdt.debug.expression.tests/src/org/scalaide/debug/internal/expression/proxies/primitives/operations/bitwise/BitwiseShiftLeftWithZerosTest.scala | Scala | bsd-3-clause | 4,000 |
/**
* FILE: LoadUserList.scala
* PERCORSO /Codice/sgad/servertier/src/main/scala/sgad/servertier/businesslogic/operations
* DATA CREAZIONE: 3 Marzo 2014
* AUTORE: ProTech
* EMAIL: [email protected]
*
* Questo file è proprietà del gruppo ProTech, viene rilasciato sotto licenza Apache v2.
*
* DIARIO DELLE MODIFICHE:
* 2014-03-03 - Creazione della classe - Biancucci Maurizio
*/
package sgad.servertier.businesslogic.operations
import sgad.servertier.dataaccess.data.userdata.UserData
import sgad.servertier.dataaccess.databaseaccess.databasemanager.DataBaseManager
/**
* Classe per la gestione dell'operazione di scelta degli utenti con i quali un utente può interagire.
*/
class LoadUserList extends Operation{
/**
* Computa gli user con i quali un utente può interagire.
* @param userData Dati dell'utente su cui verrà effettuata l'operazione.
* @param data Dati accompagnatori alla richiesta dell'operazione.
* @param loginAuthorization Autorizzazione a operare richieste di login. Di default è false.
* @param registrationAuthorization Autorizzazione a operare richieste di registrazione. Di default è false.
* @param userAuthorization Autorizzazione a operare richieste di user. Di default è false.
* @param internalAuthorization Autorizzazione a operare richieste interne. Di default è false.
* @return Stringa da restituire.
*/
def execute(userData: UserData, data: String, loginAuthorization: Boolean, registrationAuthorization: Boolean,
userAuthorization: Boolean, internalAuthorization: Boolean): String = {
if(userAuthorization)
{
try{
val mapData = decodeData(data)
//Ritorna il valore del blocco if
if(userData.getAuthenticationData.getAuthenticationString == mapData("authentication"))
{
val userList = DataBaseManager.loadRandomUsers(userData.getAuthenticationData.getUser, 5)
var answer = new StringBuilder("{ data: [")
var first = true
userList.foreach( (user: String) => {
if(!first)
answer ++= ","
else
first = false
answer ++= "\""+ user +"\"" })
(answer ++= "]}").toString()
}
else
"{data: false, authentication: false}"
}
catch {
case _: NoSuchElementException => return "{data: false, parameters: false }"
}
}
else
"{data: false, unauthorized: true}"
}
}
| protechunipd/SGAD | Codice/sgad/servertier/src/main/scala/sgad/servertier/businesslogic/operations/LoadUserList.scala | Scala | apache-2.0 | 2,356 |
trait TestTrait
trait TestTrait2
class Test(y: Int, yy: String)(implicit zx: Double) extends T(5) with TestTrait with TestTrait2{
class B { def ggg(y: String) = y }
val r = "ttt"
val t = "rrr"
val bbb = new B
bbb ggg r
}
class T(x: Int)
| VladimirNik/tasty | exttests/tests/classInh/Test.scala | Scala | bsd-3-clause | 247 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import org.apache.kafka.common.config.SslConfigs
import org.apache.kafka.common.protocol.SecurityProtocol
import org.junit.Before
class SslEndToEndAuthorizationTest extends EndToEndAuthorizationTest {
override protected def securityProtocol = SecurityProtocol.SSL
this.serverConfig.setProperty(SslConfigs.SSL_CLIENT_AUTH_CONFIG, "required")
override val clientPrincipal = "O=A client,CN=localhost"
override val kafkaPrincipal = "O=A server,CN=localhost"
@Before
override def setUp {
startSasl(jaasSections(List.empty, None, ZkSasl))
super.setUp
}
}
| wangcy6/storm_app | frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/test/scala/integration/kafka/api/SslEndToEndAuthorizationTest.scala | Scala | apache-2.0 | 1,410 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.utils.T
import org.scalatest.{FlatSpec, Matchers}
@com.intel.analytics.bigdl.tags.Parallel
class ConcatTableSpec extends FlatSpec with Matchers {
"A ConcateTable" should "return right output and grad" in {
val ct = new ConcatTable[Float]()
ct.add(new Identity[Float]())
ct.add(new Identity[Float]())
val input = T(Tensor[Float](
Storage(Array(1f, 2, 3))),
T(
Tensor[Float](Storage(Array(4f, 3, 2, 1)))
)
)
val output = ct.forward(input)
output should be (T(input, input))
val gradOutput1 = T(
Tensor(Storage[Float](Array(0.1f, 0.2f, 0.3f))),
T(
Tensor(Storage[Float](Array(0.4f, 0.3f, 0.2f, 0.1f)))
)
)
val gradOutput = T(gradOutput1, gradOutput1)
val gradInput = ct.updateGradInput(input, gradOutput)
ct.accGradParameters(input, gradOutput)
gradInput should be (T(
Tensor(Storage[Float](Array(0.2f, 0.4f, 0.6f))),
T(
Tensor(Storage[Float](Array(0.8f, 0.6f, 0.4f, 0.2f)))
)
))
}
"ConcatTable" should "work properly after clearState()" in {
val model = Sequential[Float]()
model.add(ConcatTable().add(Identity()).add(Identity()))
model.add(ParallelTable().add(Reshape(Array(3, 2))).add(Reshape(Array(3, 2))))
model.add(ConcatTable().add(Identity()))
val input = Tensor[Float](2, 3)
model.forward(input)
model.backward(input, model.output)
model.clearState()
model.modules(2).clearState()
val input2 = Tensor[Float](2, 3)
model.forward(input2)
model.backward(input2, model.output)
}
}
| psyyz10/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/ConcatTableSpec.scala | Scala | apache-2.0 | 2,366 |
package lila.tournament
import chess.Color
import lila.game.{ Game, PovRef, IdGenerator }
import org.joda.time.DateTime
case class Pairing(
id: String, // game Id
tourId: String,
status: chess.Status,
user1: String,
user2: String,
winner: Option[String],
turns: Option[Int],
berserk1: Int,
berserk2: Int) {
def gameId = id
def users = List(user1, user2)
def usersPair = user1 -> user2
def contains(user: String): Boolean = user1 == user || user2 == user
def contains(u1: String, u2: String): Boolean = contains(u1) && contains(u2)
def notContains(user: String) = !contains(user)
def opponentOf(userId: String) =
if (userId == user1) user2.some
else if (userId == user2) user1.some
else none
def finished = status >= chess.Status.Mate
def playing = !finished
def quickFinish = finished && turns.??(20 >)
def quickDraw = draw && turns.??(20 >)
def notSoQuickFinish = finished && turns.??(14 <=)
def wonBy(user: String): Boolean = winner.??(user ==)
def lostBy(user: String): Boolean = winner.??(user !=)
def draw: Boolean = finished && winner.isEmpty
def colorOf(userId: String): Option[Color] =
if (userId == user1) Color.White.some
else if (userId == user2) Color.Black.some
else none
def berserkOf(userId: String): Int =
if (userId == user1) berserk1
else if (userId == user2) berserk2
else 0
def validBerserkOf(userId: String): Int =
notSoQuickFinish ?? berserkOf(userId)
def povRef(userId: String): Option[PovRef] =
colorOf(userId) map { PovRef(gameId, _) }
def similar(other: Pairing) = other.contains(user1, user2)
}
private[tournament] object Pairing {
case class LastOpponents(hash: Map[String, String]) extends AnyVal
def apply(tourId: String, u1: String, u2: String): Pairing = new Pairing(
id = IdGenerator.game,
tourId = tourId,
status = chess.Status.Created,
user1 = u1,
user2 = u2,
winner = none,
turns = none,
berserk1 = 0,
berserk2 = 0)
case class Prep(tourId: String, user1: String, user2: String) {
def toPairing(firstGetsWhite: Boolean) =
if (firstGetsWhite) Pairing(tourId, user1, user2)
else Pairing(tourId, user2, user1)
}
def prep(tour: Tournament, ps: (Player, Player)) = Pairing.Prep(tour.id, ps._1.userId, ps._2.userId)
def prep(tour: Tournament, u1: String, u2: String) = Pairing.Prep(tour.id, u1, u2)
def prep(tour: Tournament, p1: Player, p2: Player) = Pairing.Prep(tour.id, p1.userId, p2.userId)
}
| clarkerubber/lila | modules/tournament/src/main/Pairing.scala | Scala | agpl-3.0 | 2,532 |
// @GENERATOR:play-routes-compiler
// @SOURCE:/media/ajay/D/my_projects/play-silhouette-angular-seed-2/conf/routes
// @DATE:Mon Sep 28 10:26:16 IST 2015
import play.api.mvc.{ QueryStringBindable, PathBindable, Call, JavascriptLiteral }
import play.core.routing.{ HandlerDef, ReverseRouteContext, queryString, dynamicString }
import _root_.controllers.Assets.Asset
// @LINE:7
package com.tuplejump.playYeoman {
// @LINE:7
class ReverseYeoman(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:7
def index(): Call = {
import ReverseRouteContext.empty
Call("GET", _prefix)
}
}
} | ajaygeorge91/play-silhouette-neo4j-angular-seed | target/scala-2.11/routes/main/com/tuplejump/playYeoman/ReverseRoutes.scala | Scala | apache-2.0 | 686 |
// Copyright: 2010 - 2016 Rory Graves, Sam Halliday
// License: http://www.apache.org/licenses/LICENSE-2.0
package com.acme
object F@foo@oo {
def ba@bar@r(a@a@: Int): Int = 2
}
| rorygraves/pcplod | pcplod/src/test/resources/com/acme/foo.scala | Scala | apache-2.0 | 181 |
package io.chymyst.jc
import io.chymyst.jc.Macros.{getName, rawTree}
import io.chymyst.test.LogSpec
import org.scalatest.BeforeAndAfterEach
import io.chymyst.test.Common._
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
class MacrosSpec extends LogSpec with BeforeAndAfterEach {
val warmupTimeMs = 200L
var tp0: Pool = _
def waitSome(): Unit = Thread.sleep(warmupTimeMs)
override def beforeEach(): Unit = {
tp0 = FixedPool(4)
}
override def afterEach(): Unit = {
tp0.shutdownNow()
}
behavior of "reaction sha1"
it should "compute different reaction sha1 for different conditions" in {
val a = m[Int]
val b = m[Int]
val reaction1 = go { case b(x) if x < 0 ⇒ }
val reaction2 = go { case a(x) if x < 0 ⇒ }
val reaction3 = go { case a(x) if x > 0 ⇒ }
reaction1.info.sha1 should not equal reaction2.info.sha1
reaction2.info.sha1 should not equal reaction3.info.sha1
reaction3.info.sha1 should not equal reaction1.info.sha1
}
it should "compute the same reaction sha1 regardless of molecule order" in {
val a = m[Int]
val b = m[Int]
val reaction1 = go { case a(x) + b(y) if x < 0 ⇒ }
val reaction2 = go { case b(y) + a(x) if x < 0 ⇒ }
val reaction3 = go { case b(y) + a(x) if x < 0 => }
reaction1.info.sha1 shouldEqual reaction2.info.sha1
reaction1.info.sha1 shouldEqual reaction3.info.sha1
}
behavior of "reaction site"
it should "track whether molecule emitters are bound" in {
val a = new M[Unit]("a123")
val b = new M[Unit]("b")
val c = new M[Unit]("")
a.toString shouldEqual "a123"
b.toString shouldEqual "b"
c.toString shouldEqual "<no name>"
a.isBound shouldEqual false
b.isBound shouldEqual false
c.isBound shouldEqual false
site(go { case a(_) + c(_) => b() })
a.isBound shouldEqual true
b.isBound shouldEqual false
c.isBound shouldEqual true
val expectedReaction = "<no name> + a123 → ..."
// These methods are private to the package!
a.emittingReactions shouldEqual Set()
b.emittingReactions.size shouldEqual 1
b.emittingReactions.map(_.toString) shouldEqual Set(expectedReaction)
c.emittingReactions shouldEqual Set()
a.consumingReactions.length shouldEqual 1
a.consumingReactions.head.toString shouldEqual expectedReaction
b.consumingReactions shouldEqual Array()
c.consumingReactions shouldEqual a.consumingReactions
}
behavior of "macros for defining new molecule emitters"
it should "fail to compute correct names when molecule emitters are defined together" in {
val (counter, fetch) = (m[Int], b[Unit, String])
counter.name shouldEqual fetch.name
counter.name should fullyMatch regex "x\\\\$[0-9]+"
}
it should "compute correct names and classes for molecule emitters" in {
val a = m[Option[(Int, Int, Map[String, Boolean])]] // complicated type
a.isInstanceOf[M[_]] shouldEqual true
a.toString shouldEqual "a"
val s = b[Map[(Boolean, Unit), Seq[Int]], Option[List[(Int, Option[Map[Int, String]])]]] // complicated type
s.isInstanceOf[B[_, _]] shouldEqual true
s.toString shouldEqual "s/B"
}
it should "create an emitter of class M[Unit] for m[Unit]" in {
val a = m[Unit]
a.isInstanceOf[M[Unit]] shouldEqual true
}
it should "create an emitter of class B[Int, Unit] for b[Int, Unit]" in {
val a = b[Int, Unit]
a.isInstanceOf[B[Int, Unit]] shouldEqual true
}
it should "create an emitter of class B[Unit, Int] for b[Unit, Int]" in {
val a = b[Unit, Int]
a.isInstanceOf[B[Unit, Int]] shouldEqual true
}
it should "create an emitter of class B[Unit, Unit] for b[Unit, Unit]" in {
val a = b[Unit, Unit]
a.isInstanceOf[B[Unit, Unit]] shouldEqual true
}
behavior of "macros for inspecting a reaction body"
it should "correctly sort input molecules with compound values and Option" in {
val bb = m[(Int, Option[Int])]
val reaction = go { case bb((1, Some(2))) + bb((0, None)) => }
reaction.info.toString shouldEqual "bb((0,None)) + bb((1,Some(2))) → "
}
it should "correctly sort input molecules with compound values" in {
val bb = m[(Int, Int)]
val reaction = go { case bb((1, 2)) + bb((0, 3)) + bb((4, _)) => }
reaction.info.toString shouldEqual "bb((0,3)) + bb((1,2)) + bb(?) → "
}
it should "inspect reaction body with default clause that declares a static molecule" in {
val a = m[Int]
val reaction = go { case _ => a(123) }
reaction.info.inputs shouldEqual Nil
reaction.info.guardPresence.noCrossGuards shouldEqual true
reaction.info.outputs shouldEqual List(OutputMoleculeInfo(a, ConstOutputPattern(123), List()))
}
it should "inspect reaction body containing local molecule emitters" in {
val a = m[Int]
val reaction =
go { case a(x) =>
val q = m[Int]
val s = m[Unit]
go { case q(_) + s(_) => }
q(0)
}
reaction.info.inputs.toList should matchPattern { case List(InputMoleculeInfo(`a`, 0, SimpleVarInput('x, _), `simpleVarXSha1`, _)) => }
reaction.info.outputs shouldEqual List()
}
it should "inspect reaction body with embedded site" in {
val a = m[Int]
val bb = m[Int]
val f = b[Unit, Int]
site(tp0)(
go { case f(_, r) + bb(x) => r(x) },
go { case a(x) =>
val p = m[Int]
site(tp0)(go { case p(y) => bb(y) })
p(x + 1)
}
)
a(1)
f.timeout()(1000 millis) shouldEqual Some(2)
}
it should "inspect reaction body with embedded site and go" in {
val a = m[Int]
val bb = m[Int]
val f = b[Unit, Int]
site(tp0)(
go { case f(_, r) + bb(x) => r(x) },
go { case a(x) =>
val p = m[Int]
site(tp0)(go { case p(y) => bb(y) })
p(x + 1)
}
)
a(1)
f.timeout()(1000 millis) shouldEqual Some(2)
}
val simpleVarXSha1 = ""
val constantNoneSha1 = "6EEF6648406C333A4035CD5E60D0BF2ECF2606D7"
val wildcardSha1 = ""
val constantZeroSha1 = "8227489534FBEA1F404CAAEC9F4CCAEEB9EF2DC1"
val constantOneSha1 = "356A192B7913B04C54574D18C28D46E6395428AB"
it should "inspect a two-molecule reaction body with None" in {
val a = m[Int]
val bb = m[Option[Int]]
val result = go { case a(x) + bb(None) => bb(None) }
(result.info.inputs.toList match {
case List(
InputMoleculeInfo(`a`, 0, SimpleVarInput('x, _), sha_a, Symbol("Int")),
InputMoleculeInfo(`bb`, 1, ConstInputPattern(None), sha_bb, Symbol("Option[Int]"))
) =>
sha_a shouldEqual simpleVarXSha1
sha_bb shouldEqual constantNoneSha1
true
case _ => false
}) shouldEqual true
result.info.outputs shouldEqual List(OutputMoleculeInfo(bb, ConstOutputPattern(None), List()))
result.info.guardPresence shouldEqual GuardAbsent
result.info.sha1 shouldEqual "C10342E86F1AEB8992D97883B15773F4A2DBCF1F"
}
val ax_qq_reaction_sha1 = "E2D62113017684CECF8542301354A82BF5BB5EC3"
it should "inspect a two-molecule reaction body" in {
val a = m[Int]
val qq = m[Unit]
val result = go { case a(x) + qq(_) => qq() }
(result.info.inputs.toList match {
case List(
InputMoleculeInfo(`a`, 0, SimpleVarInput('x, _), `simpleVarXSha1`, 'Int),
InputMoleculeInfo(`qq`, 1, WildcardInput, sha_qq, 'Unit)
) =>
sha_qq shouldEqual wildcardSha1
true
case _ => false
}) shouldEqual true
result.info.outputs shouldEqual List(OutputMoleculeInfo(qq, ConstOutputPattern(()), List()))
result.info.guardPresence shouldEqual AllMatchersAreTrivial
result.info.sha1 shouldEqual ax_qq_reaction_sha1
}
it should "compute reaction sha1 independently of input molecule order" in {
val a = m[Int]
val qq = m[Unit]
val result = go { case a(x) + qq(_) => qq() }
result.info.sha1 shouldEqual ax_qq_reaction_sha1
// This reaction is different only in the order of input molecules, so its sha1 must be the same.
val result2 = go { case qq(_) + a(x) => qq() }
result2.info.sha1 shouldEqual ax_qq_reaction_sha1
}
it should "compile reaction with blocking molecule inside a non-blocking molecule with warnings" in {
val a = m[Int]
val c = m[Int]
val f = b[Unit, Int]
val status = site(go { case a(x) + c(_) + f(_, r) ⇒ c(f() + 1); r(x) })
status shouldEqual WarningsAndErrors(List("Possible deadlock: molecule f/B may deadlock due to outputs of {a(x) + c(_) + f/B(_) → f/B() + c(?)}", "Possible deadlock: molecule (f/B) may deadlock due to (c) among the outputs of {a(x) + c(_) + f/B(_) → f/B() + c(?)}"), Nil, "Site{a + c + f/B → ...}")
}
it should "compute reaction sha1 independently of guard order" in {
val a = m[Int]
val result = go { case a(x) + a(y) if x > 1 && y > 1 => a(x + y) }
// This reaction is different only in the order of guards, so its sha1 must be the same.
val result2 = go { case a(x) + a(y) if y > 1 && x > 1 => a(x + y) }
result.info.sha1 shouldEqual result2.info.sha1
}
it should "inspect a reaction body with another molecule and extra code" in {
val a = m[Int]
val qqq = m[String]
object testWithApply {
def apply(x: Int): Int = x + 1
}
val result = go {
case a(_) + a(x) + a(1) =>
a(x + 1)
if (x > 0) a(testWithApply(123))
println(x)
qqq("")
}
(result.info.inputs.toList match {
case List(
InputMoleculeInfo(`a`, 0, WildcardInput, `wildcardSha1`, 'Int),
InputMoleculeInfo(`a`, 1, SimpleVarInput('x, _), `simpleVarXSha1`, 'Int),
InputMoleculeInfo(`a`, 2, ConstInputPattern(1), sha_a, 'Int)
) =>
sha_a shouldEqual constantOneSha1
true
case _ => false
}) shouldEqual true
result.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`a`, OtherOutputPattern, List(NotLastBlock(_))),
OutputMoleculeInfo(`a`, OtherOutputPattern, List(NotLastBlock(_), ChooserBlock(_, 0, 2))),
OutputMoleculeInfo(`qqq`, ConstOutputPattern(""), List())
) =>
}
result.info.guardPresence shouldEqual GuardAbsent
}
it should "inspect reaction body with embedded reaction" in {
val a = m[Int]
val qq = m[Unit]
val result = go { case a(x) => go { case qq(_) => a(0) }; qq() }
result.info.inputs.toList should matchPattern {
case List(InputMoleculeInfo(`a`, 0, SimpleVarInput('x, _), `simpleVarXSha1`, _)) =>
}
result.info.outputs shouldEqual List(OutputMoleculeInfo(qq, ConstOutputPattern(()), List()))
result.info.guardPresence shouldEqual AllMatchersAreTrivial
}
it should "inspect a very complicated reaction input pattern" in {
val a = m[Int]
val c = m[Unit]
val qq = m[Unit]
val s = b[Unit, Int]
val bb = m[(Int, Option[Int])]
// reaction contains all kinds of pattern-matching constructions
val result = go {
case a(p) + a(y) + a(1) + c(()) + c(_) + bb((0, None)) + bb((1, Some(2))) + bb((1, z)) + bb((_, None)) + bb((t, Some(q))) + s(_, r) => s(); a(p + 1); qq(); r(p)
}
result.info.inputs.toList should matchPattern {
case List(
InputMoleculeInfo(`a`, 0, SimpleVarInput('p, _), _, Symbol("Int")),
InputMoleculeInfo(`a`, 1, SimpleVarInput('y, _), _, 'Int),
InputMoleculeInfo(`a`, 2, ConstInputPattern(1), _, _),
InputMoleculeInfo(`c`, 3, WildcardInput, _, Symbol("Unit")),
InputMoleculeInfo(`c`, 4, WildcardInput, _, 'Unit),
InputMoleculeInfo(`bb`, 5, ConstInputPattern((0, None)), _, Symbol("(Int, Option[Int])")),
InputMoleculeInfo(`bb`, 6, ConstInputPattern((1, Some(2))), _, _),
InputMoleculeInfo(`bb`, 7, OtherInputPattern(_, List('z), false), _, _),
InputMoleculeInfo(`bb`, 8, OtherInputPattern(_, List(), false), _, _),
InputMoleculeInfo(`bb`, 9, OtherInputPattern(_, List('t, 'q), false), _, _),
InputMoleculeInfo(`s`, 10, WildcardInput, _, 'Unit)
) ⇒
}
result.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`s`, ConstOutputPattern(()), _),
OutputMoleculeInfo(`a`, OtherOutputPattern, _),
OutputMoleculeInfo(`qq`, ConstOutputPattern(()), _)
) ⇒
}
result.info.outputs.toList.map(_.environments) should matchPattern { case List(
List(NotLastBlock(_)),
List(NotLastBlock(_)),
List(NotLastBlock(_))
) ⇒
}
result.info.toString shouldEqual "a(1) + a(p) + a(y) + bb((0,None)) + bb((1,Some(2))) + bb(?z) + bb(?) + bb(?t,q) + c(_) + c(_) + s/B(_) → s/B() + a(?) + qq()"
}
it should "not fail to define a reaction with correct inputs with non-default pattern-matching in the middle of reaction" in {
val a = m[Option[Int]]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case b(_) + a(Some(x)) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "define a reaction with correct inputs with default pattern-matching in the middle of reaction" in {
val a = m[Option[Int]]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case b(_) + a(None) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "define a reaction with correct inputs with non-simple default pattern-matching in the middle of reaction" in {
val a = m[Seq[Int]]
val b = m[Unit]
val c = m[Unit]
site(go { case b(_) + a(List()) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "not fail to define a simple reaction with correct inputs with empty option pattern-matching at start of reaction" in {
val a = m[Option[Int]]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case a(None) + b(_) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "define a reaction with correct inputs with empty option pattern-matching at start of reaction" in {
val a = m[Option[Int]]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case a(None) + b(_) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "define a reaction with correct inputs with non-default pattern-matching at start of reaction" in {
val a = m[Option[Int]]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case a(Some(x)) + b(_) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "run reactions correctly with non-default pattern-matching at start of reaction" in {
val a = m[Option[Int]]
val f = b[Unit, Int]
site(tp0)(go { case a(Some(x)) + f(_, r) => r(x) })
a(Some(1))
waitSome()
waitSome()
a.logSite shouldEqual "Site{a + f/B → ...}\\nMolecules: a/P(Some(1))"
f.timeout()(2.second) shouldEqual Some(1)
a.logSite shouldEqual "Site{a + f/B → ...}\\nNo molecules"
}
it should "not run a reaction whose static guard is false" in {
val a = m[Option[Int]]
val f = b[Unit, Int]
val n = 1
site(tp0)(go { case a(Some(x)) + f(_, r) if n < 1 => r(x) })
a(Some(1))
waitSome()
waitSome()
a.logSite shouldEqual "Site{a + f/B → ...}\\nMolecules: a/P(Some(1))"
f.timeout()(2.second) shouldEqual None
waitSome() // Removal of blocking molecule upon timeout is now asynchronous.
waitSome()
a.logSite shouldEqual "Site{a + f/B → ...}\\nMolecules: a/P(Some(1))"
}
it should "not run a reaction whose cross-molecule guard is false" in {
val a = m[Option[Int]]
val f = b[Int, Int]
val n = 2
site(tp0)(go { case a(Some(x)) + f(y, r) if x < y + n => r(x) })
a(Some(10))
waitSome()
waitSome()
a.logSite shouldEqual "Site{a + f/B → ...}\\nMolecules: a(Some(10))"
f.timeout(0)(2.second) shouldEqual None
waitSome()
waitSome()
a.logSite shouldEqual "Site{a + f/B → ...}\\nMolecules: a(Some(10))"
}
it should "run a reaction whose cross-molecule guard is true" in {
val a = m[Option[Int]]
val f = b[Int, Int]
val n = 2
site(tp0)(go { case a(Some(x)) + f(y, r) if x < y + n => r(x) })
a(Some(1))
waitSome()
waitSome()
a.logSite shouldEqual "Site{a + f/B → ...}\\nMolecules: a(Some(1))"
f.timeout(0)(2.second) shouldEqual Some(1)
a.logSite shouldEqual "Site{a + f/B → ...}\\nNo molecules"
}
it should "run a reaction with cross-molecule guards and some independent molecules" in {
val a = m[Option[Int]]
val f = b[Int, Int]
val c = m[Int]
val n = 2
site(tp0)(go { case a(Some(x)) + c(z) + f(y, r) if x < y + n => r(x + z) })
a(Some(1))
c(123)
waitSome()
waitSome()
a.logSite shouldEqual "Site{a + c + f/B → ...}\\nMolecules: a(Some(1)) + c/P(123)"
f.timeout(0)(2.second) shouldEqual Some(124)
a.logSite shouldEqual "Site{a + c + f/B → ...}\\nNo molecules"
}
it should "define a reaction with correct inputs with constant non-default pattern-matching at start of reaction" in {
val a = m[Int]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case a(1) + b(_) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "define a reaction with correct inputs with constant default option pattern-matching at start of reaction" in {
val a = m[Option[Int]]
val b = m[Unit]
val c = m[Unit]
site(tp0)(go { case a(None) + b(_) + c(_) => })
a.logSite shouldEqual "Site{a + b + c → ...}\\nNo molecules"
}
it should "determine constant input and output patterns correctly" in {
val a = m[Option[Int]]
val b = m[String]
val c = m[(Int, Int)]
val d = m[Unit]
val e = m[Either[Option[Int], String]]
val r = go { case a(Some(1)) + b("xyz") + d(()) + c((2, 3)) + e(Left(Some(1))) + e(Right("input")) =>
a(Some(2)); e(Left(Some(2))); e(Right("output"))
}
r.info.inputs.toList should matchPattern {
case List(
InputMoleculeInfo(`a`, 0, ConstInputPattern(Some(1)), _, Symbol("Option[Int]")),
InputMoleculeInfo(`b`, 1, ConstInputPattern("xyz"), _, 'String),
InputMoleculeInfo(`d`, 2, WildcardInput, _, _),
InputMoleculeInfo(`c`, 3, ConstInputPattern((2, 3)), _, Symbol("(Int, Int)")),
InputMoleculeInfo(`e`, 4, ConstInputPattern(Left(Some(1))), _, Symbol("scala.util.Either[Option[Int],String]")),
InputMoleculeInfo(`e`, 5, ConstInputPattern(Right("input")), _, _)
) =>
}
r.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`a`, ConstOutputPattern(Some(2)), List(NotLastBlock(_))),
OutputMoleculeInfo(`e`, ConstOutputPattern(Left(Some(2))), List(NotLastBlock(_))),
OutputMoleculeInfo(`e`, ConstOutputPattern(Right("output")), List())
) ⇒
}
r.info.guardPresence shouldEqual GuardAbsent
r.info.sha1 shouldEqual "092BC1D2E16ECF2AC24374BC00EFB8BE1B5190F8"
}
it should "detect output molecules with constant values" in {
val c = m[Int]
val bb = m[(Int, Int)]
val bbb = m[Int]
val cc = m[Option[Int]]
val r1 = go { case bbb(x) => c(x); bb((1, 2)); bb((3, x)) }
val r2 = go { case bbb(_) + c(_) => bbb(0) }
val r3 = go { case bbb(x) + c(_) + c(_) => bbb(1); c(x); bbb(2); cc(None); cc(Some(1)) }
r1.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`c`, OtherOutputPattern, List(NotLastBlock(_))),
OutputMoleculeInfo(`bb`, ConstOutputPattern((1, 2)), List(NotLastBlock(_))),
OutputMoleculeInfo(`bb`, OtherOutputPattern, List())
) ⇒
}
r2.info.outputs shouldEqual List(OutputMoleculeInfo(bbb, ConstOutputPattern(0), List()))
r3.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`bbb`, ConstOutputPattern(1), List(NotLastBlock(_))),
OutputMoleculeInfo(`c`, OtherOutputPattern, List(NotLastBlock(_))),
OutputMoleculeInfo(`bbb`, ConstOutputPattern(2), List(NotLastBlock(_))),
OutputMoleculeInfo(`cc`, ConstOutputPattern(None), List(NotLastBlock(_))),
OutputMoleculeInfo(`cc`, ConstOutputPattern(Some(1)), List())
) ⇒
}
}
it should "compute input pattern variables correctly" in {
val a = m[Int]
val bb = m[(Int, Int, Option[Int], (Int, Option[Int]))]
val c = m[Unit]
val result = go { case a(1 | 2) + c(()) + bb(p@(ytt, 1, None, (s, Some(t)))) => }
result.info.inputs.toList should matchPattern {
case List(
InputMoleculeInfo(`a`, 0, OtherInputPattern(_, List(), false), _, _),
InputMoleculeInfo(`c`, 1, WildcardInput, _, _),
InputMoleculeInfo(`bb`, 2, OtherInputPattern(_, List('p, 'ytt, 's, 't), false), _, Symbol("(Int, Int, Option[Int], (Int, Option[Int]))"))
) =>
}
result.info.toString shouldEqual "a(?) + bb(?p,ytt,s,t) + c(_) → "
}
it should "create partial functions for matching from reaction body" in {
val aa = m[Option[Int]]
val bb = m[(Int, Option[Int])]
val result = go { case aa(Some(x)) + bb((0, None)) => aa(Some(x + 1)) }
result.info.outputs shouldEqual List(OutputMoleculeInfo(aa, OtherOutputPattern, List()))
val pat_aa = result.info.inputs.head
pat_aa.molecule shouldEqual aa
val pat_bb = result.info.inputs(1)
pat_bb.molecule shouldEqual bb
(pat_aa.flag match {
case OtherInputPattern(matcher, vars, false) =>
matcher.isDefinedAt(Some(1)) shouldEqual true
matcher.isDefinedAt(None) shouldEqual false
vars shouldEqual List('x)
true
case _ => false
}) shouldEqual true
pat_bb.flag shouldEqual ConstInputPattern((0, None))
}
behavior of "output environment computation"
it should "ignore + and some other functions" in {
val a = m[Unit]
val c = m[Unit]
val f = b[Unit, String]
val r = go { case c(_) =>
a() + a()
a()
Some(a())
List(a(), a(), a())
Left(a())
Right(a())
(a(), a(), a())
Symbol(f.timeout()(1.second).get)
val x = f()
if (f() == x) ()
f() match {
case "" => true
}
}
r.info.outputs.map(_.molecule) shouldEqual List(a, a, a, a, a, a, a, a, a, a, a, a, f, f, f, f)
r.info.outputs.map(_.flag).distinct shouldEqual List(ConstOutputPattern(()))
r.info.outputs.map(_.environments).forall(_.forall(_.notLastBlock)) should be
true
}
it should "detect f.timeout()()" in {
val a = m[Unit]
val f = b[Unit, Unit]
val r = go { case a(_) => Some(f.timeout()(1.second).get).foreach(_ => ()) }
r.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`f`, ConstOutputPattern(()), _)
) ⇒
}
}
it should "detect f().map()" in {
val a = m[Unit]
val f = b[Unit, List[Int]]
val r = go { case a(_) => f().foreach(_ => ()) }
r.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`f`, ConstOutputPattern(()), _)
) ⇒
}
r.info.outputs(0).environments should matchPattern { case List(NotLastBlock(1)) ⇒ }
}
it should "detect molecules emitted in if-then-else blocks" in {
val a = m[Int]
val c = m[Unit]
val d = m[Unit]
val r = go { case a(x) => if (x > 0) c() else d() }
r.info.outputs(0).environments should matchPattern { case List(ChooserBlock(_, 0, 2)) => }
r.info.outputs(1).environments should matchPattern { case List(ChooserBlock(_, 1, 2)) => }
r.info.outputs(0).environments(0).id shouldEqual r.info.outputs(1).environments(0).id
}
it should "detect molecules emitted in several if-then-else blocks" in {
val a = m[Int]
val c = m[Unit]
val d = m[Unit]
val r = go { case a(x) => if (x > 0) c() else d(); if (x < 0) c() else d() }
r.info.outputs(0).environments should matchPattern { case List(NotLastBlock(_), ChooserBlock(_, 0, 2)) => }
r.info.outputs(1).environments should matchPattern { case List(NotLastBlock(_), ChooserBlock(_, 1, 2)) => }
r.info.outputs(2).environments should matchPattern { case List(ChooserBlock(_, 0, 2)) => }
r.info.outputs(3).environments should matchPattern { case List(ChooserBlock(_, 1, 2)) => }
r.info.outputs(0).environments(0).id shouldEqual r.info.outputs(1).environments(0).id
r.info.outputs(0).environments(1).id shouldEqual r.info.outputs(1).environments(1).id
r.info.outputs(2).environments(0).id shouldEqual r.info.outputs(3).environments(0).id
}
it should "detect molecules emitted in foreach blocks" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) => if (x > 0) (1 to 10).foreach(i => c(i)) }
r.info.outputs(0).environments should matchPattern {
case List(ChooserBlock(_, 0, 2), FuncBlock(_, "scala.collection.immutable.Range.foreach"), FuncLambda(_)) =>
}
}
it should "detect molecules emitted in foreach blocks with short apply syntax" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) => if (x > 0) (1 to 10).foreach(c) }
r.info.outputs(0).environments should matchPattern {
case List(ChooserBlock(_, 0, 2), FuncBlock(_, "scala.collection.immutable.Range.foreach")) =>
}
}
it should "detect molecules emitted in map blocks" in {
val a = m[Int]
val c = m[Int]
val r = go { case c(x) => if (x > 0) (1 to 10).map { i => a(i); 1 } }
r.info.outputs(0).environments should matchPattern {
case List(ChooserBlock(_, 0, 2), FuncBlock(_, "scala.collection.TraversableLike.map"), FuncLambda(_), NotLastBlock(_)) =>
}
}
it should "detect molecules emitted in map blocks with short syntax" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) => (1 to 10).map(c).forall(_ => true) }
r.info.outputs(0).environments should matchPattern {
case List(NotLastBlock(_), FuncBlock(_, "scala.collection.TraversableLike.map")) =>
}
}
it should "detect molecules emitted in arguments of other molecules" in {
val a = m[Int]
val c = b[Int, Int]
val r = go { case a(x) => a(if (x > 0) c(x) else c(x + 1)) }
r.info.outputs(0).environments should matchPattern { case List(NotLastBlock(_), ChooserBlock(_, 0, 2)) ⇒ }
r.info.outputs(1).environments should matchPattern { case List(NotLastBlock(_), ChooserBlock(_, 1, 2)) ⇒ }
r.info.outputs(0).environments(1).id shouldEqual r.info.outputs(1).environments(1).id
r.info.outputs(2).environments shouldEqual List()
}
it should "detect molecules emitted in custom apply()" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) => FuncLambda {
c(x)
1
}
}
r.info.outputs(0).environments should matchPattern {
case List(FuncBlock(1, "io.chymyst.jc.FuncLambda.apply"), NotLastBlock(2)) =>
}
}
it should "detect molecules emitted in user-defined methods" in {
val a = m[Int]
val c = m[Int]
def f(x: Unit): Int = 1
val r = go { case a(x) => c(if (x > 0) f(c(x))
else {
c(x)
2
})
}
r.info.outputs(0).environments should matchPattern {
case List(NotLastBlock(_), ChooserBlock(_, 0, 2), FuncBlock(_, "io.chymyst.jc.MacrosSpec.f")) =>
}
}
it should "detect molecules emitted in user-defined methods within reaction scope" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) =>
def f(x: Unit): Int = 1
c(if (x > 0) f(c(x))
else {
c(x)
2
})
}
r.info.outputs(0).environments should matchPattern {
case List(NotLastBlock(_), ChooserBlock(x, 0, 2), FuncBlock(y, "io.chymyst.jc.MacrosSpec.$anonfun.f")) if y > x =>
}
}
it should "detect molecules emitted in while loops" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) => if (x > 0)
while ( {
c(x)
true
}) {
c(x)
}
}
r.info.outputs(0).environments should matchPattern { case List(ChooserBlock(_, 0, 2), AtLeastOneEmitted(_, "condition of while"), NotLastBlock(_)) => }
r.info.outputs(1).environments should matchPattern { case List(ChooserBlock(_, 0, 2), FuncBlock(_, "while")) => }
}
it should "detect molecules emitted in do-while loops" in {
val a = m[Int]
val c = m[Int]
val r = go { case a(x) => if (x > 0)
do {
c(x)
} while (x > 0)
}
r.info.outputs(0).environments should matchPattern { case List(ChooserBlock(_, 0, 2), AtLeastOneEmitted(_, "do while")) => }
}
it should "detect molecules emitted in match-case blocks with nested if-then-else" in {
val a = m[Int]
val c = m[Unit]
val d = m[Unit]
val r = go { case a(x) =>
x match {
case 0 => c(); if (x > 0) c()
case 1 => d()
case 2 => c(); if (x > 0) d() else c()
}
}
r.info.outputs(0).environments should matchPattern { case List(ChooserBlock(_, 0, 3), NotLastBlock(_)) => }
r.info.outputs(1).environments should matchPattern { case List(ChooserBlock(_, 0, 3), ChooserBlock(_, 0, 2)) => }
r.info.outputs(2).environments should matchPattern { case List(ChooserBlock(_, 1, 3)) => }
r.info.outputs(3).environments should matchPattern { case List(ChooserBlock(_, 2, 3), NotLastBlock(_)) => }
r.info.outputs(4).environments should matchPattern { case List(ChooserBlock(_, 2, 3), ChooserBlock(_, 0, 2)) => }
r.info.outputs(5).molecule shouldEqual c
r.info.outputs(5).environments should matchPattern { case List(ChooserBlock(_, 2, 3), ChooserBlock(_, 1, 2)) => }
}
it should "detect molecules emitted in anonymous functions" in {
val a = m[Int]
val c = m[Unit]
val r = go { case a(x) =>
val pf: Int => Unit = { x => c() }
pf(0)
}
r.info.outputs(0).environments should matchPattern { case List(NotLastBlock(_), FuncLambda(_)) => }
}
it should "not detect molecules emitted via assignment" in {
val a = m[Int]
val c = m[Unit]
val r = go { case a(x) =>
val c2 = c
c2()
}
r.info.outputs.length shouldEqual 0
}
it should "not detect molecules emitted via argument of emitter type" in {
val a = m[M[Unit]]
val r = go { case a(c) =>
c()
}
r.info.outputs.length shouldEqual 0
}
it should "detect molecules emitted in val blocks" in {
val a = m[Unit]
val c = m[Unit]
val r = go { case a(_) =>
val x = {
println("abc")
c()
0
}
x + 1
}
r.info.outputs.length shouldEqual 1
r.info.outputs(0).environments should matchPattern { case List(NotLastBlock(_), NotLastBlock(_)) ⇒ }
}
it should "detect molecules emitted in partial functions" in {
val a = m[Int]
val c = m[Unit]
val r = go { case a(x) =>
val pf: PartialFunction[Int, Unit] = {
case 123 => c()
}
pf(0)
}
r.info.outputs(0).environments should matchPattern { case List(NotLastBlock(1), FuncLambda(2), ChooserBlock(3, 0, 1)) => }
}
behavior of "output value computation"
it should "compute outputs with shrinkage and NotLastBlock()" in {
val c = b[Int, Int]
val d = m[Unit]
val reaction = go {
case c(x, r) + d(_) => if (x == 1) {
d()
r(0)
} else {
d()
r(1)
}
}
reaction.info.outputs(0).environments should matchPattern { case List(ChooserBlock(_, 0, 2), NotLastBlock(_)) ⇒ }
reaction.info.outputs(1).environments should matchPattern { case List(ChooserBlock(_, 1, 2), NotLastBlock(_)) ⇒ }
reaction.info.shrunkOutputs.length shouldEqual 1
reaction.info.shrunkOutputs(0).environments should matchPattern { case List(NotLastBlock(_)) ⇒ }
reaction.info.toString shouldEqual "c/B(x) + d(_) → d()"
}
it should "compute outputs for an inline reaction" in {
val thrown = intercept[Exception] {
val a = m[Int]
site(
go { case a(1) => a(1) }
)
a.consumingReactions.map(_.info.outputs) shouldEqual Array(Array(OutputMoleculeInfo(a, ConstOutputPattern(1), List())))
}
thrown.getMessage shouldEqual "In Site{a → ...}: Unavoidable livelock: reaction {a(1) → a(1)}"
}
it should "compute inputs and outputs for an inline nested reaction" in {
val a = m[Int]
site(
go {
case a(1) =>
val c = m[Int]
site(go { case c(_) => })
c(2)
a(2)
}
)
a.emittingReactions.size shouldEqual 1
a.consumingReactions.length shouldEqual 1
a.consumingReactions.map(_.info.outputs).head shouldEqual List(OutputMoleculeInfo(a, ConstOutputPattern(2), List()))
a.consumingReactions.map(_.info.inputs).head shouldEqual List(InputMoleculeInfo(a, 0, ConstInputPattern(1), constantOneSha1, 'Int))
a.emittingReactions.map(_.info.outputs).head shouldEqual List(OutputMoleculeInfo(a, ConstOutputPattern(2), List()))
a.emittingReactions.map(_.info.inputs).head shouldEqual List(InputMoleculeInfo(a, 0, ConstInputPattern(1), constantOneSha1, Symbol("Int")))
}
it should "compute outputs for an inline nested reaction" in {
val thrown = intercept[Exception] {
val a = m[Int]
site(
go {
case a(1) =>
val c = m[Int]
site(go { case c(_) => })
c(2)
a(1)
}
)
}
thrown.getMessage shouldEqual "In Site{a → ...}: Unavoidable livelock: reaction {a(1) → a(1)}"
}
it should "compute outputs in the correct order for a reaction with no livelock" in {
val a = m[Int]
val b = m[Int]
site(
go { case a(2) => b(2); a(1); b(1) }
)
a.consumingReactions.length shouldEqual 1
val infos = a.consumingReactions.map(_.info.outputs).head.toList
infos should matchPattern { case List(
OutputMoleculeInfo(`b`, ConstOutputPattern(2), List(NotLastBlock(_))),
OutputMoleculeInfo(`a`, ConstOutputPattern(1), List(NotLastBlock(_))),
OutputMoleculeInfo(`b`, ConstOutputPattern(1), List())
) ⇒
}
}
it should "recognize nested emissions of non-blocking molecules in the correct order" in {
val a = m[Int]
val c = m[Int]
val d = m[Boolean]
site(
go { case a(x) + d(_) => c({
a(1)
2
})
}
)
a.isBound shouldEqual true
c.isBound shouldEqual false
val reaction = a.consumingReactions.head
c.emittingReactions.head shouldEqual reaction
a.emittingReactions.head shouldEqual reaction
reaction.info.inputs.toList should matchPattern {
case List(InputMoleculeInfo(`a`, 0, SimpleVarInput('x, _), `simpleVarXSha1`, 'Int), InputMoleculeInfo(`d`, 1, WildcardInput, `wildcardSha1`, 'Boolean)) =>
}
reaction.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`a`, ConstOutputPattern(1), _),
OutputMoleculeInfo(`c`, OtherOutputPattern, _)
) ⇒
}
reaction.info.outputs.map(_.environments).toList should matchPattern { case List(
List(NotLastBlock(_), NotLastBlock(_)),
List()
) ⇒
}
}
it should "recognize nested emissions of blocking molecules and reply values" in {
val a = b[Int, Int]
val c = m[Int]
val d = m[Unit]
site(
go { case d(_) => c(a(1)) },
go { case a(x, r) => d(r(x)) }
)
a.isBound shouldEqual true
c.isBound shouldEqual false
d.isBound shouldEqual true
val reaction1 = d.consumingReactions.head
a.emittingReactions.head shouldEqual reaction1
c.emittingReactions.head shouldEqual reaction1
val reaction2 = a.consumingReactions.head
d.emittingReactions.head shouldEqual reaction2
reaction1.info.inputs.toList shouldEqual List(InputMoleculeInfo(d, 0, WildcardInput, wildcardSha1, 'Unit))
reaction1.info.outputs.toList should matchPattern { case List(
OutputMoleculeInfo(`a`, ConstOutputPattern(1), List(NotLastBlock(_))),
OutputMoleculeInfo(`c`, OtherOutputPattern, List())
) ⇒
}
reaction2.info.inputs.toList should matchPattern {
case List(InputMoleculeInfo(`a`, 0, SimpleVarInput('x, _), `simpleVarXSha1`, _)) =>
}
reaction2.info.outputs shouldEqual List(OutputMoleculeInfo(d, OtherOutputPattern, List()))
}
behavior of "output environment shrinkage"
it should "detect simple constant due to perfect if-then-else shrinkage" in {
val a = m[Int]
val r = go { case a(1) => if (true) a(1) else a(1) } // This livelock cannot be detected at compile time because it can't evaluate constants.
r.info.shrunkOutputs shouldEqual Array(OutputMoleculeInfo(a, ConstOutputPattern(1), Nil))
}
it should "detect simple constant due to perfect if-then-else shrinkage within val block" in {
val a = m[Int]
val r = go { case a(1) => val x = {
if (true) a(1) else a(1)
};
x
} // This livelock cannot be detected at compile time because it can't evaluate constants.
r.info.shrunkOutputs shouldEqual List(OutputMoleculeInfo(a, ConstOutputPattern(1), List(NotLastBlock(1))))
}
it should "detect other pattern due to non-perfect if-then-else shrinkage" in {
val a = m[Int]
val r = go { case a(1) => if (true) a(1) else a(2) }
r.info.shrunkOutputs shouldEqual Array(OutputMoleculeInfo(a, OtherOutputPattern, Nil))
}
behavior of "auxiliary functions"
it should "find expression trees for constant values" in {
rawTree(1) shouldEqual "Literal(Constant(1))"
rawTree(None) shouldEqual "Select(Ident(scala), scala.None)"
(Set(
"Apply(TypeApply(Select(Select(Ident(scala), scala.Some), TermName(\\"apply\\")), List(TypeTree())), List(Literal(Constant(1))))"
) contains rawTree(Some(1))) shouldEqual true
}
it should "find expression trees for matchers" in {
rawTree(Some(1) match { case Some(1) => }) shouldEqual "Match(Apply(TypeApply(Select(Select(Ident(scala), scala.Some), TermName(\\"apply\\")), List(TypeTree())), List(Literal(Constant(1)))), List(CaseDef(Apply(TypeTree().setOriginal(Select(Ident(scala), scala.Some)), List(Literal(Constant(1)))), EmptyTree, Literal(Constant(())))))"
}
it should "find enclosing symbol names with correct scopes" in {
val x = getName
x shouldEqual "x"
val y = {
val z = getName
(z, getName)
}
y shouldEqual (("z", "y"))
val (y1, y2) = {
val z = getName
(z, getName)
}
y1 shouldEqual "z"
y2 should fullyMatch regex "x\\\\$[0-9]+"
}
behavior of "errors while emitting static molecules"
/* This functionality is not useful: it's running a reaction body manually.
it should "refuse to emit static molecule if reaction runs on a non-reaction thread" in {
val dIncorrectStaticMol = m[Unit]
val e = m[Unit]
val r1 = go { case dIncorrectStaticMol(_) + e(_) => dIncorrectStaticMol(); 123 }
site(tp0)(
r1,
go { case _ => dIncorrectStaticMol() }
)
val inputs = new InputMoleculeList(2)
inputs(0) = MolValue(())
inputs(1) = MolValue(())
the[Exception] thrownBy {
r1.body.apply((inputs.length - 1, inputs)) shouldEqual 123 // Reaction ran on a non-reaction thread (i.e. on this thread) and attempted to emit the static molecule.
} should have message s"In Site{${dIncorrectStaticMol.name} + e → ...}: Refusing to emit static molecule ${dIncorrectStaticMol.name}() because this thread does not run a chemical reaction"
waitSome()
e.logSite shouldEqual s"Site{${dIncorrectStaticMol.name} + e → ...}\\nMolecules: ${dIncorrectStaticMol.name}/P()"
}
*/
it should "refuse to emit static molecule manually from non-reaction thread" in {
val dIncorrectStaticMol = m[Unit]
val e = m[Unit]
val r1 = go { case dIncorrectStaticMol(_) + e(_) => dIncorrectStaticMol(); 123 }
site(tp0)(
r1,
go { case _ => dIncorrectStaticMol() }
)
the[Exception] thrownBy {
dIncorrectStaticMol() shouldEqual (()) // User code attempted to emit the static molecule.
} should have message s"Error: static molecule ${dIncorrectStaticMol.name}(()) cannot be emitted non-statically"
waitSome()
e.logSite shouldEqual s"Site{${dIncorrectStaticMol.name} + e → ...}\\nMolecules: ${dIncorrectStaticMol.name}/P()"
}
it should "refuse to emit static molecule from a reaction that did not consume it when this cannot be determined statically" in {
val c = new M[Unit]("c")
val dIncorrectStaticMol = m[Unit]
val e = new M[M[Unit]]("e")
val memLog = new MemoryLogger
tp0.reporter = new ErrorReporter(memLog)
site(tp0)(
go { case e(s) => s() },
go { case dIncorrectStaticMol(_) + c(_) => dIncorrectStaticMol() },
go { case _ => dIncorrectStaticMol() }
)
e(dIncorrectStaticMol)
waitSome()
e.logSite shouldEqual s"Site{c + ${dIncorrectStaticMol.name} → ...; e → ...}\\nMolecules: ${dIncorrectStaticMol.name}/P()"
globalLogHas(memLog, "cannot be emitted", s"In Site{c + dIncorrectStaticMol → ...; e → ...}: Reaction {e(s) → } with inputs [e/P(dIncorrectStaticMol)] produced an exception internal to Chymyst Core. Retry run was not scheduled. Message: Error: static molecule dIncorrectStaticMol(()) cannot be emitted non-statically")
}
}
| Chymyst/chymyst-core | core/src/test/scala/io/chymyst/jc/MacrosSpec.scala | Scala | apache-2.0 | 41,073 |
package uk.gov.digital.ho.proving.financialstatus.domain
import java.time.LocalDate
import java.time.temporal.ChronoUnit.DAYS
import org.springframework.beans.factory.annotation.{Autowired, Value}
import org.springframework.stereotype.Service
import uk.gov.digital.ho.proving.financialstatus.bank.{BarclaysBankService, DailyBalances}
import scala.util.Try
@Service
class AccountStatusChecker @Autowired()(barclaysBankService: BarclaysBankService,
@Value("${daily-balance.days-to-check}") val numberConsecutiveDays1: Int) {
def areDatesConsecutive(dailyBalances: DailyBalances, numberConsecutiveDays: Long): Boolean = {
val dates = dailyBalances.balanceRecords.map {
_.date
}.sortWith((date1, date2) => date1.isBefore(date2))
val consecutive = dates.sliding(2).map { case Seq(d1, d2) => d1.plusDays(1).isEqual(d2) }.toVector
consecutive.forall(_ == true)
}
def checkDailyBalancesAreAboveMinimum(account: Account, fromDate: LocalDate, toDate: LocalDate,
threshold: BigDecimal, dob: LocalDate, userId: String): Try[DailyBalanceCheck] = {
val numberConsecutiveDays = DAYS.between(fromDate, toDate) + 1 // Inclusive of last day
Try {
val dailyBalances = barclaysBankService.fetchAccountDailyBalances(account, fromDate, toDate, dob, userId)
if (dailyBalances.balanceRecords.length < numberConsecutiveDays) {
DailyBalanceCheck(dailyBalances.accountHolderName,
fromDate,
toDate,
threshold,
pass = false,
Some(BalanceCheckFailure(recordCount = Some(dailyBalances.balanceRecords.length))))
} else {
val minimumBalance = dailyBalances.balanceRecords.minBy(_.balance)
val thresholdPassed = dailyBalances.balanceRecords.length == numberConsecutiveDays &&
areDatesConsecutive(dailyBalances, numberConsecutiveDays) && minimumBalance.balance >= threshold
if (minimumBalance.balance < threshold) {
DailyBalanceCheck(dailyBalances.accountHolderName, fromDate, toDate, threshold, thresholdPassed,
Some(BalanceCheckFailure(Option(minimumBalance.date), Option(minimumBalance.balance))))
} else {
DailyBalanceCheck(dailyBalances.accountHolderName, fromDate, toDate, threshold, thresholdPassed)
}
}
}
}
def parameters: String = {
s"""
| ---------- External parameters values ----------
""".stripMargin
}
}
| UKHomeOffice/pttg-fs-api | src/main/scala/uk/gov/digital/ho/proving/financialstatus/domain/AccountStatusChecker.scala | Scala | mit | 2,622 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.examples.kafka.wordcount
import com.twitter.bijection.Injection
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest._
import org.scalatest.mock.MockitoSugar
import org.apache.gearpump.Message
import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.streaming.task.TaskContext
class SplitSpec extends FlatSpec with Matchers with MockitoSugar {
it should "split should split the text and deliver to next task" in {
val taskContext = mock[TaskContext]
val split = new Split(taskContext, UserConfig.empty)
val msg = "this is a test message"
split.onNext(Message(Injection[String, Array[Byte]](msg)))
verify(taskContext, times(msg.split(" ").length)).output(anyObject[Message])
}
}
| manuzhang/incubator-gearpump | examples/streaming/kafka/src/test/scala/org/apache/gearpump/streaming/examples/kafka/wordcount/SplitSpec.scala | Scala | apache-2.0 | 1,595 |
/**
* Copyright 2016, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.workflowexecutor.communication.message.global
import spray.httpx.SprayJsonSupport
import spray.json.DefaultJsonProtocol
case class PoisonPill()
trait PoisonPillJsonProtocol extends DefaultJsonProtocol with SprayJsonSupport {
implicit val poisonPillFormat = jsonFormat0(PoisonPill)
}
object PoisonPillJsonProtocol extends PoisonPillJsonProtocol
| deepsense-io/seahorse-workflow-executor | workflowexecutormqprotocol/src/main/scala/io/deepsense/workflowexecutor/communication/message/global/PoisonPill.scala | Scala | apache-2.0 | 970 |
package model
import skinny.orm.SkinnyJoinTable
case class ProgrammerSkill(programmerId: Long, skillId: Long)
object ProgrammerSkill extends SkinnyJoinTable[ProgrammerSkill] {
override val defaultAlias = createAlias("ps")
}
| BlackPrincess/skinny-framework | example/src/main/scala/model/ProgrammerSkill.scala | Scala | mit | 230 |
package com.twitter.algebird.benchmark
import com.twitter.algebird._
import com.twitter.bijection._
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import scala.util.Random
object QTreeBenchmark {
@State(Scope.Benchmark)
class QTreeState {
var qtreeUnit: QTreeSemigroup[Unit] = _
var qtreeLong: QTreeSemigroup[Long] = _
var qtreeDouble: QTreeSemigroup[Double] = _
@Param(Array("5", "10", "12"))
var depthK: Int = 0
@Param(Array("100", "10000"))
var numElements: Int = 0
var inputDataUnit: Seq[QTree[Unit]] = _
var inputDataLong: Seq[QTree[Long]] = _
var inputDataDouble: Seq[QTree[Double]] = _
@Setup(Level.Trial)
def setup(): Unit = {
qtreeUnit = new QTreeSemigroup[Unit](depthK)
qtreeLong = new QTreeSemigroup[Long](depthK)
qtreeDouble = new QTreeSemigroup[Double](depthK)
inputDataUnit = {
val rng = new Random("qtree".hashCode)
(0L until numElements).map { _ =>
QTree((rng.nextInt(1000).toLong, ()))
}
}
inputDataLong = {
val rng = new Random("qtree".hashCode)
(0L until numElements).map { _ =>
QTree(rng.nextInt(1000).toLong)
}
}
inputDataDouble = {
val rng = new Random("qtree".hashCode)
(0L until numElements).map { _ =>
QTree(rng.nextInt(1000).toDouble)
}
}
}
}
}
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
class QTreeBenchmark {
import QTreeBenchmark._
@Benchmark
def timeSumOptionUnit(state: QTreeState) = {
state.qtreeUnit.sumOption(state.inputDataUnit)
}
@Benchmark
def timeSumOptionLong(state: QTreeState) = {
state.qtreeLong.sumOption(state.inputDataLong)
}
@Benchmark
def timeSumOptionDouble(state: QTreeState) = {
state.qtreeDouble.sumOption(state.inputDataDouble)
}
@Benchmark
def timePlusUnit(state: QTreeState): QTree[Unit] = {
state.inputDataUnit.tail.reduce(state.qtreeUnit.plus)
}
@Benchmark
def timePlusLong(state: QTreeState): QTree[Long] = {
state.inputDataLong.tail.reduce(state.qtreeLong.plus)
}
@Benchmark
def timePlusDouble(state: QTreeState): QTree[Double] = {
state.inputDataDouble.tail.reduce(state.qtreeDouble.plus)
}
}
| sid-kap/algebird | algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala | Scala | apache-2.0 | 2,312 |
package com.recursivity.commons.bean
/**
* Created by IntelliJ IDEA.
* User: wfaler
* Date: Mar 25, 2010
* Time: 11:36:46 PM
* To change this template use File | Settings | File Templates.
*/
class JavaBooleanTransformer extends StringValueTransformer[java.lang.Boolean]{
def toValue(from: String): Option[java.lang.Boolean] = {
if(from == null)
return None
if(from.equals("true"))
return Some(new java.lang.Boolean("true"))
else if(from.equals("false"))
return Some(new java.lang.Boolean("false"))
else return None
}
} | rkpandey/recursivity-commons | src/main/scala/com/recursivity/commons/bean/JavaBooleanTransformer.scala | Scala | bsd-3-clause | 564 |
/*
* Copyright (C) 2017 LREN CHUV for Human Brain Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ch.chuv.lren.woken.service
import cats.Id
import cats.data.{ NonEmptyList, Validated }
import cats.effect.{ Effect, Resource, Sync }
import cats.syntax.validated._
import ch.chuv.lren.woken.core.features.FeaturesQuery
import ch.chuv.lren.woken.core.model.database.{ FeaturesTableDescription, TableColumn }
import ch.chuv.lren.woken.dao.{
FeaturesRepository,
FeaturesTableRepository,
PrefillExtendedFeaturesTable
}
import ch.chuv.lren.woken.messages.datasets.{ DatasetId, TableId }
import ch.chuv.lren.woken.core.fp.runNow
import ch.chuv.lren.woken.cromwell.core.ConfigUtil.Validation
import ch.chuv.lren.woken.messages.query.{ UserFeedback, UserFeedbacks }
import ch.chuv.lren.woken.messages.query.filters.FilterRule
import ch.chuv.lren.woken.messages.variables.VariableMetaData
import spray.json.JsObject
import sup.HealthCheck
import scala.collection.mutable
import scala.language.higherKinds
object FeaturesService {
def apply[F[_]: Effect](repo: FeaturesRepository[F]): FeaturesService[F] =
new FeaturesServiceImpl[F](repo)
}
trait FeaturesService[F[_]] {
def featuresTable(table: TableId): Validation[FeaturesTableService[F]]
def healthCheck: HealthCheck[F, Id]
}
trait FeaturesTableService[F[_]] {
def table: FeaturesTableDescription
/**
* Total number of rows in the table
*
* @return number of rows
*/
def count: F[Int]
/**
* Number of rows belonging to the dataset.
*
* @param dataset The dataset used to filter rows
* @return the number of rows in the dataset, 0 if dataset is not associated with the table
*/
def count(dataset: DatasetId): F[Int]
/**
* Number of rows matching the filters.
*
* @param filters The filters used to filter rows
* @return the number of rows in the dataset matching the filters, or the total number of rows if there are no filters
*/
def count(filters: Option[FilterRule]): F[Int]
/**
* Number of rows grouped by a reference column
*
* @return a map containing the number of rows for each value of the group by column
*/
def countGroupBy(groupByColumn: TableColumn, filters: Option[FilterRule]): F[Map[String, Int]]
/**
* Returns the list of datasets effectively used by a query
*
* @param filters The filters used to filter rows
* @return a set of dataset ids
*/
def datasets(filters: Option[FilterRule]): F[Set[DatasetId]]
type Headers = List[TableColumn]
def features(query: FeaturesQuery): F[(Headers, Stream[JsObject])]
/**
* Validate the fields in the actual table against their metadata
*
* @param variables Full list of variables for the table as defined in the metadata
*/
def validateFields(
variables: List[VariableMetaData]
): F[Validated[NonEmptyList[(VariableMetaData, UserFeedback)], UserFeedbacks]]
def createExtendedFeaturesTable(
filters: Option[FilterRule],
newFeatures: List[TableColumn],
otherColumns: List[TableColumn],
prefills: List[PrefillExtendedFeaturesTable],
extendedTableNumber: Int
): Validation[Resource[F, FeaturesTableService[F]]]
}
class FeaturesServiceImpl[F[_]: Effect](repository: FeaturesRepository[F])
extends FeaturesService[F] {
private val featuresTableCache: mutable.Map[TableId, FeaturesTableService[F]] =
new mutable.WeakHashMap[TableId, FeaturesTableService[F]]()
def featuresTable(table: TableId): Validation[FeaturesTableService[F]] =
featuresTableCache
.get(table)
.orElse {
runNow(repository.featuresTable(table))
.map { featuresTable =>
val service = new FeaturesTableServiceImpl(featuresTable)
val _ = featuresTableCache.put(table, service)
service
}
}
.fold(
s"Table ${table.name} cannot be found or has not been configured in the configuration for database ${repository.database}"
.invalidNel[FeaturesTableService[F]]
) { s: FeaturesTableService[F] =>
s.validNel[String]
}
override def healthCheck: HealthCheck[F, Id] = repository.healthCheck
}
class FeaturesTableServiceImpl[F[_]: Sync](repository: FeaturesTableRepository[F])
extends FeaturesTableService[F] {
override def table: FeaturesTableDescription = repository.table
def count: F[Int] = repository.count
def count(dataset: DatasetId): F[Int] = repository.count(dataset)
def count(filters: Option[FilterRule]): F[Int] = repository.count(filters)
/**
* Number of rows grouped by a reference column
*
* @return a map containing the number of rows for each value of the group by column
*/
override def countGroupBy(groupByColumn: TableColumn,
filters: Option[FilterRule]): F[Map[String, Int]] =
repository.countGroupBy(groupByColumn, filters)
def features(query: FeaturesQuery): F[(Headers, Stream[JsObject])] = repository.features(query)
override def datasets(filters: Option[FilterRule]): F[Set[DatasetId]] =
repository.datasets(filters)
/**
* Validate the fields in the actual table against their metadata
*
* @param variables Full list of variables for the table as defined in the metadata
*/
override def validateFields(
variables: List[VariableMetaData]
): F[Validated[NonEmptyList[(VariableMetaData, UserFeedback)], UserFeedbacks]] =
repository.validateFields(variables)
override def createExtendedFeaturesTable(
filters: Option[FilterRule],
newFeatures: List[TableColumn],
otherColumns: List[TableColumn],
prefills: List[PrefillExtendedFeaturesTable],
extendedTableNumber: Int
): Validation[Resource[F, FeaturesTableService[F]]] =
repository
.createExtendedFeaturesTable(filters,
newFeatures,
otherColumns,
prefills,
extendedTableNumber)
.map(
_.flatMap { extendedTable =>
Resource.liftF(
Sync[F]
.delay(new FeaturesTableServiceImpl(extendedTable): FeaturesTableService[F])
)
}
)
}
| HBPSP8Repo/workflow | src/main/scala/ch/chuv/lren/woken/service/FeaturesService.scala | Scala | apache-2.0 | 6,924 |
package org.jetbrains.plugins.scala.actions
import com.intellij.openapi.actionSystem._
import org.jetbrains.plugins.scala.components.HighlightingAdvisor
import com.intellij.openapi.project.Project
/**
* User: Alexander Podkhalyuzin
* Date: 27.01.2010
*/
class ToggleTypeAwareHighlightingAction extends AnAction {
def actionPerformed(e: AnActionEvent) {
CommonDataKeys.PROJECT.getData(e.getDataContext) match {
case project: Project => HighlightingAdvisor.getInstance(project).toggle()
case _ =>
}
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/actions/ToggleTypeAwareHighlightingAction.scala | Scala | apache-2.0 | 530 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.File
import java.net.URI
import scala.collection.mutable
import scala.language.reflectiveCalls
import org.apache.hadoop.fs.{BlockLocation, FileStatus, LocatedFileStatus, Path, RawLocalFileSystem}
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.util.{KnownSizeEstimation, SizeEstimator}
class FileIndexSuite extends SharedSQLContext {
test("InMemoryFileIndex: leaf files are qualified paths") {
withTempDir { dir =>
val file = new File(dir, "text.txt")
stringToFile(file, "text")
val path = new Path(file.getCanonicalPath)
val catalog = new InMemoryFileIndex(spark, Seq(path), Map.empty, None) {
def leafFilePaths: Seq[Path] = leafFiles.keys.toSeq
def leafDirPaths: Seq[Path] = leafDirToChildrenFiles.keys.toSeq
}
assert(catalog.leafFilePaths.forall(p => p.toString.startsWith("file:/")))
assert(catalog.leafDirPaths.forall(p => p.toString.startsWith("file:/")))
}
}
test("SPARK-26188: don't infer data types of partition columns if user specifies schema") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=4d")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("a", StringType, false)))
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
val partitionValues = fileIndex.partitionSpec().partitions.map(_.values)
assert(partitionValues.length == 1 && partitionValues(0).numFields == 1 &&
partitionValues(0).getString(0) == "4d")
}
}
test("SPARK-26990: use user specified field names if possible") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=foo")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("A", StringType, false)))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
assert(fileIndex.partitionSchema.length == 1 && fileIndex.partitionSchema.head.name == "A")
}
}
}
test("SPARK-26230: if case sensitive, validate partitions with original column names") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=1")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val partitionDirectory2 = new File(dir, "A=2")
partitionDirectory2.mkdir()
val file2 = new File(partitionDirectory2, "text.txt")
stringToFile(file2, "text")
val path = new Path(dir.getCanonicalPath)
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None)
val partitionValues = fileIndex.partitionSpec().partitions.map(_.values)
assert(partitionValues.length == 2)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
val msg = intercept[AssertionError] {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None)
fileIndex.partitionSpec()
}.getMessage
assert(msg.contains("Conflicting partition column names detected"))
assert("Partition column name list #[0-1]: A".r.findFirstIn(msg).isDefined)
assert("Partition column name list #[0-1]: a".r.findFirstIn(msg).isDefined)
}
}
}
test("SPARK-26263: Throw exception when partition value can't be casted to user-specified type") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=foo")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("a", IntegerType, false)))
withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "true") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
val msg = intercept[RuntimeException] {
fileIndex.partitionSpec()
}.getMessage
assert(msg == "Failed to cast value `foo` to `IntegerType` for partition column `a`")
}
withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
val partitionValues = fileIndex.partitionSpec().partitions.map(_.values)
assert(partitionValues.length == 1 && partitionValues(0).numFields == 1 &&
partitionValues(0).isNullAt(0))
}
}
}
test("InMemoryFileIndex: input paths are converted to qualified paths") {
withTempDir { dir =>
val file = new File(dir, "text.txt")
stringToFile(file, "text")
val unqualifiedDirPath = new Path(dir.getCanonicalPath)
val unqualifiedFilePath = new Path(file.getCanonicalPath)
require(!unqualifiedDirPath.toString.contains("file:"))
require(!unqualifiedFilePath.toString.contains("file:"))
val fs = unqualifiedDirPath.getFileSystem(spark.sessionState.newHadoopConf())
val qualifiedFilePath = fs.makeQualified(new Path(file.getCanonicalPath))
require(qualifiedFilePath.toString.startsWith("file:"))
val catalog1 = new InMemoryFileIndex(
spark, Seq(unqualifiedDirPath), Map.empty, None)
assert(catalog1.allFiles.map(_.getPath) === Seq(qualifiedFilePath))
val catalog2 = new InMemoryFileIndex(
spark, Seq(unqualifiedFilePath), Map.empty, None)
assert(catalog2.allFiles.map(_.getPath) === Seq(qualifiedFilePath))
}
}
test("InMemoryFileIndex: folders that don't exist don't throw exceptions") {
withTempDir { dir =>
val deletedFolder = new File(dir, "deleted")
assert(!deletedFolder.exists())
val catalog1 = new InMemoryFileIndex(
spark, Seq(new Path(deletedFolder.getCanonicalPath)), Map.empty, None)
// doesn't throw an exception
assert(catalog1.listLeafFiles(catalog1.rootPaths).isEmpty)
}
}
test("PartitioningAwareFileIndex listing parallelized with many top level dirs") {
for ((scale, expectedNumPar) <- Seq((10, 0), (50, 1))) {
withTempDir { dir =>
val topLevelDirs = (1 to scale).map { i =>
val tmp = new File(dir, s"foo=$i.txt")
tmp.mkdir()
new Path(tmp.getCanonicalPath)
}
HiveCatalogMetrics.reset()
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0)
new InMemoryFileIndex(spark, topLevelDirs, Map.empty, None)
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar)
}
}
}
test("PartitioningAwareFileIndex listing parallelized with large child dirs") {
for ((scale, expectedNumPar) <- Seq((10, 0), (50, 1))) {
withTempDir { dir =>
for (i <- 1 to scale) {
new File(dir, s"foo=$i.txt").mkdir()
}
HiveCatalogMetrics.reset()
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0)
new InMemoryFileIndex(spark, Seq(new Path(dir.getCanonicalPath)), Map.empty, None)
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar)
}
}
}
test("PartitioningAwareFileIndex listing parallelized with large, deeply nested child dirs") {
for ((scale, expectedNumPar) <- Seq((10, 0), (50, 4))) {
withTempDir { dir =>
for (i <- 1 to 2) {
val subdirA = new File(dir, s"a=$i")
subdirA.mkdir()
for (j <- 1 to 2) {
val subdirB = new File(subdirA, s"b=$j")
subdirB.mkdir()
for (k <- 1 to scale) {
new File(subdirB, s"foo=$k.txt").mkdir()
}
}
}
HiveCatalogMetrics.reset()
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0)
new InMemoryFileIndex(spark, Seq(new Path(dir.getCanonicalPath)), Map.empty, None)
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar)
}
}
}
test("InMemoryFileIndex - file filtering") {
assert(!InMemoryFileIndex.shouldFilterOut("abcd"))
assert(InMemoryFileIndex.shouldFilterOut(".ab"))
assert(InMemoryFileIndex.shouldFilterOut("_cd"))
assert(!InMemoryFileIndex.shouldFilterOut("_metadata"))
assert(!InMemoryFileIndex.shouldFilterOut("_common_metadata"))
assert(InMemoryFileIndex.shouldFilterOut("_ab_metadata"))
assert(InMemoryFileIndex.shouldFilterOut("_cd_common_metadata"))
assert(InMemoryFileIndex.shouldFilterOut("a._COPYING_"))
}
test("SPARK-17613 - PartitioningAwareFileIndex: base path w/o '/' at end") {
class MockCatalog(
override val rootPaths: Seq[Path])
extends PartitioningAwareFileIndex(spark, Map.empty, None) {
override def refresh(): Unit = {}
override def leafFiles: mutable.LinkedHashMap[Path, FileStatus] = mutable.LinkedHashMap(
new Path("mockFs://some-bucket/file1.json") -> new FileStatus()
)
override def leafDirToChildrenFiles: Map[Path, Array[FileStatus]] = Map(
new Path("mockFs://some-bucket/") -> Array(new FileStatus())
)
override def partitionSpec(): PartitionSpec = {
PartitionSpec.emptySpec
}
}
withSQLConf(
"fs.mockFs.impl" -> classOf[FakeParentPathFileSystem].getName,
"fs.mockFs.impl.disable.cache" -> "true") {
val pathWithSlash = new Path("mockFs://some-bucket/")
assert(pathWithSlash.getParent === null)
val pathWithoutSlash = new Path("mockFs://some-bucket")
assert(pathWithoutSlash.getParent === null)
val catalog1 = new MockCatalog(Seq(pathWithSlash))
val catalog2 = new MockCatalog(Seq(pathWithoutSlash))
assert(catalog1.allFiles().nonEmpty)
assert(catalog2.allFiles().nonEmpty)
}
}
test("InMemoryFileIndex with empty rootPaths when PARALLEL_PARTITION_DISCOVERY_THRESHOLD" +
"is a nonpositive number") {
withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "0") {
new InMemoryFileIndex(spark, Seq.empty, Map.empty, None)
}
val e = intercept[IllegalArgumentException] {
withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "-1") {
new InMemoryFileIndex(spark, Seq.empty, Map.empty, None)
}
}.getMessage
assert(e.contains("The maximum number of paths allowed for listing files at " +
"driver side must not be negative"))
}
test("refresh for InMemoryFileIndex with FileStatusCache") {
withTempDir { dir =>
val fileStatusCache = FileStatusCache.getOrCreate(spark)
val dirPath = new Path(dir.getAbsolutePath)
val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
val catalog =
new InMemoryFileIndex(spark, Seq(dirPath), Map.empty, None, fileStatusCache) {
def leafFilePaths: Seq[Path] = leafFiles.keys.toSeq
def leafDirPaths: Seq[Path] = leafDirToChildrenFiles.keys.toSeq
}
val file = new File(dir, "text.txt")
stringToFile(file, "text")
assert(catalog.leafDirPaths.isEmpty)
assert(catalog.leafFilePaths.isEmpty)
catalog.refresh()
assert(catalog.leafFilePaths.size == 1)
assert(catalog.leafFilePaths.head == fs.makeQualified(new Path(file.getAbsolutePath)))
assert(catalog.leafDirPaths.size == 1)
assert(catalog.leafDirPaths.head == fs.makeQualified(dirPath))
}
}
test("SPARK-20280 - FileStatusCache with a partition with very many files") {
/* fake the size, otherwise we need to allocate 2GB of data to trigger this bug */
class MyFileStatus extends FileStatus with KnownSizeEstimation {
override def estimatedSize: Long = 1000 * 1000 * 1000
}
/* files * MyFileStatus.estimatedSize should overflow to negative integer
* so, make it between 2bn and 4bn
*/
val files = (1 to 3).map { i =>
new MyFileStatus()
}
val fileStatusCache = FileStatusCache.getOrCreate(spark)
fileStatusCache.putLeafFiles(new Path("/tmp", "abc"), files.toArray)
}
test("SPARK-20367 - properly unescape column names in inferPartitioning") {
withTempPath { path =>
val colToUnescape = "Column/#%'?"
spark
.range(1)
.select(col("id").as(colToUnescape), col("id"))
.write.partitionBy(colToUnescape).parquet(path.getAbsolutePath)
assert(spark.read.parquet(path.getAbsolutePath).schema.exists(_.name == colToUnescape))
}
}
test("SPARK-25062 - InMemoryFileIndex stores BlockLocation objects no matter what subclass " +
"the FS returns") {
withSQLConf("fs.file.impl" -> classOf[SpecialBlockLocationFileSystem].getName) {
withTempDir { dir =>
val file = new File(dir, "text.txt")
stringToFile(file, "text")
val inMemoryFileIndex = new InMemoryFileIndex(
spark, Seq(new Path(file.getCanonicalPath)), Map.empty, None) {
def leafFileStatuses = leafFiles.values
}
val blockLocations = inMemoryFileIndex.leafFileStatuses.flatMap(
_.asInstanceOf[LocatedFileStatus].getBlockLocations)
assert(blockLocations.forall(_.getClass == classOf[BlockLocation]))
}
}
}
}
class FakeParentPathFileSystem extends RawLocalFileSystem {
override def getScheme: String = "mockFs"
override def getUri: URI = {
URI.create("mockFs://some-bucket")
}
}
class SpecialBlockLocationFileSystem extends RawLocalFileSystem {
class SpecialBlockLocation(
names: Array[String],
hosts: Array[String],
offset: Long,
length: Long)
extends BlockLocation(names, hosts, offset, length)
override def getFileBlockLocations(
file: FileStatus,
start: Long,
len: Long): Array[BlockLocation] = {
Array(new SpecialBlockLocation(Array("dummy"), Array("dummy"), 0L, file.getLen))
}
}
| yanboliang/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala | Scala | apache-2.0 | 15,337 |
package lila.user
import org.joda.time.DateTime
case class Plan(
months: Int,
active: Boolean,
since: Option[DateTime]
) {
def incMonths =
copy(
months = months + 1,
active = true,
since = since orElse DateTime.now.some
)
def disable = copy(active = false)
def enable =
copy(
active = true,
months = months atLeast 1,
since = since orElse DateTime.now.some
)
def isEmpty = months == 0
def nonEmpty = !isEmpty option this
def sinceDate = since | DateTime.now
}
object Plan {
val empty = Plan(0, active = false, none)
def start = Plan(1, active = true, DateTime.now.some)
import lila.db.dsl._
private[user] val planBSONHandler = reactivemongo.api.bson.Macros.handler[Plan]
}
| luanlv/lila | modules/user/src/main/Plan.scala | Scala | mit | 768 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.parquet
import java.util.{HashMap => JHashMap}
import org.apache.hadoop.conf.Configuration
import parquet.column.ParquetProperties
import parquet.hadoop.ParquetOutputFormat
import parquet.hadoop.api.ReadSupport.ReadContext
import parquet.hadoop.api.{ReadSupport, WriteSupport}
import parquet.io.api._
import parquet.schema.MessageType
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.expressions.{Attribute, Row}
import org.apache.spark.sql.types._
/**
* A `parquet.io.api.RecordMaterializer` for Rows.
*
*@param root The root group converter for the record.
*/
private[parquet] class RowRecordMaterializer(root: CatalystConverter)
extends RecordMaterializer[Row] {
def this(parquetSchema: MessageType, attributes: Seq[Attribute]) =
this(CatalystConverter.createRootConverter(parquetSchema, attributes))
override def getCurrentRecord: Row = root.getCurrentRecord
override def getRootConverter: GroupConverter = root.asInstanceOf[GroupConverter]
}
/**
* A `parquet.hadoop.api.ReadSupport` for Row objects.
*/
private[parquet] class RowReadSupport extends ReadSupport[Row] with Logging {
override def prepareForRead(
conf: Configuration,
stringMap: java.util.Map[String, String],
fileSchema: MessageType,
readContext: ReadContext): RecordMaterializer[Row] = {
log.debug(s"preparing for read with Parquet file schema $fileSchema")
// Note: this very much imitates AvroParquet
val parquetSchema = readContext.getRequestedSchema
var schema: Seq[Attribute] = null
if (readContext.getReadSupportMetadata != null) {
// first try to find the read schema inside the metadata (can result from projections)
if (
readContext
.getReadSupportMetadata
.get(RowReadSupport.SPARK_ROW_REQUESTED_SCHEMA) != null) {
schema = ParquetTypesConverter.convertFromString(
readContext.getReadSupportMetadata.get(RowReadSupport.SPARK_ROW_REQUESTED_SCHEMA))
} else {
// if unavailable, try the schema that was read originally from the file or provided
// during the creation of the Parquet relation
if (readContext.getReadSupportMetadata.get(RowReadSupport.SPARK_METADATA_KEY) != null) {
schema = ParquetTypesConverter.convertFromString(
readContext.getReadSupportMetadata.get(RowReadSupport.SPARK_METADATA_KEY))
}
}
}
// if both unavailable, fall back to deducing the schema from the given Parquet schema
// TODO: Why it can be null?
if (schema == null) {
log.debug("falling back to Parquet read schema")
schema = ParquetTypesConverter.convertToAttributes(
parquetSchema, false, true)
}
log.debug(s"list of attributes that will be read: $schema")
new RowRecordMaterializer(parquetSchema, schema)
}
override def init(
configuration: Configuration,
keyValueMetaData: java.util.Map[String, String],
fileSchema: MessageType): ReadContext = {
var parquetSchema = fileSchema
val metadata = new JHashMap[String, String]()
val requestedAttributes = RowReadSupport.getRequestedSchema(configuration)
if (requestedAttributes != null) {
// If the parquet file is thrift derived, there is a good chance that
// it will have the thrift class in metadata.
val isThriftDerived = keyValueMetaData.keySet().contains("thrift.class")
parquetSchema = ParquetTypesConverter
.convertFromAttributes(requestedAttributes, isThriftDerived)
metadata.put(
RowReadSupport.SPARK_ROW_REQUESTED_SCHEMA,
ParquetTypesConverter.convertToString(requestedAttributes))
}
val origAttributesStr: String = configuration.get(RowWriteSupport.SPARK_ROW_SCHEMA)
if (origAttributesStr != null) {
metadata.put(RowReadSupport.SPARK_METADATA_KEY, origAttributesStr)
}
new ReadSupport.ReadContext(parquetSchema, metadata)
}
}
private[parquet] object RowReadSupport {
val SPARK_ROW_REQUESTED_SCHEMA = "org.apache.spark.sql.parquet.row.requested_schema"
val SPARK_METADATA_KEY = "org.apache.spark.sql.parquet.row.metadata"
private def getRequestedSchema(configuration: Configuration): Seq[Attribute] = {
val schemaString = configuration.get(RowReadSupport.SPARK_ROW_REQUESTED_SCHEMA)
if (schemaString == null) null else ParquetTypesConverter.convertFromString(schemaString)
}
}
/**
* A `parquet.hadoop.api.WriteSupport` for Row objects.
*/
private[parquet] class RowWriteSupport extends WriteSupport[Row] with Logging {
private[parquet] var writer: RecordConsumer = null
private[parquet] var attributes: Array[Attribute] = null
override def init(configuration: Configuration): WriteSupport.WriteContext = {
val origAttributesStr: String = configuration.get(RowWriteSupport.SPARK_ROW_SCHEMA)
val metadata = new JHashMap[String, String]()
metadata.put(RowReadSupport.SPARK_METADATA_KEY, origAttributesStr)
if (attributes == null) {
attributes = ParquetTypesConverter.convertFromString(origAttributesStr).toArray
}
log.debug(s"write support initialized for requested schema $attributes")
ParquetRelation.enableLogForwarding()
new WriteSupport.WriteContext(ParquetTypesConverter.convertFromAttributes(attributes), metadata)
}
override def prepareForWrite(recordConsumer: RecordConsumer): Unit = {
writer = recordConsumer
log.debug(s"preparing for write with schema $attributes")
}
override def write(record: Row): Unit = {
val attributesSize = attributes.size
if (attributesSize > record.size) {
throw new IndexOutOfBoundsException(
s"Trying to write more fields than contained in row ($attributesSize > ${record.size})")
}
var index = 0
writer.startMessage()
while(index < attributesSize) {
// null values indicate optional fields but we do not check currently
if (record(index) != null) {
writer.startField(attributes(index).name, index)
writeValue(attributes(index).dataType, record(index))
writer.endField(attributes(index).name, index)
}
index = index + 1
}
writer.endMessage()
}
private[parquet] def writeValue(schema: DataType, value: Any): Unit = {
if (value != null) {
schema match {
case t: UserDefinedType[_] => writeValue(t.sqlType, value)
case t @ ArrayType(_, _) => writeArray(
t,
value.asInstanceOf[CatalystConverter.ArrayScalaType[_]])
case t @ MapType(_, _, _) => writeMap(
t,
value.asInstanceOf[CatalystConverter.MapScalaType[_, _]])
case t @ StructType(_) => writeStruct(
t,
value.asInstanceOf[CatalystConverter.StructScalaType[_]])
case _ => writePrimitive(schema.asInstanceOf[AtomicType], value)
}
}
}
private[parquet] def writePrimitive(schema: DataType, value: Any): Unit = {
if (value != null) {
schema match {
case StringType => writer.addBinary(
Binary.fromByteArray(value.asInstanceOf[UTF8String].getBytes))
case BinaryType => writer.addBinary(
Binary.fromByteArray(value.asInstanceOf[Array[Byte]]))
case IntegerType => writer.addInteger(value.asInstanceOf[Int])
case ShortType => writer.addInteger(value.asInstanceOf[Short])
case LongType => writer.addLong(value.asInstanceOf[Long])
case TimestampType => writeTimestamp(value.asInstanceOf[java.sql.Timestamp])
case ByteType => writer.addInteger(value.asInstanceOf[Byte])
case DoubleType => writer.addDouble(value.asInstanceOf[Double])
case FloatType => writer.addFloat(value.asInstanceOf[Float])
case BooleanType => writer.addBoolean(value.asInstanceOf[Boolean])
case DateType => writer.addInteger(value.asInstanceOf[Int])
case d: DecimalType =>
if (d.precisionInfo == None || d.precisionInfo.get.precision > 18) {
sys.error(s"Unsupported datatype $d, cannot write to consumer")
}
writeDecimal(value.asInstanceOf[Decimal], d.precisionInfo.get.precision)
case _ => sys.error(s"Do not know how to writer $schema to consumer")
}
}
}
private[parquet] def writeStruct(
schema: StructType,
struct: CatalystConverter.StructScalaType[_]): Unit = {
if (struct != null) {
val fields = schema.fields.toArray
writer.startGroup()
var i = 0
while(i < fields.size) {
if (struct(i) != null) {
writer.startField(fields(i).name, i)
writeValue(fields(i).dataType, struct(i))
writer.endField(fields(i).name, i)
}
i = i + 1
}
writer.endGroup()
}
}
private[parquet] def writeArray(
schema: ArrayType,
array: CatalystConverter.ArrayScalaType[_]): Unit = {
val elementType = schema.elementType
writer.startGroup()
if (array.size > 0) {
if (schema.containsNull) {
writer.startField(CatalystConverter.ARRAY_CONTAINS_NULL_BAG_SCHEMA_NAME, 0)
var i = 0
while (i < array.size) {
writer.startGroup()
if (array(i) != null) {
writer.startField(CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME, 0)
writeValue(elementType, array(i))
writer.endField(CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME, 0)
}
writer.endGroup()
i = i + 1
}
writer.endField(CatalystConverter.ARRAY_CONTAINS_NULL_BAG_SCHEMA_NAME, 0)
} else {
writer.startField(CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME, 0)
var i = 0
while (i < array.size) {
writeValue(elementType, array(i))
i = i + 1
}
writer.endField(CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME, 0)
}
}
writer.endGroup()
}
private[parquet] def writeMap(
schema: MapType,
map: CatalystConverter.MapScalaType[_, _]): Unit = {
writer.startGroup()
if (map.size > 0) {
writer.startField(CatalystConverter.MAP_SCHEMA_NAME, 0)
for ((key, value) <- map) {
writer.startGroup()
writer.startField(CatalystConverter.MAP_KEY_SCHEMA_NAME, 0)
writeValue(schema.keyType, key)
writer.endField(CatalystConverter.MAP_KEY_SCHEMA_NAME, 0)
if (value != null) {
writer.startField(CatalystConverter.MAP_VALUE_SCHEMA_NAME, 1)
writeValue(schema.valueType, value)
writer.endField(CatalystConverter.MAP_VALUE_SCHEMA_NAME, 1)
}
writer.endGroup()
}
writer.endField(CatalystConverter.MAP_SCHEMA_NAME, 0)
}
writer.endGroup()
}
// Scratch array used to write decimals as fixed-length binary
private val scratchBytes = new Array[Byte](8)
private[parquet] def writeDecimal(decimal: Decimal, precision: Int): Unit = {
val numBytes = ParquetTypesConverter.BYTES_FOR_PRECISION(precision)
val unscaledLong = decimal.toUnscaledLong
var i = 0
var shift = 8 * (numBytes - 1)
while (i < numBytes) {
scratchBytes(i) = (unscaledLong >> shift).toByte
i += 1
shift -= 8
}
writer.addBinary(Binary.fromByteArray(scratchBytes, 0, numBytes))
}
private[parquet] def writeTimestamp(ts: java.sql.Timestamp): Unit = {
val binaryNanoTime = CatalystTimestampConverter.convertFromTimestamp(ts)
writer.addBinary(binaryNanoTime)
}
}
// Optimized for non-nested rows
private[parquet] class MutableRowWriteSupport extends RowWriteSupport {
override def write(record: Row): Unit = {
val attributesSize = attributes.size
if (attributesSize > record.size) {
throw new IndexOutOfBoundsException(
s"Trying to write more fields than contained in row ($attributesSize > ${record.size})")
}
var index = 0
writer.startMessage()
while(index < attributesSize) {
// null values indicate optional fields but we do not check currently
if (record(index) != null && record(index) != Nil) {
writer.startField(attributes(index).name, index)
consumeType(attributes(index).dataType, record, index)
writer.endField(attributes(index).name, index)
}
index = index + 1
}
writer.endMessage()
}
private def consumeType(
ctype: DataType,
record: Row,
index: Int): Unit = {
ctype match {
case StringType => writer.addBinary(
Binary.fromByteArray(record(index).asInstanceOf[UTF8String].getBytes))
case BinaryType => writer.addBinary(
Binary.fromByteArray(record(index).asInstanceOf[Array[Byte]]))
case IntegerType => writer.addInteger(record.getInt(index))
case ShortType => writer.addInteger(record.getShort(index))
case LongType => writer.addLong(record.getLong(index))
case ByteType => writer.addInteger(record.getByte(index))
case DoubleType => writer.addDouble(record.getDouble(index))
case FloatType => writer.addFloat(record.getFloat(index))
case BooleanType => writer.addBoolean(record.getBoolean(index))
case DateType => writer.addInteger(record.getInt(index))
case TimestampType => writeTimestamp(record(index).asInstanceOf[java.sql.Timestamp])
case d: DecimalType =>
if (d.precisionInfo == None || d.precisionInfo.get.precision > 18) {
sys.error(s"Unsupported datatype $d, cannot write to consumer")
}
writeDecimal(record(index).asInstanceOf[Decimal], d.precisionInfo.get.precision)
case _ => sys.error(s"Unsupported datatype $ctype, cannot write to consumer")
}
}
}
private[parquet] object RowWriteSupport {
val SPARK_ROW_SCHEMA: String = "org.apache.spark.sql.parquet.row.attributes"
def getSchema(configuration: Configuration): Seq[Attribute] = {
val schemaString = configuration.get(RowWriteSupport.SPARK_ROW_SCHEMA)
if (schemaString == null) {
throw new RuntimeException("Missing schema!")
}
ParquetTypesConverter.convertFromString(schemaString)
}
def setSchema(schema: Seq[Attribute], configuration: Configuration) {
val encoded = ParquetTypesConverter.convertToString(schema)
configuration.set(SPARK_ROW_SCHEMA, encoded)
configuration.set(
ParquetOutputFormat.WRITER_VERSION,
ParquetProperties.WriterVersion.PARQUET_1_0.toString)
}
}
| andrewor14/iolap | sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala | Scala | apache-2.0 | 15,151 |
/* Copyright 2009-2015 EPFL, Lausanne */
import leon._
import leon.lang._
import leon.annotation._
object Numerical {
def power(base: BigInt, p: BigInt): BigInt = {
require(p >= BigInt(0))
if (p == BigInt(0)) {
BigInt(1)
} else if (p%BigInt(2) == BigInt(0)) {
power(base*base, p/BigInt(2))
} else {
base*power(base, p-BigInt(1))
}
} ensuring {
res => ((base, p), res) passes {
case (_, BigInt(0)) => BigInt(1)
case (b, BigInt(1)) => b
case (BigInt(2), BigInt(7)) => BigInt(128)
case (BigInt(2), BigInt(10)) => BigInt(1024)
}
}
def gcd(a: BigInt, b: BigInt): BigInt = {
require(a > BigInt(0) && b > BigInt(0));
if (a == b) {
BigInt(1) // fixme: should be a
} else if (a > b) {
gcd(a-b, b)
} else {
gcd(a, b-a)
}
} ensuring {
res => (a%res == BigInt(0)) && (b%res == BigInt(0)) && (((a,b), res) passes {
case (BigInt(120), BigInt(24)) => BigInt(12)
case (BigInt(5), BigInt(7)) => BigInt(1)
case (BigInt(5), BigInt(5)) => BigInt(5)
})
}
}
| epfl-lara/leon | testcases/repair/Numerical/Numerical2.scala | Scala | gpl-3.0 | 1,092 |
package de.fosd.typechef.crefactor.evaluation.evalcases.openSSL
import de.fosd.typechef.crefactor.evaluation.Refactor
object OpenSSLRefactorEvaluation extends OpenSSLEvaluation with Refactor { }
| joliebig/Morpheus | src/main/scala/de/fosd/typechef/crefactor/evaluation/evalcases/openSSL/OpenSSLRefactorEvaluation.scala | Scala | lgpl-3.0 | 197 |
/**
* (C) Copyright IBM Corp. 2015, 2016
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ibm.spark.netezza
import java.sql.Connection
import org.apache.spark.Partition
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer
/**
* Instructions on how to partition the table among workers.
*/
private[netezza] case class PartitioningInfo(column: Option[String],
lowerBound: Option[String],
upperBound: Option[String],
numPartitions: Int)
/**
* Get information about data slices.
*/
private[netezza] object NetezzaInputFormat {
private val log = LoggerFactory.getLogger(getClass)
def getParitionPredicate(start: Int, end: Int): String = {
s"DATASLICEID BETWEEN $start AND $end";
}
/**
* Get number of data slices configured in the database.
*
* @param conn connection to the database.
* @return number of of data slices
*/
def getNumberDataSlices(conn: Connection): Integer = {
// query to get maximum number of data slices in the database.
val query = "select max(ds_id) from _v_dslice"
val stmt = conn.prepareStatement(query)
try {
val rs = stmt.executeQuery()
try {
val numberDataSlices = if (rs.next) rs.getInt(1) else 0
if (numberDataSlices == 0) {
// there should always be some data slices with netezza
throw new Exception("No data slice ids returned.");
}
return numberDataSlices
} finally {
rs.close()
}
} finally {
stmt.close
}
}
/**
* Get partitions mapping to the data slices in the database,
*/
def getDataSlicePartition(conn: Connection, numPartitions: Int): Array[Partition] = {
val numberDataSlices = getNumberDataSlices(conn)
if (numPartitions <= 1 || numberDataSlices <= 1) {
Array[Partition](NetezzaPartition(null, 0))
} else {
val ans = new ArrayBuffer[Partition]()
var partitionIndex = 0
// if there are more partions than the data slice, assign one data slice per partition
if (numberDataSlices <= numPartitions) {
// one data slice per mapper
for (sliceId <- 1 to numberDataSlices) {
ans += NetezzaPartition(getParitionPredicate(sliceId, sliceId), partitionIndex)
partitionIndex = partitionIndex + 1
}
} else {
val slicesPerPartion: Int = {
val slices = Math.floor(numberDataSlices / numPartitions).toInt
if (slices * numPartitions < numberDataSlices) slices + 1 else slices
}
var start = 1
var end = slicesPerPartion
for (index <- 1 to numPartitions if (start <= numberDataSlices)) {
ans += NetezzaPartition(getParitionPredicate(start, end), partitionIndex)
partitionIndex = partitionIndex + 1
start = end + 1;
end = if ((start + slicesPerPartion - 1) > numberDataSlices) {
numberDataSlices
} else {
start + slicesPerPartion - 1
}
}
}
ans.toArray
}
}
/**
* Get column partitions based on the user specified column.
*/
def getColumnPartitions(conn: Connection, table: String,
partitionInfo: PartitioningInfo): Array[Partition] = {
if (partitionInfo.numPartitions <= 1 || !partitionInfo.column.isDefined) {
Array[Partition](NetezzaPartition(null, 0))
} else {
val (min, max) = NetezzaJdbcUtils.getPartitionColumnBoundaryValues(conn, table, partitionInfo)
getIntegerColumnPartition(partitionInfo.column, min, max, partitionInfo.numPartitions)
}
}
/**
* Given a partitioning schematic (a column of integral type, a number of
* partitions, and upper and lower bounds on the column's value), generate
* WHERE clauses for each partition so that each row in the table appears
* exactly once. The parameters minValue and maxValue are advisory in that
* incorrect values may cause the partitioning to be poor, but no data
* will fail to be represented.
*
* Null value predicate is added to the first partition where clause to include
* the rows with null value for the partitions column.
*/
def getIntegerColumnPartition(partitioncolumn: Option[String],
lowerBound: Long,
upperBound: Long,
userSpecifiedNumPartitions: Int): Array[Partition] = {
require(upperBound > lowerBound,
"lower bound of partitioning column is larger than the upper " +
s"bound. Lower bound: $lowerBound; Upper bound: $upperBound")
val numPartitions =
if ((upperBound - lowerBound) >= userSpecifiedNumPartitions) {
userSpecifiedNumPartitions
} else {
log.warn("The number of partitions is reduced because the specified number of " +
"partitions is less than the difference between upper bound and lower bound. " +
s"Updated number of partitions: ${upperBound - lowerBound}; Input number of " +
s"partitions: ${userSpecifiedNumPartitions}; Lower bound: $lowerBound; " +
s"Upper bound: $upperBound.")
upperBound - lowerBound
}
if (numPartitions <= 1 || lowerBound == upperBound) {
return Array[Partition](NetezzaPartition(null, 0))
}
val column = partitioncolumn.get
// Overflow and silliness can happen if you subtract then divide.
// Here we get a little roundoff, but that's (hopefully) OK.
val stride: Long = upperBound / numPartitions - lowerBound / numPartitions
var i: Int = 0
var currentValue: Long = lowerBound
var ans = new ArrayBuffer[Partition]()
while (i < numPartitions) {
val lBound = if (i != 0) s"$column >= $currentValue" else null
currentValue += stride
val uBound = if (i != numPartitions - 1) s"$column < $currentValue" else null
val whereClause =
if (uBound == null) {
lBound
} else if (lBound == null) {
s"$uBound or $column is null"
} else {
s"$lBound AND $uBound"
}
ans += NetezzaPartition(whereClause, i)
i = i + 1
}
ans.toArray
}
}
| SparkTC/spark-netezza | src/main/scala/com/ibm/spark/netezza/NetezzaInputFormat.scala | Scala | apache-2.0 | 6,840 |
package com.blogspot.nhu313.tictactoe
class Rules {
def isGameOver(board: Board): Boolean = {
return winner(board) != None || board.isFull
}
def winner(board: Board): Option[Marker] = {
val winning_sets = sets(board)
val result = winning_sets.find(x =>
x.forall(marker => marker != Marker.NONE && marker == x.head)
)
return if (result == None) None else Some(result.head.head)
}
private def sets(board: Board): Array[Array[Marker]] = {
board.rows ++ board.columns ++ board.diagonals
}
}
| nhu313/tic_tac_toe_scala | src/main/scala/tictactoe/Rules.scala | Scala | apache-2.0 | 533 |
import com.datastax.spark.connector.cql.CassandraConnector
import org.apache.spark.SparkConf
import org.joda.time.{DateTime, DateTimeZone}
import org.scalatest._
class ResponseTimeTest extends FlatSpec with BeforeAndAfter with GivenWhenThen with Matchers {
private val master = "local[2]"
private val appName = "spark-streaming-twitter"
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
.set("spark.cassandra.connection.host", "localhost")
before {
CassandraConnector(conf).withSessionDo { session =>
session.execute("CREATE KEYSPACE IF NOT EXISTS twitter_streaming WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': 1}")
session.execute(
"""
CREATE TABLE IF NOT EXISTS twitter_streaming.tweets (
body text,
user_id bigint,
user_screen_name text,
lang text,
created_at timestamp,
favorite_count int,
retweet_count int,
tweet_id bigint,
user_mentions list<text>,
reply_id bigint,
response_time text,
hashtags list<text>,
urls list<text>,
PRIMARY KEY (body, tweet_id, user_id, user_screen_name)
)"""
)
session.execute("CREATE INDEX IF NOT EXISTS ON twitter_streaming.tweets(tweet_id);")
// Insert a mock question tweet for Sosh_fr
session.execute("INSERT INTO twitter_streaming.tweets (body, user_id, user_screen_name, lang, created_at, favorite_count, retweet_count, tweet_id, user_mentions, reply_id, response_time, hashtags, urls) VALUES('@Sosh_fr Ceci est un test', 63721, 'user', 'fr', '2015-11-30 18:23:49+0100', 0, 0, 3258473, ['Sosh_fr'], null, null, [''], [''])")
}
}
after {
CassandraConnector(conf).withSessionDo { session =>
session.execute("DROP KEYSPACE twitter_streaming;")
}
}
"Response time" should "be counted" in {
Given("a tweet response")
val responseTime = ResponseTime.getResponseTime(conf, 3258473, new DateTime(2015, 11, 30, 18, 28, 12, DateTimeZone.forID("Europe/Paris")))
When("a corresponding question is found")
Then("the date difference should match")
println(responseTime)
responseTime should be ("00:04:23")
}
}
| t3g7/spark-streaming-twitter | src/test/scala/ResponseTimeTest.scala | Scala | apache-2.0 | 2,260 |
import java.io.{FileOutputStream, FileInputStream}
import scala.tools.asm.{ClassWriter, Opcodes, ClassReader}
import scala.tools.asm.tree.{InsnNode, ClassNode}
import scala.tools.nsc.backend.jvm.AsmUtils
import scala.tools.partest.DirectTest
import scala.collection.JavaConverters._
/**
* Test that the ICodeReader does not crash if the bytecode of a method has unreachable code.
*/
object Test extends DirectTest {
def code: String = ???
def show(): Unit = {
// The bytecode of f will be modified using ASM by `addDeadCode`
val aCode =
"""
|package p
|class A {
| @inline final def f = 1
|}
""".stripMargin
val bCode =
"""
|package p
|class B {
| def g = (new A()).f
|}
""".stripMargin
compileString(newCompiler("-usejavacp"))(aCode)
addDeadCode()
// If inlining fails, the compiler will issue an inliner warning that is not present in the
// check file
compileString(newCompiler("-usejavacp", "-opt:l:classpath"))(bCode)
}
def readClass(file: String) = {
val cnode = new ClassNode()
val is = new FileInputStream(file)
val reader = new ClassReader(is)
reader.accept(cnode, 0)
is.close()
cnode
}
def writeClass(file: String, cnode: ClassNode): Unit = {
val writer = new ClassWriter(0)
cnode.accept(writer)
val os = new FileOutputStream(file)
os.write(writer.toByteArray)
os.close()
}
def addDeadCode() {
val file = (testOutput / "p" / "A.class").path
val cnode = readClass(file)
val method = cnode.methods.asScala.find(_.name == "f").head
AsmUtils.traceMethod(method)
val insns = method.instructions
val it = insns.iterator()
while (it.hasNext) {
val in = it.next()
if (in.getOpcode == Opcodes.IRETURN) {
// Insert an ATHROW before the IRETURN. The IRETURN will then be dead code.
// The ICodeReader should not crash if there's dead code.
insns.insert(in.getPrevious, new InsnNode(Opcodes.ATHROW))
}
}
AsmUtils.traceMethod(method)
writeClass(file, cnode)
}
}
| felixmulder/scala | test/files/run/icode-reader-dead-code.scala | Scala | bsd-3-clause | 2,142 |
package com.seanshubin.uptodate.logic
import java.nio.file.{Path, Paths}
import com.seanshubin.devon.domain.DevonMarshaller
class ConfigurationValidatorImpl(fileSystem: FileSystem,
devonMarshaller: DevonMarshaller) extends ConfigurationValidator {
private val sample = new Configuration(
pomFileName = "pom.xml",
directoryNamesToSkip = Set("target"),
directoriesToSearch = Seq(Paths.get(".")),
mavenRepositories = Seq(
"http://repo.maven.apache.org/maven2",
"http://onejar-maven-plugin.googlecode.com/svn/mavenrepo",
"http://oss.sonatype.org/content/groups/scala-tools"),
doNotUpgradeFrom = Set(GroupAndArtifact("groupIdToIgnore", "artifactIdToIgnore")),
doNotUpgradeTo = Set(GroupArtifactVersion("groupIdToIgnore", "artifactIdToIgnore", "1.2.3")),
automaticallyUpgrade = true,
reportDirectory = Paths.get("generated", "sample", "report"),
cacheDirectory = Paths.get("generated", "cache"),
cacheExpire = "5 days"
)
override def validate(commandLineArguments: Seq[String]): Either[Seq[String], Configuration] = {
if (commandLineArguments.size < 1) {
Left(Seq("at least one command line argument required"))
} else if (commandLineArguments.size > 1) {
Left(Seq("no more than one command line argument allowed"))
} else {
validateFile(Paths.get(commandLineArguments(0)))
}
}
private def validateFile(path: Path): Either[Seq[String], Configuration] = {
if (fileSystem.fileExists(path)) {
val text = fileSystem.loadString(path)
validateText(path, text)
} else {
Left(Seq(s"file '$path' does not exist"))
}
}
private def validateText(path: Path, text: String): Either[Seq[String], Configuration] = {
try {
val devon = devonMarshaller.fromString(text)
val configuration = devonMarshaller.toValue(devon, classOf[Configuration])
Right(configuration)
} catch {
case ex: Exception =>
val sampleDevon = devonMarshaller.fromValue(sample)
val sampleConfigString = devonMarshaller.toPretty(sampleDevon)
Left(Seq(
s"Unable to read json from '$path': ${ex.getMessage}",
"A valid configuration might look something like this:") ++
sampleConfigString
)
}
}
}
| SeanShubin/up-to-date | logic/src/main/scala/com/seanshubin/uptodate/logic/ConfigurationValidatorImpl.scala | Scala | unlicense | 2,315 |
package concrete.constraint.linear
import concrete.constraint.AdviseCount
import concrete.{Assignment, BooleanDomain, Problem, Variable}
import org.scalatest.{FlatSpec, Matchers}
class LinearNeTest extends FlatSpec with Matchers {
"LinearNe" should "not filter" in {
val x_2095 = new Variable("X2095", BooleanDomain.UNKNOWNBoolean)
val x_2055 = new Variable("X2055", BooleanDomain.UNKNOWNBoolean)
val ctr = new LinearNe(2, Array(2, 1), Array(x_2095, x_2055))
val pb = Problem(x_2095, x_2055)
pb.addConstraint(ctr)
ctr.register(new AdviseCount)
val res = pb.initState
.andThen { ps =>
ctr.eventAll(ps)
ctr.revise(ps)
}
.andThen(_.assign(x_2055, 1))
.andThen { ps =>
ctr.event(ps, Assignment, 1)
ctr.revise(ps)
}
res.dom(x_2095) shouldBe BooleanDomain.UNKNOWNBoolean
}
it should "filter" in {
val x_2095 = new Variable("X2095", BooleanDomain.UNKNOWNBoolean)
val x_2055 = new Variable("X2055", BooleanDomain.UNKNOWNBoolean)
val ctr = new LinearNe(2, Array(2, 1), Array(x_2095, x_2055))
val pb = Problem(x_2095, x_2055)
pb.addConstraint(ctr)
ctr.register(new AdviseCount)
val res = pb.initState
.andThen { ps =>
ctr.eventAll(ps)
ctr.revise(ps) }
.andThen(_.assign(x_2055, 0))
.andThen { ps =>
ctr.event(ps, Assignment, 1)
ctr.revise(ps) }
.toState
res.dom(x_2095) shouldBe BooleanDomain.FALSE
}
}
| concrete-cp/concrete | src/test/scala/concrete/constraint/linear/LinearNeTest.scala | Scala | lgpl-2.1 | 1,499 |
/*
* Copyright (c) 2012 Yahoo! Inc. All rights reserved. Licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying LICENSE file.
*/
package com.yahoo.scalops.dsl.types
// Used for the loop environment
trait CompositeType extends Type[Any] {
} | markusweimer/ScalOps | src/main/scala/com/yahoo/scalops/dsl/types/CompositeType.scala | Scala | apache-2.0 | 745 |
package forcomp
import common._
object Anagrams {
/** A word is simply a `String`. */
type Word = String
/** A sentence is a `List` of words. */
type Sentence = List[Word]
/** `Occurrences` is a `List` of pairs of characters and positive integers saying
* how often the character appears.
* This list is sorted alphabetically w.r.t. to the character in each pair.
* All characters in the occurrence list are lowercase.
*
* Any list of pairs of lowercase characters and their frequency which is not sorted
* is **not** an occurrence list.
*
* Note: If the frequency of some character is zero, then that character should not be
* in the list.
*/
type Occurrences = List[(Char, Int)]
/** The dictionary is simply a sequence of words.
* It is predefined and obtained as a sequence using the utility method `loadDictionary`.
*/
val dictionary: List[Word] = loadDictionary
/** Converts the word into its character occurence list.
*
* Note: the uppercase and lowercase version of the character are treated as the
* same character, and are represented as a lowercase character in the occurrence list.
*/
def lt(t1:(Char,Int) , t2:(Char,Int)) : Boolean = t1._1< t2._1;
def add(t1:Int, t2:(Char,Int)) : Int = t1 +t2._2;
def wordOccurrences(w: Word): Occurrences = w.toLowerCase().toList.sorted.groupBy(x => x).mapValues(x => x.length).toList.sortWith(lt);
def getTotal(list :List[(Char,Int)]):Int = list.foldLeft(0)(add) ;
def concat ( b :(Occurrences,String) , a:List[String]):List[String] = a:::List(b._2);
def getSecond(list:List[(Occurrences,Word)]) :List[Word] = list.foldRight(List(""))(concat) filterNot(_ =="")
def getfirst(t1:(Char,Int)): Char = t1._1
/** Converts a sentence into its character occurrence list. */
def sentenceOccurrences(s: Sentence): Occurrences = {
( s flatMap(word => wordOccurrences(word)) groupBy(_._1) mapValues(x =>getTotal(x)) toList ).sortWith(lt)
}
/** The `dictionaryByOccurrences` is a `Map` from different occurrences to a sequence of all
* the words that have that occurrence count.
* This map serves as an easy way to obtain all the anagrams of a word given its occurrence list.
*
* For example, the word "eat" has the following character occurrence list:
*
* `List(('a', 1), ('e', 1), ('t', 1))`
*
* Incidentally, so do the words "ate" and "tea".
*
* This means that the `dictionaryByOccurrences` map will contain an entry:
*
* List(('a', 1), ('e', 1), ('t', 1)) -> Seq("ate", "eat", "tea")
*
*/
lazy val dictionaryByOccurrences: Map[Occurrences, List[Word]] = {
val occList = {for (word <-loadDictionary ; occ = wordOccurrences(word)) yield (occ,word)}
occList.groupBy(_._1).mapValues(y => getSecond(y)) withDefaultValue(List())
}
/** Returns all the anagrams of a given word. */
def wordAnagrams(word: Word): List[Word] = {
{ { for { entry <- dictionaryByOccurrences
p = entry._2
if (p map (x=>x.toLowerCase())contains word.toLowerCase() )
}yield p } flatten }toList
}
/** Returns the list of all subsets of the occurrence list.
* This includes the occurrence itself, i.e. `List(('k', 1), ('o', 1))`
* is a subset of `List(('k', 1), ('o', 1))`.
* It also include the empty subset `List()`.
*
* Example: the subsets of the occurrence list `List(('a', 2), ('b', 2))` are:
*
* List(
* List(),
* List(('a', 1)),
* List(('a', 2)),
* List(('b', 1)),
* List(('a', 1), ('b', 1)),
* List(('a', 2), ('b', 1)),
* List(('b', 2)),
* List(('a', 1), ('b', 2)),
* List(('a', 2), ('b', 2))
* )
*
* Note that the order of the occurrence list subsets does not matter -- the subsets
* in the example above could have been displayed in some other order.
*/
def combinations(occurrences: Occurrences): List[Occurrences] =
{
if (occurrences.length ==0) List(List());
else
{{for {
queens <- combinations(occurrences.tail)
column <- 1 to occurrences.head._2
queen = (occurrences.head._1, column)
//if isSafe(queen, queens)
} yield List(queens ,queen :: queens,List(queen))}flatten }distinct
}
def sub (a:(Char,Int) , y :Occurrences) :Unit= {
val p = {y filter(x => x._1==a._1)}
if (p.isEmpty) a else if (a._2 > p.head._2) (a._1 ,a._2 - p.head._2)
}
/** Subtracts occurrence list `y` from occurrence list `x`.
*
* The precondition is that the occurrence list `y` is a subset of
* the occurrence list `x` -- any character appearing in `y` must
* appear in `x`, and its frequency in `y` must be smaller or equal
* than its frequency in `x`.
*
* Note: the resulting value is an occurrence - meaning it is sorted
* and has no zero-entries.
*/
def subtract(x: Occurrences, y: Occurrences): Occurrences =
for
{
a <- x
p = {y filter(x => x._1==a._1)}
if (p.isEmpty || (!p.isEmpty && a._2 > p.head._2))
}yield if(p.isEmpty) a else (a._1,a._2-p.head._2)
def matchChar(v1:(Char,Int), v2:Char) : Boolean = v1._1==v2;
def isWordSafe(total:Occurrences , wordOccurrences :Occurrences) :Boolean= {
val list = for ((wordchar,count) <- wordOccurrences ;
filtered = total.filter(matchChar(_,wordchar))
) yield !filtered.isEmpty
!list.contains(false)
}
/** Returns a list of all anagram sentences of the given sentence.
*
* An anagram of a sentence is formed by taking the occurrences of all the characters of
* all the words in the sentence, and producing all possible combinations of words with those characters,
* such that the words have to be from the dictionary.
*
* The number of words in the sentence and its anagrams does not have to correspond.
* For example, the sentence `List("I", "love", "you")` is an anagram of the sentence `List("You", "olive")`.
*
* Also, two sentences with the same words but in a different order are considered two different anagrams.
* For example, sentences `List("You", "olive")` and `List("olive", "you")` are different anagrams of
* `List("I", "love", "you")`.
*
* Here is a full example of a sentence `List("Yes", "man")` and its anagrams for our dictionary:
*
* List(
* List(en, as, my),
* List(en, my, as),
* List(man, yes),
* List(men, say),
* List(as, en, my),
* List(as, my, en),
* List(sane, my),
* List(Sean, my),
* List(my, en, as),
* List(my, as, en),
* List(my, sane),
* List(my, Sean),
* List(say, men),
* List(yes, man)
* )
*
* The different sentences do not have to be output in the order shown above - any order is fine as long as
* all the anagrams are there. Every returned word has to exist in the dictionary.
*
* Note: in case that the words of the sentence are in the dictionary, then the sentence is the anagram of itself,
* so it has to be returned in this list.
*
* Note: There is only one anagram of an empty sentence.
*/
def sentenceAnagrams(sentence: Sentence): List[Sentence] =
sentenceAnagramsOccurrence(sentenceOccurrences(sentence)) ;
def appendListBeforeSentence(lista:List[String], b:List[Sentence]) :List[Sentence] = {
for {
word <- lista
singleSentence <- b
}yield word::singleSentence
}
def sentenceAnagramsOccurrence(sentenceOccurrences :Occurrences) :List[Sentence] =
{
if (sentenceOccurrences.length ==0) List(List())
else {
{for {
wordList <- {combinations(sentenceOccurrences) map dictionaryByOccurrences filterNot(x =>x.isEmpty)}
p = sentenceAnagramsOccurrence (subtract(sentenceOccurrences, wordOccurrences(wordList.head)))
addtl <- p
word <-wordList
} yield word::addtl
}
}
}
// wordList = dictionaryByOccurrences.getOrElse(occCombos, List())
//if (!addtl.isEmpty)
// word <- wordList
//word <- ({dictionaryByOccurrences.values.flatten} toList )
// if (isWordSafe(sentenceOccurrences,wordOccurrences(word)))
//(appendListBeforeSentence(wordList,addtl)) flatten
}
| samsol/FunProg-scala | forcomp/src/main/scala/forcomp/Anagrams.scala | Scala | agpl-3.0 | 8,577 |
package scala.scalajs.runtime
import scala.scalajs.js
/** Information about link-time configuration of Scala.js. */
@js.native
trait LinkingInfo extends js.Object {
/** Semantics configuration. */
val semantics: LinkingInfo.Semantics = js.native
/** Whether we are assuming ECMAScript 6 support or not. */
val assumingES6: Boolean = js.native
}
object LinkingInfo {
/** Semantics configuration. */
@js.native
trait Semantics extends js.Object {
/** Compliance level of asInstanceOfs. */
val asInstanceOfs: Int = js.native
/** Compliance level of moduleInit. */
val moduleInit: Int = js.native
/** Whether floats have strict semantics. */
val strictFloats: Boolean = js.native
}
object Semantics {
final val Compliant = 0
final val Fatal = 1
final val Unchecked = 2
}
}
| CapeSepias/scala-js | library/src/main/scala/scala/scalajs/runtime/LinkingInfo.scala | Scala | bsd-3-clause | 832 |
package com.twitter.finagle.util
import com.twitter.finagle.{Stack, StackBuilder, Stackable, param, stack}
import com.twitter.util.registry.{Entry, GlobalRegistry, SimpleRegistry}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
case class TestParam(p1: Int) {
def mk() = (this, TestParam.param)
}
object TestParam {
implicit val param = Stack.Param(TestParam(1))
}
case class TestParam2(p2: Int) {
def mk() = (this, TestParam2.param)
}
object TestParam2 {
implicit val param = Stack.Param(TestParam2(1))
}
@RunWith(classOf[JUnitRunner])
class StackRegistryTest extends FunSuite {
val headRole = Stack.Role("head")
val nameRole = Stack.Role("name")
val param1 = TestParam(999)
def newStack() = {
val stack = new StackBuilder(Stack.Leaf(new Stack.Head {
def role: Stack.Role = headRole
def description: String = "the head!!"
def parameters: Seq[Stack.Param[_]] = Seq(TestParam2.param)
}, List(1, 2, 3, 4)))
val stackable: Stackable[List[Int]] = new Stack.Module1[TestParam, List[Int]] {
def make(p: TestParam, l: List[Int]): List[Int] = p.p1 :: l
val description: String = "description"
val role: Stack.Role = nameRole
}
stack.push(stackable)
stack.result
}
test("StackRegistry registryPrefix includes expected keys") {
new StackRegistry {
def registryName: String = "reg_name"
// we run the test inside a subclass to have access to the protected
// `registryPrefix` method.
private val params = Stack.Params.empty +
param.Label("a_label") +
param.ProtocolLibrary("a_protocol_lib")
private val entry = StackRegistry.Entry("an_addr", stack.nilStack, params)
private val prefix = registryPrefix(entry)
assert(prefix ==
Seq("reg_name", "a_protocol_lib", "a_label", "an_addr"))
}
}
test("StackRegistry should register stacks and params properly") {
val reg = new StackRegistry { def registryName: String = "test" }
val stk = newStack()
val params = Stack.Params.empty + param1 + param.Label("foo") + param.ProtocolLibrary("qux")
val simple = new SimpleRegistry()
GlobalRegistry.withRegistry(simple) {
reg.register("bar", stk, params)
val expected = {
Set(
Entry(Seq("test", "qux", "foo", "bar", "name", "p1"), "999"),
Entry(Seq("test", "qux", "foo", "bar", "head", "p2"), "1")
)
}
assert(GlobalRegistry.get.toSet == expected)
}
}
test("StackRegistry should unregister stacks and params properly") {
val reg = new StackRegistry { def registryName: String = "test" }
val stk = newStack()
val params = Stack.Params.empty + param1 + param.Label("foo") + param.ProtocolLibrary("qux")
val simple = new SimpleRegistry()
GlobalRegistry.withRegistry(simple) {
reg.register("bar", stk, params)
val expected = {
Set(
Entry(Seq("test", "qux", "foo", "bar", "name", "p1"), "999"),
Entry(Seq("test", "qux", "foo", "bar", "head", "p2"), "1")
)
}
assert(GlobalRegistry.get.toSet == expected)
reg.unregister("bar", stk, params)
assert(GlobalRegistry.get.toSet.isEmpty)
}
}
test("StackRegistry keeps track of the number of GlobalRegistry entries it enters") {
val reg = new StackRegistry { def registryName: String = "test" }
val stk = newStack()
val params = Stack.Params.empty + param1 + param.Label("foo") + param.ProtocolLibrary("qux")
val simple = new SimpleRegistry()
GlobalRegistry.withRegistry(simple) {
reg.register("bar", stk, params)
assert(GlobalRegistry.get.size == reg.size)
reg.unregister("bar", stk, params)
assert(GlobalRegistry.get.size == reg.size)
}
}
test("Duplicates are tracked") {
val reg = new StackRegistry { def registryName: String = "test" }
val stk = newStack()
val name = "aname"
reg.register("addr1", stk, Stack.Params.empty + param.Label(name))
assert(reg.registeredDuplicates.isEmpty)
reg.register("addr2", stk, Stack.Params.empty + param.Label(name))
assert(reg.registeredDuplicates.size == 1)
reg.register("addr3", stk, Stack.Params.empty + param.Label("somethingelse"))
assert(reg.registeredDuplicates.size == 1)
}
}
| spockz/finagle | finagle-core/src/test/scala/com/twitter/finagle/util/StackRegistryTest.scala | Scala | apache-2.0 | 4,341 |
package fpscala.c03
import fpscala.datastructures.{List => FpList, Nil => FpNil}
import org.scalatest.{FlatSpec, Matchers}
class Exercise06Spec extends FlatSpec with Matchers {
"append" should "add new elements to the end of a List" in {
Exercise06.append(6, FpList(1, 2, 3)) should equal(FpList(1, 2, 3, 6))
Exercise06.append(6, FpNil) should equal(FpList(6))
}
"init" should "drop the last element of a list" in {
Exercise06.init(FpList(1, 2, 3)) should equal(FpList(1, 2))
Exercise06.init(FpNil) should equal(FpNil)
}
}
| willtaylor/fpscala | src/test/scala/fpscala/c03/Exercise06Spec.scala | Scala | gpl-3.0 | 552 |
import scala.util.Random.nextInt
import scala.sys.error
object Test extends App {
def unreachableNormalExit: Int = {
return 42
0
}
def unreachableIf: Int = {
return 42
if (nextInt() % 2 == 0)
0
else
1
}
def unreachableIfBranches: Int = {
if (nextInt() % 2 == 0)
return 42
else
return 42
return 0
}
def unreachableOneLegIf: Int = {
if (nextInt() % 2 == 0)
return 42
return 42
}
def unreachableLeftBranch: Int = {
val result = if (nextInt() % 2 == 0)
return 42
else
42
return result
}
def unreachableRightBranch: Int = {
val result = if (nextInt() % 2 == 0)
42
else
return 42
return result
}
def unreachableTryCatchFinally: Int = {
return 42
try {
return 0
} catch {
case x: Throwable => return 1
} finally {
return 2
}
return 3
}
def unreachableAfterTry: Int = {
try {
return 42
} catch {
case x: Throwable => return 2
}
return 3
}
def unreachableAfterCatch: Int = {
try {
error("haha")
} catch {
case x: Throwable => return 42
}
return 3
}
def unreachableAfterFinally: Int = {
try {
return 1
} catch {
case x: Throwable => return 2
} finally {
return 42
}
return 3
}
def unreachableSwitch: Int = {
return 42
val x = nextInt() % 2
x match {
case 0 => return 0
case 1 => return 1
case -1 => return 2
}
3
}
def unreachableAfterSwitch: Int = {
val x = nextInt() % 2
x match {
case 0 => return 42
case 1 => return 41 + x
case -1 => return 43 + x
}
2
}
def check(f: Int) = assert(f == 42, s"Expected 42 but got $f")
check(unreachableNormalExit)
check(unreachableIf)
check(unreachableIfBranches)
check(unreachableOneLegIf)
check(unreachableLeftBranch)
check(unreachableRightBranch)
check(unreachableTryCatchFinally)
check(unreachableAfterTry)
check(unreachableAfterCatch)
check(unreachableAfterFinally)
check(unreachableSwitch)
check(unreachableAfterSwitch)
}
| lampepfl/dotty | tests/run/unreachable.scala | Scala | apache-2.0 | 2,171 |
package org.openurp.edu.eams
import org.beangle.commons.inject.bind.AbstractBindModule
import org.openurp.edu.base.Course.web.action.CourseAction
import org.openurp.edu.base.Course.web.action.CourseSearchAction
import org.openurp.edu.eams.teach.lesson.helper.LessonSearchHelper
import org.openurp.edu.eams.teach.web.action.code.ManageAction
class TeachWebModule extends AbstractBindModule {
protected override def doBinding() {
bind(classOf[org.openurp.edu.eams.web.action.api.CourseAction])
bind(classOf[org.openurp.edu.eams.web.action.api.AdminclassAction])
bind(classOf[org.openurp.edu.eams.web.action.api.NormalclassAction])
bind(classOf[org.openurp.edu.eams.web.action.api.ProgramAction])
bind(classOf[CourseAction])
bind(classOf[CourseSearchAction])
bind("lessonSearchHelper", classOf[LessonSearchHelper])
bind(classOf[ManageAction])
}
}
| openurp/edu-eams-webapp | web/src/main/scala/org/openurp/edu/eams/TeachWebModule.scala | Scala | gpl-3.0 | 885 |
package scala.meta.internal.semanticdb.scalac
import java.io._
import java.net.URI
import scala.compat.Platform.EOL
import scala.tools.nsc.Phase
import scala.tools.nsc.plugins.PluginComponent
import scala.util.control.NonFatal
import scala.meta.internal.{semanticdb => s}
trait SemanticdbPipeline extends SemanticdbOps { self: SemanticdbPlugin =>
implicit class XtensionURI(uri: URI) { def toFile: File = new File(uri) }
implicit class XtensionUnit(unit: g.CompilationUnit) {
def isIgnored: Boolean = {
val matchesExtension = {
val fileName = unit.source.file.name
fileName.endsWith(".scala") || fileName.endsWith(".sc")
}
val matchesFilter = {
Option(unit.source.file)
.flatMap(f => Option(f.file))
.map(f => config.fileFilter.matches(f.getAbsolutePath))
.getOrElse(true)
}
!matchesExtension || !matchesFilter
}
}
def handleCrash(unit: Option[g.CompilationUnit]): PartialFunction[Throwable, Unit] = {
case NonFatal(ex) =>
val writer = new StringWriter()
val culprit = unit.map(unit => " for " + unit.source.file.path).getOrElse("")
writer.write(s"failed to generate semanticdb$culprit:$EOL")
ex.printStackTrace(new PrintWriter(writer))
val msg = writer.toString
import scala.meta.internal.semanticdb.scalac.FailureMode._
config.failures match {
case Error => global.reporter.error(g.NoPosition, msg)
case Warning => global.reporter.warning(g.NoPosition, msg)
case Info => global.reporter.info(g.NoPosition, msg, force = true)
case Ignore => // do nothing.
}
}
object SemanticdbTyperComponent extends PluginComponent {
val global: SemanticdbPipeline.this.global.type = SemanticdbPipeline.this.global
val runsAfter = List("typer")
override val runsRightAfter = Some("typer")
val phaseName = "semanticdb-typer"
override val description = "compute and persist SemanticDB after typer"
def newPhase(_prev: Phase) = new ComputeSemanticdbPhase(_prev)
class ComputeSemanticdbPhase(prev: Phase) extends StdPhase(prev) {
override def apply(unit: g.CompilationUnit): Unit = {
try {
if (unit.isIgnored) return
validateCompilerState()
val sdoc = unit.toTextDocument
sdoc.save(config.targetroot)
} catch handleCrash(Some(unit))
}
private def synchronizeSourcesAndSemanticdbFiles(): Unit = {
RemoveOrphanSemanticdbFiles.process(config)
}
private def synchronizeSourcesAndSemanticdbIndex(): Unit = {
index.save(config.targetroot, config.sourceroot)
}
override def run(): Unit = {
try {
timestampComputeStarted = System.nanoTime()
super.run()
synchronizeSourcesAndSemanticdbFiles()
synchronizeSourcesAndSemanticdbIndex()
timestampComputeFinished = System.nanoTime()
idCache.clear()
symbolCache.clear()
} catch handleCrash(None)
}
}
}
object SemanticdbJvmComponent extends PluginComponent {
val global: SemanticdbPipeline.this.global.type = SemanticdbPipeline.this.global
val runsAfter = List("jvm")
override val runsRightAfter = Some("jvm")
val phaseName = "semanticdb-jvm"
override val description =
"compute and persist additional SemanticDB messages created after typer"
def newPhase(_prev: Phase) = new PersistSemanticdbPhase(_prev)
class PersistSemanticdbPhase(prev: Phase) extends StdPhase(prev) {
override def apply(unit: g.CompilationUnit): Unit = {
if (unit.isIgnored) return
try {
if (config.diagnostics.isOn) {
val diagnostics = unit.reportedDiagnostics(Map.empty)
if (diagnostics.nonEmpty) {
val sdoc = s.TextDocument(
schema = s.Schema.SEMANTICDB4,
uri = unit.source.toUri,
language = s.Language.SCALA,
diagnostics = diagnostics
)
sdoc.append(config.targetroot)
}
}
} catch handleCrash(Some(unit))
}
override def run(): Unit = {
try {
timestampPersistStarted = System.nanoTime()
super.run()
timestampPersistFinished = System.nanoTime()
reportSemanticdbSummary()
} catch handleCrash(None)
}
}
}
private val timestampPluginCreated = System.nanoTime()
private var timestampComputeStarted = -1L
private var timestampComputeFinished = -1L
private var timestampPersistStarted = -1L
private var timestampPersistFinished = -1L
private def reportSemanticdbSummary(): Unit = {
def createdSemanticdbsMessage = {
val howMany = g.currentRun.units.length
val what = if (howMany == 1) "file" else "files"
var where = config.targetroot.toString
where = where.stripSuffix("/").stripSuffix("/.")
where = where + "/META-INF/semanticdb"
s"Created $howMany semanticdb $what in $where"
}
def performanceOverheadMessage = {
val computeMs = (timestampComputeFinished - timestampComputeStarted) / 1000000
val persistMs = (timestampPersistFinished - timestampPersistStarted) / 1000000
val semanticdbMs = computeMs + persistMs
val totalMs = (timestampPersistFinished - timestampPluginCreated) / 1000000
val overhead = s"$computeMs+$persistMs=${semanticdbMs}ms performance overhead"
val pct = Math.floor(1.0 * semanticdbMs / totalMs * 100).toInt
s"At the cost of $overhead ($pct% of compilation time)"
}
if (config.profiling.isOn) {
println(createdSemanticdbsMessage)
println(performanceOverheadMessage)
}
}
}
| MasseGuillaume/scalameta | semanticdb/scalac/library/src/main/scala/scala/meta/internal/semanticdb/scalac/SemanticdbPipeline.scala | Scala | bsd-3-clause | 5,753 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util.codec
import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
import xsbti.Position
import java.util.Optional
trait PositionFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val PositionFormat: JsonFormat[Position] = new JsonFormat[Position] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Position = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val line0 = unbuilder.readField[Optional[java.lang.Integer]]("line")
val lineContent0 = unbuilder.readField[String]("lineContent")
val offset0 = unbuilder.readField[Optional[java.lang.Integer]]("offset")
val pointer0 = unbuilder.readField[Optional[java.lang.Integer]]("pointer")
val pointerSpace0 = unbuilder.readField[Optional[String]]("pointerSpace")
val sourcePath0 = unbuilder.readField[Optional[String]]("sourcePath")
val sourceFile0 = unbuilder.readField[Optional[java.io.File]]("sourceFile")
val startOffset0 = unbuilder.readField[Optional[java.lang.Integer]]("startOffset")
val endOffset0 = unbuilder.readField[Optional[java.lang.Integer]]("endOffset")
val startLine0 = unbuilder.readField[Optional[java.lang.Integer]]("startLine")
val startColumn0 = unbuilder.readField[Optional[java.lang.Integer]]("startColumn")
val endLine0 = unbuilder.readField[Optional[java.lang.Integer]]("endLine")
val endColumn0 = unbuilder.readField[Optional[java.lang.Integer]]("endColumn")
unbuilder.endObject()
new Position() {
override val line = line0
override val lineContent = lineContent0
override val offset = offset0
override val pointer = pointer0
override val pointerSpace = pointerSpace0
override val sourcePath = sourcePath0
override val sourceFile = sourceFile0
override val startOffset = startOffset0
override val endOffset = endOffset0
override val startLine = startLine0
override val startColumn = startColumn0
override val endLine = endLine0
override val endColumn = endColumn0
}
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: Position, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("line", obj.line)
builder.addField("lineContent", obj.lineContent)
builder.addField("offset", obj.offset)
builder.addField("pointer", obj.pointer)
builder.addField("pointerSpace", obj.pointerSpace)
builder.addField("sourcePath", obj.sourcePath)
builder.addField("sourceFile", obj.sourceFile)
builder.addField("startOffset", obj.startOffset)
builder.addField("endOffset", obj.endOffset)
builder.addField("startLine", obj.startLine)
builder.addField("startColumn", obj.startColumn)
builder.addField("endLine", obj.endLine)
builder.addField("endColumn", obj.endColumn)
builder.endObject()
}
}
}
| sbt/sbt | internal/util-logging/src/main/scala/sbt/internal/util/codec/PositionFormats.scala | Scala | apache-2.0 | 3,320 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.cosmos.spark
import com.azure.cosmos.CosmosException
import com.azure.cosmos.implementation.HttpConstants
import com.azure.cosmos.implementation.HttpConstants.SubStatusCodes
import reactor.core.scala.publisher.SMono
private object Exceptions {
def isResourceExistsException(statusCode: Int): Boolean = {
statusCode == CosmosConstants.StatusCodes.Conflict
}
def isPreconditionFailedException(statusCode: Int): Boolean = {
statusCode == CosmosConstants.StatusCodes.PreconditionFailed
}
def canBeTransientFailure(statusCode: Int, subStatusCode: Int): Boolean = {
// TODO: moderakh SDK should only throw 503 and not 410,
// however due a bug in core SDK we currently may throw 410 on write
// once that's fixed remove GONE here
statusCode == CosmosConstants.StatusCodes.Gone ||
statusCode == CosmosConstants.StatusCodes.ServiceUnavailable ||
statusCode == CosmosConstants.StatusCodes.InternalServerError ||
statusCode == CosmosConstants.StatusCodes.Timeout ||
statusCode == CosmosConstants.StatusCodes.NotFound && subStatusCode == 1002
}
def isTimeout(statusCode: Int): Boolean = {
statusCode == CosmosConstants.StatusCodes.Timeout
}
def isNotFoundException(throwable: Throwable): Boolean = {
throwable match {
case cosmosException: CosmosException =>
isNotFoundExceptionCore(cosmosException.getStatusCode, cosmosException.getSubStatusCode)
case _ => false
}
}
def isNotFoundExceptionCore(statusCode: Int, subStatusCode: Int): Boolean = {
statusCode == CosmosConstants.StatusCodes.NotFound &&
subStatusCode == 0
}
}
| Azure/azure-sdk-for-java | sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/Exceptions.scala | Scala | mit | 1,751 |
object p1 {
class B
object B
class C extends java.io.Serializable
object C
type D = DD
object D
}
package object p2 {
class B
object B
class C extends java.io.Serializable
object C
type D = DD
object D
}
class DD extends java.io.Serializable
object Test {
def main(args: Array[String]): Unit = {
// This is the behaviour that was intended and was unchanged by this commmit.
assert(!(p1.B : Object).isInstanceOf[scala.Serializable])
assert(p1.C.isInstanceOf[scala.Serializable])
assert(!(p1.D: Object).isInstanceOf[scala.Serializable])
assert(!(p2.B : Object).isInstanceOf[scala.Serializable])
assert(p2.C.isInstanceOf[scala.Serializable])
// this behaviour was different in 2.12.1 and earlier due to a bug
// in companionSymbolOf
assert(!(p2.D: Object).isInstanceOf[scala.Serializable])
}
}
| martijnhoekstra/scala | test/files/run/SD-290.scala | Scala | apache-2.0 | 864 |
package work.martins.simon.expect.fluent
import work.martins.simon.expect.StringUtils._
import work.martins.simon.expect.{Settings, core}
/**
* @define type Expect
*/
case class Expect[R](command: Seq[String], defaultValue: R, settings: Settings = Settings.fromConfig()) extends Expectable[R] {
def this(command: String, defaultValue: R, settings: Settings) = this(splitBySpaces(command), defaultValue, settings)
def this(command: String, defaultValue: R) = this(command, defaultValue, Settings.fromConfig())
require(command.nonEmpty, "Expect must have a command to run.")
protected val expectableParent: Expect[R] = this
protected var expectBlocks = Seq.empty[ExpectBlock[R]]
override def expect: ExpectBlock[R] = {
val block = ExpectBlock[R](this)
expectBlocks :+= block
block
}
override def addExpectBlock(f: Expect[R] => ExpectBlock[R]): Expect[R] = {
f(this)
this
}
/**
* @return the core.Expect equivalent of this fluent.Expect.
*/
def toCore: core.Expect[R] = new core.Expect[R](command, defaultValue, settings)(expectBlocks.map(_.toCore):_*)
override def toString: String =
s"""Expect:
|\\tCommand: $command
|\\tDefaultValue: $defaultValue
|\\tSettings: $settings
|${expectBlocks.mkString("\\n").indent()}
""".stripMargin
override def equals(other: Any): Boolean = other match {
case that: Expect[R] =>
command == that.command &&
defaultValue == that.defaultValue &&
settings == that.settings &&
expectBlocks == that.expectBlocks
case _ => false
}
override def hashCode(): Int = Seq(command, defaultValue, settings, expectBlocks).map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
| Lasering/scala-expect | src/main/scala/work/martins/simon/expect/fluent/Expect.scala | Scala | mit | 1,738 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.handler
import java.io.OutputStream
import java.util.concurrent.atomic.AtomicInteger
import akka.actor._
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.kernel.api.{FactoryMethods, Kernel}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content._
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.kernel.protocol.v5Test._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import play.api.libs.json.Json
import org.mockito.Mockito._
import org.mockito.Matchers._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import test.utils.MaxAkkaTestTimeout
class ExecuteRequestHandlerSpec extends TestKit(
ActorSystem(
"ExecuteRequestHandlerSpec",
None,
Some(org.apache.toree.Main.getClass.getClassLoader)
)
) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar
with BeforeAndAfter {
private var mockActorLoader: ActorLoader = _
private var mockFactoryMethods: FactoryMethods = _
private var mockKernel: Kernel = _
private var mockOutputStream: OutputStream = _
private var handlerActor: ActorRef = _
private var kernelMessageRelayProbe: TestProbe = _
private var executeRequestRelayProbe: TestProbe = _
private var statusDispatchProbe: TestProbe = _
before {
mockActorLoader = mock[ActorLoader]
mockFactoryMethods = mock[FactoryMethods]
mockKernel = mock[Kernel]
mockOutputStream = mock[OutputStream]
doReturn(mockFactoryMethods).when(mockKernel)
.factory(any[KernelMessage], any[KMBuilder])
doReturn(mockOutputStream).when(mockFactoryMethods)
.newKernelOutputStream(anyString(), anyBoolean())
// Add our handler and mock interpreter to the actor system
handlerActor = system.actorOf(Props(
classOf[ExecuteRequestHandler],
mockActorLoader,
mockKernel
))
kernelMessageRelayProbe = new TestProbe(system)
when(mockActorLoader.load(SystemActorType.KernelMessageRelay))
.thenReturn(system.actorSelection(kernelMessageRelayProbe.ref.path.toString))
executeRequestRelayProbe = new TestProbe(system)
when(mockActorLoader.load(SystemActorType.ExecuteRequestRelay))
.thenReturn(system.actorSelection(executeRequestRelayProbe.ref.path.toString))
statusDispatchProbe = new TestProbe(system)
when(mockActorLoader.load(SystemActorType.StatusDispatch))
.thenReturn(system.actorSelection(statusDispatchProbe.ref.path.toString))
}
/**
* This method simulates the interpreter passing back an
* execute result and reply.
*/
def replyToHandlerWithOkAndResult() = {
// This stubs the behaviour of the interpreter executing code
val expectedClass = classOf[(ExecuteRequest, KernelMessage, OutputStream)]
executeRequestRelayProbe.expectMsgClass(expectedClass)
executeRequestRelayProbe.reply((
ExecuteReplyOk(1, Some(Payloads()), Some(UserExpressions())),
ExecuteResult(1, Data("text/plain" -> "resulty result"), Metadata())
))
}
def replyToHandlerWithOk() = {
// This stubs the behaviour of the interpreter executing code
val expectedClass = classOf[(ExecuteRequest, KernelMessage, OutputStream)]
executeRequestRelayProbe.expectMsgClass(expectedClass)
executeRequestRelayProbe.reply((
ExecuteReplyOk(1, Some(Payloads()), Some(UserExpressions())),
ExecuteResult(1, Data("text/plain" -> ""), Metadata())
))
}
/**
* This method simulates the interpreter passing back an
* execute result and reply
*/
def replyToHandlerWithErrorAndResult() = {
// This stubs the behaviour of the interpreter executing code
val expectedClass = classOf[(ExecuteRequest, KernelMessage, OutputStream)]
executeRequestRelayProbe.expectMsgClass(expectedClass)
executeRequestRelayProbe.reply((
ExecuteReplyError(1, Some(""), Some(""), Some(Nil)),
ExecuteResult(1, Data("text/plain" -> "resulty result"), Metadata())
))
}
describe("ExecuteRequestHandler( ActorLoader )") {
describe("#receive( KernelMessage ) when interpreter replies") {
it("should send an execute result message if the result is not empty") {
handlerActor ! MockExecuteRequestKernelMessage
replyToHandlerWithOkAndResult()
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, header, _, _, _) =>
header.msg_type == ExecuteResult.toTypeString
}
}
it("should not send an execute result message if there is no result") {
handlerActor ! MockExecuteRequestKernelMessage
replyToHandlerWithOk()
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, header, _, _, _) =>
header.msg_type != ExecuteResult.toTypeString
}
}
it("should send an execute reply message") {
handlerActor ! MockExecuteRequestKernelMessage
replyToHandlerWithOkAndResult()
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, header, _, _, _) =>
header.msg_type == ExecuteResult.toTypeString
}
}
it("should send a status idle message after the reply and result") {
handlerActor ! MockExecuteRequestKernelMessage
replyToHandlerWithOkAndResult()
val msgCount = new AtomicInteger(0)
var statusMsgNum = -1
var statusReceived = false
val f1 = Future {
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, header, _, _, _) =>
if (header.msg_type == ExecuteResult.toTypeString &&
!statusReceived)
msgCount.incrementAndGet()
else if (header.msg_type == ExecuteReply.toTypeString &&
!statusReceived)
msgCount.incrementAndGet()
statusReceived || (msgCount.get() >= 2)
}
}
val f2 = Future {
statusDispatchProbe.fishForMessage(MaxAkkaTestTimeout) {
case (status, header) =>
if (status == KernelStatusIdle.toString)
statusReceived = true
statusMsgNum = msgCount.get()
statusReceived || (msgCount.get() >= 2)
}
}
val fs = f1.zip(f2)
Await.ready(fs, 3 * MaxAkkaTestTimeout)
statusMsgNum should equal(2)
}
it("should send an execute input message") {
handlerActor ! MockExecuteRequestKernelMessage
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, header, _, _, _) =>
header.msg_type == ExecuteInput.toTypeString
}
}
it("should send a message with ids equal to the incoming " +
"KernelMessage's ids") {
handlerActor ! MockExecuteRequestKernelMessage
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(ids, _, _, _, _, _) =>
ids == MockExecuteRequestKernelMessage.ids
}
}
it("should send a message with parent header equal to the incoming " +
"KernelMessage's header") {
handlerActor ! MockExecuteRequestKernelMessage
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, _, parentHeader, _, _) =>
parentHeader == MockExecuteRequestKernelMessage.header
}
}
// TODO: Investigate if this is still relevant at all
// it("should send a status busy and idle message") {
// handlerActor ! MockExecuteRequestKernelMessage
// replyToHandlerWithOkAndResult()
// var busy = false
// var idle = false
//
// statusDispatchProbe.receiveWhile(100.milliseconds) {
// case Tuple2(status: KernelStatusType, header: Header)=>
// if(status == KernelStatusType.Busy)
// busy = true
// if(status == KernelStatusType.Idle)
// idle = true
// }
//
// idle should be (true)
// busy should be (true)
// }
}
}
// Testing error timeout for interpreter future
describe("ExecuteRequestHandler( ActorLoader )") {
describe("#receive( KernelMessage with bad JSON content )"){
it("should respond with an execute_reply with status error") {
handlerActor ! MockKernelMessageWithBadExecuteRequest
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
// Only mark as successful if this specific message was received
case KernelMessage(_, _, header, _, _, contentString)
if header.msg_type == ExecuteReply.toTypeString =>
val reply = Json.parse(contentString).as[ExecuteReply]
reply.status == "error"
case _ => false
}
}
it("should send error message to relay") {
handlerActor ! MockKernelMessageWithBadExecuteRequest
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
// Only mark as successful if this specific message was received
case KernelMessage(_, _, header, _, _, _)
if header.msg_type == ErrorContent.toTypeString => true
case _ => false
}
}
// TODO: Investigate if this is still relevant at all
// it("should send a status idle message") {
// handlerActor ! MockKernelMessageWithBadExecuteRequest
// var busy = false
// var idle = false
//
// statusDispatchProbe.receiveWhile(100.milliseconds) {
// case Tuple2(status: KernelStatusType, header: Header)=>
// if(status == KernelStatusType.Busy)
// busy = true
// if(status == KernelStatusType.Idle)
// idle = true
// }
//
// idle should be (true)
// busy should be (false)
// }
}
}
}
| apache/incubator-toree | kernel/src/test/scala/org/apache/toree/kernel/protocol/v5/handler/ExecuteRequestHandlerSpec.scala | Scala | apache-2.0 | 10,920 |
/*
* AudioNodeImpl.scala
* (Cord)
*
* Copyright (c) 2015-2020 Hanns Holger Rutz.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.cord
package impl
trait AudioNodeImpl extends NodeImpl {
node: Node =>
private val dspL = parent.dsp.addListener {
case true => dspStarted()
case false => dspStopped()
}
protected def dspStarted(): Unit
protected def dspStopped(): Unit
/** Should be called by sub-classes after they finished initialization. */
protected def initDSP(): Unit = if (parent.dsp.active) dspStarted()
override def dispose(): Unit = {
if (parent.dsp.active) dspStopped()
super.dispose()
parent.dsp.removeListener(dspL)
}
}
| Sciss/Cord | src/main/scala/de/sciss/cord/impl/AudioNodeImpl.scala | Scala | lgpl-2.1 | 828 |
package example
import java.util.NoSuchElementException
object Lists extends App {
/**
* This method computes the sum of all elements in the list xs. There are
* multiple techniques that can be used for implementing this method, and
* you will learn during the class.
*
* For this example assignment you can use the following methods in class
* `List`:
*
* - `xs.isEmpty: Boolean` returns `true` if the list `xs` is empty
* - `xs.head: Int` returns the head element of the list `xs`. If the list
* is empty an exception is thrown
* - `xs.tail: List[Int]` returns the tail of the list `xs`, i.e. the the
* list `xs` without its `head` element
*
* ''Hint:'' instead of writing a `for` or `while` loop, think of a recursive
* solution.
*
* @param xs A list of natural numbers
* @return The sum of all elements in `xs`
*/
def sum(xs: List[Int]): Int = {
def loop(acc: Int, xs: List[Int]): Int = {
if (xs.size == 0) acc
else loop(acc + xs.head, xs.tail)
}
loop(0, xs)
}
/**
* This method returns the largest element in a list of integers. If the
* list `xs` is empty it throws a `java.util.NoSuchElementException`.
*
* You can use the same methods of the class `List` as mentioned above.
*
* ''Hint:'' Again, think of a recursive solution instead of using looping
* constructs. You might need to define an auxiliary method.
*
* @param xs A list of natural numbers
* @return The largest element in `xs`
* @throws java.util.NoSuchElementException if `xs` is an empty list
*/
def max(xs: List[Int]): Int = {
if (xs.isEmpty) throw new NoSuchElementException()
else xs.max
}
}
| giovannidoni/Scala-course-1 | example/src/main/scala/example/Lists.scala | Scala | gpl-3.0 | 1,745 |
/*
* Copyright (C) 2016 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.multideposit.parser
import java.net.URI
import better.files.File
import nl.knaw.dans.easy.multideposit.PathExplorer.InputPathExplorer
import nl.knaw.dans.easy.multideposit.TestSupportFixture
import org.joda.time.DateTime
class ParserUtilsSpec extends TestSupportFixture {
self =>
private val parser = new ParserUtils with InputPathExplorer {
val multiDepositDir: File = self.multiDepositDir
}
import parser._
"extractExactlyOne" should "find the value for the given rows" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
)
extractExactlyOne(2, Headers.Title, rows).value shouldBe "abc"
}
it should "filter out the blank values" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "")),
)
extractExactlyOne(2, Headers.Description, rows).value shouldBe "def"
}
it should "fail when the output is empty" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractExactlyOne(2, Headers.Creator, rows).invalidValue shouldBe
ParseError(2, "There should be one non-empty value for DC_CREATOR").chained
}
it should "fail when the input contains multiple distinct values for the same columnName" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractExactlyOne(2, Headers.Title, rows).invalidValue shouldBe
ParseError(2, "Only one row is allowed to contain a value for the column 'DC_TITLE'. Found: [abc, ghi]").chained
}
it should "succeed when the input contains multiple identical values for the same columnName" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "jkl")),
)
extractExactlyOne(2, Headers.Title, rows).value shouldBe "abc"
}
"extractAtLeastOne" should "find the values for the given rows" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractAtLeastOne(2, Headers.Title, rows).value.toList should contain inOrderOnly("abc", "ghi")
}
it should "filter out the blank values" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "")),
)
extractAtLeastOne(2, Headers.Description, rows).value.toList should contain only "def"
}
it should "fail when the output is empty" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractAtLeastOne(2, Headers.Creator, rows).invalidValue shouldBe
ParseError(2, "There should be at least one non-empty value for DC_CREATOR").chained
}
it should "succeed when the input contains multiple identical values for the same columnName" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "jkl")),
)
extractAtLeastOne(2, Headers.Title, rows).value.toList should contain only "abc"
}
"extractAtMostOne" should "find the value for the given rows" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
)
extractAtMostOne(2, Headers.Title, rows).value.value shouldBe "abc"
}
it should "filter out the blank values" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "")),
)
extractAtMostOne(2, Headers.Description, rows).value.value shouldBe "def"
}
it should "return a None when the output is empty" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractAtMostOne(2, Headers.Creator, rows).value shouldBe empty
}
it should "fail when the input contains multiple distinct values for the same columnName" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractAtMostOne(2, Headers.Title, rows).invalidValue shouldBe
ParseError(2, "At most one row is allowed to contain a value for the column 'DC_TITLE'. Found: [abc, ghi]").chained
}
it should "succeed when the input contains multiple identical values for the same columnName" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "jkl")),
)
extractAtMostOne(2, Headers.Title, rows).value.value shouldBe "abc"
}
"extractList curried" should "for each row run the given function and collect the results" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(3, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractList(rows)(i => Some(i.rowNum.toValidated)).value should contain inOrderOnly(2, 3)
}
it should "leave out the rows for which the function returns a None" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(3, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractList(rows) {
case DepositRow(rowNum, _) if rowNum % 2 == 0 => Some(rowNum.toValidated)
case _ => None
}.value should contain only 2
}
it should "iterate over all rows and aggregate all errors until the end" in {
val rows = List(
DepositRow(2, Map(Headers.Title -> "abc", Headers.Description -> "def")),
DepositRow(3, Map(Headers.Title -> "ghi", Headers.Description -> "jkl")),
)
extractList(rows)(i => Some(ParseError(i.rowNum, s"foo ${ i.rowNum }").toInvalid))
.invalidValue.toNonEmptyList.toList should contain inOrderOnly(
ParseError(2, "foo 2"),
ParseError(3, "foo 3")
)
}
"checkValidChars" should "succeed with the input value when all characters are valid" in {
checkValidChars("valid-input", 2, Headers.Title).value shouldBe "valid-input"
}
it should "fail when the input contains invalid characters" in {
checkValidChars("#$%", 2, Headers.Title).invalidValue shouldBe
ParseError(2, "The column 'DC_TITLE' contains the following invalid characters: {#, $, %}").chained
}
"date" should "convert the value of the date into the corresponding object" in {
date(2, Headers.Date)("2016-07-30").value shouldBe DateTime.parse("2016-07-30")
}
it should "fail if the value does not represent a date" in {
date(2, Headers.Date)("you can't parse me!").invalidValue shouldBe
ParseError(2, "DCT_DATE value 'you can't parse me!' does not represent a date").chained
}
"url" should "convert the value of a URI into the corresponding object" in {
uri(2, Headers.RelationLink)("http://does.not.exist.dans.knaw.nl/").value shouldBe new URI("http://does.not.exist.dans.knaw.nl/")
}
it should "fail if the value does not represent a URI" in {
uri(2, Headers.RelationLink)("you can't parse me!").invalidValue shouldBe
ParseError(2, "DCX_RELATION_LINK value 'you can't parse me!' is not a valid URI").chained
}
it should "fail if the value does not represent a URI with one of the accepted schemes" in {
uri(2, Headers.RelationLink)("javascript://hello-world").invalidValue shouldBe
ParseError(2, "DCX_RELATION_LINK value 'javascript://hello-world' is a valid URI but doesn't have one of the accepted protocols: {http, https}").chained
}
"missingRequired" should "return a ParseError listing the one missing column" in {
val row = DepositRow(2, Map(Headers.Title -> "1", Headers.Description -> "2", Headers.Subject -> "3", Headers.Temporal -> "4"))
missingRequired(row, Headers.Title, Headers.Description, Headers.Subject, Headers.Temporal, Headers.Language) shouldBe ParseError(2, "Missing value for: DC_LANGUAGE")
}
it should "return a ParseError listing the missing columns" in {
val row = DepositRow(2, Map(Headers.Title -> "1", Headers.Description -> "2", Headers.Subject -> "3", Headers.Temporal -> ""))
missingRequired(row, Headers.Title, Headers.Description, Headers.Subject, Headers.Temporal, Headers.Language) shouldBe ParseError(2, "Missing value(s) for: [DCT_TEMPORAL, DC_LANGUAGE]")
}
it should "throw an IllegalArgumentException if no columns were missing" in {
val row = DepositRow(2, Map(Headers.Title -> "1", Headers.Description -> "2", Headers.Subject -> "3"))
the[IllegalArgumentException] thrownBy missingRequired(row, Headers.Title, Headers.Description, Headers.Subject) should have message "requirement failed: the list of missing elements is supposed to be non-empty"
}
}
| DANS-KNAW/easy-process-sip | src/test/scala/nl.knaw.dans.easy.multideposit/parser/ParserUtilsSpec.scala | Scala | apache-2.0 | 10,237 |
package xscalatest.util
import org.scalatest.Suite
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
trait FutureValues {
this: Suite =>
val defaultAtMost = 10.seconds
implicit class Rendezvous[A](future: Future[A]) {
/**
* Await and return the result of an `future`.
* @see scala.concurrent.Await.result
*/
def rendezvous(atMost: Duration = defaultAtMost): A = Await.result(future, atMost)
}
}
| nokamoto/scalatest-util | src/main/scala/xscalatest/util/FutureValues.scala | Scala | mit | 455 |
package org.hammerlab.guacamole.reads
import org.hammerlab.guacamole.readsets.{SampleId, SampleName}
/**
* An unmapped read. See the [[Read]] trait for field descriptions.
*
*/
case class UnmappedRead(
name: String,
sequence: IndexedSeq[Byte],
baseQualities: IndexedSeq[Byte],
isDuplicate: Boolean,
sampleId: SampleId,
failedVendorQualityChecks: Boolean,
isPaired: Boolean) extends Read {
assert(baseQualities.length == sequence.length)
override val isMapped = false
override def asMappedRead = None
}
| hammerlab/guacamole | src/main/scala/org/hammerlab/guacamole/reads/UnmappedRead.scala | Scala | apache-2.0 | 544 |
/*
* Copyright (C) 2014 GRNET S.A.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gr.grnet.egi
/**
*
*/
package object vmcatcher {
def DEFER(f: ⇒ Unit) = () ⇒ f
def deepScalaToJava[J](t: J): Any = {
import scala.collection.JavaConverters._
t match {
case tMap: Map[_, _] ⇒
tMap.map { case (k, v) ⇒ deepScalaToJava(k) → deepScalaToJava(v) }. asJava
case id ⇒ id
}
}
implicit class JsonToStringMap(val json: String) extends AnyVal {
def jsonToStringMap: Map[String, String] = Json.stringMapOfJson(json)
}
}
| grnet/snf-vmcatcher | src/main/scala/gr/grnet/egi/vmcatcher/package.scala | Scala | gpl-3.0 | 1,187 |
import scala.tools.nsc.interpreter.IMain
object Test extends App {
val engine = new IMain.Factory getScriptEngine()
engine.asInstanceOf[IMain].settings.usejavacp.value = true
engine put ("n", 10)
engine eval "1 to n.asInstanceOf[Int] foreach print"
}
| som-snytt/dotty | tests/pending/run/t7843-jsr223-service.scala | Scala | apache-2.0 | 260 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package xsbt.boot
import scala.collection.immutable.List
import java.io.{File, FileFilter}
import java.net.{URL, URLClassLoader}
object Pre
{
def trimLeading(line: String) =
{
def newStart(i: Int): Int = if(i >= line.length || !Character.isWhitespace(line.charAt(i))) i else newStart(i+1)
line.substring(newStart(0))
}
def isEmpty(line: String) = line.length == 0
def isNonEmpty(line: String) = line.length > 0
def assert(condition: Boolean, msg: => String): Unit = if (!condition) throw new AssertionError(msg)
def assert(condition: Boolean): Unit = assert(condition, "Assertion failed")
def require(condition: Boolean, msg: => String): Unit = if (!condition) throw new IllegalArgumentException(msg)
def error(msg: String): Nothing = throw new BootException(prefixError(msg))
def declined(msg: String): Nothing = throw new BootException(msg)
def prefixError(msg: String): String = "Error during sbt execution: " + msg
def toBoolean(s: String) = java.lang.Boolean.parseBoolean(s)
def toArray[T : ClassManifest](list: List[T]) =
{
val arr = new Array[T](list.length)
def copy(i: Int, rem: List[T]): Unit =
if(i < arr.length)
{
arr(i) = rem.head
copy(i+1, rem.tail)
}
copy(0, list)
arr
}
/* These exist in order to avoid bringing in dependencies on RichInt and ArrayBuffer, among others. */
def concat(a: Array[File], b: Array[File]): Array[File] =
{
val n = new Array[File](a.length + b.length)
java.lang.System.arraycopy(a, 0, n, 0, a.length)
java.lang.System.arraycopy(b, 0, n, a.length, b.length)
n
}
def array(files: File*): Array[File] = toArray(files.toList)
/* Saves creating a closure for default if it has already been evaluated*/
def orElse[T](opt: Option[T], default: T) = if(opt.isDefined) opt.get else default
def wrapNull(a: Array[File]): Array[File] = if(a == null) new Array[File](0) else a
def const[B](b: B): Any => B = _ => b
def strictOr[T](a: Option[T], b: Option[T]): Option[T] = a match { case None => b; case _ => a }
def getOrError[T](a: Option[T], msg: String): T = a match { case None => error(msg); case Some(x) => x }
def orNull[T >: Null](t: Option[T]): T = t match { case None => null; case Some(x) => x }
def getJars(directories: List[File]): Array[File] = toArray(directories.flatMap(directory => wrapNull(directory.listFiles(JarFilter))))
object JarFilter extends FileFilter
{
def accept(file: File) = !file.isDirectory && file.getName.endsWith(".jar")
}
def getMissing(loader: ClassLoader, classes: Iterable[String]): Iterable[String] =
{
def classMissing(c: String) = try { Class.forName(c, false, loader); false } catch { case e: ClassNotFoundException => true }
classes.toList.filter(classMissing)
}
def toURLs(files: Array[File]): Array[URL] = files.map(_.toURI.toURL)
}
| olove/xsbt | launch/src/main/scala/xsbt/boot/Pre.scala | Scala | bsd-3-clause | 2,870 |
package models.dao
import com.google.inject.ImplementedBy
import models.dao.anorm.AnormUserDAO
final case class User(id: Int, email: String, name: String, password: String)
@ImplementedBy(classOf[AnormUserDAO])
trait UserDAO {
def authenticate(email: String, password: String): Option[User]
def findByEmail(email: String): Option[User]
def changePassword(email: String, newPasswd: String): Int
}
| jcranky/lojinha | app/models/dao/User.scala | Scala | gpl-3.0 | 407 |
package dit4c.common
import java.net._
import akka.http.scaladsl._
import akka.stream.scaladsl._
import akka.event.LoggingAdapter
import akka.http.scaladsl.model._
import akka.http.scaladsl.settings.ClientConnectionSettings
import scala.concurrent._
import akka.stream.Client
import akka.http.scaladsl.Http.OutgoingConnection
import akka.http.scaladsl.model.headers.Host
import akka.actor.ActorSystem
import akka.http.scaladsl.Http.OutgoingConnection
import akka.stream.Materializer
import scala.concurrent.duration.FiniteDuration
import akka.http.scaladsl.HttpsConnectionContext
import javax.net.ssl.SSLContext
import akka.stream.EagerClose
object AkkaHttpExtras {
val modernHttpsConnectionContext = new HttpsConnectionContext(
SSLContext.getDefault,
enabledProtocols = Some("TLSv1.2" :: "TLSv1.1" :: Nil))
implicit class Extras(http: HttpExt)(implicit system: ActorSystem) {
def singleResilientRequest(request: HttpRequest,
settings: ClientConnectionSettings,
httpsContext: Option[HttpsConnectionContext],
log: LoggingAdapter)(implicit fm: Materializer): Future[HttpResponse] =
singleResilientRequest(request,
request.uri.authority.host.inetAddresses,
settings, httpsContext, log)
def singleResilientRequest(request: HttpRequest,
addrs: Seq[InetAddress],
settings: ClientConnectionSettings,
httpsContext: Option[HttpsConnectionContext],
log: LoggingAdapter)(implicit fm: Materializer): Future[HttpResponse] = {
implicit val ec = fm.executionContext
val addr::remainingAddrs = addrs
val c = outgoingConnectionImpl(addr, request.uri.authority.port,
None, settings,
httpsContext orElse {
if (request.uri.scheme == "https") Some(http.defaultClientHttpsContext)
else None
}, log)
val p = Promise[HttpResponse]()
// Result must be made recursive before resolution attempts are made, or
// else a short-circuit is possible.
val fResult = p.future
.recoverWith {
case e: akka.stream.StreamTcpException if !remainingAddrs.isEmpty =>
log.warning(s"Request to $addr failed. " +
s"Trying remaining ${remainingAddrs.size} addresses.")
singleResilientRequest(request,
remainingAddrs, settings, httpsContext, log)
}
settings.idleTimeout match {
case timeout: FiniteDuration =>
fm.scheduleOnce(timeout, new Runnable() {
override def run() {
p.tryFailure(new TimeoutException(s"No response within $timeout"))
}
})
case _ => // No timeout
}
Source.single(request).via(c)
.runForeach((r) => p.trySuccess(r))
.onFailure({ case e: Throwable => p.tryFailure(e) })
fResult
}
def outgoingConnection(addr: InetAddress, port: Int,
localAddress: Option[InetSocketAddress],
settings: ClientConnectionSettings,
log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] =
outgoingConnectionImpl(addr, port, localAddress, settings, None, log)
def outgoingConnectionTls(addr: InetAddress, port: Int,
localAddress: Option[InetSocketAddress],
settings: ClientConnectionSettings,
httpsContext: Option[HttpsConnectionContext],
log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] =
outgoingConnectionImpl(addr, port, localAddress, settings,
httpsContext orElse Some(http.defaultClientHttpsContext), log)
private def outgoingConnectionImpl(addr: InetAddress, port: Int,
localAddress: Option[InetSocketAddress],
settings: ClientConnectionSettings,
httpsContext: Option[HttpsConnectionContext],
log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = {
val host = addr.getHostName.stripSuffix(".")
val effectivePort = port match {
case 0 if httpsContext.isEmpty => 80
case 0 => 443
case _ => port
}
val layer = {
val hostHeader = effectivePort match {
case 80 if httpsContext.isEmpty => Host(host)
case 443 if httpsContext.isDefined => Host(host)
case _ => Host(host, port)
}
http.clientLayer(hostHeader, settings, log)
}
val tlsStage = httpsContext match {
case Some(hctx) =>
TLS(hctx.sslContext, hctx.firstSession, Client, EagerClose,
hostInfo = Some(host -> effectivePort))
case None => TLSPlacebo()
}
val transportFlow = Tcp().outgoingConnection(
new InetSocketAddress(addr, effectivePort), localAddress,
settings.socketOptions, halfClose = true,
settings.connectingTimeout, settings.idleTimeout)
val tmp = tlsStage.joinMat(transportFlow) { (_, f) =>
import system.dispatcher
f.map { c => OutgoingConnection(c.localAddress, c.remoteAddress) }
}
layer.joinMat(tmp)(Keep.right)
}
}
}
| dit4c/dit4c | dit4c-common/src/main/scala/dit4c/common/AkkaHttpExtras.scala | Scala | mit | 5,105 |
package org.danielnixon.playwarts
object HeadersPartial extends ClassMethodWart(
"play.api.mvc.Headers",
"apply",
"Headers#apply is disabled - use Headers#get instead"
)
| danielnixon/playwarts | core/src/main/scala/org/danielnixon/playwarts/HeadersPartial.scala | Scala | apache-2.0 | 177 |
package controller
import skinny._
import skinny.validator._
import model._
class CompaniesController extends SkinnyResourceWithId[CompanyId] with ApplicationController {
protectFromForgery()
implicit override val scalatraParamsIdTypeConverter = new skinny.TypeConverter[String, CompanyId] {
def apply(s: String): Option[CompanyId] = Option(s).map(model.rawValueToId)
}
override def model = Company
override def resourcesName = "companies"
override def resourceName = "company"
override def createParams = Params(params).withDateTime("updatedAt")
override def createForm = validation(createParams,
paramKey("name") is required & maxLength(64),
paramKey("url") is maxLength(128),
paramKey("updatedAt") is required & dateTimeFormat
)
override def createFormStrongParameters = Seq(
"name" -> ParamType.String,
"url" -> ParamType.String,
"updatedAt" -> ParamType.DateTime)
override def updateParams = Params(params).withDateTime("updatedAt")
override def updateForm = validation(updateParams,
paramKey("id") is required,
paramKey("name") is required & maxLength(64),
paramKey("url") is maxLength(128),
paramKey("updatedAt") is required & dateTimeFormat
)
override def updateFormStrongParameters = Seq(
"name" -> ParamType.String,
"url" -> ParamType.String,
"updatedAt" -> ParamType.DateTime)
}
| BlackPrincess/skinny-framework | example/src/main/scala/controller/CompaniesController.scala | Scala | mit | 1,381 |
package com.scalapenos.myapp.api
import scala.concurrent.duration._
import akka.actor._
import akka.util.Timeout
import akka.pattern.ask
import spray.routing._
import spray.http.StatusCodes._
import scala.concurrent.ExecutionContext
object ApiActor {
def props = Props[ApiActor]
def name = "api"
}
class ApiActor extends HttpServiceActor
with Routes
with ActorContextCreationSupport {
def executionContext:ExecutionContext = context.dispatcher
def receive = runRoute(routes)
}
trait Routes extends HttpService
with ActorCreationSupport {
implicit def executionContext:ExecutionContext
implicit val timeout = Timeout(15 seconds)
val processJob = createChild(ProcessJob.props, ProcessJob.name)
def routes = {
pathPrefix("api") {
path("jobs") {
post {
entity(as[Job]) { job =>
val result = processJob.ask(job).mapTo[JobResult]
complete(OK, result)
}
}
}
}
}
}
trait ExecutionContextSupport {
import scala.concurrent.ExecutionContext
implicit def executionContext: ExecutionContext
}
trait ActorExecutionContextSupport extends ExecutionContextSupport { this: Actor =>
implicit def executionContext = context.dispatcher
}
trait ActorCreationSupport {
def createChild(props: Props, name: String): ActorRef
}
trait ActorContextCreationSupport extends ActorCreationSupport {
def context: ActorContext
def createChild(props: Props, name: String): ActorRef = context.actorOf(props, name)
}
object ProcessJob {
def props = Props[ProcessJob]
def name = "process-job"
}
class ProcessJob extends Actor {
def receive = {
case msg => msg
}
}
| RayRoestenburg/nljug2013 | src/main/scala/com/scalapenos/myapp.api/TestCreateChildrenAfter.scala | Scala | apache-2.0 | 1,723 |
package com.github.ldaniels528.scalascript.extensions
import com.github.ldaniels528.scalascript.Scope
import com.github.ldaniels528.scalascript.core.Provider
import scala.scalajs.js
/**
* \$route is used for deep-linking URLs to controllers and views (HTML partials).
* It watches \$location.url() and tries to map the path to an existing route definition.
* @see [[https://docs.angularjs.org/api/ngRoute/service/\$route]]
*/
@js.native
trait Route extends js.Object {
/**
* Reference to the current route definition.
*/
var current: RouteCurrent = js.native
/**
* Object with all route configuration Objects as its properties.
*/
var routes: js.Dictionary[String] = js.native
/**
* Causes $route service to reload the current route even if $location hasn't changed.
* As a result of that, ngView creates new scope and reinstantiates the controller.
*/
def reload(): Unit = js.native
/**
* Causes $route service to update the current URL, replacing current route parameters with those specified in
* newParams. Provided property names that match the route's path segment definitions will be interpolated into the
* location's path, while remaining properties will be treated as query params.
* @param newParams mapping of URL parameter names to values
*/
def updateParams(newParams: js.Dictionary[String]): Unit = js.native
}
/**
* Reference to the current route definition. The route definition contains:
* <ul>
* <li>controller: The controller constructor as define in route definition</li>
* <li> locals: A map of locals which is used by $controller service for controller instantiation.
* The locals contain the resolved values of the resolve map. Additionally the locals also contain:
* <ul>
* <li>$scope - The current route scope.</li>
* <li>$template - The current route template HTML.</li>
* </ul>
* </li>
* </ul>
*/
@js.native
trait RouteCurrent extends js.Object {
var controller: String = js.native
var locals: js.Dictionary[Any] = js.native
var $scope: Scope = js.native
var $template: String = js.native
}
/**
* Route Provider - Used for configuring routes.
* @see [[https://docs.angularjs.org/api/ngRoute/provider/\$routeProvider]]
*/
@js.native
trait RouteProvider extends Provider[Route] {
/**
* A boolean property indicating if routes defined using this provider should be matched using a
* case insensitive algorithm. Defaults to false.
*/
var caseInsensitiveMatch: Boolean = js.native
/**
* Adds a new route definition to the \$route service.
* @param path Route path (matched against $location.path). If $location.path contains redundant trailing slash
* or is missing one, the route will still match and the $location.path will be updated to add or drop
* the trailing slash to exactly match the route definition.
* @param route Mapping information to be assigned to $route.current on route match.
* @return self
*/
def when(path: String, route: RouteTo): this.type = js.native
/**
* Sets route definition that will be used on route change when no other route definition is matched.
* @param params Mapping information to be assigned to $route.current. If called with a string,
* the value maps to redirectTo.
* @return self
*/
def otherwise(params: RouteTo): this.type = js.native
} | ldaniels528/scalascript | src/main/scala/com/github/ldaniels528/scalascript/extensions/Route.scala | Scala | apache-2.0 | 3,411 |
/*
* Copyright (c) 2014 Paul Bernard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Spectrum Finance is based in part on:
* QuantLib. http://quantlib.org/
*
*/
package org.quantintel.ql.time.calendars
import org.quantintel.ql.time.Month._
import org.quantintel.ql.time.Weekday._
import org.quantintel.ql.time.{Impl, Calendar, Date, Western}
object NorwayEnum extends Enumeration {
type NorwayEnum = Value
val NORWAY = Value(1)
def valueOf(market: Int) : NorwayEnum = market match {
case 1 => NORWAY
case _ => throw new Exception("Valid units = 1")
}
}
object Norway {
def apply(): Calendar = {
new Norway()
}
def apply(market: org.quantintel.ql.time.calendars.NorwayEnum.NorwayEnum): Calendar = {
new Norway(market)
}
}
/**
*
* Norwegian calendar
* Holidays:
* Saturdays
* Sundays
* Holy Thursday
* Good Friday
* Easter Monday
* Ascension
* Whit(Pentecost) Monday
* New Year's Day, JANUARY 1st
* May Day, May 1st
* National Independence Day, May 17st
* Christmas, December 25th
* Boxing Day, December 26th
*
* @author Paul Bernard
*/
class Norway extends Calendar {
impl = new Norway
import org.quantintel.ql.time.calendars.NorwayEnum._
def this(market: org.quantintel.ql.time.calendars.NorwayEnum.NorwayEnum){
this
market match {
case NORWAY => impl = new Norway
case _ => throw new Exception("Valid units = 1")
}
}
private class Norway extends Western {
override def name : String = "Norway"
override def isBusinessDay(date: Date): Boolean = {
// standard dependencies
val w: Weekday = date.weekday
val d: Int = date.dayOfMonth
val dd: Int = date.dayOfYear
val m: Month = date.month
val y: Int = date.year
val em: Int = easterMonday(y)
if (isWeekend(w)
|| (dd == em - 4) // Holy Thursday
|| (dd == em - 3) // Good Friday
|| (dd == em) // Easter Monday
|| (dd == em + 38) // Ascension Thursday
|| (dd == em + 49) // Whit Monday
|| (d == 1 && m == JANUARY) // New Year's Day
|| (d == 1 && m == MAY) // May Day
|| (d == 17 && m == MAY) // National Independence Day
|| (d == 25 && m == DECEMBER) // Christmas
|| (d == 26 && m == DECEMBER)) // Boxing Day
false
else true
}
}
}
| pmularien/spectrum-old | financial/src/main/scala/org/quantintel/ql/time/calendars/Norway.scala | Scala | apache-2.0 | 2,871 |
package org.jetbrains.plugins.scala
package lang
package formatting
import psi.api.ScalaFile
import settings.ScalaCodeStyleSettings
import com.intellij.lang.ASTNode
import com.intellij.psi.codeStyle.{CommonCodeStyleSettings, CodeStyleSettings}
import com.intellij.openapi.util.TextRange
import org.jetbrains.plugins.scala.lang.psi.api.expr.xml._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.formatting.processors._
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import java.util.List
import scaladoc.psi.api.ScDocComment
import psi.api.toplevel.ScEarlyDefinitions
import com.intellij.formatting._
import com.intellij.psi.{TokenType, PsiComment, PsiErrorElement, PsiWhiteSpace}
import psi.api.base.ScLiteral
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
class ScalaBlock (val myParentBlock: ScalaBlock,
protected val myNode: ASTNode,
val myLastNode: ASTNode,
protected var myAlignment: Alignment,
protected var myIndent: Indent,
protected var myWrap: Wrap,
protected val mySettings: CodeStyleSettings)
extends Object with ScalaTokenTypes with Block {
protected var mySubBlocks: List[Block] = null
def getNode = myNode
def getSettings = mySettings
def getCommonSettings = mySettings.getCommonSettings(ScalaFileType.SCALA_LANGUAGE)
def getTextRange =
if (myLastNode == null) myNode.getTextRange
else new TextRange(myNode.getTextRange.getStartOffset, myLastNode.getTextRange.getEndOffset)
def getIndent = myIndent
def getWrap = myWrap
def getAlignment = myAlignment
def isLeaf = isLeaf(myNode)
def isIncomplete = isIncomplete(myNode)
def getChildAttributes(newChildIndex: Int): ChildAttributes = {
val scalaSettings = mySettings.getCustomSettings(classOf[ScalaCodeStyleSettings])
val indentSize = mySettings.getIndentSize(ScalaFileType.SCALA_FILE_TYPE)
val parent = getNode.getPsi
val braceShifted = mySettings.BRACE_STYLE == CommonCodeStyleSettings.NEXT_LINE_SHIFTED
parent match {
case m: ScMatchStmt => {
if (m.caseClauses.length == 0) {
new ChildAttributes(if (braceShifted) Indent.getNoneIndent else Indent.getNormalIndent, null)
} else {
val indent = if (mySettings.INDENT_CASE_FROM_SWITCH) Indent.getSpaceIndent(2 * indentSize)
else Indent.getNormalIndent
new ChildAttributes(indent, null)
}
}
case c: ScCaseClauses => new ChildAttributes(Indent.getNormalIndent, null)
case l: ScLiteral
if l.isMultiLineString && scalaSettings.MULTILINE_STRING_SUPORT != ScalaCodeStyleSettings.MULTILINE_STRING_NONE =>
new ChildAttributes(Indent.getSpaceIndent(3, true), null)
case b: ScBlockExpr if b.lastExpr.exists(_.isInstanceOf[ScFunctionExpr]) =>
var i = getSubBlocks.size() - newChildIndex
val elem = b.lastExpr.get.getNode.getTreePrev
if (elem.getElementType != TokenType.WHITE_SPACE || !elem.getText.contains("\n")) i = 0
val indent = i + (if (!braceShifted) 1 else 0)
new ChildAttributes(Indent.getSpaceIndent(indent * indentSize), null)
case _: ScBlockExpr | _: ScEarlyDefinitions | _: ScTemplateBody | _: ScForStatement | _: ScWhileStmt |
_: ScTryBlock | _: ScCatchBlock =>
new ChildAttributes(if (braceShifted) Indent.getNoneIndent else
if (mySubBlocks.size >= newChildIndex &&
mySubBlocks.get(newChildIndex - 1).isInstanceOf[ScalaBlock] &&
mySubBlocks.get(newChildIndex - 1).asInstanceOf[ScalaBlock].getNode.getElementType == ScalaElementTypes.CASE_CLAUSES)
Indent.getSpaceIndent(2 * indentSize)
else
Indent.getNormalIndent, null)
case p : ScPackaging if p.isExplicit => new ChildAttributes(Indent.getNormalIndent, null)
case _: ScBlock =>
val grandParent = parent.getParent
new ChildAttributes(if (grandParent != null && (grandParent.isInstanceOf[ScCaseClause] || grandParent.isInstanceOf[ScFunctionExpr])) Indent.getNormalIndent
else Indent.getNoneIndent, null)
case _: ScIfStmt => new ChildAttributes(Indent.getNormalIndent(scalaSettings.ALIGN_IF_ELSE),
this.getAlignment)
case x: ScDoStmt => {
if (x.hasExprBody)
new ChildAttributes(Indent.getNoneIndent, null)
else new ChildAttributes(if (mySettings.BRACE_STYLE == CommonCodeStyleSettings.NEXT_LINE_SHIFTED)
Indent.getNoneIndent else Indent.getNormalIndent, null)
}
case _: ScXmlElement => new ChildAttributes(Indent.getNormalIndent, null)
case _: ScalaFile => new ChildAttributes(Indent.getNoneIndent, null)
case _: ScCaseClause => new ChildAttributes(Indent.getNormalIndent, null)
case _: ScExpression | _: ScPattern | _: ScParameters =>
new ChildAttributes(Indent.getContinuationWithoutFirstIndent, this.getAlignment)
case comment: ScDocComment if comment.version > 1 =>
new ChildAttributes(Indent.getSpaceIndent(2), null)
case _: ScDocComment =>
new ChildAttributes(Indent.getSpaceIndent(1), null)
case _ if parent.getNode.getElementType == ScalaTokenTypes.kIF =>
new ChildAttributes(Indent.getNormalIndent, null)
case _ => new ChildAttributes(Indent.getNoneIndent, null)
}
}
def getSpacing(child1: Block, child2: Block) = {
ScalaSpacingProcessor.getSpacing(child1.asInstanceOf[ScalaBlock], child2.asInstanceOf[ScalaBlock])
}
def getSubBlocks(): List[Block] = {
import collection.JavaConversions._
if (mySubBlocks == null) {
mySubBlocks = getDummyBlocks(myNode, myLastNode, this).filterNot {
_.asInstanceOf[ScalaBlock].getNode.getElementType == ScalaTokenTypes.tWHITE_SPACE_IN_LINE
}
}
mySubBlocks
}
def isLeaf(node: ASTNode): Boolean = {
if (myLastNode == null) node.getFirstChildNode == null
else false
}
def isIncomplete(node: ASTNode): Boolean = {
if (node.getPsi.isInstanceOf[PsiErrorElement])
return true
var lastChild = node.getLastChildNode
while (lastChild != null &&
(lastChild.getPsi.isInstanceOf[PsiWhiteSpace] || lastChild.getPsi.isInstanceOf[PsiComment])) {
lastChild = lastChild.getTreePrev
}
if (lastChild == null) {
return false
}
if (lastChild.getPsi.isInstanceOf[PsiErrorElement]) {
return true
}
isIncomplete(lastChild)
}
private var _suggestedWrap: Wrap = null
def suggestedWrap: Wrap = {
if (_suggestedWrap == null) {
val settings = getSettings.getCustomSettings(classOf[ScalaCodeStyleSettings])
_suggestedWrap = ScalaWrapManager.suggestedWrap(this, settings)
}
_suggestedWrap
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/formatting/ScalaBlock.scala | Scala | apache-2.0 | 7,020 |
package com.versal.fireotter.tests
object Arithmetic {
def add(x: Int, y: Int): Int = x + y
def subtract(x: Int, y: Int): Int = x - y
def multiply(x: Int, y: Int): Int = x * y
def divide(x: Int, y: Int): Int = x / y
}
class ArithmeticTest extends org.scalatest.FunSuite {
import com.versal.fireotter._
val specs: Traversable[Seq[String]] = csv(resource("arithmetic.csv"))
specs foreach { spec =>
val inputs: Seq[Int] = spec(2).split(",").map(_.toInt)
val output = spec(1) match {
case "add" => Arithmetic.add(inputs(0), inputs(1))
case "subtract" => Arithmetic.subtract(inputs(0), inputs(1))
case "multiply" => Arithmetic.multiply(inputs(0), inputs(1))
case "divide" => Arithmetic.divide(inputs(0), inputs(1))
}
val expectedOutput: Int = spec(3).toInt
test(spec(0)) { assert(output === expectedOutput) }
}
}
| Versal/fireotter | src/test/scala/tests.scala | Scala | bsd-2-clause | 885 |
package lesst
import org.mozilla.javascript.{ NativeArray, ScriptableObject }
case class StyleSheet(src: String, imports: List[String])
class ScriptableStyleSheet extends ScriptableObject {
implicit class NativeArrayWrapper(arr: NativeArray) {
def toList[T](f: AnyRef => T): List[T] =
(arr.getIds map { id: AnyRef =>
f(arr.get(id.asInstanceOf[java.lang.Integer], null))
}).toList
}
// yes. it's mutable. it's also javascript.
var result: StyleSheet = null
override def getClassName() = "StyleSheet"
def jsConstructor(css: String, imports: NativeArray) {
result = StyleSheet(css, imports.toList(_.toString))
}
}
| softprops/lesst | src/main/scala/sheets.scala | Scala | mit | 658 |
package im.tox.antox.data
import java.util
import java.util.ArrayList
import android.content.{ContentValues, Context}
import android.database.sqlite.{SQLiteDatabase, SQLiteOpenHelper}
//remove if not needed
class UserDB(ctx: Context) extends SQLiteOpenHelper(ctx, "userdb", null, 1) {
private var CREATE_TABLE_USERS: String = "CREATE TABLE IF NOT EXISTS users" + " ( _id integer primary key , " +
"username text," +
"password text," +
"nickname text," +
"status text," +
"status_message text);"
override def onCreate(db: SQLiteDatabase) {
db.execSQL(CREATE_TABLE_USERS)
}
override def onUpgrade(db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
}
def addUser(username: String, password: String) {
val db = this.getWritableDatabase
val values = new ContentValues()
values.put("username", username)
values.put("password", password)
values.put("nickname", username)
values.put("status", "online")
values.put("status_message", "Hey! I'm using Antox")
db.insert("users", null, values)
db.close()
}
def doesUserExist(username: String): Boolean = {
val db = this.getReadableDatabase
val cursor = db.rawQuery("SELECT count(*) FROM users WHERE username='" + username +
"'", null)
cursor.moveToFirst()
val count = cursor.getInt(0)
cursor.close()
db.close()
count > 0
}
def getUserDetails(username: String): Array[String] = {
val details = Array.ofDim[String](4)
val db = this.getReadableDatabase
val query = "SELECT * FROM users WHERE username='" + username + "'"
val cursor = db.rawQuery(query, null)
if (cursor.moveToFirst()) {
//WHY WOULD ANY SANE MAN DO THIS
details(0) = cursor.getString(3) //nickname
details(1) = cursor.getString(2) //password
details(2) = cursor.getString(4) //status
details(3) = cursor.getString(5) //status message
}
cursor.close()
db.close()
details
}
def updateUserDetail(username: String, detail: String, newDetail: String) {
val db = this.getReadableDatabase
val query = "UPDATE users SET " + detail + "='" + newDetail + "' WHERE username='" +
username +
"'"
db.execSQL(query)
db.close()
}
def doUsersExist(): Boolean = {
val db = this.getReadableDatabase
val cursor = db.rawQuery("SELECT count(*) FROM users", null)
cursor.moveToFirst()
val count = cursor.getInt(0)
cursor.close()
db.close()
count > 0
}
def getAllProfiles: util.ArrayList[String] = {
val profiles = new util.ArrayList[String]()
val sqLiteDatabase = this.getReadableDatabase
val query = "SELECT username FROM users"
val cursor = sqLiteDatabase.rawQuery(query, null)
if (cursor.moveToFirst()) {
do {
profiles.add(cursor.getString(0))
} while (cursor.moveToNext())
}
profiles
}
}
| afkgeek/Antox | app/src/main/scala/im/tox/antox/data/UserDB.scala | Scala | gpl-3.0 | 2,884 |
import com.twitter.finagle.Http.Server
import com.twitter.finagle.http.Request
import com.twitter.finagle.stack.nilStack
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finatra.http.filters.HttpResponseFilter
import com.twitter.finatra.http.routing.HttpRouter
import com.twitter.finatra.http.{Controller, HttpServer}
object FinatraBenchmarkServerMain extends FinatraBenchmarkServer
class FinatraBenchmarkServer extends HttpServer {
override def configureHttpServer(server: Server): Server = {
server
.withCompressionLevel(0)
.withStatsReceiver(NullStatsReceiver)
.withStack(nilStack)
}
override def configureHttp(router: HttpRouter): Unit = {
router
.filter[HttpResponseFilter[Request]]
.add[FinatraBenchmarkController]
}
}
class FinatraBenchmarkController extends Controller {
get("/plaintext") { request: Request =>
"Hello, World!"
}
get("/json") { request: Request =>
Map("message" -> "Hello, World!")
}
} | actframework/FrameworkBenchmarks | frameworks/Scala/finatra/src/main/scala/Main.scala | Scala | bsd-3-clause | 997 |
package ru.maizy.cheesecake.server.endpointmanager
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2016
* See LICENSE.txt for details.
*/
import scala.concurrent.Future
import scala.concurrent.duration._
import akka.actor.{ ActorRef, Props }
import akka.event.LoggingReceive
import akka.pattern.{ ask, pipe }
import ru.maizy.cheesecake.server.bodyparser.BodyParsers
import ru.maizy.cheesecake.server.ExtraInfo
import ru.maizy.cheesecake.server.checker.{ HttpCheck, HttpCheckResult }
import ru.maizy.cheesecake.server.service.{ EndpointStatus, HttpEndpoint }
class HttpEndpointManagerActor(httpCheckerPool: ActorRef, endpoint: HttpEndpoint) extends EndpointManagerActor {
private val parseResponse = endpoint.bodyParsers.isDefined
override protected def check(): Unit = {
val timeout = checkInterval.getOrElse(1.seconds)
val checkFuture: Future[HttpCheckResult] =
(httpCheckerPool ? HttpCheck(endpoint, includeResponse = parseResponse))(timeout).mapTo[HttpCheckResult]
checkFuture pipeTo self
}
def checkResultsHandler: Receive = {
case checkResult: HttpCheckResult =>
context.parent ! EndpointStatus(endpoint, checkResult.copy(extraInfo = parseBody(checkResult)))
}
private def parseBody(checkResult: HttpCheckResult): Option[ExtraInfo] = {
if (!parseResponse || endpoint.bodyParsers.isEmpty) {
None
} else {
checkResult.body.map { body =>
BodyParsers.parse(body, endpoint.bodyParsers.get)
}
}
}
override def receive: Receive = LoggingReceive(checkResultsHandler orElse super.receive)
}
object HttpEndpointManagerActor {
def props(httpCheckerPool: ActorRef, endpoint: HttpEndpoint): Props =
Props(new HttpEndpointManagerActor(httpCheckerPool, endpoint))
}
| maizy/cheesecake | server/src/main/scala/ru/maizy/cheesecake/server/endpointmanager/HttpEndpointManagerActor.scala | Scala | apache-2.0 | 1,757 |
package im.actor.server.user
import akka.pattern.pipe
import im.actor.api.rpc.users.{ User ⇒ ApiUser }
import im.actor.server.api.ApiConversions._
import im.actor.server.util.{ ACLUtils, UserUtils }
import im.actor.server.{ persist ⇒ p }
private[user] trait UserQueriesHandlers {
self: UserProcessor ⇒
import UserQueries._
protected def getAuthIds(state: User): Unit = {
sender() ! GetAuthIdsResponse(state.authIds.toSeq)
}
protected def getApiStruct(state: User, clientUserId: Int, clientAuthId: Long): Unit = {
sender() ! GetApiStructResponse(ApiUser(
id = state.id,
accessHash = ACLUtils.userAccessHash(clientAuthId, state.id, state.accessSalt),
name = state.name,
localName = None,
sex = Some(state.sex),
avatar = state.avatar,
phone = state.phones.headOption.orElse(Some(0)),
isBot = Some(state.isBot),
contactInfo = UserUtils.defaultUserContactRecords(state.phones.toVector, state.emails.toVector),
nick = state.nickname,
about = state.about
))
}
protected def getContactRecords(state: User): Unit = {
sender() ! GetContactRecordsResponse(state.phones, state.emails)
}
protected def checkAccessHash(state: User, senderAuthId: Long, accessHash: Long): Unit =
sender() ! CheckAccessHashResponse(isCorrect = accessHash == ACLUtils.userAccessHash(senderAuthId, userId, state.accessSalt))
}
| darioajr/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/user/UserQueriesHandlers.scala | Scala | mit | 1,411 |
package systems.adaptix.bling.tags
import org.specs2.mutable.Specification
import scala.collection.mutable
/**
* Created by nkashyap on 5/18/15.
*/
class TagDagSpecification extends Specification {
"A TagDag is instantiated with a tag which represents universal context. This means that every data point is, if not explicitly then implicitly tagged with it." >> {
val tags = new TagDag("lol")
tags.universalTag mustEqual "lol"
}
"TagDag is a subclass of RootedDag, and hence a TagDag instance inherits a root. The universalTag of the TagDag is the label of this root vertex." >> {
val tags = new TagDag("lol")
tags.root.label mustEqual "lol"
}
"TagDags build upon RootedDags by providing a guarantee that each string occurs AT MOST ONCE as a vertex label. To this end, a TagDag provides an interface to vertex and label manipulation." >> {
"The labels of the vertices of a TagDag are stored as keys in its tagVertices member value, which is a mutable mapping from Strings to DagVertices. The value matching each of these tags is the DagVertex in the TagDag which has that tag as its label." >> {
val tags = new TagDag("lol")
tags.tagVertices must haveSize(1)
tags.tagVertices.getOrElse("lol", "") mustEqual tags.root
}
"The \\"hasTag\\" method is a means of testing whether or not a TagDag already contains a vertex with a given tag." >> {
val tags = new TagDag("lol")
tags hasTag "lol" must beTrue
tags hasTag "rofl" must beFalse
}
"It is possible to make assertions regarding the existence of tags." >> {
"\\"assertHasTag\\" throws an IllegalArgumentException if its argument IS NOT a registered tag." >> {
val tags = new TagDag("root")
tags.assertHasTag("root") mustEqual ()
tags.assertHasTag("lol") must throwA[IllegalArgumentException]
}
"\\"assertHasNotTag\\" throws an IllegalArgumentException if its argument IS a registered tag." >> {
val tags = new TagDag("root")
tags.assertHasNotTag("root") must throwA[IllegalArgumentException]
tags.assertHasNotTag("lol") mustEqual ()
}
}
"A new tag may be added to a TagDag via the \\"insertTag\\" method." >> {
"By default, the tag is inserted as a child of the TagDag's root vertex." >> {
val tags = new TagDag("root")
tags.root.children must beEmpty
tags.insertTag("new")
tags.root.children must haveSize(1)
tags.root.children must contain(tags tagVertices "new")
}
"The method also allows the insertion of a tag as a child to a specified Set of parents." >> {
val tags = new TagDag("root")
tags.insertTag("child1")
tags.insertTag("child2")
tags.insertTag("grandchild", Set("child1", "child2"))
val rootChildren = (tags tagVertices "root").children
rootChildren must haveSize(2)
rootChildren must contain(tags tagVertices "child1")
rootChildren must contain(tags tagVertices "child2")
rootChildren must not contain(tags tagVertices "grandchild")
val child1Children = (tags tagVertices "child1").children
child1Children must haveSize(1)
child1Children must contain(tags tagVertices "grandchild")
val child2Children = (tags tagVertices "child2").children
child2Children must haveSize(1)
child2Children must contain(tags tagVertices "grandchild")
}
"The method also allows for specification of a Set of children of the tag to be inserted." >> {
val tags = new TagDag("root")
tags.insertTag("child1")
tags.insertTag("child2", parents = Set("root"), children = Set("child1"))
val rootChildren = (tags tagVertices "root").children
rootChildren must haveSize(2)
rootChildren must contain(tags tagVertices "child1")
rootChildren must contain(tags tagVertices "child2")
val child1Children = (tags tagVertices "child1").children
child1Children must beEmpty
val child2Children = (tags tagVertices "child2").children
child2Children must haveSize(1)
child2Children must contain(tags tagVertices "child1")
}
"Any insertion which ultimately violates acyclicity will result in an IllegalArgumentException and a reversion of state of the TagDag to before the insertion was attempted." >> {
val tags = new TagDag("root")
tags.insertTag("child1")
tags.insertTag("child2", children = Set("root")) must throwA[IllegalArgumentException]
tags.tagVertices must haveSize(2)
val root = tags tagVertices "root"
root.children must haveSize(1)
root.children must contain(tags tagVertices "child1")
}
"The attempted insertion of a tag which already exists will cause the method to throw an IllegalArgumentException." >> {
val tags = new TagDag("root")
tags.insertTag("root") must throwA[IllegalArgumentException]
}
"The method also throws an IllegalArgumentException if one of the specified parents or children is not a registered tag." >> {
val tags = new TagDag("root")
tags.insertTag("lol", Set("fakeTag")) must throwA[IllegalArgumentException]
tags.insertTag("lol", children = Set("fakeTag")) must throwA[IllegalArgumentException]
}
}
"The \\"descendants\\" method returns the tags which are reachable from the given tag." >> {
"The tags are returned in a container of type Seq[String]." >> {
val tags = new TagDag("root")
tags.insertTag("lol")
tags.insertTag("rofl")
tags.insertTag("omg")
tags.insertTag("wtf", Set("omg"))
tags.insertTag("bbq", Set("wtf"))
val descendantsOfOmg = tags descendants "omg"
descendantsOfOmg must haveSize(3)
descendantsOfOmg must contain("omg")
descendantsOfOmg must contain("wtf")
descendantsOfOmg must contain("bbq")
val descendantsOfRoot = tags descendants "root"
descendantsOfRoot must haveSize(6)
descendantsOfRoot(0) mustEqual "root"
descendantsOfRoot must contain("lol")
descendantsOfRoot must contain("rofl")
descendantsOfRoot must contain("omg")
descendantsOfRoot(4) mustEqual "wtf"
descendantsOfRoot(5) mustEqual "bbq"
}
"The method throws an IllegalArgumentException if the originating tag is not registered in the TagDag." >> {
val tags = new TagDag("root")
(tags descendants "fakeTag") must throwA[IllegalArgumentException]
}
}
"The \\"validateUniversality\\" method checks if every registered tag in a TagDag is reachable from the universalTag." >> {
val tags = new TagDag("root")
tags insertTag "child"
tags.validateUniversality must beTrue
tags.root removeChild tags.tagVertices("child")
tags.validateUniversality must beFalse
}
"The \\"link\\" method facilitates edge creation." >> {
"The argument order determines the direction of the edge." >> {
val tags = new TagDag("root")
tags.insertTag("parent")
tags.insertTag("child")
tags.link("parent", "child")
val parent = tags.tagVertices("parent")
val child = tags.tagVertices("child")
tags.root hasChildren Set(parent, child) must beTrue
parent.children must haveSize(1)
parent hasChild child must beTrue
child.children must beEmpty
}
"The method throws an exception if either one of the tags has not been registered in the TagDag." >> {
val tags = new TagDag("root")
tags.insertTag("vertex")
tags.link("fakeParent", "vertex") must throwA[IllegalArgumentException]
tags.link("vertex", "fakeChild") must throwA[IllegalArgumentException]
}
"The method reverts the TagDag to its previous state and throws an IllegalArgumentException if the introduction of the link creates a cycle in the TagDag." >> {
val tags = new TagDag("root")
tags.insertTag("child")
val child = tags tagVertices "child"
tags.link("child", "root") must throwA[IllegalArgumentException]("Linking tags violated acyclicity: child to root")
tags.root.children must haveSize(1)
tags.root.children must contain(child)
child.children must beEmpty
}
}
"The \\"unlink\\" method facilitates edge removal." >> {
"The argument order determines the direction of the edge." >> {
val tags = new TagDag("root")
tags.insertTag("child")
tags.insertTag("sibling", Set("root", "child"))
tags.tagVertices("root").children must contain(tags.tagVertices("sibling"))
tags.tagVertices("child").children must contain(tags.tagVertices("sibling"))
tags.unlink("child", "sibling")
tags.tagVertices("root").children must contain(tags.tagVertices("sibling"))
tags.tagVertices("child").children must not contain(tags.tagVertices("sibling"))
}
"The method throws an IllegalArgumentException if either of its arguments has not been registered in the TagDag." >> {
val tags = new TagDag("root")
tags.unlink("root", "fakeTag") must throwA[IllegalArgumentException]
tags.unlink("fakeTag", "root") must throwA[IllegalArgumentException]
}
"The method gracefully does nothing if, although both its arguments are registered in the TagDag, there is no edge between them in the specified direction." >> {
val tags = new TagDag("root")
tags.insertTag("child")
tags.root.children must haveSize(1)
tags.root.children must contain(tags.tagVertices("child"))
}
"The method reverts the TagDag to its previous state and throws an IllegalArgumentException if the specified unlinking causes the child node to become isolated." >> {
val tags = new TagDag("root")
tags.insertTag("child")
tags.unlink("root", "child") must throwA[IllegalArgumentException]
tags.root.children must contain(tags.tagVertices("child"))
}
}
"The \\"groupSiblings\\" method facilitates the creation of a new tag representing the aggregation of some subset of the children of a given contextual tag." >> {
"The new tag is interjected between the contextual tag and its children." >> {
val tags = new TagDag("context")
tags.insertTag("member1")
tags.insertTag("member2")
tags.insertTag("non-member")
tags.groupSiblings("group", Set("member1", "member2"), "context")
Set("member1", "member2", "context").
map( tag => tags.tagVertices(tag) ).
exists( vertex => tags.tagVertices("context").hasChild(vertex) ) must beFalse
Set("group", "non-member").
map( tag => tags.tagVertices(tag) ).
forall( vertex => tags.tagVertices("context").hasChild(vertex) ) must beTrue
Set("member1", "member2").
map( tag => tags.tagVertices(tag) ).
forall( vertex => tags.tagVertices("group").hasChild(vertex) ) must beTrue
Set("context", "non-member", "group").
map( tag => tags.tagVertices(tag) ).
exists( vertex => tags.tagVertices("group").hasChild(vertex) ) must beFalse
tags.tagVertices("context").children must haveSize(2)
tags.tagVertices("group").children must haveSize(2)
}
"If the groupTag has already been registered in the TagDag, the method throws an IllegalArgumentException." >> {
val tags = new TagDag("context")
tags.insertTag("member")
tags.groupSiblings("member", Set("member"), "context") must throwA[IllegalArgumentException]
}
"If the contextTag or one of the memberTags is NOT a registered tag, or if any of the memberTags is not a child of the contextTag, the method throws an IllegalArgumentException." >> {
val tags = new TagDag("context")
tags.insertTag("shallow-member")
tags.insertTag("deep-member", Set("shallow-member"))
tags.groupSiblings("lol", Set("shallow-member"), "fakeTag") must throwA[IllegalArgumentException]("The contextTag and all the memberTags have to be registered.")
tags.groupSiblings("lol", Set("fakeTag"), "context") must throwA[IllegalArgumentException]("The contextTag and all the memberTags have to be registered.")
tags.groupSiblings("lol", Set("deep-member"), "context") must throwA[IllegalArgumentException]("memberTags have to be children of the contextTag.")
}
}
"The \\"pushChild\\" method provides a means to move a newly created tag (which is inserted as a child of the universal tag) down the TagDag to a more suitable location." >> {
"The tag to be moved is passed as the first argument, and its new parent is passed as the second argument." >> {
val tags = new TagDag("omg")
tags.insertTag("wtf")
tags.insertTag("bbq")
tags.root.children must haveSize(2)
tags.tagVertices("wtf").children must beEmpty
tags.pushChild("bbq", "wtf")
tags.root.children must haveSize(1)
tags.root.hasChild(tags.tagVertices("wtf")) must beTrue
tags.root.hasChild(tags.tagVertices("bbq")) must beFalse
tags.tagVertices("wtf").children must haveSize(1)
tags.tagVertices("wtf").hasChild(tags.tagVertices("bbq")) must beTrue
}
"If either argument is not a registered tag, the method throws an IllegalArgumentException." >> {
val tags = new TagDag("omg")
tags.insertTag("wtf")
tags.pushChild("bbq", "wtf") must throwA[IllegalArgumentException]
tags.pushChild("wtf", "fakeTag") must throwA[IllegalArgumentException]
}
}
}
}
| nkashy1/bling | src/test/scala/systems/adaptix/bling/tags/TagDagSpecification.scala | Scala | mit | 13,764 |
package scalax.hash
import org.scalameter.api._
import scodec.bits.ByteVector
object CRC32Regression extends PerformanceTest.OfflineRegressionReport with CRC32 {
val sizes = Gen.range("megabyte")(2,16,2)
val streams = for (size <- sizes) yield
Stream.fill(size)(ByteVector.view(Random.MB))
performance of "CRC32" in {
measure method "update" in {
using(streams) config (
exec.benchRuns -> 10,
exec.independentSamples -> 2,
exec.reinstantiation.frequency -> 1
) in { stream =>
var a = Hash.empty
for (chunk <- stream)
a = a.update(chunk)
a.value
}
}
}
}
| wookietreiber/scala-hash | core/it/scala/CRC32Regression.scala | Scala | bsd-2-clause | 653 |
/*
* Copyright 2014–2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.contrib.scalaz
import scalaz._, Scalaz._
/** A version of MonadListen that doesn't extend Monad to avoid ambiguous implicits
* in the presence of multiple "mtl" constraints.
*/
trait MonadListen_[F[_], W] {
def listen[A](fa: F[A]): F[(A, W)]
def pass[A](fa: F[(A, W => W)])(implicit B: Bind[F], T: MonadTell_[F, W]): F[A] =
B.bind(listen(fa)) { case ((a, f), w) => T.writer(f(w), a) }
}
object MonadListen_ extends MonadListen_Instances {
def apply[F[_], W](implicit L: MonadListen_[F, W]): MonadListen_[F, W] = L
}
sealed abstract class MonadListen_Instances extends MonadListen_Instances0 {
implicit def eitherTMonadListen[F[_]: Functor, W, E](implicit L: MonadListen_[F, W]): MonadListen_[EitherT[F, E, ?], W] =
new MonadListen_[EitherT[F, E, ?], W] {
def listen[A](fa: EitherT[F, E, A]) =
EitherT(L.listen(fa.run) map { case (d, w) => d strengthR w })
}
implicit def writerTMonadListen[F[_]: Functor, W1, W2](implicit L: MonadListen_[F, W1]): MonadListen_[WriterT[F, W2, ?], W1] =
new MonadListen_[WriterT[F, W2, ?], W1] {
def listen[A](fa: WriterT[F, W2, A]) =
WriterT(L.listen(fa.run) map { case ((w2, a), w1) => (w2, (a, w1)) })
}
implicit def kleisliMonadListen[F[_], R, W](
implicit F: MonadListen_[F, W]): MonadListen_[Kleisli[F, R, ?], W] =
new MonadListen_[Kleisli[F, R, ?], W] {
def listen[A](fa: Kleisli[F, R, A]): Kleisli[F, R, (A, W)] =
fa.mapK(MonadListen_[F, W].listen[A])
}
}
sealed abstract class MonadListen_Instances0 {
implicit def monadListenNoMonad[F[_], W](implicit L: MonadListen[F, W]): MonadListen_[F, W] =
new MonadListen_[F, W] {
def listen[A](fa: F[A]) = L.listen(fa)
}
}
| slamdata/quasar | foundation/src/main/scala/quasar/contrib/scalaz/MonadListen_.scala | Scala | apache-2.0 | 2,340 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster
import java.util.concurrent.Semaphore
import java.util.concurrent.atomic.AtomicBoolean
import scala.concurrent.Future
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.{ApplicationDescription, Command}
import org.apache.spark.deploy.client.{StandaloneAppClient, StandaloneAppClientListener}
import org.apache.spark.internal.Logging
import org.apache.spark.launcher.{LauncherBackend, SparkAppHandle}
import org.apache.spark.rpc.RpcEndpointAddress
import org.apache.spark.scheduler._
import org.apache.spark.util.Utils
/**
* A [[SchedulerBackend]] implementation for Spark's standalone cluster manager.
*/
private[spark] class StandaloneSchedulerBackend(
scheduler: TaskSchedulerImpl,
sc: SparkContext,
masters: Array[String])
extends CoarseGrainedSchedulerBackend(scheduler, sc.env.rpcEnv)
with StandaloneAppClientListener
with Logging {
private var client: StandaloneAppClient = null
private val stopping = new AtomicBoolean(false)
private val launcherBackend = new LauncherBackend() {
override protected def onStopRequest(): Unit = stop(SparkAppHandle.State.KILLED)
}
@volatile var shutdownCallback: StandaloneSchedulerBackend => Unit = _
@volatile private var appId: String = _
private val registrationBarrier = new Semaphore(0)
private val maxCores = conf.getOption("spark.cores.max").map(_.toInt)
private val totalExpectedCores = maxCores.getOrElse(0)
override def start() {
super.start()
// SPARK-21159. The scheduler backend should only try to connect to the launcher when in client
// mode. In cluster mode, the code that submits the application to the Master needs to connect
// to the launcher instead.
if (sc.deployMode == "client") {
launcherBackend.connect()
}
// The endpoint for executors to talk to us
val driverUrl = RpcEndpointAddress(
sc.conf.get("spark.driver.host"),
sc.conf.get("spark.driver.port").toInt,
CoarseGrainedSchedulerBackend.ENDPOINT_NAME).toString
val args = Seq(
"--driver-url", driverUrl,
"--executor-id", "{{EXECUTOR_ID}}",
"--hostname", "{{HOSTNAME}}",
"--cores", "{{CORES}}",
"--app-id", "{{APP_ID}}",
"--worker-url", "{{WORKER_URL}}")
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions")
.map(Utils.splitCommandString).getOrElse(Seq.empty)
val classPathEntries = sc.conf.getOption("spark.executor.extraClassPath")
.map(_.split(java.io.File.pathSeparator).toSeq).getOrElse(Nil)
val libraryPathEntries = sc.conf.getOption("spark.executor.extraLibraryPath")
.map(_.split(java.io.File.pathSeparator).toSeq).getOrElse(Nil)
// When testing, expose the parent class path to the child. This is processed by
// compute-classpath.{cmd,sh} and makes all needed jars available to child processes
// when the assembly is built with the "*-provided" profiles enabled.
val testingClassPath =
if (sys.props.contains("spark.testing")) {
sys.props("java.class.path").split(java.io.File.pathSeparator).toSeq
} else {
Nil
}
// Start executors with a few necessary configs for registering with the scheduler
val sparkJavaOpts = Utils.sparkJavaOpts(conf, SparkConf.isExecutorStartupConf)
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
args, sc.executorEnvs, classPathEntries ++ testingClassPath, libraryPathEntries, javaOpts)
val webUrl = sc.ui.map(_.webUrl).getOrElse("")
val coresPerExecutor = conf.getOption("spark.executor.cores").map(_.toInt)
// If we're using dynamic allocation, set our initial executor limit to 0 for now.
// ExecutorAllocationManager will send the real initial limit to the Master later.
val initialExecutorLimit =
if (Utils.isDynamicAllocationEnabled(conf)) {
Some(0)
} else {
None
}
val appDesc = ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
webUrl, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
client = new StandaloneAppClient(sc.env.rpcEnv, masters, appDesc, this, conf)
client.start()
launcherBackend.setState(SparkAppHandle.State.SUBMITTED)
waitForRegistration()
launcherBackend.setState(SparkAppHandle.State.RUNNING)
}
override def stop(): Unit = {
stop(SparkAppHandle.State.FINISHED)
}
override def connected(appId: String) {
logInfo("Connected to Spark cluster with app ID " + appId)
this.appId = appId
notifyContext()
launcherBackend.setAppId(appId)
}
override def disconnected() {
notifyContext()
if (!stopping.get) {
logWarning("Disconnected from Spark cluster! Waiting for reconnection...")
}
}
override def dead(reason: String) {
notifyContext()
if (!stopping.get) {
launcherBackend.setState(SparkAppHandle.State.KILLED)
logError("Application has been killed. Reason: " + reason)
try {
scheduler.error(reason)
} finally {
// Ensure the application terminates, as we can no longer run jobs.
sc.stopInNewThread()
}
}
}
override def executorAdded(fullId: String, workerId: String, hostPort: String, cores: Int,
memory: Int) {
logInfo("Granted executor ID %s on hostPort %s with %d core(s), %s RAM".format(
fullId, hostPort, cores, Utils.megabytesToString(memory)))
}
override def executorRemoved(
fullId: String, message: String, exitStatus: Option[Int], workerLost: Boolean) {
val reason: ExecutorLossReason = exitStatus match {
case Some(code) => ExecutorExited(code, exitCausedByApp = true, message)
case None => SlaveLost(message, workerLost = workerLost)
}
logInfo("Executor %s removed: %s".format(fullId, message))
removeExecutor(fullId.split("/")(1), reason)
}
override def workerRemoved(workerId: String, host: String, message: String): Unit = {
logInfo("Worker %s removed: %s".format(workerId, message))
removeWorker(workerId, host, message)
}
override def sufficientResourcesRegistered(): Boolean = {
totalCoreCount.get() >= totalExpectedCores * minRegisteredRatio
}
override def applicationId(): String =
Option(appId).getOrElse {
logWarning("Application ID is not initialized yet.")
super.applicationId
}
/**
* Request executors from the Master by specifying the total number desired,
* including existing pending and running executors.
*
* @return whether the request is acknowledged.
*/
protected override def doRequestTotalExecutors(requestedTotal: Int): Future[Boolean] = {
Option(client) match {
case Some(c) => c.requestTotalExecutors(requestedTotal)
case None =>
logWarning("Attempted to request executors before driver fully initialized.")
Future.successful(false)
}
}
/**
* Kill the given list of executors through the Master.
* @return whether the kill request is acknowledged.
*/
protected override def doKillExecutors(executorIds: Seq[String]): Future[Boolean] = {
Option(client) match {
case Some(c) => c.killExecutors(executorIds)
case None =>
logWarning("Attempted to kill executors before driver fully initialized.")
Future.successful(false)
}
}
private def waitForRegistration() = {
registrationBarrier.acquire()
}
private def notifyContext() = {
registrationBarrier.release()
}
private def stop(finalState: SparkAppHandle.State): Unit = {
if (stopping.compareAndSet(false, true)) {
try {
super.stop()
client.stop()
val callback = shutdownCallback
if (callback != null) {
callback(this)
}
} finally {
launcherBackend.setState(finalState)
launcherBackend.close()
}
}
}
}
| akopich/spark | core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala | Scala | apache-2.0 | 8,780 |
package models
import java.util.UUID
import play.api.libs.json.{ Json, OFormat }
case class TwilioFax(
userID: UUID,
claimID: UUID,
claimSubmissionID: UUID,
dateCreated: java.util.Date,
dateUpdated: java.util.Date,
to: String,
from: String,
twilioFaxId: String,
status: String) {
}
object TwilioFax {
implicit val jsonFormat: OFormat[TwilioFax] = Json.format[TwilioFax]
}
| vetafi/vetafi-web | app/models/TwilioFax.scala | Scala | apache-2.0 | 397 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.hbase.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand
import org.locationtech.geomesa.tools.CatalogParam
import org.locationtech.geomesa.tools.stats.{StatsHistogramCommand, StatsHistogramParams}
class HBaseStatsHistogramCommand extends StatsHistogramCommand[HBaseDataStore] with HBaseDataStoreCommand {
override val params = new HBaseStatsHistogramParams
}
@Parameters(commandDescription = "View or calculate counts of attribute in a GeoMesa feature type, grouped by sorted values")
class HBaseStatsHistogramParams extends StatsHistogramParams with CatalogParam
| MutahirKazmi/geomesa | geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/stats/HBaseStatsHistogramCommand.scala | Scala | apache-2.0 | 1,188 |
package com.sksamuel.elastic4s.requests.searches.aggs
import com.sksamuel.elastic4s.requests.script.Script
import com.sksamuel.elastic4s.requests.searches.aggs.pipeline.PipelineAgg
import com.sksamuel.elastic4s.ext.OptionImplicits._
case class WeightedAvgAggregation(
name: String,
value: Option[WeightedAvgField] = None,
weight: Option[WeightedAvgField] = None,
pipelines: Seq[PipelineAgg] = Nil,
subaggs: Seq[AbstractAggregation] = Nil,
metadata: Map[String, AnyRef] = Map.empty
) extends Aggregation {
type T = WeightedAvgAggregation
def value(value: WeightedAvgField): WeightedAvgAggregation = copy(value = value.some)
def weight(weight: WeightedAvgField): WeightedAvgAggregation = copy(weight = weight.some)
override def subAggregations(aggs: Iterable[AbstractAggregation]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = map)
}
case class WeightedAvgField(field: Option[String] = None, script: Option[Script] = None, missing: Option[AnyRef] = None)
| sksamuel/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/WeightedAvgAggregation.scala | Scala | apache-2.0 | 1,291 |
package com.tristanpenman.chordial.dht
final case class SeedNode(hostname: String, port: Int)
| tristanpenman/chordial | modules/dht/src/main/scala/com/tristanpenman/chordial/dht/SeedNode.scala | Scala | bsd-3-clause | 95 |
Subsets and Splits