code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster
import org.apache.hadoop.yarn.api.records.{ApplicationId, YarnApplicationState}
import org.apache.spark.{SparkException, Logging, SparkContext}
import org.apache.spark.deploy.yarn.{Client, ClientArguments}
import org.apache.spark.scheduler.TaskSchedulerImpl
import scala.collection.mutable.ArrayBuffer
private[spark] class YarnClientSchedulerBackend(
scheduler: TaskSchedulerImpl,
sc: SparkContext)
extends CoarseGrainedSchedulerBackend(scheduler, sc.env.actorSystem)
with Logging {
var client: Client = null
var appId: ApplicationId = null
private[spark] def addArg(optionName: String, optionalParam: String, arrayBuf: ArrayBuffer[String]) {
Option(System.getenv(optionalParam)) foreach {
optParam => {
arrayBuf += (optionName, optParam)
}
}
}
override def start() {
super.start()
val userJar = System.getenv("SPARK_YARN_APP_JAR")
if (userJar == null)
throw new SparkException("env SPARK_YARN_APP_JAR is not set")
val driverHost = conf.get("spark.driver.host")
val driverPort = conf.get("spark.driver.port")
val hostport = driverHost + ":" + driverPort
val argsArrayBuf = new ArrayBuffer[String]()
argsArrayBuf += (
"--class", "notused",
"--jar", userJar,
"--args", hostport,
"--master-class", "org.apache.spark.deploy.yarn.WorkerLauncher"
)
// process any optional arguments, use the defaults already defined in ClientArguments
// if things aren't specified
Map("--master-memory" -> "SPARK_MASTER_MEMORY",
"--num-workers" -> "SPARK_WORKER_INSTANCES",
"--worker-memory" -> "SPARK_WORKER_MEMORY",
"--worker-cores" -> "SPARK_WORKER_CORES",
"--queue" -> "SPARK_YARN_QUEUE",
"--name" -> "SPARK_YARN_APP_NAME",
"--files" -> "SPARK_YARN_DIST_FILES",
"--archives" -> "SPARK_YARN_DIST_ARCHIVES")
.foreach { case (optName, optParam) => addArg(optName, optParam, argsArrayBuf) }
logDebug("ClientArguments called with: " + argsArrayBuf)
val args = new ClientArguments(argsArrayBuf.toArray, conf)
client = new Client(args, conf)
appId = client.runApp()
waitForApp()
}
def waitForApp() {
// TODO : need a better way to find out whether the workers are ready or not
// maybe by resource usage report?
while(true) {
val report = client.getApplicationReport(appId)
logInfo("Application report from ASM: \n" +
"\t appMasterRpcPort: " + report.getRpcPort() + "\n" +
"\t appStartTime: " + report.getStartTime() + "\n" +
"\t yarnAppState: " + report.getYarnApplicationState() + "\n"
)
// Ready to go, or already gone.
val state = report.getYarnApplicationState()
if (state == YarnApplicationState.RUNNING) {
return
} else if (state == YarnApplicationState.FINISHED ||
state == YarnApplicationState.FAILED ||
state == YarnApplicationState.KILLED) {
throw new SparkException("Yarn application already ended," +
"might be killed or not able to launch application master.")
}
Thread.sleep(1000)
}
}
override def stop() {
super.stop()
client.stop()
logInfo("Stoped")
}
}
| dotunolafunmiloye/spark | yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala | Scala | apache-2.0 | 4,069 |
package org.akozlov.chapter07
import scalax.collection.Graph
import scalax.collection.edge._
import scalax.collection.GraphPredef._
import scalax.collection.GraphEdge._
import scalax.collection.edge.Implicits._
/**
* An Influence Diagram example based on Chapter 2 from the book
*/
object InfluenceDiagram extends App {
var g = Graph(("'Weather'" ~+> "'Weather Forecast'")("Forecast"), ("'Weather Forecast'" ~+> "'Vacation Activity'")("Decision"), ("'Vacation Activity'" ~+> "'Satisfaction'")("Deterministic"), ("'Weather'" ~+> "'Satisfaction'")("Deterministic"))
println(g.mkString(";"))
println("Directed: " + g.isDirected)
println("Acyclic: " + g.isAcyclic)
g += ("'Satisfaction'" ~+> "'Recommend to a Friend'")("Probabilistic")
println(g.mkString(";"))
println("Directed: " + g.isDirected)
println("Acyclic: " + g.isAcyclic)
println((g get "'Recommend to a Friend'").incoming)
g += ("'Satisfaction'" ~+> "'Weather'")("Cyclic")
println(g.mkString(";"))
println("Directed: " + g.isDirected)
println("Acyclic: " + g.isAcyclic)
g += LDiEdge("'Weather Forecast'", "'Weather'")("Reverse")
println(g.mkString(";"))
println("Directed: " + g.isDirected)
println("Acyclic: " + g.isAcyclic)
println(g.nodes)
println(g.edges)
println((g get "'Weather'").outerNodeTraverser.toList)
println((g get "'Weather'").innerNodeTraverser.toList)
println((g get "'Satisfaction'").outerNodeTraverser.toList)
println((g get "'Satisfaction'").innerNodeTraverser.toList)
println((g get "'Satisfaction'").incoming)
println(g.isMulti)
println(g.isAcyclic)
println(g.isConnected)
val nodes = List(1, 3, 5)
val edges = List(3~1)
val ints = Graph.from(nodes, edges)
println(ints)
println(ints.isMulti)
println(ints.isAcyclic)
println(ints.isConnected)
}
| alexvk/ml-in-scala | chapter07/src/main/scala/InfluenceDiagram.scala | Scala | unlicense | 1,815 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.collection
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `Iterable`.
*
* @tparam A Element type (e.g. `Int`)
* @tparam CC Collection type constructor (e.g. `List`)
*
* @define coll collection
*/
@SerialVersionUID(3L)
abstract class WithFilter[+A, +CC[_]] extends Serializable {
/** Builds a new collection by applying a function to all elements of the
* `filtered` outer $coll.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @return a new $coll resulting from applying
* the given function `f` to each element of the filtered outer $coll
* and collecting the results.
*/
def map[B](f: A => B): CC[B]
/** Builds a new collection by applying a function to all elements of the
* `filtered` outer $coll containing this `WithFilter` instance that satisfy
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @return a new $coll resulting from applying
* the given collection-valued function `f` to each element
* of the filtered outer $coll and
* concatenating the results.
*/
def flatMap[B](f: A => IterableOnce[B]): CC[B]
/** Applies a function `f` to all elements of the `filtered` outer $coll.
*
* @param f the function that is applied for its side-effect to every element.
* The result of function `f` is discarded.
*
* @tparam U the type parameter describing the result of function `f`.
* This result will always be ignored. Typically `U` is `Unit`,
* but this is not necessary.
*/
def foreach[U](f: A => U): Unit
/** Further refines the filter for this `filtered` $coll.
*
* @param q the predicate used to test elements.
* @return an object of class `WithFilter`, which supports
* `map`, `flatMap`, `foreach`, and `withFilter` operations.
* All these operations apply to those elements of this $coll which
* also satisfy both `p` and `q` predicates.
*/
def withFilter(q: A => Boolean): WithFilter[A, CC]
}
| lrytz/scala | src/library/scala/collection/WithFilter.scala | Scala | apache-2.0 | 2,660 |
package com.pygmalios.reactiveinflux.spark.utils
import com.pygmalios.reactiveinflux.{ReactiveInfluxConfig, ReactiveInfluxDbName}
import com.pygmalios.reactiveinflux.sync.{SyncReactiveInflux, SyncReactiveInfluxDb}
import scala.concurrent.duration.Duration
object Utils {
def withInflux[S](action: (SyncReactiveInfluxDb) => S)
(implicit reactiveInfluxDbName: ReactiveInfluxDbName,
awaitAtMost: Duration): S = {
val syncReactiveInflux = SyncReactiveInflux(ReactiveInfluxConfig(None))
try {
action(syncReactiveInflux.database)
}
finally {
syncReactiveInflux.close()
}
}
}
| pygmalios/reactiveinflux-spark | src/main/scala/com/pygmalios/reactiveinflux/spark/utils/Utils.scala | Scala | apache-2.0 | 647 |
package info.cmlubinski.newslearning.classify.trainingset
import java.security.SecureRandom
import org.apache.commons.codec.binary.Base64
import unfiltered.Cookie
import unfiltered.filter.Plan
import unfiltered.request._
import unfiltered.response._
import info.cmlubinski.newslearning.models.{DB, TrainingSet}
import info.cmlubinski.newslearning.web.Jade
object New extends Plan {
import DB.imports._
object NameParam extends Params.Extract("name",
Params.first ~> Params.nonempty)
def intent = {
case req@GET(Path("/trainingset/new")) & Params(params) =>
Jade(req, "trainingset/new.jade",
"errors" -> params("error").toList)
case req@POST(Path("/trainingset/new") & Params(NameParam(name))) =>
val ts = newSet(name)
SetCookies(Cookie("trainingset", ts.uuid, path=Some("/"))) ~>
Redirect("/classify")
case req@POST(Path("/trainingset/new")) =>
Redirect("/trainingset/new?error=Missing+name+field")
}
def newSet(name:String) = DB.withTransaction{
implicit session =>
val sr = new SecureRandom()
val uuidBytes = new Array[Byte](18)
sr.nextBytes(uuidBytes)
val uuid = Base64.encodeBase64URLSafeString(uuidBytes)
val ts = TrainingSet(0, name.take(100), uuid, "Yes", "No")
DB.trainingSets += ts
ts
}
}
| cmc333333/news-learning | src/main/scala/classify/trainingset/New.scala | Scala | mit | 1,315 |
/*
* Copyright 2018 Dell Inc. or its subsidiaries. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.dellemc.ecs.s3.sample
import com.amazonaws.services.s3.AmazonS3
import com.amazonaws.services.s3.model.BucketVersioningConfiguration
import com.amazonaws.services.s3.model.SetBucketVersioningConfigurationRequest
object _30_EnableVersioning extends BucketAndObjectValidator {
/**
* Run the class.
*
* @param args
*/
def main(args: Array[String]): Unit = {
createVersionedBucket(AWSS3Factory.getS3ClientWithV4Signatures(), AWSS3Factory.S3_VERSIONED_BUCKET)
enableVersioningOnAnExistingBucket(AWSS3Factory.getS3ClientWithV2Signatures(), AWSS3Factory.S3_BUCKET_2)
}
/**
* Check that the bucket does not exist, create it, then set the versioning state.
*
* @param s3Client the client to use
* @param bucketName the bucket to use
*/
def createVersionedBucket(s3Client: AmazonS3, bucketName: String) = {
try {
checkBucketExistence(s3Client, bucketName)
s3Client.createBucket(bucketName)
enableVersioningOnAnExistingBucket(s3Client, bucketName)
} catch { case e: Exception => outputException(e) }
println()
}
/**
* Change the versioning state on a bucket.
*
* @param s3Client the client to use
* @param bucketName the bucket to use
*/
def enableVersioningOnAnExistingBucket(s3Client: AmazonS3, bucketName: String) = {
try {
checkVersioningStatus(s3Client, bucketName)
val bucketVersioningConfiguration: BucketVersioningConfiguration = new BucketVersioningConfiguration(BucketVersioningConfiguration.ENABLED)
val request: SetBucketVersioningConfigurationRequest = new SetBucketVersioningConfigurationRequest(bucketName, bucketVersioningConfiguration)
s3Client.setBucketVersioningConfiguration(request)
checkVersioningStatus(s3Client, bucketName)
} catch { case e: Exception => outputException(e) }
println()
}
/**
* Read and output the versioning status.
*
* @param s3Client the client to use
* @param bucketName the bucket to check
*/
def checkVersioningStatus(s3Client: AmazonS3, bucketName: String) = {
try {
val bucketVersioningConfiguration: BucketVersioningConfiguration = s3Client.getBucketVersioningConfiguration(bucketName)
System.out.println( s"Bucket [$bucketName] has versioning configuration ${bucketVersioningConfiguration.getStatus()}." )
} catch { case e: Exception => outputException(e) }
println()
}
} | EMCECS/ecs-samples | aws-scala-workshop/src/main/scala/com/dellemc/ecs/s3/sample/_30_EnableVersioning.scala | Scala | apache-2.0 | 3,177 |
package geotrellis.test.singleband.accumulo
import geotrellis.config.Dataset
import geotrellis.raster.Tile
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.test.AccumuloTest
import geotrellis.test.singleband.load.TemporalS3Load
import geotrellis.util.{S3Support, SparkSupport}
import org.apache.spark.SparkContext
abstract class TemporalS3IngestTest(dataset: Dataset) extends AccumuloTest[TemporalProjectedExtent, SpaceTimeKey, Tile](dataset) with S3Support with TemporalS3Load
object TemporalS3IngestTest {
def apply(implicit dataset: Dataset, _sc: SparkContext) = new TemporalS3IngestTest(dataset) {
@transient implicit val sc = SparkSupport.configureTime(dataset)(_sc)
}
}
| pomadchin/geotrellis-integration-tests | src/main/scala/geotrellis/test/singleband/accumulo/TemporalS3IngestTest.scala | Scala | apache-2.0 | 712 |
package edu.arizona.sista.twitter4food
import java.io.{FileOutputStream, PrintWriter, File}
import twitter4j._
import twitter4j.conf.ConfigurationBuilder
import java.util.Date
import scala.collection.JavaConversions.asScalaBuffer
object ScrapeIndividuals {
val SLEEP = 8000
def dateToString(d: Date): String = if(d == null) "NIL" else d.toString
def placeToString(p: Place): String = {
if (p == null) return "NIL"
val os = new StringBuilder
os.append(c(p.getPlaceType()))
os.append("/" + c(p.getFullName()))
os.append("/" + c(p.getCountryCode()))
os.append("/" + c(p.getBoundingBoxType()))
val gs = p.getBoundingBoxCoordinates
if (gs != null) {
for {
i <- 1 until gs.length
j <- 1 until gs(i).length
} {
os.append("/" + geoLocationToString(gs(i)(j)))
}
}
os.toString
}
def geoLocationToString(g: GeoLocation): String = {
if(g == null) return "NIL"
c(g.getLatitude.toString) + "|" + c(g.getLongitude.toString)
}
def c(s: String): String = {
if(s == null) return "NIL"
if(s.length() == 0) return "NIL"
s.replaceAll("[\\\\t\\\\n\\\\r]+", " ")
}
def mkConfig: ConfigurationBuilder = {
val cb = new ConfigurationBuilder()
cb.setDebugEnabled(false)
val rand = new scala.util.Random()
val cset = rand.nextInt() % 18
cset match {
case 0 => {
// app name: search4food ***** Twitter4Food
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 1 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 2 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 3 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 4 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 5 => {
// app name: search4food2
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 6 => {
//for DELL computer
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 7 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 8 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 9 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 10 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 11 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 12 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 13 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 14 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 15 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case 16 => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
case _ => {
cb.setOAuthConsumerKey("")
cb.setOAuthConsumerSecret("")
cb.setOAuthAccessToken("")
cb.setOAuthAccessTokenSecret("")
}
}
cb
}
def main(args: Array[String]) = {
val ratedIndividualsFiles = "/work/dane/rated_individuals.csv"
val bufferedSource = scala.io.Source.fromFile(ratedIndividualsFiles)
val userWeight = (for (line <- bufferedSource.getLines) yield {
val Array(userHandle, label) = line.split(",").map(_.trim)
userHandle -> label
}).toMap
bufferedSource.close
val userHandles = userWeight.keys
var number = 0
for(userHandle <- userHandles){
number += 1
val weight = userWeight(userHandle)
println("userHandle=" + userHandle + " weight=" + weight)
println("number=" + number)
val directoryRoot = "/data/nlp/corpora/twitter4food/ratedIndividuals/"
val fileRoot = new File(directoryRoot)
if(!fileRoot.exists()){
fileRoot.mkdir()
}
val directory = directoryRoot + weight
val file = new File(directory)
if(!file.exists()){
file.mkdir()
}
val output = directory + "/" + userHandle + ".txt"
val userfile = new File(output)
if(!userfile.exists()){
val pw = new PrintWriter(new FileOutputStream(output, true))
var cb = mkConfig
println("Getting Tweets: ")
////////////////Right here the program really starts - Gets the user tweets////
var twitter: Twitter = new TwitterFactory(cb.build()).getInstance()
for (i <- 1 to 16) {//get the first i pages of 200 tweets (we expect i*200 tweets), max 3200 total
try {
val paging = new Paging(i, 200)
//200 is the max # of tweets per page
val statuses = twitter.getUserTimeline(userHandle, paging)
if (statuses != null && statuses.size > 0) {
val u = statuses.get(0).getUser()
val uCreatedAt = dateToString(u.getCreatedAt())
for (status <- asScalaBuffer(statuses)) {
pw.println(
"@" + u.getScreenName() + "\\t" +
c(u.getName()) + "\\t" +
c(u.getId().toString) + "\\t" +
c(u.getLocation()) + "\\t" +
c(u.getFollowersCount().toString) + "\\t" +
c(u.getUtcOffset().toString) + "\\t" +
c(u.getTimeZone()) + "\\t" +
c(uCreatedAt.toString) + "\\t" +
c(u.getLang()) + "\\n" +
c(dateToString(status.getCreatedAt())) + "\\t" +
geoLocationToString(status.getGeoLocation()) + "\\t" +
placeToString(status.getPlace()) + "\\n" +
c(status.getText()))
pw.flush()
}
}
Thread.sleep(SLEEP)
} catch {
case e: Exception => {
// If there's an exception, try new credentials...
Thread.sleep(SLEEP)
cb = mkConfig
twitter = new TwitterFactory(cb.build()).getInstance()
val paging = new Paging(i, 200)
//200 is the max # of tweets per page
val statuses = twitter.getUserTimeline(userHandle, paging)
if (statuses != null && statuses.size > 0) {
val u = statuses.get(0).getUser()
val uCreatedAt = dateToString(u.getCreatedAt())
for (status <- asScalaBuffer(statuses)) {
pw.println(
"@" + u.getScreenName() + "\\t" +
c(u.getName()) + "\\t" +
c(u.getId().toString) + "\\t" +
c(u.getLocation()) + "\\t" +
c(u.getFollowersCount().toString) + "\\t" +
c(u.getUtcOffset().toString) + "\\t" +
c(u.getTimeZone()) + "\\t" +
c(uCreatedAt.toString) + "\\t" +
c(u.getLang()) + "\\n" +
c(dateToString(status.getCreatedAt())) + "\\t" +
geoLocationToString(status.getGeoLocation()) + "\\t" +
placeToString(status.getPlace()) + "\\n" +
c(status.getText()))
pw.flush()
}
}
Thread.sleep(SLEEP)
}
}
}
}
}
}
}
| clulab/twitter4food | src/main/scala/edu/arizona/sista/twitter4food/ScrapeIndividuals.scala | Scala | apache-2.0 | 9,202 |
/**
* Majyyka
*
* AspectRegistry.scala
*
* @author Myo-kun
* @license Lesser GNU Public License v3 (http://www.gnu.org/licenses/lgpl.html)
*/
package myokun.mods.majyyka.api
import java.util.LinkedHashMap
object AspectRegistry {
var aspects:LinkedHashMap[String, TAspect] = new LinkedHashMap[String, TAspect]
} | myoKun345/Majyyka | majyyka_common/myokun/mods/majyyka/api/AspectRegistry.scala | Scala | lgpl-3.0 | 333 |
package org.jetbrains.plugins.scala
package lang.psi.types
import com.intellij.psi.search.LocalSearchScope
import com.intellij.psi.search.searches.ClassInheritorsSearch
import com.intellij.psi.{PsiClass, PsiTypeParameter}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScTypeParam
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScModifierListOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.types.api.ParameterizedType
import org.jetbrains.plugins.scala.util.ScEquivalenceUtil.areClassesEquivalent
/**
* Nikolay.Tropin
* 2014-04-03
*/
object ComparingUtil {
//this relation is actually symmetric
def isNeverSubClass(clazz1: PsiClass, clazz2: PsiClass): Boolean = {
val classes = Seq(clazz1, clazz2)
val oneFinal = clazz1.isEffectivelyFinal || clazz2.isEffectivelyFinal
val twoNonTraitsOrInterfaces = !classes.exists(_.isInterface)
def inheritorsInSameFile(clazz: PsiClass) = {
import scala.collection.JavaConversions._
ClassInheritorsSearch.search(clazz, new LocalSearchScope(clazz.getContainingFile), true).findAll().collect {
case x: ScTypeDefinition => x
}
}
def sealedAndAllChildrenAreIrreconcilable = {
val areSealed = classes.forall{
case modOwner: ScModifierListOwner => modOwner.hasModifierProperty("sealed")
case _ => false
}
def childrenAreIrreconcilable =
inheritorsInSameFile(clazz1).forall {
c1 => inheritorsInSameFile(clazz2).forall {
c2 => isNeverSubClass(c1, c2)
}
}
areSealed && childrenAreIrreconcilable
}
val areUnrelatedClasses =
!areClassesEquivalent(clazz1, clazz2) && !(clazz1.isInheritor(clazz2, true) || clazz2.isInheritor(clazz1, true))
areUnrelatedClasses && (oneFinal || twoNonTraitsOrInterfaces || sealedAndAllChildrenAreIrreconcilable)
}
def isNeverSubType(tp1: ScType, tp2: ScType, sameType: Boolean = false): Boolean = {
if (tp2.weakConforms(tp1) || tp1.weakConforms(tp2)) return false
val Seq(clazzOpt1, clazzOpt2) =
Seq(tp1, tp2).map(_.tryExtractDesignatorSingleton.extractClass)
if (clazzOpt1.isEmpty || clazzOpt2.isEmpty) return false
val (clazz1, clazz2) = (clazzOpt1.get, clazzOpt2.get)
def isNeverSameType(tp1: ScType, tp2: ScType) = isNeverSubType(tp1, tp2, sameType = true)
def isNeverSubArgs(tps1: Seq[ScType], tps2: Seq[ScType], tparams: Seq[PsiTypeParameter]): Boolean = {
def isNeverSubArg(t1: ScType, t2: ScType, variance: Int) = {
if (variance > 0) isNeverSubType(t2, t1)
else if (variance < 0) isNeverSubType(t1, t2)
else isNeverSameType(t1, t2)
}
def getVariance(tp: PsiTypeParameter) = tp match {
case scParam: ScTypeParam =>
if (scParam.isCovariant) 1
else if (scParam.isContravariant) -1
else 0
case _ => 0
}
tps1.zip(tps2).zip(tparams.map(getVariance)) exists {
case ((t1, t2), vr) => isNeverSubArg(t1, t2, vr)
case _ => false
}
}
def neverSubArgs() = {
(tp1, tp2) match {
case (ParameterizedType(_, args1), ParameterizedType(_, args2)) =>
isNeverSubArgs(args1, args2, clazz2.getTypeParameters)
case _ => false
}
}
isNeverSubClass(clazz1, clazz2) ||
((areClassesEquivalent(clazz1, clazz2) || (!sameType) && clazz1.isInheritor(clazz2, true)) && neverSubArgs())
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/types/ComparingUtil.scala | Scala | apache-2.0 | 3,576 |
import Deps._
import sbt.Keys._
import sbt._
import sbtassembly.AssemblyKeys._
import sbtassembly._
object ProjectBuild extends Build {
val extractServer = taskKey[Seq[File]]("Extract infinispan server")
lazy val getSparkVersion = taskKey[Unit]("Get Spark version used")
lazy val getInfinispanVersion = taskKey[Unit]("Get Infinispan version used")
lazy val ServerFolder = "infinispan-server"
lazy val core = (project in file("."))
.settings(commonSettings: _ *)
.settings(Publishing.settings: _*)
.settings(net.virtualvoid.sbt.graph.Plugin.graphSettings: _*)
.settings(
moduleName := "infinispan-spark",
libraryDependencies ++= Seq(sparkCore, sparkStreaming, sparkSQL, sparkHive, hotRodClient, queryDSL, jcip,
junit, scalaTest, scalaDMR, remoteQueryClient, protoStream, infinispanServerZip,
shrinkWrap, infinispanCore, sl4jbridge, log4j),
extractServer := {
val report = update.value
val deps = report.matching(artifactFilter(name = "infinispan-server-build", extension = "zip"))
val zipPath = deps.head.getAbsoluteFile
val destination = (resourceManaged in Test).value
val destinationWithoutVersion = destination / ServerFolder
IO.unzip(zipPath, destination).toSeq
(destination ** "*infinispan-server*").get.head.renameTo(destinationWithoutVersion)
installScalaModule(report, destinationWithoutVersion, scalaVersion.value)
(destinationWithoutVersion ** AllPassFilter).get
},
getSparkVersion := {
println(Versions.sparkVersion)
},
getInfinispanVersion := {
println(Versions.infinispanVersion)
},
resourceGenerators in Test <+= extractServer,
publishArtifact in Test := true,
mappings in(Test, packageBin) ~= (_.filter(!_._1.getPath.contains("infinispan-server")))
).disablePlugins(sbtassembly.AssemblyPlugin, plugins.JUnitXmlReportPlugin).aggregate(LocalProject("examplesTwitter"), LocalProject("examplesSnippets"))
lazy val examplesRoot = project in file("examples")
lazy val examplesSnippets = (project in file("examples/snippets"))
.dependsOn(core)
.settings(commonSettings: _ *)
.settings(
scalacOptions --= Seq("-Ywarn-dead-code","-Ywarn-unused"),
publishLocal := {},
publish := {}
)
lazy val examplesTwitter = (project in file("examples/twitter"))
.dependsOn(core)
.settings(commonSettings: _ *)
.settings(
libraryDependencies ++= Seq(twitterHbc, playJson),
assemblyJarName in assembly := "infinispan-spark-twitter.jar",
assemblyMergeStrategy in assembly := {
case PathList("META-INF", "MANIFEST.MF") => MergeStrategy.discard
case PathList("META-INF", "DEPENDENCIES.txt") => MergeStrategy.discard
case PathList(ps@_*) if ps.last == "UnusedStubClass.class" => MergeStrategy.discard
case PathList("META-INF", "io.netty.versions.properties") => MergeStrategy.discard
case "features.xml" => MergeStrategy.first
case x => val oldStrategy = (assemblyMergeStrategy in assembly).value
oldStrategy(x)
},
publishLocal := {},
publish := {}
)
def commonSettings = Seq(
scalaVersion := "2.11.8",
libraryDependencies ++= Seq(sparkCore, sparkStreaming, sparkSQL),
scalacOptions <++= scalaVersion map { v =>
val baseFlags = Seq("-deprecation", "-encoding", "UTF-8", "-feature", "-unchecked", "-Yno-adapted-args", "-Ywarn-dead-code")
baseFlags ++ Seq("-Xlint:_,-nullary-unit", "-Ywarn-unused", "-Ywarn-unused-import")
},
resolvers ++= Seq(
"Local Maven" at Path.userHome.asFile.toURI.toURL + ".m2/repository",
"JBoss Releases" at "https://repository.jboss.org/nexus/content/repositories/releases/",
"JBoss Snapshots" at "https://repository.jboss.org/nexus/content/repositories/snapshots/"
),
test in assembly := {},
parallelExecution in Test := false,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, "-u", s"${crossTarget.value.getAbsolutePath}/test-reports/", "-o"),
parallelExecution in Global := false
)
def installScalaModule(report: UpdateReport, serverDir: File, version: String): Unit = {
def moduleXML(scalaLibrary: String) = {
<module xmlns="urn:jboss:module:1.3" name="org.scala">
<resources>
<resource-root path={s"$scalaLibrary"}/>
</resources>
<dependencies>
<module name="sun.jdk"/>
</dependencies>
</module>
}
val moduleFile = "module.xml"
val scalaLibrary = report.matching(artifactFilter(name = "scala-library")).head
val moduleDir = serverDir / "modules" / "org" / "scala" / "main"
IO.createDirectory(moduleDir)
IO.write(moduleDir / moduleFile, moduleXML(scalaLibrary.getName).toString())
IO.copyFile(scalaLibrary, moduleDir / scalaLibrary.getName)
}
}
| galderz/infinispan-spark | project/ProjectBuild.scala | Scala | apache-2.0 | 5,293 |
package com.arcusys.valamis.user
import com.arcusys.learn.liferay.LiferayClasses._
import com.arcusys.learn.liferay.services.WebServerServletTokenHelper
import com.liferay.portal.kernel.util.{DigesterUtil, HttpUtil}
import scala.collection.JavaConverters._
import scala.util.Try
package object util {
implicit class UserExtension(val user: LUser) extends AnyVal {
def getPortraitUrl: String = {
val gender = if (user.isMale) "male" else "female"
val portraitId = user.getPortraitId
val token = HttpUtil.encodeURL(DigesterUtil.digest(user.getUserUuid))
val stamp = WebServerServletTokenHelper.getToken(portraitId)
s"/image/user_${gender}_portrait?img_id=$portraitId&img_id_token=$token&t=$stamp"
}
def getPublicUrl: String = {
Try(if (new LGroup(user.getGroup).getPublicLayoutsPageCount > 0) "/web/" + user.getScreenName else "").getOrElse("")
}
def getOrganizationNames: Set[String] = {
user.getOrganizations.asScala.map(org => org.getName).toSet
}
}
}
| arcusys/JSCORM | valamis-core/src/main/scala/com/arcusys/valamis/user/util/util.scala | Scala | gpl-3.0 | 1,029 |
object I0 {
def i1(i2: => Int) = i2
def i3[I4, I5](i6: I4 => I5): (I4 => I5) { def apply(i7: I4): I5 } = i6
val i8 = i3(i1)
}
| som-snytt/dotty | tests/pending/fuzzy/AE-a08bc91c922a8a040c6a8434bad6bc70c32e0614.scala | Scala | apache-2.0 | 132 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.{util => ju}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, SparkSession}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution.{QueryExecution, SQLExecution}
import org.apache.spark.sql.types.{BinaryType, StringType}
import org.apache.spark.util.Utils
/**
* The [[KafkaWriter]] class is used to write data from a batch query
* or structured streaming query, given by a [[QueryExecution]], to Kafka.
* The data is assumed to have a value column, and an optional topic and key
* columns. If the topic column is missing, then the topic must come from
* the 'topic' configuration option. If the key column is missing, then a
* null valued key field will be added to the
* [[org.apache.kafka.clients.producer.ProducerRecord]].
*/
private[kafka010] object KafkaWriter extends Logging {
val TOPIC_ATTRIBUTE_NAME: String = "topic"
val KEY_ATTRIBUTE_NAME: String = "key"
val VALUE_ATTRIBUTE_NAME: String = "value"
override def toString: String = "KafkaWriter"
def validateQuery(
queryExecution: QueryExecution,
kafkaParameters: ju.Map[String, Object],
topic: Option[String] = None): Unit = {
val schema = queryExecution.analyzed.output
schema.find(_.name == TOPIC_ATTRIBUTE_NAME).getOrElse(
if (topic.isEmpty) {
throw new AnalysisException(s"topic option required when no " +
s"'$TOPIC_ATTRIBUTE_NAME' attribute is present. Use the " +
s"${KafkaSourceProvider.TOPIC_OPTION_KEY} option for setting a topic.")
} else {
Literal(topic.get, StringType)
}
).dataType match {
case StringType => // good
case _ =>
throw new AnalysisException(s"Topic type must be a String")
}
schema.find(_.name == KEY_ATTRIBUTE_NAME).getOrElse(
Literal(null, StringType)
).dataType match {
case StringType | BinaryType => // good
case _ =>
throw new AnalysisException(s"$KEY_ATTRIBUTE_NAME attribute type " +
s"must be a String or BinaryType")
}
schema.find(_.name == VALUE_ATTRIBUTE_NAME).getOrElse(
throw new AnalysisException(s"Required attribute '$VALUE_ATTRIBUTE_NAME' not found")
).dataType match {
case StringType | BinaryType => // good
case _ =>
throw new AnalysisException(s"$VALUE_ATTRIBUTE_NAME attribute type " +
s"must be a String or BinaryType")
}
}
def write(
sparkSession: SparkSession,
queryExecution: QueryExecution,
kafkaParameters: ju.Map[String, Object],
topic: Option[String] = None): Unit = {
val schema = queryExecution.analyzed.output
validateQuery(queryExecution, kafkaParameters, topic)
queryExecution.toRdd.foreachPartition { iter =>
val writeTask = new KafkaWriteTask(kafkaParameters, schema, topic)
Utils.tryWithSafeFinally(block = writeTask.execute(iter))(
finallyBlock = writeTask.close())
}
}
}
| akopich/spark | external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaWriter.scala | Scala | apache-2.0 | 3,820 |
package pl.pej.trelloilaro.api.requestBuilder.builder.board
import pl.pej.trelloilaro.api.requestBuilder.{AllRequestParam, RequestParam, RequestBuilder}
trait BoardsBuilder { this: RequestBuilder[_] =>
def withBoards(boards: Boards *) = withParams("boards", boards)
}
trait Boards extends RequestParam
object Boards {
case object all extends Boards with AllRequestParam
case object members extends Boards
case object organization extends Boards
case object public extends Boards
case object open extends Boards
case object closed extends Boards
case object pinned extends Boards
case object unpinned extends Boards
case object starred extends Boards
} | tomaszym/trelloilaro | src/main/scala/pl/pej/trelloilaro/api/requestBuilder/builder/board/Boards.scala | Scala | mit | 678 |
package dsmoq.maintenance.controllers
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import org.scalatra.ActionResult
import org.scalatra.BadRequest
import org.scalatra.InternalServerError
import org.slf4j.MarkerFactory
import com.typesafe.scalalogging.LazyLogging
import dsmoq.maintenance.services.ErrorDetail
import dsmoq.maintenance.services.ServiceException
/**
* レスポンス作成ユーティリティ
*/
object ResponseUtil extends LazyLogging {
/**
* ログマーカー
*/
val LOG_MARKER = MarkerFactory.getMarker("MAINTENANCE_RESPONSE_LOG")
/**
* エラーが発生し得る処理結果のレスポンスに対して、エラー側のレスポンスを作成する。
*
* @param result 処理結果
* @param errorProc エラーメッセージ、エラー詳細のリストを受け取ってレスポンスボディを返す関数
* @return レスポンス
*/
def resultAs(result: Try[ActionResult])(errorProc: (String, Seq[ErrorDetail]) => String): ActionResult = {
result match {
case Success(res) => res
case Failure(e) => {
e match {
case se: ServiceException => {
val content = errorProc(se.getMessage, se.details)
BadRequest(content)
}
case _ => {
logger.error(LOG_MARKER, e.getMessage, e)
val errorDetails = Seq(
ErrorDetail(e.getMessage, e.getStackTrace.map(_.toString))
)
val content = errorProc("内部エラーが発生しました。", errorDetails)
InternalServerError(content)
}
}
}
}
}
}
| nkawa/dsmoq | server/maintenance/src/main/scala/dsmoq/maintenance/controllers/ResponseUtil.scala | Scala | apache-2.0 | 1,663 |
package tests.rescala
class DynamicEventTestSuite extends RETests {
allEngines("simple"){ engine => import engine._
val ev1 = Evt[Int]
val v1 = Var(8)
val snapshotEvent = Event {
ev1().map(i => i + v1())
}
val res = snapshotEvent.latest(0)
assert(res.now === 0)
ev1(10)
assert(res.now === 18)
v1.set(7)
assert(res.now === 18)
ev1(10)
assert(res.now === 17)
}
}
| volkc/REScala | Tests/shared/src/test/scala/tests/rescala/DynamicEventTestSuite.scala | Scala | apache-2.0 | 428 |
package simulacrum
import scala.annotation.StaticAnnotation
import scala.language.experimental.macros
import scala.reflect.macros.Context
/**
* Annotation that may be applied to methods on a type that is annotated with `@typeclass`.
*
* Doing so changes the code generation strategy used when generating the syntax ops type.
* Instead of the type class method name being used, the name specified on this annotation is used.
* If `alias` is true, two methods are generated, one with the original name and one with the
* specified name.
*/
class op(name: String, alias: Boolean = false) extends StaticAnnotation
/**
* Annotation that may be applied to methods on a type that is annotated with `@typeclass`.
*
* Doing so results in the method being excluded from the generated syntax ops type.
*/
class noop() extends StaticAnnotation
/**
* Annotation that may be applied to a trait or class of one type parameter to generate
* boilerplate that makes the type class easier to use.
*
* The only type parameter must either by a proper type or a unary type constructor.
* Types of other shapes, like binary type constructors, are not currently supported.
*
* As a result of adding this annotation, the following code is generated in the companion:
* - an implicit summoning method, providing syntax like `MyTypeClass[Type]` as a
* shortcut for `implicitly[MyTypeClass[Type]]`.
* - a trait, named `Ops`, which provides object oriented style forwarding
* methods -- aka, syntax -- for the methods defined directly on the type class.
* - a trait, named `AllOps`, which extends `Ops` and the `Ops` traits for any
* super types.
* - a trait, named `ToMyTypeClassOps`, which provides an implicit conversion
* that enables use of the `Ops` trait.
* - an object, named `ops`, which provides an implicit conversion to the
* `AllOps` trait.
*
* As a result, the ops can be used by either importing `MyTypeClass.ops._` or
* by mixing `MyTypeClass.ToMyTypeClassOps` in to a type.
*/
class typeclass(excludeParents: List[String] = Nil, generateAllOps: Boolean = true) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro TypeClassMacros.generateTypeClass
}
object TypeClassMacros {
def generateTypeClass(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
import c.universe._
def freshName(prefix: String) = c.fresh(prefix)
def TermName(name: String) = newTermName(name)
def TypeName(name: String) = newTypeName(name)
val typeNames = tpnme
def showCode(t: Tree): String = show(t)
def trace(s: => String) = {
// Macro paradise seems to always output info statements, even without -verbose
if (sys.props.get("simulacrum.trace").isDefined) c.info(c.enclosingPosition, s, false)
}
class RewriteTypeName(from: TypeName, to: TypeName) extends Transformer {
override def transform(t: Tree): Tree = t match {
case Ident(name) if name == from => super.transform(Ident(to))
case TypeDef(mods, name, tparams, rhs) if name == from => super.transform(TypeDef(mods, to, tparams, rhs))
case other => super.transform(other)
}
}
case class Arguments(parentsToExclude: Set[TypeName], generateAllOps: Boolean)
val typeClassArguments: Arguments = c.prefix.tree match {
case Apply(_, args) =>
val excludeParents: Set[TypeName] = args.collectFirst { case q"excludeParents = $exclusions" =>
c.eval(c.Expr[List[String]](exclusions)).map { n => TypeName(n) }.toSet
}.getOrElse(Set.empty)
val generateAllOps: Boolean = args.collectFirst { case q"generateAllOps = $gen" =>
c.eval(c.Expr[Boolean](gen))
}.getOrElse(true)
Arguments(excludeParents, generateAllOps)
case other => c.abort(c.enclosingPosition, "not possible - macro invoked on type that does not have @typeclass: " + showRaw(other))
}
def determineOpsMethodName(sourceMethod: DefDef): List[TermName] = {
val suppress = sourceMethod.mods.annotations.collectFirst {
case q"new noop()" => ()
case q"new simulacrum.noop()" => ()
}.isDefined
if (suppress) Nil
else {
def genAlias(alias: String, rest: List[Tree]) = {
val aliasTermName = TermName(reflect.NameTransformer.encode(alias))
rest match {
case Nil =>
List(aliasTermName)
case Literal(Constant(alias: Boolean)) :: _ =>
if (alias) List(sourceMethod.name.toTermName, aliasTermName)
else List(aliasTermName)
case q"alias = ${Literal(Constant(alias: Boolean))}" :: _ =>
if (alias) List(sourceMethod.name.toTermName, aliasTermName)
else List(aliasTermName)
case other =>
List(aliasTermName)
}
}
val overrides = sourceMethod.mods.annotations.collect {
case q"new op(${Literal(Constant(alias: String))}, ..$rest)" => genAlias(alias, rest)
case q"new simulacrum.op(${Literal(Constant(alias: String))}, ..$rest)" => genAlias(alias, rest)
}
if (overrides.isEmpty) List(sourceMethod.name.toTermName) else overrides.flatten
}
}
def filterSimulacrumAnnotations(mods: Modifiers): Modifiers = {
val filteredAnnotations = mods.annotations.filter {
case q"new typeclass(..${_})" => false
case q"new op(..${_})" => false
case q"new noop(..${_})" => false
case q"new simulacrum.${_}(..${_})" => false
case other => true
}
Modifiers(mods.flags, mods.privateWithin, filteredAnnotations)
}
def adaptMethodForProperType(tcInstanceName: TermName, tparamName: Name, method: DefDef): List[DefDef] = {
// Method should only be adapted if the first parameter in the first parameter list
// matches `tparamName`
val TargetTypeName = tparamName
for {
firstParamList <- method.vparamss.headOption.toList
firstParam <- firstParamList.headOption.toList
Ident(TargetTypeName) <- Option(firstParam.tpt).toList
paramssWithoutFirst = {
if (firstParamList.tail.isEmpty) method.vparamss.tail
else firstParamList.tail :: method.vparamss.tail
}
paramNamess: List[List[Tree]] = {
val original = method.vparamss map { _ map { p => Ident(p.name) } }
original.updated(0, original(0).updated(0, q"self"))
}
rhs = paramNamess.foldLeft(Select(Ident(tcInstanceName), method.name): Tree) { (tree, paramNames) =>
Apply(tree, paramNames)
}
name <- determineOpsMethodName(method)
if !method.mods.hasFlag(Flag.OVERRIDE)
} yield DefDef(Modifiers(NoFlags), name, method.tparams, paramssWithoutFirst, method.tpt, rhs)
}
def adaptMethodForAppliedType(tcInstanceName: TermName, tparamName: Name, method: DefDef, liftedTypeArg: TypeDef): List[DefDef] = {
// Method should only be adapted if the first parameter in the first parameter list
// is an F[X] for some (potentially applied) type X
val TargetTypeName = tparamName
(for {
firstParamList <- method.vparamss.headOption.toList
firstParam <- firstParamList.headOption.toList
AppliedTypeTree(Ident(TargetTypeName), arg :: Nil) <- Option(firstParam.tpt).toList
if !method.mods.hasFlag(Flag.OVERRIDE)
} yield {
val typeArgs = method.tparams.map { _.name }.toSet
val simpleArgOpt: Option[Name] = {
def extract(tree: Tree): Option[Name] = tree match {
case Ident(name: TypeName) if typeArgs contains name => Some(name)
case tq"$ctor[..$targs]" =>
targs.foldLeft(None: Option[Name]) { (acc, targ) => extract(targ) }
case other => None
}
extract(arg)
}
simpleArgOpt match {
case None => Nil
case Some(simpleArg) =>
// Rewrites all occurrences of simpleArg to liftedTypeArg.name
val rewriteSimpleArg = new RewriteTypeName(from = simpleArg.toTypeName, to = liftedTypeArg.name)
val (paramssFixed, removeSimpleArgTParam) = {
val withoutFirst = {
if (firstParamList.tail.isEmpty) method.vparamss.tail
else firstParamList.tail :: method.vparamss.tail
}
val withRewrittenFirst = withoutFirst map { _ map { param =>
ValDef(param.mods, param.name, rewriteSimpleArg.transform(param.tpt), rewriteSimpleArg.transform(param.rhs))
}}
if (arg equalsStructure Ident(simpleArg)) {
(withRewrittenFirst, true)
} else {
val typeEqualityType = tq"${liftedTypeArg.name} =:= $arg"
val equalityEvidence = ValDef(Modifiers(Flag.IMPLICIT), TermName(freshName("ev")), typeEqualityType, EmptyTree)
val updatedParamss = {
if (withRewrittenFirst.nonEmpty && withRewrittenFirst.last.head.mods.hasFlag(Flag.IMPLICIT))
withRewrittenFirst.init ++ List(equalityEvidence +: withRewrittenFirst.last)
else {
withRewrittenFirst ++ List(List(equalityEvidence))
}
}
(updatedParamss, false)
}
}
val paramNamess: List[List[Tree]] = {
val original = method.vparamss map { _ map { p => Ident(p.name) } }
val replacement = if (removeSimpleArgTParam) q"self" else q"self.asInstanceOf[${tparamName.toTypeName}[$arg]]"
original.updated(0, original(0).updated(0, replacement))
}
val rhs = paramNamess.foldLeft(Select(Ident(tcInstanceName), method.name): Tree) { (tree, paramNames) =>
Apply(tree, paramNames)
}
val fixedTParams = if (removeSimpleArgTParam) method.tparams.filter { _.name != simpleArg } else method.tparams
determineOpsMethodName(method) map { name =>
// Important: let the return type be inferred here, so the return type doesn't need to be rewritten
q"def $name[..$fixedTParams](...$paramssFixed) = $rhs"
}
}
}).flatten
}
def adaptMethods(typeClass: ClassDef, tcInstanceName: TermName, tparamName: Name, proper: Boolean, liftedTypeArg: Option[TypeDef]): List[DefDef] = {
val typeClassMethods = typeClass.impl.children.collect {
case m: DefDef if !m.mods.hasFlag(Flag.PRIVATE) && !m.mods.hasFlag(Flag.PROTECTED) => m
}
typeClassMethods.flatMap { method =>
val adapted =
if (proper) adaptMethodForProperType(tcInstanceName, tparamName, method)
else adaptMethodForAppliedType(tcInstanceName, tparamName, method, liftedTypeArg.get)
adapted
}
}
def generateOps(typeClass: ClassDef, tcInstanceName: TermName, tparam: TypeDef, proper: Boolean, liftedTypeArg: Option[TypeDef]): (ClassDef, Set[TypeName]) = {
val adaptedMethods = adaptMethods(typeClass, tcInstanceName, tparam.name, proper, liftedTypeArg)
val tparams = List(tparam) ++ liftedTypeArg
val tparamNames = tparams.map { _.name }
val targetType = liftedTypeArg.map(lta => tq"${tparam.name}[${lta.name}]").getOrElse(tq"${tparam.name}")
val opsTrait = q"""trait Ops[..$tparams] {
def $tcInstanceName: ${typeClass.name}[${tparam.name}]
def self: $targetType
..$adaptedMethods
}"""
val reservedTypeNames = adaptedMethods.flatMap(_.tparams.map(_.name)).toSet ++ tparamNames
(opsTrait, reservedTypeNames)
}
def generateAllOps(typeClass: ClassDef, tcInstanceName: TermName, tparam: TypeDef, liftedTypeArg: Option[TypeDef]): ClassDef = {
val tparams = List(tparam) ++ liftedTypeArg
val tparamNames = tparams.map { _.name }
val tcargs = typeClass.mods.annotations.collectFirst {
case q"new typeclass(..${args})" => args
case q"new simulacrum.typeclass(..${args})" => args
}
val typeClassParents: List[TypeName] = typeClass.impl.parents.collect {
case tq"${Ident(parentTypeClassTypeName)}[${_}]" => parentTypeClassTypeName.toTypeName
}
val allOpsParents = typeClassParents collect {
case parent if !(typeClassArguments.parentsToExclude contains parent) =>
tq"${parent.toTermName}.AllOps[..$tparamNames]"
}
val unknownParentExclusions = (typeClassArguments.parentsToExclude -- typeClassParents.toSet).toList.map(_.toString).sorted
if (unknownParentExclusions.nonEmpty) {
c.error(c.enclosingPosition, s"@typeclass excludes unknown parent types: ${unknownParentExclusions.mkString}")
}
q"""trait AllOps[..$tparams] extends Ops[..$tparamNames] with ..$allOpsParents {
def $tcInstanceName: ${typeClass.name}[${tparam.name}]
}"""
}
def generateCompanion(typeClass: ClassDef, tparam: TypeDef, proper: Boolean, comp: Tree) = {
val summoner = q"def apply[$tparam](implicit instance: ${typeClass.name}[${tparam.name}]): ${typeClass.name}[${tparam.name}] = instance"
val liftedTypeArg = if (proper) None else Some {
// We have a TypeClass[F[_ >: L <: U]], so let's create a F[X >: L <: U] for a fresh name X
// For example:
// TypeDef(
// Modifiers(PARAM), TypeName("F"), List(
// TypeDef(Modifiers(PARAM), typeNames.WILDCARD, List(), TypeBoundsTree(Ident(TypeName("Lower")), Ident(TypeName("Upper"))))
// ), TypeBoundsTree(EmptyTree, EmptyTree))
val TypeDef(_, _, tparamtparams, _) = tparam
tparamtparams.find { _.name == typeNames.WILDCARD } match {
case None => c.abort(c.enclosingPosition, "Cannot find a wildcard type in supposed unary type constructor")
case Some(q"$mods type ${_}[..$tpps] = $rhs") =>
val fixedMods = Modifiers(NoFlags, mods.privateWithin, mods.annotations)
val liftedTypeArgName = TypeName(freshName("lta"))
object rewriteWildcard extends Transformer {
override def transform(t: Tree): Tree = t match {
case Ident(typeNames.WILDCARD) => super.transform(Ident(liftedTypeArgName))
case other => super.transform(t)
}
}
rewriteWildcard.transformTypeDefs(List(TypeDef(fixedMods, liftedTypeArgName, tpps, rhs))).head
}
}
val tcInstanceName = TermName("typeClassInstance")
val (opsTrait, opsReservedTParamNames) = generateOps(typeClass, tcInstanceName, tparam, proper, liftedTypeArg)
val allOpsTrait = generateAllOps(typeClass, tcInstanceName, tparam, liftedTypeArg)
def generateOpsImplicitConversion(opsType: TypeName, methodName: TermName) = {
val tparams = List(tparam) ++ liftedTypeArg
val tparamNames = tparams.map(_.name)
val targetType = liftedTypeArg.map(lta => tq"${tparam.name}[${lta.name}]").getOrElse(tq"${tparam.name}")
q"""
implicit def $methodName[..$tparams](target: $targetType)(implicit tc: ${typeClass.name}[${tparam.name}]): $opsType[..$tparamNames] =
new $opsType[..$tparamNames] { val self = target; val $tcInstanceName = tc }
"""
}
val toOpsTrait = {
val toOpsTraitName = TypeName(s"To${typeClass.name}Ops")
val method = generateOpsImplicitConversion(opsTrait.name, TermName(s"to${typeClass.name}Ops"))
q"trait $toOpsTraitName { $method }"
}
val allOpsConversion = {
val method = generateOpsImplicitConversion(TypeName("AllOps"), TermName(s"toAll${typeClass.name}Ops"))
q"object ops { $method }"
}
val opsMembers: List[Tree] = {
val ops = List(opsTrait, toOpsTrait)
val allOps = if (typeClassArguments.generateAllOps) List(allOpsTrait, allOpsConversion) else Nil
ops ++ allOps
}
val q"object $name extends ..$bases { ..$body }" = comp
val companion = q"""
object $name extends ..$bases {
..$body
$summoner
..$opsMembers
}
"""
// Rewrite liftedTypeArg.name to something easier to read
liftedTypeArg.fold(companion: Tree) { lta =>
val potentialNames = ('A' to 'Z').map(ch => TypeName(ch.toString)).toList
val newLiftedTypeArgName = potentialNames.find(nme => !(opsReservedTParamNames contains nme))
newLiftedTypeArgName.fold(companion: Tree) { newName =>
new RewriteTypeName(from = lta.name, to = newName).transform(companion)
}
}
}
def modify(typeClass: ClassDef, companion: Option[ModuleDef]) = {
val (tparam, proper) = typeClass.tparams match {
case hd :: Nil =>
hd.tparams.size match {
case 0 => (hd, true)
case 1 => (hd, false)
case n => c.abort(c.enclosingPosition, "@typeclass may only be applied to types that take a single proper type or type constructor")
}
case other => c.abort(c.enclosingPosition, "@typeclass may only be applied to types that take a single type parameter")
}
val modifiedTypeClass = {
val filteredBody = typeClass.impl.body.map {
case q"$mods def $name[..$tparams](...$vparamss): $tpt = $rhs" =>
q"${filterSimulacrumAnnotations(mods)} def $name[..$tparams](...$vparamss): $tpt = $rhs"
case other => other
}
val filteredImpl = Template(typeClass.impl.parents, typeClass.impl.self, filteredBody)
ClassDef(filterSimulacrumAnnotations(typeClass.mods), typeClass.name, typeClass.tparams, filteredImpl)
}
val modifiedCompanion = generateCompanion(typeClass, tparam, proper, companion match {
case Some(c) => c
case None => q"object ${typeClass.name.toTermName} {}"
})
val result = c.Expr(q"""
$modifiedTypeClass
$modifiedCompanion
""")
trace(s"Generated type class ${typeClass.name}:\n" + showCode(result.tree))
result
}
annottees.map(_.tree) match {
case (typeClass: ClassDef) :: Nil => modify(typeClass, None)
case (typeClass: ClassDef) :: (companion: ModuleDef) :: Nil => modify(typeClass, Some(companion))
case other :: Nil =>
c.abort(c.enclosingPosition, "@typeclass can only be applied to traits or abstract classes that take 1 type parameter which is either a proper type or a type constructor")
}
}
}
| malcolmgreaves/simulacrum | core/src/main/scala_2.10/simulacrum/typeclass.scala | Scala | bsd-3-clause | 18,540 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.ejson
import slamdata.Predef.{Map => SMap, _}
import jawn.{Facade, SimpleFacade, SupportParser}
import matryoshka._
import matryoshka.implicits._
import scalaz.{:<:, Functor}
object jsonParser {
def apply[T, F[_]: Functor]
(implicit T: Corecursive.Aux[T, F], C: Common :<: F, O: Obj :<: F)
: SupportParser[T] =
new SupportParser[T] {
implicit val facade: Facade[T] =
new SimpleFacade[T] {
def jarray(arr: List[T]) = C(Arr(arr)).embed
def jobject(obj: SMap[String, T]) = O(Obj(ListMap(obj.toList: _*))).embed
def jnull() = C(Null[T]()).embed
def jfalse() = C(Bool[T](false)).embed
def jtrue() = C(Bool[T](true)).embed
def jnum(n: String) = C(Dec[T](BigDecimal(n))).embed
def jint(n: String) = C(Dec[T](BigDecimal(n))).embed
def jstring(s: String) = C(Str[T](s)).embed
}
}
}
| jedesah/Quasar | ejson/src/main/scala/quasar/ejson/jsonParser.scala | Scala | apache-2.0 | 1,631 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.SparkEnv
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
class SparkPlanSuite extends QueryTest with SharedSQLContext {
test("SPARK-21619 execution of a canonicalized plan should fail") {
val plan = spark.range(10).queryExecution.executedPlan.canonicalized
intercept[IllegalStateException] { plan.execute() }
intercept[IllegalStateException] { plan.executeCollect() }
intercept[IllegalStateException] { plan.executeCollectPublic() }
intercept[IllegalStateException] { plan.executeToIterator() }
intercept[IllegalStateException] { plan.executeBroadcast() }
intercept[IllegalStateException] { plan.executeTake(1) }
}
test("SPARK-23731 plans should be canonicalizable after being (de)serialized") {
withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> "parquet") {
withTempPath { path =>
spark.range(1).write.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath)
val fileSourceScanExec =
df.queryExecution.sparkPlan.collectFirst { case p: FileSourceScanExec => p }.get
val serializer = SparkEnv.get.serializer.newInstance()
val readback =
serializer.deserialize[FileSourceScanExec](serializer.serialize(fileSourceScanExec))
try {
readback.canonicalized
} catch {
case e: Throwable => fail("FileSourceScanExec was not canonicalizable", e)
}
}
}
}
test("SPARK-27418 BatchScanExec should be canonicalizable after being (de)serialized") {
withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> "") {
withTempPath { path =>
spark.range(1).write.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath)
val batchScanExec =
df.queryExecution.sparkPlan.collectFirst { case p: BatchScanExec => p }.get
val serializer = SparkEnv.get.serializer.newInstance()
val readback =
serializer.deserialize[BatchScanExec](serializer.serialize(batchScanExec))
try {
readback.canonicalized
} catch {
case e: Throwable => fail("BatchScanExec was not canonicalizable", e)
}
}
}
}
test("SPARK-25357 SparkPlanInfo of FileScan contains nonEmpty metadata") {
withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> "parquet") {
withTempPath { path =>
spark.range(5).write.parquet(path.getAbsolutePath)
val f = spark.read.parquet(path.getAbsolutePath)
assert(SparkPlanInfo.fromSparkPlan(f.queryExecution.sparkPlan).metadata.nonEmpty)
}
}
}
}
| aosagie/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanSuite.scala | Scala | apache-2.0 | 3,613 |
/*******************************************************************************
* Copyright (C) 2012 Łukasz Szpakowski.
*
* This library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package org.lkbgraph.algorithm.spec
import org.scalatest.Spec
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.prop.PropertyChecks
import org.scalacheck.Gen
import org.lkbgraph._
import org.lkbgraph.immutable._
import org.lkbgraph.algorithm._
import scala.util.Random
import org.scalacheck.Arbitrary
trait MinSpanningTreeBehaviors[GG[XV, XX, XE[+XY, +XZ] <: EdgeLike[XY, XZ, XE]] <: base.Graph[XV, XX, XE] with base.GraphLike[XV, XX, XE, _]] extends PropertyChecks with ShouldMatchers
{
this: Spec =>
import org.lkbgraph.spec.GraphParamGen._
import org.lkbgraph.spec.GraphParamGen.GraphGen._
def graphFactory: base.GraphFactory[GG]
def minSpanningTreeStrategy: MinSpanningTreeStrategy
def genMstWUndiEdgesForMst(root: VertexType, vs: Set[VertexType]) =
TreeGen.genEdges[Int, Weighted[Int], UndiEdge](Set(root), vs - root) { (v, u, w) => v ~ u w w } (Arbitrary { Gen.choose(1, 10) })
def genGraphWUndiEdgesForMst(vs: Set[VertexType]) =
Gen.containerOf[Set, UndiEdge[VertexType, Weighted[Int]]](Gen.pick(2, vs).map2(Gen.choose(30, 40)) {
case (Seq(v, u), w) => WUndiEdge[VertexType, Int](v, u, w)
}).map { es => es.map { e => (e.toUnweightedEdge, e) }.toMap.values.toSet }
case class MinSpanningTreeParamData[GP, V, E](ps: Set[GP], vs: Set[V], gEs: Set[E], mstEs: Set[E])
def genMinSpanningTreeParamData(root: VertexType, vs: Set[VertexType]) =
for {
mstEs <- genMstWUndiEdgesForMst(root, vs)
gEs <- genGraphWUndiEdgesForMst(vs).map { es => es.filterNot { e => mstEs.exists { _ ==~ e } } }
} yield MinSpanningTreeParamData(vs.map(V[VertexType]) ++ gEs ++ mstEs, vs, gEs, mstEs.toSet)
def minSpanningTree
{
describe("minSpanningTree") {
it("should return a tree with one vertex for the empty graph with only one vertex") {
forAll(Gen.alphaChar) {
v =>
val g = (graphFactory[VertexType, Weighted[Int], UndiEdge]() ++ Seq(V(v)))
val mst = g.minSpanningTree(minSpanningTreeStrategy)
mst should not be None
mst.get.vertices.toSet should be === Set(v)
mst.get.vertices should have size 1
mst.get.edges.toSet should be === Set()
}
}
it("should return a minimum spanning tree for the graph with one connected component") {
forAll(for(vs <- genVertices; v <- Gen.oneOf(vs.toSeq); d <- genMinSpanningTreeParamData(v, vs)) yield d) {
case MinSpanningTreeParamData(ps, vs, gEs, mstEs) =>
val g = graphFactory() ++ ps
//println("mst1")
val mst = g.minSpanningTree(minSpanningTreeStrategy)
mst should not be None
mst.get.vertices.toSet should be === vs
mst.get.vertices should have size(vs.size)
mst.get.edges.toSet should be === mstEs
mst.get.edges should have size(mstEs.size)
}
}
it("should return none for the graph with two connected components") {
forAll(
for {
vs <- genVertices
v1 <- Gen.oneOf(vs.toSeq)
v2 <- Gen.oneOf((vs - v1).toSeq)
n <- Gen.choose(1, vs.size - 1)
us <- Gen.pick(n, vs.toSeq)
es1 <- genWUndiEdges(vs -- us)
es2 <- genWUndiEdges(us.toSet)
} yield {
val vs1 = (vs -- us)
val vs2 = us.toSet
val ps1 = vs1.map(V[VertexType]).toSet ++ es1
val ps2 = vs2.map(V[VertexType]).toSet ++ es2
(GraphParamData(ps1, vs1, es1), GraphParamData(ps2, vs2, es2))
}
) {
case (GraphParamData(ps1, vs1, es1), GraphParamData(ps2, vs2, es2)) =>
val g = graphFactory() ++ ps1 ++ ps2
g.minSpanningTree(minSpanningTreeStrategy) should be === None
}
}
}
describe("minSpanningTrees") {
it("should return a tree with vertices for the empty graph with vertex") {
forAll(genVertices) {
vs =>
val g = graphFactory[VertexType, Weighted[Int], UndiEdge]() ++ vs.map(V[VertexType])
val msts = g.minSpanningTrees(minSpanningTreeStrategy)
msts.map { _.vertices.toSet } should be ===(vs.map { v => Set(v) })
msts.map { _.vertices.size } should be ===(vs.map { v => 1 })
}
}
it("should return two minimum spanning trees for the graph with two connected components") {
forAll {
for {
vs <- genVertices
us <- Gen.someOf(vs)
v1 <- Gen.oneOf((vs -- us).toSeq)
v2 <- Gen.oneOf(us.toSeq)
d1 <- genMinSpanningTreeParamData(v1, vs -- us)
d2 <- genMinSpanningTreeParamData(v2, us.toSet)
} yield (d1, d2)
} {
case (MinSpanningTreeParamData(ps1, vs1, gEs1, mstEs1), MinSpanningTreeParamData(ps2, vs2, gEs2, mstEs2)) =>
val g = graphFactory() ++ ps1 ++ ps2
//println("msts2")
val msts = g.minSpanningTrees(minSpanningTreeStrategy).toSeq
msts should have size(2)
msts(0).vertices.toSet should(be === vs1 or be === vs2)
msts(0).vertices should(have size(vs1.size) or have size(vs2.size))
msts(0).edges.toSet should(be === mstEs1 or be === mstEs2)
msts(0).edges should(have size(mstEs1.size) or have size(mstEs2.size))
msts(1).vertices.toSet should(be === vs1 or be === vs2)
msts(1).vertices should(have size(vs1.size) or have size(vs2.size))
msts(1).edges.toSet should(be === mstEs1 or be === mstEs2)
msts(1).edges should(have size(mstEs1.size) or have size(mstEs2.size))
msts(0).vertices should not be msts(1)
}
}
}
}
}
| luckboy/LkbGraph | src/test/org/lkbgraph/algorithm/spec/MinSpanningTreeBehaviors.scala | Scala | lgpl-3.0 | 6,679 |
/*
* Copyright 2014 Alan Rodas Bonjour
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.alanrodas.scaland.cli
/**
* This package defines the classes that will hold
* all the definitions of commands, arguments and flags.
*
* All this definitions are used later on in order to
* process the user callbacks. Although you can use the
* definitions package to construct the commands that
* your application will accept, the instances of this
* class are more usually used internally by the
* ''builders'', users of this library will usually
* construct instances using the provided DSL.
*
* If you are planning not to use the DSL, consider starting on
* [[com.alanrodas.scaland.cli.definitions.CommandDefinition]]
* to learn your way around the classes in this package.
*/
package object definitions | alanrodas/scaland | cli/src/main/scala/com/alanrodas/scaland/cli/definitions/package.scala | Scala | apache-2.0 | 1,327 |
package ru.pavkin.todoist.api.core.command
import cats.{FlatMap, Functor}
import ru.pavkin.todoist.api.RawRequest
import ru.pavkin.todoist.api.core.CommandReturns.Aux
import ru.pavkin.todoist.api.core._
import ru.pavkin.todoist.api.core.decoder.SingleCommandResponseDecoder
import ru.pavkin.todoist.api.utils.{Flattener, Produce}
import shapeless.{::, HNil}
class SingleCommandRequestDefinition[F[_], L[_], P[_], C, R, Req, Base]
(requestFactory: RawRequest Produce Req,
executor: RequestExecutor.Aux[Req, L, Base],
flattener: Flattener[F, L, P],
parser: SingleCommandResponseDecoder.Aux[P, C, Base, R])
(command: C)
(implicit val trr: ToRawRequest[C],
override implicit val F: Functor[L])
extends CompositeExecutedRequestDefinition[F, L, P, R, Req, Base](
requestFactory, executor, flattener
) with SingleCommandDefinition[F, P, C, R, Base] {
def toRawRequest: RawRequest = trr.rawRequest(command)
def parse(r: Base): P[R] = parser.parse(command)(r)
def and[CC, RR](otherCommand: CC)
(implicit
FM: FlatMap[P],
tr: ToRawRequest[CC],
cr: Aux[CC, RR],
otherParser: SingleCommandResponseDecoder.Aux[P, CC, Base, RR])
: MultipleCommandDefinition[F, P, CC :: C :: HNil, RR :: R :: HNil, Base] =
new MultipleCommandRequestDefinition[F, L, P, CC :: C :: HNil, RR :: R :: HNil, Req, Base](
requestFactory, executor, flattener, parser.combine(otherParser)
)(otherCommand :: command :: HNil)
}
| vpavkin/todoist-api-scala | core/src/main/scala/ru/pavkin/todoist/api/core/command/SingleCommandRequestDefinition.scala | Scala | mit | 1,533 |
package concurrent
import play.libs.Akka
import scala.concurrent.ExecutionContext
object Pools {
implicit val jsPool: ExecutionContext = Akka.system.dispatchers.lookup("play.js-pool")
}
| dribba/play-node-benchmark | app/concurrent/Pools.scala | Scala | mit | 192 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.batch
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.`trait`.{FlinkRelDistribution, FlinkRelDistributionTraitDef}
import org.apache.flink.table.planner.plan.nodes.exec.batch.BatchExecSortAggregate
import org.apache.flink.table.planner.plan.nodes.exec.{ExecEdge, ExecNode}
import org.apache.flink.table.planner.plan.rules.physical.batch.BatchExecJoinRuleBase
import org.apache.flink.table.planner.plan.utils.{FlinkRelOptUtil, RelExplainUtil}
import org.apache.calcite.plan.{RelOptCluster, RelOptRule, RelTraitSet}
import org.apache.calcite.rel.RelDistribution.Type.{HASH_DISTRIBUTED, SINGLETON}
import org.apache.calcite.rel._
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.AggregateCall
import org.apache.calcite.util.{ImmutableIntList, Util}
import java.util
import scala.collection.JavaConversions._
/**
* Batch physical RelNode for (global) sort-based aggregate operator.
*
* @see [[BatchPhysicalGroupAggregateBase]] for more info.
*/
class BatchPhysicalSortAggregate(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
outputRowType: RelDataType,
inputRowType: RelDataType,
val aggInputRowType: RelDataType,
grouping: Array[Int],
auxGrouping: Array[Int],
aggCallToAggFunction: Seq[(AggregateCall, UserDefinedFunction)],
isMerge: Boolean)
extends BatchPhysicalSortAggregateBase(
cluster,
traitSet,
inputRel,
outputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
isMerge = isMerge,
isFinal = true) {
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new BatchPhysicalSortAggregate(
cluster,
traitSet,
inputs.get(0),
outputRowType,
inputRowType,
aggInputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
isMerge)
}
override def explainTerms(pw: RelWriter): RelWriter = {
super.explainTerms(pw)
.item("isMerge", isMerge)
.itemIf("groupBy",
RelExplainUtil.fieldToString(grouping, inputRowType), grouping.nonEmpty)
.itemIf("auxGrouping",
RelExplainUtil.fieldToString(auxGrouping, inputRowType), auxGrouping.nonEmpty)
.item("select", RelExplainUtil.groupAggregationToString(
inputRowType,
outputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
isMerge,
isGlobal = true))
}
override def satisfyTraits(requiredTraitSet: RelTraitSet): Option[RelNode] = {
val requiredDistribution = requiredTraitSet.getTrait(FlinkRelDistributionTraitDef.INSTANCE)
val canSatisfy = requiredDistribution.getType match {
case SINGLETON => grouping.length == 0
case HASH_DISTRIBUTED =>
val shuffleKeys = requiredDistribution.getKeys
val groupKeysList = ImmutableIntList.of(grouping.indices.toArray: _*)
if (requiredDistribution.requireStrict) {
shuffleKeys == groupKeysList
} else if (Util.startsWith(shuffleKeys, groupKeysList)) {
// If required distribution is not strict, Hash[a] can satisfy Hash[a, b].
// so return true if shuffleKeys(Hash[a, b]) start with groupKeys(Hash[a])
true
} else {
// If partialKey is enabled, try to use partial key to satisfy the required distribution
val tableConfig = FlinkRelOptUtil.getTableConfigFromContext(this)
val partialKeyEnabled = tableConfig.getConfiguration.getBoolean(
BatchExecJoinRuleBase.TABLE_OPTIMIZER_SHUFFLE_BY_PARTIAL_KEY_ENABLED)
partialKeyEnabled && groupKeysList.containsAll(shuffleKeys)
}
case _ => false
}
if (!canSatisfy) {
return None
}
val inputRequiredDistribution = requiredDistribution.getType match {
case SINGLETON => requiredDistribution
case HASH_DISTRIBUTED =>
val shuffleKeys = requiredDistribution.getKeys
val groupKeysList = ImmutableIntList.of(grouping.indices.toArray: _*)
if (requiredDistribution.requireStrict) {
FlinkRelDistribution.hash(grouping, requireStrict = true)
} else if (Util.startsWith(shuffleKeys, groupKeysList)) {
// Hash [a] can satisfy Hash[a, b]
FlinkRelDistribution.hash(grouping, requireStrict = false)
} else {
// use partial key to satisfy the required distribution
FlinkRelDistribution.hash(shuffleKeys.map(grouping(_)).toArray, requireStrict = false)
}
}
val providedCollation = if (grouping.length == 0) {
RelCollations.EMPTY
} else {
val providedFieldCollations = grouping.map(FlinkRelOptUtil.ofRelFieldCollation).toList
RelCollations.of(providedFieldCollations)
}
val requiredCollation = requiredTraitSet.getTrait(RelCollationTraitDef.INSTANCE)
val newProvidedTraitSet = if (providedCollation.satisfies(requiredCollation)) {
getTraitSet.replace(requiredDistribution).replace(requiredCollation)
} else {
getTraitSet.replace(requiredDistribution)
}
val newInput = RelOptRule.convert(getInput, inputRequiredDistribution)
Some(copy(newProvidedTraitSet, Seq(newInput)))
}
override def translateToExecNode(): ExecNode[_] = {
new BatchExecSortAggregate(
grouping,
auxGrouping,
getAggCallList.toArray,
FlinkTypeFactory.toLogicalRowType(aggInputRowType),
isMerge,
true, // isFinal is always true
getInputEdge,
FlinkTypeFactory.toLogicalRowType(getRowType),
getRelDetailedDescription
)
}
private def getInputEdge: ExecEdge = {
if (grouping.length == 0) {
ExecEdge.builder().damBehavior(ExecEdge.DamBehavior.END_INPUT).build()
} else {
ExecEdge.DEFAULT
}
}
}
| aljoscha/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchPhysicalSortAggregate.scala | Scala | apache-2.0 | 6,742 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.app
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.control.NonFatal
import io.gatling.app.cli.ArgsParser
import io.gatling.core.config.GatlingConfiguration
import akka.actor.ActorSystem
import ch.qos.logback.classic.LoggerContext
import com.typesafe.scalalogging.StrictLogging
import org.slf4j.LoggerFactory
/**
* Object containing entry point of application
*/
object Gatling extends StrictLogging {
// used by bundle
def main(args: Array[String]): Unit = sys.exit(fromArgs(args, None))
// used by maven archetype
def fromMap(overrides: ConfigOverrides): Int = start(overrides, None)
// used by sbt-test-framework
private[gatling] def fromArgs(args: Array[String], selectedSimulationClass: SelectedSimulationClass): Int =
new ArgsParser(args).parseArguments match {
case Left(overrides) => start(overrides, selectedSimulationClass)
case Right(statusCode) => statusCode.code
}
private def terminateActorSystem(system: ActorSystem, timeout: FiniteDuration): Unit =
try {
val whenTerminated = system.terminate()
Await.result(whenTerminated, timeout)
} catch {
case NonFatal(e) =>
logger.error("Could not terminate ActorSystem", e)
}
private[app] def start(overrides: ConfigOverrides, selectedSimulationClass: SelectedSimulationClass) = {
try {
logger.trace("Starting")
val configuration = GatlingConfiguration.load(overrides)
logger.trace("Configuration loaded")
// start actor system before creating simulation instance, some components might need it (e.g. shutdown hook)
val system = ActorSystem("GatlingSystem", GatlingConfiguration.loadActorSystemConfiguration())
logger.trace("ActorSystem instantiated")
val runResult =
try {
val runner = Runner(system, configuration)
logger.trace("Runner instantiated")
runner.run(selectedSimulationClass)
} finally {
terminateActorSystem(system, 5 seconds)
}
RunResultProcessor(configuration).processRunResult(runResult).code
} finally {
LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext].stop()
}
}
}
| MykolaB/gatling | gatling-app/src/main/scala/io/gatling/app/Gatling.scala | Scala | apache-2.0 | 2,842 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.rdd.{RDD, RDDOperationScope}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.ui.{UIUtils => SparkUIUtils}
import org.apache.spark.util.ManualClock
/**
* Tests whether scope information is passed from DStream operations to RDDs correctly.
*/
class DStreamScopeSuite
extends SparkFunSuite
with LocalStreamingContext {
override def beforeEach(): Unit = {
super.beforeEach()
val conf = new SparkConf().setMaster("local").setAppName("test")
conf.set("spark.streaming.clock", classOf[ManualClock].getName())
val batchDuration: Duration = Seconds(1)
ssc = new StreamingContext(new SparkContext(conf), batchDuration)
assertPropertiesNotSet()
}
override def afterEach(): Unit = {
try {
assertPropertiesNotSet()
} finally {
super.afterEach()
}
}
test("dstream without scope") {
val dummyStream = new DummyDStream(ssc)
dummyStream.initialize(Time(0))
// This DStream is not instantiated in any scope, so all RDDs
// created by this stream should similarly not have a scope
assert(dummyStream.baseScope === None)
assert(dummyStream.getOrCompute(Time(1000)).get.scope === None)
assert(dummyStream.getOrCompute(Time(2000)).get.scope === None)
assert(dummyStream.getOrCompute(Time(3000)).get.scope === None)
}
test("input dstream without scope") {
val inputStream = new DummyInputDStream(ssc)
inputStream.initialize(Time(0))
val baseScope = inputStream.baseScope.map(RDDOperationScope.fromJson)
val scope1 = inputStream.getOrCompute(Time(1000)).get.scope
val scope2 = inputStream.getOrCompute(Time(2000)).get.scope
val scope3 = inputStream.getOrCompute(Time(3000)).get.scope
// This DStream is not instantiated in any scope, so all RDDs
assertDefined(baseScope, scope1, scope2, scope3)
assert(baseScope.get.name.startsWith("dummy stream"))
assertScopeCorrect(baseScope.get, scope1.get, 1000)
assertScopeCorrect(baseScope.get, scope2.get, 2000)
assertScopeCorrect(baseScope.get, scope3.get, 3000)
}
test("scoping simple operations") {
val inputStream = new DummyInputDStream(ssc)
val mappedStream = inputStream.map { i => i + 1 }
val filteredStream = mappedStream.filter { i => i % 2 == 0 }
filteredStream.initialize(Time(0))
val mappedScopeBase = mappedStream.baseScope.map(RDDOperationScope.fromJson)
val mappedScope1 = mappedStream.getOrCompute(Time(1000)).get.scope
val mappedScope2 = mappedStream.getOrCompute(Time(2000)).get.scope
val mappedScope3 = mappedStream.getOrCompute(Time(3000)).get.scope
val filteredScopeBase = filteredStream.baseScope.map(RDDOperationScope.fromJson)
val filteredScope1 = filteredStream.getOrCompute(Time(1000)).get.scope
val filteredScope2 = filteredStream.getOrCompute(Time(2000)).get.scope
val filteredScope3 = filteredStream.getOrCompute(Time(3000)).get.scope
// These streams are defined in their respective scopes "map" and "filter", so all
// RDDs created by these streams should inherit the IDs and names of their parent
// DStream's base scopes
assertDefined(mappedScopeBase, mappedScope1, mappedScope2, mappedScope3)
assertDefined(filteredScopeBase, filteredScope1, filteredScope2, filteredScope3)
assert(mappedScopeBase.get.name === "map")
assert(filteredScopeBase.get.name === "filter")
assertScopeCorrect(mappedScopeBase.get, mappedScope1.get, 1000)
assertScopeCorrect(mappedScopeBase.get, mappedScope2.get, 2000)
assertScopeCorrect(mappedScopeBase.get, mappedScope3.get, 3000)
assertScopeCorrect(filteredScopeBase.get, filteredScope1.get, 1000)
assertScopeCorrect(filteredScopeBase.get, filteredScope2.get, 2000)
assertScopeCorrect(filteredScopeBase.get, filteredScope3.get, 3000)
}
test("scoping nested operations") {
val inputStream = new DummyInputDStream(ssc)
// countByKeyAndWindow internally uses reduceByKeyAndWindow, but only countByKeyAndWindow
// should appear in scope
val countStream = inputStream.countByWindow(Seconds(10), Seconds(1))
countStream.initialize(Time(0))
val countScopeBase = countStream.baseScope.map(RDDOperationScope.fromJson)
val countScope1 = countStream.getOrCompute(Time(1000)).get.scope
val countScope2 = countStream.getOrCompute(Time(2000)).get.scope
val countScope3 = countStream.getOrCompute(Time(3000)).get.scope
// Assert that all children RDDs inherit the DStream operation name correctly
assertDefined(countScopeBase, countScope1, countScope2, countScope3)
assert(countScopeBase.get.name === "countByWindow")
assertScopeCorrect(countScopeBase.get, countScope1.get, 1000)
assertScopeCorrect(countScopeBase.get, countScope2.get, 2000)
assertScopeCorrect(countScopeBase.get, countScope3.get, 3000)
// All streams except the input stream should share the same scopes as `countStream`
def testStream(stream: DStream[_]): Unit = {
if (stream != inputStream) {
val myScopeBase = stream.baseScope.map(RDDOperationScope.fromJson)
val myScope1 = stream.getOrCompute(Time(1000)).get.scope
val myScope2 = stream.getOrCompute(Time(2000)).get.scope
val myScope3 = stream.getOrCompute(Time(3000)).get.scope
assertDefined(myScopeBase, myScope1, myScope2, myScope3)
assert(myScopeBase === countScopeBase)
assert(myScope1 === countScope1)
assert(myScope2 === countScope2)
assert(myScope3 === countScope3)
// Climb upwards to test the parent streams
stream.dependencies.foreach(testStream)
}
}
testStream(countStream)
}
test("transform should allow RDD operations to be captured in scopes") {
val inputStream = new DummyInputDStream(ssc)
val transformedStream = inputStream.transform { _.map { _ -> 1}.reduceByKey(_ + _) }
transformedStream.initialize(Time(0))
val transformScopeBase = transformedStream.baseScope.map(RDDOperationScope.fromJson)
val transformScope1 = transformedStream.getOrCompute(Time(1000)).get.scope
val transformScope2 = transformedStream.getOrCompute(Time(2000)).get.scope
val transformScope3 = transformedStream.getOrCompute(Time(3000)).get.scope
// Assert that all children RDDs inherit the DStream operation name correctly
assertDefined(transformScopeBase, transformScope1, transformScope2, transformScope3)
assert(transformScopeBase.get.name === "transform")
assertNestedScopeCorrect(transformScope1.get, 1000)
assertNestedScopeCorrect(transformScope2.get, 2000)
assertNestedScopeCorrect(transformScope3.get, 3000)
def assertNestedScopeCorrect(rddScope: RDDOperationScope, batchTime: Long): Unit = {
assert(rddScope.name === "reduceByKey")
assert(rddScope.parent.isDefined)
assertScopeCorrect(transformScopeBase.get, rddScope.parent.get, batchTime)
}
}
test("foreachRDD should allow RDD operations to be captured in scope") {
val inputStream = new DummyInputDStream(ssc)
val generatedRDDs = new ArrayBuffer[RDD[(Int, Int)]]
inputStream.foreachRDD { rdd =>
generatedRDDs += rdd.map { _ -> 1}.reduceByKey(_ + _)
}
val batchCounter = new BatchCounter(ssc)
ssc.start()
val clock = ssc.scheduler.clock.asInstanceOf[ManualClock]
clock.advance(3000)
batchCounter.waitUntilBatchesCompleted(3, 10000)
assert(generatedRDDs.size === 3)
val foreachBaseScope =
ssc.graph.getOutputStreams().head.baseScope.map(RDDOperationScope.fromJson)
assertDefined(foreachBaseScope)
assert(foreachBaseScope.get.name === "foreachRDD")
val rddScopes = generatedRDDs.map { _.scope }.toSeq
assertDefined(rddScopes: _*)
rddScopes.zipWithIndex.foreach { case (rddScope, idx) =>
assert(rddScope.get.name === "reduceByKey")
assert(rddScope.get.parent.isDefined)
assertScopeCorrect(foreachBaseScope.get, rddScope.get.parent.get, (idx + 1) * 1000)
}
}
/** Assert that the RDD operation scope properties are not set in our SparkContext. */
private def assertPropertiesNotSet(): Unit = {
assert(ssc != null)
assert(ssc.sc.getLocalProperty(SparkContext.RDD_SCOPE_KEY) == null)
assert(ssc.sc.getLocalProperty(SparkContext.RDD_SCOPE_NO_OVERRIDE_KEY) == null)
}
/** Assert that the given RDD scope inherits the name and ID of the base scope correctly. */
private def assertScopeCorrect(
baseScope: RDDOperationScope,
rddScope: RDDOperationScope,
batchTime: Long): Unit = {
val (baseScopeId, baseScopeName) = (baseScope.id, baseScope.name)
val formattedBatchTime = SparkUIUtils.formatBatchTime(
batchTime, ssc.graph.batchDuration.milliseconds, showYYYYMMSS = false)
assert(rddScope.id === s"${baseScopeId}_$batchTime")
assert(rddScope.name.replaceAll("\\n", " ") === s"$baseScopeName @ $formattedBatchTime")
assert(rddScope.parent.isEmpty) // There should not be any higher scope
}
/** Assert that all the specified options are defined. */
private def assertDefined[T](options: Option[T]*): Unit = {
options.zipWithIndex.foreach { case (o, i) => assert(o.isDefined, s"Option $i was empty!") }
}
}
| mahak/spark | streaming/src/test/scala/org/apache/spark/streaming/DStreamScopeSuite.scala | Scala | apache-2.0 | 10,157 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.benchmark
import org.apache.spark.SparkConf
import org.apache.spark.benchmark.Benchmark
import org.apache.spark.internal.config.MEMORY_OFFHEAP_ENABLED
import org.apache.spark.memory.{StaticMemoryManager, TaskMemoryManager}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{BoundReference, UnsafeProjection}
import org.apache.spark.sql.execution.joins.LongToUnsafeRowMap
import org.apache.spark.sql.types.LongType
/**
* Benchmark to measure metrics performance at HashedRelation.
* To run this benchmark:
* {{{
* 1. without sbt: bin/spark-submit --class <this class> <spark sql test jar>
* 2. build/sbt "sql/test:runMain <this class>"
* 3. generate result: SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "sql/test:runMain <this class>"
* Results will be written to "benchmarks/HashedRelationMetricsBenchmark-results.txt".
* }}}
*/
object HashedRelationMetricsBenchmark extends SqlBasedBenchmark {
def benchmarkLongToUnsafeRowMapMetrics(numRows: Int): Unit = {
runBenchmark("LongToUnsafeRowMap metrics") {
val benchmark = new Benchmark("LongToUnsafeRowMap metrics", numRows, output = output)
benchmark.addCase("LongToUnsafeRowMap") { iter =>
val taskMemoryManager = new TaskMemoryManager(
new StaticMemoryManager(
new SparkConf().set(MEMORY_OFFHEAP_ENABLED.key, "false"),
Long.MaxValue,
Long.MaxValue,
1),
0)
val unsafeProj = UnsafeProjection.create(Seq(BoundReference(0, LongType, false)))
val keys = Range.Long(0, numRows, 1)
val map = new LongToUnsafeRowMap(taskMemoryManager, 1)
keys.foreach { k =>
map.append(k, unsafeProj(InternalRow(k)))
}
map.optimize()
val threads = (0 to 100).map { _ =>
val thread = new Thread {
override def run: Unit = {
val row = unsafeProj(InternalRow(0L)).copy()
keys.foreach { k =>
assert(map.getValue(k, row) eq row)
assert(row.getLong(0) == k)
}
}
}
thread.start()
thread
}
threads.map(_.join())
map.free()
}
benchmark.run()
}
}
override def runBenchmarkSuite(mainArgs: Array[String]): Unit = {
benchmarkLongToUnsafeRowMapMetrics(500000)
}
}
| guoxiaolongzte/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/HashedRelationMetricsBenchmark.scala | Scala | apache-2.0 | 3,236 |
package rescala.fullmv
import rescala.core.{Reactive, Struct}
trait FullMVStruct extends Struct {
override type State[P, S <: Struct] = NodeVersionHistory[P, FullMVTurn, Reactive[FullMVStruct]]
}
| volkc/REScala | Research/Multiversion/src/main/scala/rescala/fullmv/FullMVStruct.scala | Scala | apache-2.0 | 201 |
package org.senkbeil.debugger.akka.messages
import org.senkbeil.debugger.akka.messages.structures.JDIThread
case class VMStartMessage(
thread: Option[JDIThread] = None
) extends MessageLike
| chipsenkbeil/scala-debugger-akka | src/main/scala/org/senkbeil/debugger/akka/messages/VMStartMessage.scala | Scala | apache-2.0 | 194 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.File
import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConverters._
import org.apache.commons.lang.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession.Builder
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.QueryExecution
import org.apache.spark.sql.hive.execution.command.CarbonSetCommand
import org.apache.spark.sql.internal.{SessionState, SharedState}
import org.apache.spark.sql.profiler.{Profiler, SQLStart}
import org.apache.spark.util.{CarbonReflectionUtils, Utils}
import org.apache.carbondata.common.annotations.InterfaceAudience
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.{CarbonProperties, CarbonSessionInfo, ThreadLocalSessionInfo}
import org.apache.carbondata.streaming.CarbonStreamingQueryListener
/**
* Session implementation for {org.apache.spark.sql.SparkSession}
* Implemented this class only to use our own SQL DDL commands.
* User needs to use {CarbonSession.getOrCreateCarbon} to create Carbon session.
*/
class CarbonSession(@transient val sc: SparkContext,
@transient private val existingSharedState: Option[SharedState],
@transient private val useHiveMetaStore: Boolean = true
) extends SparkSession(sc) { self =>
def this(sc: SparkContext) {
this(sc, None)
}
@transient
override lazy val sessionState: SessionState = {
CarbonReflectionUtils.getSessionState(sparkContext, this, useHiveMetaStore)
.asInstanceOf[SessionState]
}
/**
* State shared across sessions, including the `SparkContext`, cached data, listener,
* and a catalog that interacts with external systems.
*/
@transient
override lazy val sharedState: SharedState = {
existingSharedState match {
case Some(_) =>
val ss = existingSharedState.get
if (ss == null) {
new SharedState(sparkContext)
} else {
ss
}
case None =>
new SharedState(sparkContext)
}
}
override def newSession(): SparkSession = {
new CarbonSession(sparkContext, Some(sharedState), useHiveMetaStore)
}
/**
* Run search mode if enabled, otherwise run SparkSQL
*/
override def sql(sqlText: String): DataFrame = {
withProfiler(
sqlText,
(qe, sse) => {
new Dataset[Row](self, qe, RowEncoder(qe.analyzed.schema))
}
)
}
/**
* Return true if the specified sql statement will hit the datamap
* This API is for test purpose only
*/
@InterfaceAudience.Developer(Array("DataMap"))
def isDataMapHit(sqlStatement: String, dataMapName: String): Boolean = {
// explain command will output the dataMap information only if enable.query.statistics = true
val message = sql(s"EXPLAIN $sqlStatement").collect()
message(0).getString(0).contains(dataMapName)
}
/**
* Run SparkSQL directly
*/
def sparkSql(sqlText: String): DataFrame = {
withProfiler(
sqlText,
(qe, sse) => new Dataset[Row](self, qe, RowEncoder(qe.analyzed.schema))
)
}
private def withProfiler(
sqlText: String,
generateDF: (QueryExecution, SQLStart) => DataFrame): DataFrame = {
val sse = SQLStart(sqlText, CarbonSession.statementId.getAndIncrement())
CarbonSession.threadStatementId.set(sse.statementId)
sse.startTime = System.currentTimeMillis()
try {
val logicalPlan = sessionState.sqlParser.parsePlan(sqlText)
sse.parseEnd = System.currentTimeMillis()
val qe = sessionState.executePlan(logicalPlan)
qe.assertAnalyzed()
sse.isCommand = qe.analyzed match {
case c: Command => true
case u @ Union(children) if children.forall(_.isInstanceOf[Command]) => true
case _ => false
}
sse.analyzerEnd = System.currentTimeMillis()
generateDF(qe, sse)
} finally {
Profiler.invokeIfEnable {
if (sse.isCommand) {
sse.endTime = System.currentTimeMillis()
Profiler.send(sse)
} else {
Profiler.addStatementMessage(sse.statementId, sse)
}
}
}
}
}
object CarbonSession {
private val statementId = new AtomicLong(0)
private var enableInMemCatlog: Boolean = false
private[sql] val threadStatementId = new ThreadLocal[Long]()
implicit class CarbonBuilder(builder: Builder) {
def enableInMemoryCatalog(): Builder = {
enableInMemCatlog = true
builder
}
def getOrCreateCarbonSession(): SparkSession = {
getOrCreateCarbonSession(null, null)
}
def getOrCreateCarbonSession(storePath: String): SparkSession = {
getOrCreateCarbonSession(
storePath,
new File(CarbonCommonConstants.METASTORE_LOCATION_DEFAULT_VAL).getCanonicalPath)
}
def getOrCreateCarbonSession(storePath: String,
metaStorePath: String): SparkSession = synchronized {
if (!enableInMemCatlog) {
builder.enableHiveSupport()
}
val options =
getValue("options", builder).asInstanceOf[scala.collection.mutable.HashMap[String, String]]
val userSuppliedContext: Option[SparkContext] =
getValue("userSuppliedContext", builder).asInstanceOf[Option[SparkContext]]
CarbonReflectionUtils.updateCarbonSerdeInfo()
if (StringUtils.isNotBlank(metaStorePath)) {
val hadoopConf = new Configuration()
val configFile = Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
if (configFile != null) {
hadoopConf.addResource(configFile)
}
if (options.get(CarbonCommonConstants.HIVE_CONNECTION_URL).isEmpty &&
hadoopConf.get(CarbonCommonConstants.HIVE_CONNECTION_URL) == null) {
val metaStorePathAbsolute = new File(metaStorePath).getCanonicalPath
val hiveMetaStoreDB = metaStorePathAbsolute + "/metastore_db"
options ++= Map[String, String]((CarbonCommonConstants.HIVE_CONNECTION_URL,
s"jdbc:derby:;databaseName=$hiveMetaStoreDB;create=true"))
}
}
// Get the session from current thread's active session.
var session: SparkSession = SparkSession.getActiveSession match {
case Some(sparkSession: CarbonSession) =>
if ((sparkSession ne null) && !sparkSession.sparkContext.isStopped) {
options.foreach { case (k, v) => sparkSession.sessionState.conf.setConfString(k, v) }
sparkSession
} else {
null
}
case _ => null
}
if (session ne null) {
return session
}
// Global synchronization so we will only set the default session once.
SparkSession.synchronized {
// If the current thread does not have an active session, get it from the global session.
session = SparkSession.getDefaultSession match {
case Some(sparkSession: CarbonSession) =>
if ((sparkSession ne null) && !sparkSession.sparkContext.isStopped) {
options.foreach { case (k, v) => sparkSession.sessionState.conf.setConfString(k, v) }
sparkSession
} else {
null
}
case _ => null
}
if (session ne null) {
return session
}
// No active nor global default session. Create a new one.
val sparkContext = userSuppliedContext.getOrElse {
// set app name if not given
val randomAppName = java.util.UUID.randomUUID().toString
val sparkConf = new SparkConf()
options.foreach { case (k, v) => sparkConf.set(k, v) }
if (!sparkConf.contains("spark.app.name")) {
sparkConf.setAppName(randomAppName)
}
val sc = SparkContext.getOrCreate(sparkConf)
// maybe this is an existing SparkContext, update its SparkConf which maybe used
// by SparkSession
options.foreach { case (k, v) => sc.conf.set(k, v) }
if (!sc.conf.contains("spark.app.name")) {
sc.conf.setAppName(randomAppName)
}
sc
}
session = new CarbonSession(sparkContext, None, !enableInMemCatlog)
val carbonProperties = CarbonProperties.getInstance()
if (StringUtils.isNotBlank(storePath)) {
carbonProperties.addProperty(CarbonCommonConstants.STORE_LOCATION, storePath)
// In case if it is in carbon.properties for backward compatible
} else if (carbonProperties.getProperty(CarbonCommonConstants.STORE_LOCATION) == null) {
carbonProperties.addProperty(CarbonCommonConstants.STORE_LOCATION,
session.sessionState.conf.warehousePath)
}
options.foreach { case (k, v) => session.sessionState.conf.setConfString(k, v) }
SparkSession.setDefaultSession(session)
// Setup monitor end point and register CarbonMonitorListener
Profiler.initialize(sparkContext)
// Register a successfully instantiated context to the singleton. This should be at the
// end of the class definition so that the singleton is updated only if there is no
// exception in the construction of the instance.
CarbonToSparkAdapter.addSparkListener(sparkContext)
session.streams.addListener(new CarbonStreamingQueryListener(session))
}
session
}
/**
* It is a hack to get the private field from class.
*/
def getValue(name: String, builder: Builder): Any = {
val currentMirror = scala.reflect.runtime.currentMirror
val instanceMirror = currentMirror.reflect(builder)
val m = currentMirror.classSymbol(builder.getClass).
toType.members.find { p =>
p.name.toString.equals(name)
}.get.asTerm
instanceMirror.reflectField(m).get
}
}
def threadSet(key: String, value: String): Unit = {
var currentThreadSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfo == null) {
currentThreadSessionInfo = new CarbonSessionInfo()
}
else {
currentThreadSessionInfo = currentThreadSessionInfo.clone()
}
val threadParams = currentThreadSessionInfo.getThreadParams
CarbonSetCommand.validateAndSetValue(threadParams, key, value)
ThreadLocalSessionInfo.setCarbonSessionInfo(currentThreadSessionInfo)
}
def threadSet(key: String, value: Object): Unit = {
var currentThreadSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfo == null) {
currentThreadSessionInfo = new CarbonSessionInfo()
}
else {
currentThreadSessionInfo = currentThreadSessionInfo.clone()
}
currentThreadSessionInfo.getThreadParams.setExtraInfo(key, value)
ThreadLocalSessionInfo.setCarbonSessionInfo(currentThreadSessionInfo)
}
def threadUnset(key: String): Unit = {
val currentThreadSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfo != null) {
val currentThreadSessionInfoClone = currentThreadSessionInfo.clone()
val threadParams = currentThreadSessionInfoClone.getThreadParams
CarbonSetCommand.unsetValue(threadParams, key)
threadParams.removeExtraInfo(key)
ThreadLocalSessionInfo.setCarbonSessionInfo(currentThreadSessionInfoClone)
}
}
def updateSessionInfoToCurrentThread(sparkSession: SparkSession): Unit = {
val carbonSessionInfo = CarbonEnv.getInstance(sparkSession).carbonSessionInfo.clone()
val currentThreadSessionInfoOrig = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfoOrig != null) {
val currentThreadSessionInfo = currentThreadSessionInfoOrig.clone()
// copy all the thread parameters to apply to session parameters
currentThreadSessionInfo.getThreadParams.getAll.asScala
.foreach(entry => carbonSessionInfo.getSessionParams.addProperty(entry._1, entry._2))
carbonSessionInfo.setThreadParams(currentThreadSessionInfo.getThreadParams)
}
// preserve thread parameters across call
ThreadLocalSessionInfo.setCarbonSessionInfo(carbonSessionInfo)
ThreadLocalSessionInfo
.setConfigurationToCurrentThread(sparkSession.sessionState.newHadoopConf())
}
}
| manishgupta88/carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala | Scala | apache-2.0 | 13,175 |
package example.repr
case class Resp(data: String)
case class GetResp(url: String, origin: String)
case class GetRespArg(name: String, age: Int)
case class GetRespWithQueryString(url: String, origin: String, args: GetRespArg)
| pepegar/hammock | example/src/main/scala/example/repr/Resp.scala | Scala | mit | 228 |
// This testcase is a snippet that did not compile correctly under
// pre-release 2.10.x. The relevant discussion around it can be
// found at:
// https://groups.google.com/forum/?fromgroups#!topic/scala-internals/qcyTjk8euUI[1-25]
//
// The reason it did not compile is related to the fact that ICode
// ops did not correctly define the stack entries they consumed and
// the dead code elimination phase was unable to correctly reconstruct
// the stack after code elimination.
//
// Originally, this did not compile, but I included it in the run
// tests because this was ASM-dependand and did not happen for GenJVM.
//
// Thus, we run the code and force the loading of class B -- if the
// bytecode is incorrect, it will fail the test.
final class A {
def f1 = true
def f2 = true
inline def f3 = f1 || f2
class B {
def f() = 1 to 10 foreach (_ => f3)
}
def f = (new B).f()
}
object Test {
def main(args: Array[String]): Unit = {
// force the loading of B
(new A).f
}
}
| lampepfl/dotty | tests/run/dead-code-elimination.scala | Scala | apache-2.0 | 1,002 |
package turkey
package tasks
import turkey.util._
import com.amazonaws.services.mturk.model.AssignmentStatus
import com.amazonaws.services.mturk.model.UpdateExpirationForHITRequest
import com.amazonaws.services.mturk.model.DeleteHITRequest
import com.amazonaws.services.mturk.model.ApproveAssignmentRequest
import com.amazonaws.services.mturk.model.RejectAssignmentRequest
import scala.util.{Try, Success, Failure}
import scala.concurrent.duration._
import scala.language.postfixOps
import akka.actor.Actor
import akka.actor.ActorRef
import upickle.default._
import com.typesafe.scalalogging.StrictLogging
/**
* Manages a particular kind of task; corresponds to a single TaskSpecification / HIT Type.
* In here will be all of the logic related to how to review HITs, do quality control, keep track of auxiliary data,
* schedule which HITs should be uploaded when, etc.
*/
abstract class HITManager[Prompt, Response](
helper: HITManager.Helper[Prompt, Response]
) extends Actor {
import helper.Message._
final override def receive = receiveHelperMessage orElse receiveAux
// delegates to helper when given a standard message defined in the helper
private[this] final val receiveHelperMessage: PartialFunction[Any, Unit] = {
case ExpireAll => helper.expireAll
case DeleteAll => helper.deleteAll
case ReviewHITs => reviewHITs
case AddPrompt(p) => addPrompt(p)
}
/** Override to add more incoming message types and message-processing logic */
def receiveAux: PartialFunction[Any, Unit] =
PartialFunction.empty[Any, Unit]
/** Queries Turk and refreshes the task state, sending assignments for approval/validation,
* approving/rejecting them, deleting HITs, etc. as necessary */
def reviewHITs: Unit
/** Adds a prompt to the set of prompts that this HITManager should be responsible for sourcing responses for. */
def addPrompt(prompt: Prompt): Unit
}
object HITManager {
/** Manages the ongoing state for a task with a particular HIT type;
* keeps track of HITs and assignments that are active, saved, etc.
* and gives convenience methods for interfacing with Turk. */
class Helper[P, R](
val taskSpec: TaskSpecification { type Prompt = P ; type Response = R })(
implicit val promptReader: Reader[P],
val responseReader: Reader[R],
val responseWriter: Writer[R],
val config: TaskConfig
) extends StrictLogging {
private type Prompt = P
private type Response = R
import scala.collection.mutable
object Message {
sealed trait Message
case object DeleteAll extends Message
case object ExpireAll extends Message
case object ReviewHITs extends Message
case class AddPrompt(prompt: Prompt) extends Message
}
import Message._
import taskSpec.hitTypeId
def expireAll: Unit = {
val currentlyActiveHITs = activeHITs.iterator.toList
currentlyActiveHITs.foreach(expireHIT)
}
def deleteAll: Unit = {
val currentlyActiveHITs = activeHITs.iterator.toList
currentlyActiveHITs.foreach(deleteHIT)
}
// HITs Active stuff
// active HITs are currently up on Turk
// finished means the HIT is not on turk (i.e., all assignments are done)
// actives by prompt includes HITs for which some assignments are done and some are not
private[this] val (activeHITs, finishedHITInfosByPrompt, activeHITInfosByPrompt) = {
val active = mutable.Set.empty[HIT[Prompt]]
for {
mTurkHIT <- config.service.listAllHITs
if mTurkHIT.getHITTypeId.equals(hitTypeId)
hit <- config.hitDataService.getHIT[Prompt](hitTypeId, mTurkHIT.getHITId).toOptionLogging(logger)
} yield (active += hit)
val finishedRes = mutable.Map.empty[Prompt, List[HITInfo[Prompt, Response]]]
val activeRes = mutable.Map.empty[Prompt, List[HITInfo[Prompt, Response]]]
config.hitDataService.getAllHITInfo[Prompt, Response](hitTypeId).get
.groupBy(_.hit.prompt)
.foreach { case (prompt, infos) =>
infos.foreach { hitInfo =>
if(active.contains(hitInfo.hit)) {
activeRes.put(prompt, hitInfo :: activeRes.get(prompt).getOrElse(Nil))
} else {
finishedRes.put(prompt, hitInfo :: activeRes.get(prompt).getOrElse(Nil))
}
}
}
(active, finishedRes, activeRes)
}
def finishedHITInfosByPromptIterator: Iterator[(Prompt, List[HITInfo[Prompt, Response]])] =
finishedHITInfosByPrompt.iterator
def finishedHITInfos(p: Prompt): List[HITInfo[Prompt, Response]] =
finishedHITInfosByPrompt.get(p).getOrElse(Nil)
def activeHITInfosByPromptIterator: Iterator[(Prompt, List[HITInfo[Prompt, Response]])] =
activeHITInfosByPrompt.iterator
def activeHITInfos(p: Prompt): List[HITInfo[Prompt, Response]] =
activeHITInfosByPrompt.get(p).getOrElse(Nil)
def allCurrentHITInfosByPromptIterator: Iterator[(Prompt, List[HITInfo[Prompt, Response]])] =
activeHITInfosByPromptIterator ++ finishedHITInfosByPromptIterator
def allCurrentHITInfos(p: Prompt): List[HITInfo[Prompt, Response]] =
activeHITInfos(p) ++ finishedHITInfos(p)
/** Create a HIT with the specific parameters.
* This should be used in order to ensure the helper has a consistent state.
*/
def createHIT(prompt: Prompt, numAssignments: Int): Try[HIT[Prompt]] = {
val attempt = taskSpec.createHIT(prompt, numAssignments)
attempt match {
case Success(hit) =>
activeHITs += hit
val newHITInfo = HITInfo[Prompt, Response](hit, Nil)
activeHITInfosByPrompt.put(prompt, newHITInfo :: activeHITInfos(prompt))
logger.info(s"Created HIT: ${hit.hitId}\n${config.workerUrl}/mturk/preview?groupId=${hit.hitTypeId}")
case Failure(e) =>
logger.error(e.getMessage)
e.printStackTrace
}
attempt
}
def isActive(prompt: Prompt): Boolean = activeHITInfosByPrompt.contains(prompt)
def isActive(hit: HIT[Prompt]): Boolean = activeHITs.contains(hit)
def isActive(hitId: String): Boolean = activeHITs.exists(_.hitId == hitId)
def numActiveHITs = activeHITs.size
def expireHIT(hit: HIT[Prompt]): Unit = {
val cal = java.util.Calendar.getInstance
cal.add(java.util.Calendar.DATE, -1)
val yesterday = cal.getTime
Try(
config.service.updateExpirationForHIT(
(new UpdateExpirationForHITRequest)
.withHITId(hit.hitId)
.withExpireAt(yesterday)
)
) match {
case Success(_) =>
logger.info(s"Expired HIT: ${hit.hitId}\nHIT type for expired HIT: ${hitTypeId}")
case Failure(e) =>
logger.error(s"HIT expiration failed:\n$hit\n$e")
}
}
/** Deletes a HIT (if possible) and takes care of bookkeeping. */
def deleteHIT(hit: HIT[Prompt]): Unit = {
Try(config.service.deleteHIT((new DeleteHITRequest).withHITId(hit.hitId))) match {
case Success(_) =>
logger.info(s"Deleted HIT: ${hit.hitId}\nHIT type for deleted HIT: ${hitTypeId}")
if(!isActive(hit)) {
logger.error(s"Deleted HIT that isn't registered as active: $hit")
} else {
activeHITs -= hit
// add to other appropriate data structures
val finishedData = finishedHITInfos(hit.prompt)
val activeData = activeHITInfos(hit.prompt)
val curInfo = activeData
.find(_.hit.hitId == hit.hitId)
.getOrElse {
logger.error("Could not find active HIT to move to finished");
HITInfo(
hit,
config.hitDataService.getAssignmentsForHIT[Response](hitTypeId, hit.hitId).get)
}
val newActiveData = activeData.filterNot(_.hit.hitId == hit.hitId)
val newFinishedData = curInfo :: finishedData
if(newActiveData.isEmpty) {
activeHITInfosByPrompt.remove(hit.prompt)
} else {
activeHITInfosByPrompt.put(hit.prompt, newActiveData)
}
finishedHITInfosByPrompt.put(hit.prompt, newFinishedData)
}
case Failure(e) =>
logger.error(s"HIT deletion failed:\n$hit\n$e")
}
}
// Assignment reviewing
/** Represents an assignment waiting for a reviewing result. */
class AssignmentInReview protected[Helper] (val assignment: Assignment[Response])
private[this] val assignmentsInReview = mutable.Set.empty[AssignmentInReview]
def getInReview(assignment: Assignment[Response]): Option[AssignmentInReview] =
assignmentsInReview.find(_.assignment == assignment)
def getInReview(assignmentId: String): Option[AssignmentInReview] =
assignmentsInReview.find(_.assignment.assignmentId == assignmentId)
def isInReview(assignment: Assignment[Response]): Boolean =
getInReview(assignment).nonEmpty
def isInReview(assignmentId: String): Boolean =
getInReview(assignmentId).nonEmpty
def numAssignmentsInReview = assignmentsInReview.size
/** Mark an assignment as under review. */
def startReviewing(assignment: Assignment[Response]): AssignmentInReview = {
val aInRev = new AssignmentInReview(assignment)
assignmentsInReview += aInRev
aInRev
}
/** Process and record the result of reviewing an assignment. */
def evaluateAssignment(
hit: HIT[Prompt],
aInRev: AssignmentInReview,
evaluation: AssignmentEvaluation
): Unit = {
import aInRev.assignment
evaluation match {
case Approval(message) => Try {
config.service.approveAssignment(
(new ApproveAssignmentRequest)
.withAssignmentId(assignment.assignmentId)
.withRequesterFeedback(message))
assignmentsInReview -= aInRev
val curData = activeHITInfos(hit.prompt)
val curInfo = curData.find(_.hit.hitId == hit.hitId)
.getOrElse {
logger.error(s"Could not find active data for hit $hit")
activeHITs += hit
HITInfo[Prompt, Response](hit, Nil)
}
val filteredData = curData.filterNot(_.hit.hitId == hit.hitId)
val newInfo = curInfo.copy(assignments = assignment :: curInfo.assignments)
activeHITInfosByPrompt.put(hit.prompt, newInfo :: filteredData)
logger.info(s"Approved assignment for worker ${assignment.workerId}: ${assignment.assignmentId}\n" +
s"HIT for approved assignment: ${assignment.hitId}; $hitTypeId")
config.hitDataService.saveApprovedAssignment(assignment).recover { case e =>
logger.error(s"Failed to save approved assignment; data:\n${write(assignment)}")
}
}
case Rejection(message) => Try {
config.service.rejectAssignment(
(new RejectAssignmentRequest)
.withAssignmentId(assignment.assignmentId)
.withRequesterFeedback(message))
assignmentsInReview -= aInRev
logger.info(s"Rejected assignment: ${assignment.assignmentId}\n" +
s"HIT for rejected assignment: ${assignment.hitId}; ${hitTypeId}\n" +
s"Reason: $message")
config.hitDataService.saveRejectedAssignment(assignment) recover { case e =>
logger.error(s"Failed to save approved assignment; data:\n${write(assignment)}")
}
}
}
}
}
}
| julianmichael/turkey | turkey/jvm/src/main/scala/turkey/tasks/HITManager.scala | Scala | mit | 11,532 |
package models.admin.reports
import scalaz._
import Scalaz._
import scalaz.effect.IO
import scalaz.EitherT._
import scalaz.Validation
import scalaz.Validation.FlatMap._
import scalaz.NonEmptyList._
import models.tosca.KeyValueList
import net.liftweb.json._
import io.megam.util.Time
import org.joda.time.{DateTime, Period}
import org.joda.time.format.DateTimeFormat
import models.Constants.{JSON_CLAZ, REPORTSCLAZ, REPORT_LAUNCHES, REPORT_CATEGORYMAP}
import models.admin.{ReportInput, ReportResult}
class Launches(ri: ReportInput) extends Reporter {
def report: ValidationNel[Throwable, Option[ReportResult]] = {
for {
abt <- build(ri.start_date, ri.end_date) leftMap { err: NonEmptyList[Throwable] ⇒ err }
aal <- aggregate(abt).successNel
fal <- subaggregate(aal).successNel
} yield {
ReportResult(REPORT_LAUNCHES, fal.map(_.map(_.toKeyList)), REPORTSCLAZ, Time.now.toString).some
}
}
def build(startdate: String, enddate: String): ValidationNel[Throwable,Tuple2[Seq[models.tosca.AssembliesResult],
Seq[models.tosca.AssemblyResult]]] = {
for {
al <- (models.tosca.Assemblies.findByDateRange(startdate, enddate) leftMap { err: NonEmptyList[Throwable] ⇒ err })
as <- (models.tosca.Assembly.findByDateRange(startdate, enddate) leftMap { err: NonEmptyList[Throwable] ⇒ err })
} yield (al, as )
}
def reportFor(email: String, org: String): ValidationNel[Throwable, Option[ReportResult]] = {
for {
abt <- buildFor(email, org, ri.start_date, ri.end_date) leftMap { err: NonEmptyList[Throwable] ⇒ err }
aal <- aggregate(abt).successNel
} yield {
ReportResult(REPORT_LAUNCHES, aal.map(_.map(_.toKeyList)), REPORTSCLAZ, Time.now.toString).some
}
}
def buildFor(email: String, org: String, startdate: String, enddate: String): ValidationNel[Throwable,Tuple2[Seq[models.tosca.AssembliesResult],
Seq[models.tosca.AssemblyResult]]] = {
for {
al <- (models.tosca.Assemblies.findByDateRangeFor(email, org, startdate, enddate) leftMap { err: NonEmptyList[Throwable] ⇒ err })
as <- (models.tosca.Assembly.findByDateRangeFor(email, org, startdate, enddate) leftMap { err: NonEmptyList[Throwable] ⇒ err })
} yield (al, as)
}
private def aggregate(abt: Tuple2[Seq[models.tosca.AssembliesResult], Seq[models.tosca.AssemblyResult]]) = {
for {
ba <- (abt._1.map { asms => (asms.assemblies.map {x => (x, asms.id)})}).flatten.toMap.some
la <- LaunchesAggregate(abt._2, ba).some
} yield la.aggregate
}
private def subaggregate(olrt: Option[Seq[LaunchesResult]]) = {
val lrt = olrt.getOrElse(List[LaunchesResult]())
val f: List[String] = REPORT_CATEGORYMAP.get(ri.category).getOrElse(REPORT_CATEGORYMAP.get("all").getOrElse(List()))
val g: List[String] = List(ri.group)
for {
ba <- lrt.filter { a => {
(f.filter(x => a.tosca_type.contains(x)).size > 0) &&
(if (g.size > 2) (g.filter(x => a.status.contains(x)).size > 0) else true)
}
}.some
} yield ba
}
}
case class LaunchesAggregate(als: Seq[models.tosca.AssemblyResult],
tal: Map[String, String]) {
lazy val aggregate: Seq[LaunchesResult] = als.map(al => {
LaunchesResult(al.id, tal.get(al.id).getOrElse(""), al.name, al.account_id, al.state, al.status, al.tosca_type,
KeyValueList.toMap(al.inputs), KeyValueList.toMap(al.outputs), al.created_at)
})
}
case class LaunchesResult(id: String, asms_id: String, name: String, account_id: String, state: String, status: String, tosca_type: String,
inputProps: Map[String, String], outputProps: Map[String, String], created_at: DateTime) {
val X = "x"
val Y = "y"
val ID = "id"
val ASMS_ID = "asms_id"
val NAME = "name"
val ACCOUNT_ID = "account_id"
val STATE = "state"
val STATUS = "status"
val TOSCA_TYPE = "type"
val CREATED_AT = "created_at"
val INPUTPROPS = "inputprops"
val OUTPUTPROPS = "outputprops"
val NUMBER_OF_HOURS = "number_of_hours"
def isEmpty(x: String) = Option(x).forall(_.isEmpty)
def shouldZero = isEmpty(created_at.toString)
def calculateHours = if (shouldZero) { "0" }
else {
val runningTime = (new Period(DateTime.parse(created_at.toString), new DateTime())).toStandardDuration.getStandardMinutes
(runningTime.toFloat/60).toString
}
def toKeyList: models.tosca.KeyValueList = models.tosca.KeyValueList(
Map((X -> created_at.toString),
(Y -> "1"),
(ID -> id),
(ASMS_ID -> asms_id),
(NAME -> name),
(ACCOUNT_ID -> account_id),
(STATE -> status),
(STATUS -> state),
(TOSCA_TYPE -> tosca_type),
(INPUTPROPS -> inputProps.map(pair => pair._1+"="+pair._2).mkString("",",",",")),
(OUTPUTPROPS -> outputProps.map(pair => pair._1+"="+pair._2).mkString("",",",",")),
(CREATED_AT -> created_at.toString),
(NUMBER_OF_HOURS -> calculateHours)))
}
| indykish/vertice_gateway | app/models/admin/reports/Launches.scala | Scala | mit | 5,136 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import org.geotools.data.Query
import org.geotools.filter.text.ecql.ECQL
import org.joda.time.{DateTime, DateTimeZone}
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.conf.QueryHints
import org.locationtech.geomesa.index.utils.KryoLazyStatsUtils.decodeStat
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.stats._
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class KryoLazyStatsIteratorTest extends Specification with TestWithDataStore {
sequential
override val spec = "idt:java.lang.Integer:index=full,attr:java.lang.Long:index=join,dtg:Date,*geom:Point:srid=4326"
addFeatures((0 until 150).toArray.map { i =>
val attrs = Array(i.asInstanceOf[AnyRef], (i * 2).asInstanceOf[AnyRef],
new DateTime("2012-01-01T19:00:00", DateTimeZone.UTC).toDate, "POINT(-77 38)")
val sf = new ScalaSimpleFeature(sft, i.toString)
sf.setAttributes(attrs)
sf
})
def getQuery(statString: String, ecql: Option[String] = None): Query = {
val query = new Query(sftName, ECQL.toFilter("dtg DURING 2012-01-01T18:30:00.000Z/2012-01-01T19:30:00.000Z " +
"AND bbox(geom,-80,35,-75,40)" + ecql.map(" AND " + _).getOrElse("")))
query.getHints.put(QueryHints.STATS_STRING, statString)
query.getHints.put(QueryHints.ENCODE_STATS, java.lang.Boolean.TRUE)
query
}
/**
* Not testing too much here stat-wise, as most of the stat testing is in geomesa-utils
*/
"StatsIterator" should {
"work with the MinMax stat" in {
val q = getQuery("MinMax(attr)")
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val minMaxStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[MinMax[java.lang.Long]]
minMaxStat.bounds mustEqual (0, 298)
}
"work with the IteratorStackCount stat" in {
val q = getQuery("IteratorStackCount()")
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val isc = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[IteratorStackCount]
// note: I don't think there is a defined answer here that isn't implementation specific
isc.count must beGreaterThanOrEqualTo(1L)
}
"work with the Enumeration stat" in {
val q = getQuery("Enumeration(idt)")
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val eh = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[EnumerationStat[java.lang.Integer]]
eh.size mustEqual 150
eh.frequency(0) mustEqual 1
eh.frequency(149) mustEqual 1
eh.frequency(150) mustEqual 0
}
"work with the Histogram stat" in {
val q = getQuery("Histogram(idt,5,10,14)", Some("idt between 10 and 14"))
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val rh = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[Histogram[java.lang.Integer]]
rh.length mustEqual 5
rh.count(rh.indexOf(10)) mustEqual 1
rh.count(rh.indexOf(11)) mustEqual 1
rh.count(rh.indexOf(12)) mustEqual 1
rh.count(rh.indexOf(13)) mustEqual 1
rh.count(rh.indexOf(14)) mustEqual 1
}
"work with multiple stats at once" in {
val q = getQuery("MinMax(attr);IteratorStackCount();Enumeration(idt);Histogram(idt,5,10,14)")
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val seqStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[SeqStat]
val stats = seqStat.stats
stats.size mustEqual 4
val minMax = stats(0).asInstanceOf[MinMax[java.lang.Long]]
val isc = stats(1).asInstanceOf[IteratorStackCount]
val eh = stats(2).asInstanceOf[EnumerationStat[java.lang.Integer]]
val rh = stats(3).asInstanceOf[Histogram[java.lang.Integer]]
minMax.bounds mustEqual (0, 298)
isc.count must beGreaterThanOrEqualTo(1L)
eh.size mustEqual 150
eh.frequency(0) mustEqual 1
eh.frequency(149) mustEqual 1
eh.frequency(150) mustEqual 0
rh.length mustEqual 5
rh.bounds mustEqual (0, 149)
(0 until 5).map(rh.count).sum mustEqual 150
}
"work with the stidx index" in {
val q = getQuery("MinMax(attr)")
q.setFilter(ECQL.toFilter("bbox(geom,-80,35,-75,40)"))
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val minMaxStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[MinMax[java.lang.Long]]
minMaxStat.bounds mustEqual (0, 298)
}
"work with the record index" in {
val q = getQuery("MinMax(attr)")
q.setFilter(ECQL.toFilter("IN(0)"))
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val minMaxStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[MinMax[java.lang.Long]]
minMaxStat.bounds mustEqual (0, 0)
}
"work with the attribute partial index" in {
val q = getQuery("MinMax(attr)")
q.setFilter(ECQL.toFilter("attr > 10"))
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val minMaxStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[MinMax[java.lang.Long]]
minMaxStat.bounds mustEqual (12, 298)
}
"work with the attribute join index" in {
val q = getQuery("MinMax(idt)")
q.setFilter(ECQL.toFilter("attr > 10"))
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val minMaxStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[MinMax[java.lang.Integer]]
minMaxStat.bounds mustEqual (6, 149)
}
"work with the attribute full index" in {
val q = getQuery("MinMax(attr)")
q.setFilter(ECQL.toFilter("idt > 10"))
val results = SelfClosingIterator(fs.getFeatures(q).features).toList
val sf = results.head
val minMaxStat = decodeStat(sft)(sf.getAttribute(0).asInstanceOf[String]).asInstanceOf[MinMax[java.lang.Long]]
minMaxStat.bounds mustEqual (22, 298)
}
}
}
| ronq/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/iterators/KryoLazyStatsIteratorTest.scala | Scala | apache-2.0 | 7,076 |
package ch2
import org.learningconcurrency._
import ch2._
object UnsafeUid extends App {
import scala.annotation.tailrec
private val unsafe = scala.concurrent.util.Unsafe.instance
private val uidCountOffset = unsafe.objectFieldOffset(UnsafeUid.getClass.getDeclaredField("uidCount"))
@volatile var uidCount = 0L
@tailrec def getUniqueId(): Long = {
val oldUid = uidCount
val newUid = uidCount + 1
if (unsafe.compareAndSwapLong(UnsafeUid, uidCountOffset, oldUid, newUid)) newUid
else getUniqueId()
}
def getUniqueIds(n: Int): Unit = {
val uids = for (i <- 0 until n) yield getUniqueId()
log(s"Generated uids: $uids")
}
val t = thread {
getUniqueIds(5)
}
getUniqueIds(5)
t.join()
}
| tophua/spark1.52 | examples/src/main/scala/scalaDemo/threadConcurrency/ch2/Unsafe.scala | Scala | apache-2.0 | 744 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.internal
import com.intel.analytics.bigdl.dllib.nn.abstractnn._
import com.intel.analytics.bigdl.dllib.nn._
import com.intel.analytics.bigdl.dllib.nn.SpatialMaxPooling
import com.intel.analytics.bigdl.dllib.nn.{Sequential => TSequential}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import scala.reflect.ClassTag
/**
* Applies global max pooling operation for temporal data.
* The input of this layer should be 3D.
*
* When you use this layer as the first layer of a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
*
* @tparam T The numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class GlobalMaxPooling1D[T: ClassTag](
inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends GlobalPooling1D[T](inputShape) {
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = {
val input = inputShape.toSingle().toArray
val model = TSequential[T]()
model.add(com.intel.analytics.bigdl.dllib.nn.Reshape(Array(input(1), 1, input(2)), Some(true)))
val layer = SpatialMaxPooling(
kW = 1,
kH = input(1),
format = DataFormat.NHWC)
model.add(layer)
model.add(Squeeze(3))
model.add(Squeeze(2))
model.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]]
}
}
object GlobalMaxPooling1D {
def apply[@specialized(Float, Double) T: ClassTag](
inputShape: Shape = null)(implicit ev: TensorNumeric[T]) : GlobalMaxPooling1D[T] = {
new GlobalMaxPooling1D[T](inputShape)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/internal/GlobalMaxPooling1D.scala | Scala | apache-2.0 | 2,342 |
package lila.app
package templating
import java.util.Locale
import scala.collection.mutable
import org.joda.time.format._
import org.joda.time.format.ISODateTimeFormat
import org.joda.time.{ Period, PeriodType, DurationFieldType, DateTime }
import play.twirl.api.Html
import lila.api.Context
trait DateHelper { self: I18nHelper =>
private val dateTimeStyle = "MS"
private val dateStyle = "M-"
private val dateTimeFormatters = mutable.Map[String, DateTimeFormatter]()
private val dateFormatters = mutable.Map[String, DateTimeFormatter]()
private val periodFormatters = mutable.Map[String, PeriodFormatter]()
private val periodType = PeriodType forFields Array(
DurationFieldType.days,
DurationFieldType.hours,
DurationFieldType.minutes)
private val isoFormatter = ISODateTimeFormat.dateTime
private val englishDateFormatter = DateTimeFormat forStyle dateStyle
private def dateTimeFormatter(ctx: Context): DateTimeFormatter =
dateTimeFormatters.getOrElseUpdate(
lang(ctx).language,
DateTimeFormat forStyle dateTimeStyle withLocale new Locale(lang(ctx).language))
private def dateFormatter(ctx: Context): DateTimeFormatter =
dateFormatters.getOrElseUpdate(
lang(ctx).language,
DateTimeFormat forStyle dateStyle withLocale new Locale(lang(ctx).language))
private def periodFormatter(ctx: Context): PeriodFormatter =
periodFormatters.getOrElseUpdate(
lang(ctx).language,
PeriodFormat wordBased new Locale(lang(ctx).language))
def showDateTime(date: DateTime)(implicit ctx: Context): String =
dateTimeFormatter(ctx) print date
def showDate(date: DateTime)(implicit ctx: Context): String =
dateFormatter(ctx) print date
def showEnglishDate(date: DateTime): String =
englishDateFormatter print date
def semanticDate(date: DateTime)(implicit ctx: Context) = Html {
s"""<time datetime="${isoFormatter print date}">${showDate(date)}</time>"""
}
def showPeriod(period: Period)(implicit ctx: Context): String =
periodFormatter(ctx) print period.normalizedStandard(periodType)
def showMinutes(minutes: Int)(implicit ctx: Context): String =
showPeriod(new Period(minutes * 60 * 1000l))
def isoDate(date: DateTime): String = isoFormatter print date
def momentFormat(date: DateTime, format: String): Html = Html {
s"""<time class="moment" datetime="${isoFormatter print date}" data-format="$format"></time>"""
}
def momentFormat(date: DateTime): Html = momentFormat(date, "calendar")
def momentFromNow(date: DateTime) = Html {
s"""<time class="moment-from-now" datetime="${isoFormatter print date}"></time>"""
}
def secondsFromNow(seconds: Int) = momentFromNow(DateTime.now plusSeconds seconds)
private val atomDateFormatter = ISODateTimeFormat.dateTime
def atomDate(date: DateTime): String = atomDateFormatter print date
def atomDate(field: String)(doc: io.prismic.Document): Option[String] =
doc getDate field map (_.value.toDateTimeAtStartOfDay) map atomDate
}
| pavelo65/lila | app/templating/DateHelper.scala | Scala | mit | 3,021 |
package drt.client.components
import scalacss.Defaults._
object GlobalStyles extends StyleSheet.Inline {
import dsl._
style(unsafeRoot("body")(
paddingTop(70.px))
)
val bootstrapStyles = new BootstrapStyles
}
| somanythings/drt-scalajs-spa-exploration | client/src/main/scala/spatutorial/client/components/GlobalStyles.scala | Scala | apache-2.0 | 225 |
object Par {
def unit[A] (a:A): Par[A] = (es: ExecutionService) => UnitFuture(a)
private case class UnitFuture[A](get: A) extends Future[A]{
def isDone = true
def get(timeout: Long, units: TimeUnit) = get
def isCancelled = false
def cancel(evenIfRunning: Boolean): Boolean = false
}
def map2[A, B, C](a: Par[A], b: Par[B])(f: (A,B) => C): Par[C] =
(es: ExecutionService) => {
val af = a(es)
val bf = b(es)
UnitFuture((f(af.get, bf.get)))
}
def fork[A](a: => Par[A]): Par[A] =
es => es.submit(new Callable[A] {
def call = a(es).get
})
def asyncF[A, B](f: A => B): A => Par[B] =
a => fork(unit(f(a))) //think of the previous lazyUnit
def sortPar(parList: Par[List[Int]]): Par[List[Int]] =
map2(parList, unit(()))((a, _) => a.sorted)
def map[A, B](pa: Par[A])(f: A => B): Par[B] =
map2(pa, unit(()))((a, _) => f(a))
//below is difficult
def sequence[A](ps: List[Par[A]]): Par[List[A]]
def sequence_simple[A](l: List[Par[A]]): Par[List[A]] =
l.foldRight[Par[List[A]]](unit(List()))((h, t) => map2(h, t)(_ :: _))
def sequenceRight[A](as: List[Par[A]]): Par[List[A]] =
as match {
case Nil => unit(Nil)
case h :: t => map2(h, fork(sequenceRight(t)))(_ :: _)
}
//think more deeply needed
def parMap[A, B](ps: List[A])(f: A => B): Par[List[B]] = fork{
val fbs: List[Par[B]] = ps.map(asynF(f))
sequence(fbs)
}
def parFilter[A](as: List[A])(f: A => Boolean): Par[List[A]] = {
val fbs: List[Par[List[A]]] = as.map(async((a: A) => if f(a) List(a) else List() ))
map(sequence(fbs))(_.flatten)
}
//这里前后两个map并不一样
def choice[A](cond: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] =
es =>
if (run(es)(cond).get) t(es)
else f(es)
def choiceN[A](n: Par[Int])(choices: List[Par[A]): Par[A] =
es =>
choices[n](es)
//注意审题"Let us say that choiceN runs n, and then uses that to select a ..."
//所以得先运算出indice
def choiceN[A](n: Par[Int])(choices: List[Par[A]]): Par[A] =
es =>
val indice = run(es)(n).get
run(es)choices(indeice)
//choices(indices)(es)
def choice[A](cond: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] =
choiceN(map(cond)(indice => if indice 0 else 1)List(t, f))
//将Par[Boolean] map 为Par[Int]
def choiceMap[K, V](key :Par[K])(choices: Map[K, Par[V]]): Par[V] =
es =>
val keys = run(es)(key).get
run(es)choices(keys)
def chooser[A,B](pa: Par[A])(choices: A => Par[B]): Par[B] =
es =>
val pas = run(es)(pa).get
run(es)choices(pas)
} | Tomcruseal/FunctionalLearn | fpinscala/src/main/scala/fpinScala/pars/Par.scala | Scala | mit | 2,883 |
package models.analysis
import play.api.libs.json._
object SampleStatuses {
sealed trait SampleStatus {
val key: Int
}
object SampleStatus {
// scalastyle:off
def fromInt(i: Int): Option[SampleStatus] = i match {
case Intact.key => Some(Intact)
case Destroyed.key => Some(Destroyed)
case Contaminated.key => Some(Contaminated)
case Prepared.key => Some(Prepared)
case Discarded.key => Some(Discarded)
case Cancelled.key => Some(Cancelled)
case Consumed.key => Some(Consumed)
case Dessicated.key => Some(Dessicated)
case Degraded.key => Some(Degraded)
case Mounted.key => Some(Mounted)
case _ => None
}
// scalastyle:on
@throws(classOf[IllegalArgumentException])
def unsafeFromInt(i: Int): SampleStatus = {
fromInt(i).getOrElse {
throw new IllegalArgumentException(s"Unknown sample status $i")
}
}
implicit val reads: Reads[SampleStatus] = Reads { jsv =>
jsv.validate[Int] match {
case JsSuccess(Intact.key, _) => JsSuccess(Intact)
case JsSuccess(Destroyed.key, _) => JsSuccess(Destroyed)
case JsSuccess(Contaminated.key, _) => JsSuccess(Contaminated)
case JsSuccess(Prepared.key, _) => JsSuccess(Prepared)
case JsSuccess(Discarded.key, _) => JsSuccess(Discarded)
case JsSuccess(Cancelled.key, _) => JsSuccess(Cancelled)
case JsSuccess(Consumed.key, _) => JsSuccess(Consumed)
case JsSuccess(Dessicated.key, _) => JsSuccess(Dessicated)
case JsSuccess(Degraded.key, _) => JsSuccess(Degraded)
case JsSuccess(Mounted.key, _) => JsSuccess(Mounted)
case JsSuccess(bad, p) => JsError(p, s"Unknown sample status code $bad")
case err: JsError => err
}
}
implicit val writes: Writes[SampleStatus] = Writes(ss => JsNumber(ss.key))
}
case object Intact extends SampleStatus {
override val key = 1
}
case object Destroyed extends SampleStatus {
override val key = 2
}
case object Contaminated extends SampleStatus {
override val key = 3
}
case object Prepared extends SampleStatus {
override val key = 4
}
case object Discarded extends SampleStatus {
override val key = 5
}
case object Cancelled extends SampleStatus {
override val key = 6
}
case object Consumed extends SampleStatus {
override val key = 7
}
case object Dessicated extends SampleStatus {
override val key = 8
}
case object Degraded extends SampleStatus {
override val key = 9
}
case object Mounted extends SampleStatus {
override val key = 10
}
}
| MUSIT-Norway/musit | service_backend/app/models/analysis/SampleStatuses.scala | Scala | gpl-2.0 | 2,759 |
package semverfi
trait Show[T <: SemVersion] {
def show(v: T): String
}
object Show {
implicit object ShowNormal extends Show[NormalVersion] {
def show(v: NormalVersion) = "%d.%d.%d" format(
v.major, v.minor, v.patch
)
}
implicit object ShowPreRelease extends Show[PreReleaseVersion] {
def show(v: PreReleaseVersion) = "%d.%d.%d-%s" format(
v.major, v.minor, v.patch, v.classifier.mkString(".")
)
}
implicit object ShowBuild extends Show[BuildVersion] {
def show(v: BuildVersion) = "%d.%d.%d%s+%s" format(
v.major, v.minor, v.patch,
v.classifier match {
case Nil => ""
case cs => cs.mkString("-", ".", "")
},
v.build.mkString(".")
)
}
implicit object ShowInvalid extends Show[Invalid] {
def show(v: Invalid) = "invalid: %s" format v.raw
}
implicit object ShowSemVersion extends Show[SemVersion] {
def show(v: SemVersion) = v match {
case x: NormalVersion => implicitly[Show[NormalVersion]].show(x)
case x: PreReleaseVersion => implicitly[Show[PreReleaseVersion]].show(x)
case x: BuildVersion => implicitly[Show[BuildVersion]].show(x)
case x: Invalid => implicitly[Show[Invalid]].show(x)
}
}
def apply[T <: SemVersion: Show](v: T) =
implicitly[Show[T]].show(v)
}
| softprops/semverfi | src/main/scala/show.scala | Scala | mit | 1,309 |
package org.scalgebra.generation
import algebra.ring._
import org.scalacheck._
import org.scalgebra.DenseMatrix
import scala.reflect.ClassTag
/**
* Generates DenseMatrices.
*
* @author Daniyar Itegulov
*/
object DenseMatrixGen {
def genRingDenseMatrix[T: Arbitrary : Ring : ClassTag]: Gen[DenseMatrix[T]] =
Gen.sized { rows =>
Gen.sized { cols =>
for {
matrix <- Gen.containerOfN[Array, Array[T]](rows, Gen.containerOfN[Array, T](cols, Arbitrary.arbitrary[T]))
} yield new DenseMatrix(matrix)
}
}
def genUnitDenseMatrix[T: Ring : ClassTag]: Gen[DenseMatrix[T]] =
Gen.sized { size =>
Gen.const(DenseMatrix.unit(size))
}
def genZeroDenseMatrix[T: Ring : ClassTag]: Gen[DenseMatrix[T]] =
Gen.sized { rows =>
Gen.sized { cols =>
Gen.const(DenseMatrix.zeros[T](rows, cols))
}
}
def genOneDenseMatrix[T: Ring : ClassTag]: Gen[DenseMatrix[T]] =
Gen.sized { rows =>
Gen.sized { cols =>
Gen.const(DenseMatrix.ones[T](rows, cols))
}
}
def genOneRowedMatrix[T: Arbitrary : Ring : ClassTag]: Gen[DenseMatrix[T]] =
Gen.sized { size =>
for {
matrix <- Gen.containerOfN[Seq, T](size, Arbitrary.arbitrary[T])
} yield DenseMatrix(matrix)
}
def genOneColumnMatrix[T: Arbitrary : Ring : ClassTag]: Gen[DenseMatrix[T]] =
Gen.sized { size =>
for {
matrix <- Gen.containerOfN[Array, Array[T]](size, Gen.containerOfN[Array, T](1, Arbitrary.arbitrary[T]))
} yield new DenseMatrix(matrix)
}
def genOneElementMatrix[T: Arbitrary : ClassTag]: Gen[DenseMatrix[T]] =
for {
element <- Arbitrary.arbitrary[T]
} yield DenseMatrix(Tuple1(element))
implicit def arbitraryRingDenseMatrix[T: Arbitrary : Ring : ClassTag]: Arbitrary[DenseMatrix[T]] =
Arbitrary {
Gen.frequency[DenseMatrix[T]](
(70, genRingDenseMatrix),
(5, genOneRowedMatrix),
(5, genOneColumnMatrix),
(5, genZeroDenseMatrix),
(5, genUnitDenseMatrix),
(5, genOneDenseMatrix),
(5, genOneElementMatrix)
)
}
def arbitraryOneRowedDenseMatrix[T: Arbitrary : Ring : ClassTag]: Arbitrary[DenseMatrix[T]] =
Arbitrary {
genOneRowedMatrix
}
def arbitraryOneColumnDenseMatrix[T: Arbitrary : Ring : ClassTag]: Arbitrary[DenseMatrix[T]] =
Arbitrary {
genOneColumnMatrix
}
}
| itegulov/scalgebra | src/test/scala/org/scalgebra/generation/DenseMatrixGen.scala | Scala | mit | 2,421 |
package scala.tools.nsc
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh
import scala.tools.nsc.plugins.{Plugin, PluginComponent}
@BenchmarkMode(Array(jmh.annotations.Mode.AverageTime))
@Fork(2)
@Threads(1)
@Warmup(iterations = 5)
@Measurement(iterations = 5)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
class PhaseAssemblyBenchmark {
class Data[G <: Global with Singleton](val global: G, val components: List[SubComponent { val global: G}])
var data: Data[_] = _
@Param(Array("1", "4", "8", "16"))
var size: Int = 16
@Setup
def setup(): Unit = {
val global = new Global(new Settings)
case class component[G <: Global with Singleton](val global: G, val phaseName: String, override val runsRightAfter: Option[String], override val runsAfter: List[String], override val runsBefore: List[String]) extends SubComponent {
override def newPhase(prev: Phase): Phase = ???
}
object component {
def apply(phaseName: String, runsRightAfter: Option[String], runsAfter: List[String], runsBefore: List[String]): component[global.type] = {
new component[global.type](global, phaseName, runsRightAfter, runsAfter, runsBefore)
}
}
val N = size
val components = List.tabulate(N){ i =>
component(i.toString, None, if (i == 0) List("parser") else List.tabulate(2)(j => i - j - 1).filter(_ >= 0).map(_.toString), List("terminal"))
} ::: List(component("parser", None, Nil, Nil), component("terminal", None, Nil, List(N.toString)))
data = new Data[global.type](global, components )
}
@Benchmark def assemble(): Object = {
val s = data.asInstanceOf[Data[Global with Singleton]]
val g = s.global
val graph = g.phasesSetToDepGraph(s.components.reverse)
graph.removeDanglingNodes()
graph.validateAndEnforceHardlinks()
graph.collapseHardLinksAndLevels(graph.getNodeByPhase("parser"), 1)
graph
}
}
object PhaseAssemblyBenchmark {
def main(args: Array[String]): Unit = {
val bench = new PhaseAssemblyBenchmark
bench.setup()
bench.assemble()
}
} | scala/scala | test/benchmarks/src/main/scala/scala/tools/nsc/PhaseAssemblyBenchmark.scala | Scala | apache-2.0 | 2,125 |
package sangria.execution
import sangria.ast
import sangria.schema._
import sangria.introspection.{isIntrospection, TypeNameMetaField}
import scala.annotation.unchecked.uncheckedVariance
import scala.util.{Failure, Success, Try}
trait QueryReducer[-Ctx, +Out] {
type Acc
def initial: Acc
def reduceAlternatives(alternatives: Seq[Acc]): Acc
def reduceField[Val](
fieldAcc: Acc,
childrenAcc: Acc,
path: ExecutionPath,
ctx: Ctx,
astFields: Vector[ast.Field],
parentType: ObjectType[Out, Val] @uncheckedVariance,
field: Field[Ctx, Val] @uncheckedVariance,
argumentValuesFn: (ExecutionPath, List[Argument[_]], List[ast.Argument]) ⇒ Try[Args]): Acc
def reduceScalar[T](
path: ExecutionPath,
ctx: Ctx,
tpe: ScalarType[T]): Acc
def reduceEnum[T](
path: ExecutionPath,
ctx: Ctx,
tpe: EnumType[T]): Acc
def reduceCtx(acc: Acc, ctx: Ctx): ReduceAction[Out, Out]
}
object QueryReducer {
def measureComplexity[Ctx](fn: (Double, Ctx) ⇒ ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] =
new MeasureComplexity[Ctx](fn)
def rejectComplexQueries[Ctx](complexityThreshold: Double, error: (Double, Ctx) ⇒ Throwable): QueryReducer[Ctx, Ctx] =
measureComplexity[Ctx]((c, ctx) ⇒
if (c >= complexityThreshold) throw error(c, ctx) else ctx)
def measureDepth[Ctx](fn: (Int, Ctx) ⇒ ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] =
new MeasureQueryDepth[Ctx](fn)
def rejectMaxDepth[Ctx](maxDepth: Int): QueryReducer[Ctx, Ctx] =
measureDepth[Ctx]((depth, ctx) ⇒
if (depth > maxDepth) throw new MaxQueryDepthReachedError(maxDepth) else ctx)
def collectTags[Ctx, T](tagMatcher: PartialFunction[FieldTag, T])(fn: (Seq[T], Ctx) ⇒ ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] =
new TagCollector[Ctx, T](tagMatcher, fn)
def rejectIntrospection[Ctx](includeTypeName: Boolean = true): QueryReducer[Ctx, Ctx] =
hasIntrospection((hasIntro, ctx) ⇒ if (hasIntro) throw IntrospectionNotAllowedError else ctx, includeTypeName)
def hasIntrospection[Ctx](fn: (Boolean, Ctx) ⇒ ReduceAction[Ctx, Ctx], includeTypeName: Boolean = true): QueryReducer[Ctx, Ctx] =
new HasIntrospectionReducer[Ctx](includeTypeName, fn)
}
class MeasureComplexity[Ctx](action: (Double, Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] {
type Acc = Double
import MeasureComplexity.DefaultComplexity
val initial = 0.0D
def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.max
def reduceField[Val](
fieldAcc: Acc,
childrenAcc: Acc,
path: ExecutionPath,
ctx: Ctx,
astFields: Vector[ast.Field],
parentType: ObjectType[Ctx, Val],
field: Field[Ctx, Val],
argumentValuesFn: (ExecutionPath, List[Argument[_]], List[ast.Argument]) ⇒ Try[Args]): Acc = {
val estimate = field.complexity match {
case Some(fn) ⇒
argumentValuesFn(path, field.arguments, astFields.head.arguments) match {
case Success(args) ⇒ fn(ctx, args, childrenAcc)
case Failure(_) ⇒ DefaultComplexity + childrenAcc
}
case None ⇒ DefaultComplexity + childrenAcc
}
fieldAcc + estimate
}
def reduceScalar[T](
path: ExecutionPath,
ctx: Ctx,
tpe: ScalarType[T]): Acc = tpe.complexity
def reduceEnum[T](
path: ExecutionPath,
ctx: Ctx,
tpe: EnumType[T]): Acc = initial
def reduceCtx(acc: Acc, ctx: Ctx) =
action(acc, ctx)
}
class MeasureQueryDepth[Ctx](action: (Int, Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] {
type Acc = Int
def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.max
def initial: Acc = 0
def reduceField[Val](
fieldAcc: Acc,
childrenAcc: Acc,
path: ExecutionPath,
ctx: Ctx,
astFields: Vector[ast.Field],
parentType: ObjectType[Ctx, Val],
field: Field[Ctx, Val],
argumentValuesFn: (ExecutionPath, List[Argument[_]], List[ast.Argument]) ⇒ Try[Args]): Acc =
childrenAcc
def reduceScalar[T](
path: ExecutionPath,
ctx: Ctx,
tpe: ScalarType[T]): Acc = path.size
def reduceEnum[T](
path: ExecutionPath,
ctx: Ctx,
tpe: EnumType[T]): Acc = path.size
def reduceCtx(acc: Acc, ctx: Ctx) =
action(acc, ctx)
}
object MeasureComplexity {
val DefaultComplexity = 1.0D
}
class TagCollector[Ctx, T](tagMatcher: PartialFunction[FieldTag, T], action: (Seq[T], Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] {
type Acc = Vector[T]
val initial = Vector.empty
def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.toVector.flatten
def reduceField[Val](
fieldAcc: Acc,
childrenAcc: Acc,
path: ExecutionPath,
ctx: Ctx,
astFields: Vector[ast.Field],
parentType: ObjectType[Ctx, Val],
field: Field[Ctx, Val],
argumentValuesFn: (ExecutionPath, List[Argument[_]], List[ast.Argument]) ⇒ Try[Args]): Acc =
fieldAcc ++ childrenAcc ++ field.tags.collect {case t if tagMatcher.isDefinedAt(t) ⇒ tagMatcher(t)}
def reduceScalar[ST](
path: ExecutionPath,
ctx: Ctx,
tpe: ScalarType[ST]): Acc = initial
def reduceEnum[ET](
path: ExecutionPath,
ctx: Ctx,
tpe: EnumType[ET]): Acc = initial
def reduceCtx(acc: Acc, ctx: Ctx) =
action(acc, ctx)
}
class HasIntrospectionReducer[Ctx](includeTypeName: Boolean, action: (Boolean, Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] {
type Acc = Boolean
val initial = false
def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.exists(hasIntro ⇒ hasIntro)
def reduceField[Val](
fieldAcc: Acc,
childrenAcc: Acc,
path: ExecutionPath,
ctx: Ctx,
astFields: Vector[ast.Field],
parentType: ObjectType[Ctx, Val],
field: Field[Ctx, Val],
argumentValuesFn: (ExecutionPath, List[Argument[_]], List[ast.Argument]) ⇒ Try[Args]): Acc = {
val self =
if (!includeTypeName && field.name == TypeNameMetaField.name) false
else isIntrospection(parentType, field)
fieldAcc || childrenAcc || self
}
def reduceScalar[ST](
path: ExecutionPath,
ctx: Ctx,
tpe: ScalarType[ST]): Acc = initial
def reduceEnum[ET](
path: ExecutionPath,
ctx: Ctx,
tpe: EnumType[ET]): Acc = initial
def reduceCtx(acc: Acc, ctx: Ctx) =
action(acc, ctx)
} | PhilAndrew/JumpMicro | JMSangriaGraphql/src/main/scala/sangria/execution/QueryReducer.scala | Scala | mit | 6,374 |
/*
* Copyright (c) 2016. Fengguo (Hugo) Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jc.incremental.jawa
package remote
import java.io.{File, PrintWriter, StringWriter}
import org.argus.jawa.core.io.SourceFile
import org.jetbrains.jps.incremental.messages.BuildMessage.Kind
/**
* @author <a href="mailto:[email protected]">Fengguo Wei</a>
*/
class EventGeneratingClient(listener: Event => Unit, canceled: => Boolean) extends Client {
def message(kind: Kind, text: String, source: SourceFile, line: Option[Long], column: Option[Long]) {
listener(MessageEvent(kind, text, source, line, column))
}
def trace(exception: Throwable) {
val lines = {
val writer = new StringWriter()
exception.printStackTrace(new PrintWriter(writer))
writer.toString.split("\\\\n")
}
listener(TraceEvent(exception.getMessage, lines))
}
def progress(text: String, done: Option[Float]) {
listener(ProgressEvent(text, done))
}
def debug(text: String) {
listener(DebugEvent(text))
}
def generated(source: SourceFile, module: File, name: String) {
listener(GeneratedEvent(source, module, name))
}
def deleted(module: File) {
listener(DeletedEvent(module))
}
def isCanceled = canceled
def processed(source: SourceFile) {
listener(SourceProcessedEvent(source))
}
override def compilationEnd() {
listener(CompilationEndEvent())
}
}
| arguslab/argus-cit-intellij | jc-plugin/src/main/scala/org/argus/jc/incremental/jawa/remote/EventGeneratingClient.scala | Scala | epl-1.0 | 1,705 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.custom
import com.github.dnvriend.TestSpec
import scala.concurrent.{ ExecutionContext, Future }
case class ABFuture[+A](future: Future[Option[A]]) extends AnyVal {
def flatMap[B](f: A => ABFuture[B])(implicit ec: ExecutionContext): ABFuture[B] = {
val newFuture = future.flatMap {
case Some(a) => f(a).future
case None => Future.successful(None)
}
ABFuture(newFuture)
}
def map[B](f: A => B)(implicit ec: ExecutionContext): ABFuture[B] = {
ABFuture(future.map(option => option.map(f)))
}
}
class CustomFutureFunctorTest extends TestSpec {
}
| dnvriend/study-category-theory | scalaz-test/src/test/com/github/dnvriend/custom/CustomFutureFunctorTest.scala | Scala | apache-2.0 | 1,212 |
/* Copyright 2015 UniCredit S.p.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.unicredit.riffel
import Chisel._
object TestRunner extends App {
val argz = Array("--backend", "c", "--compile", "--genHarness", "--test", "--targetDir", "./target")
chiselMainTest(argz, () => Module(RiffelRXComm(64))){
rc => new RiffelRXCommTester(rc)}
chiselMainTest(argz, () => Module(RiffelTXComm(64, 10))){
rc => new RiffelTXCommTester(rc)}
}
| unicredit/riffel | src/test/scala/eu/unicredit/riffel/TestRunner.scala | Scala | apache-2.0 | 966 |
package org.shelmet.heap.parser
/**
* Primitive array type codes as defined by VM specification.
*/
object ArrayTypeCodes {
// Typecodes for array elements.
// Refer to newarray instruction in VM Spec.
final val T_BOOLEAN: Int = 4
final val T_CHAR: Int = 5
final val T_FLOAT: Int = 6
final val T_DOUBLE: Int = 7
final val T_BYTE: Int = 8
final val T_SHORT: Int = 9
final val T_INT: Int = 10
final val T_LONG: Int = 11
} | rorygraves/shelmet | src/main/scala/org/shelmet/heap/parser/ArrayTypeCodes.scala | Scala | gpl-2.0 | 442 |
package com.cds.learn.chapter7
import java.util.concurrent.atomic.{AtomicInteger, AtomicReference}
import java.util.concurrent.{Callable,ExecutorService}
import annotation.tailrec
/*
* Implementation is taken from `scalaz` library, with only minor changes. See:
*
* https://github.com/scalaz/scalaz/blob/scalaz-seven/concurrent/src/main/scala/scalaz/concurrent/Actor.scala
*
* This code is copyright Andriy Plokhotnyuk, Runar Bjarnason, and other contributors,
* and is licensed using 3-clause BSD, see LICENSE file at:
*
* https://github.com/scalaz/scalaz/blob/scalaz-seven/etc/LICENCE
*/
/**
* Processes messages of type `A`, one at a time. Messages are submitted to
* the actor with the method `!`. Processing is typically performed asynchronously,
* this is controlled by the provided `strategy`.
*
* Memory consistency guarantee: when each message is processed by the `handler`, any memory that it
* mutates is guaranteed to be visible by the `handler` when it processes the next message, even if
* the `strategy` runs the invocations of `handler` on separate threads. This is achieved because
* the `Actor` reads a volatile memory location before entering its event loop, and writes to the same
* location before suspending.
*
* Implementation based on non-intrusive MPSC node-based queue, described by Dmitriy Vyukov:
* [[http://www.1024cores.net/home/lock-free-algorithms/queues/non-intrusive-mpsc-node-based-queue]]
*
* @see scalaz.concurrent.Promise for a use case.
*
* @param handler The message handler
* @param onError Exception handler, called if the message handler throws any `Throwable`.
* @param strategy Execution strategy, for example, a strategy that is backed by an `ExecutorService`
* @tparam A The type of messages accepted by this actor.
*/
final class Actor[A](strategy: Strategy)(handler: A => Unit, onError: Throwable => Unit = throw(_)) {
self =>
private val tail = new AtomicReference(new Node[A]())
private val suspended = new AtomicInteger(1)
private val head = new AtomicReference(tail.get)
/** Alias for `apply` */
def !(a: A) {
val n = new Node(a)
head.getAndSet(n).lazySet(n)
trySchedule()
}
/** Pass the message `a` to the mailbox of this actor */
def apply(a: A) {
this ! a
}
def contramap[B](f: B => A): Actor[B] =
new Actor[B](strategy)((b: B) => (this ! f(b)), onError)
private def trySchedule() {
if (suspended.compareAndSet(1, 0)) schedule()
}
private def schedule() {
strategy(act())
}
private def act() {
val t = tail.get
val n = batchHandle(t, 1024)
if (n ne t) {
n.a = null.asInstanceOf[A]
tail.lazySet(n)
schedule()
} else {
suspended.set(1)
if (n.get ne null) trySchedule()
}
}
@tailrec
private def batchHandle(t: Node[A], i: Int): Node[A] = {
val n = t.get
if (n ne null) {
try {
handler(n.a)
} catch {
case ex: Throwable => onError(ex)
}
if (i > 0) batchHandle(n, i - 1) else n
} else t
}
}
private class Node[A](var a: A = null.asInstanceOf[A]) extends AtomicReference[Node[A]]
object Actor {
/** Create an `Actor` backed by the given `ExecutorService`. */
def apply[A](es: ExecutorService)(handler: A => Unit, onError: Throwable => Unit = throw(_)): Actor[A] =
new Actor(Strategy.fromExecutorService(es))(handler, onError)
}
/**
* Provides a function for evaluating expressions, possibly asynchronously.
* The `apply` function should typically begin evaluating its argument
* immediately. The returned thunk can be used to block until the resulting `A`
* is available.
*/
trait Strategy {
def apply[A](a: => A): () => A
}
object Strategy {
/**
* We can create a `Strategy` from any `ExecutorService`. It's a little more
* convenient than submitting `Callable` objects directly.
*/
def fromExecutorService(es: ExecutorService): Strategy = new Strategy {
def apply[A](a: => A): () => A = {
val f = es.submit { new Callable[A] { def call = a} }
() => f.get
}
}
/**
* A `Strategy` which begins executing its argument immediately in the calling thread.
*/
def sequential: Strategy = new Strategy {
def apply[A](a: => A): () => A = {
val r = a
() => r
}
}
}
| anancds/scala-project | fpis/src/main/scala/com/cds/learn/chapter7/Actor.scala | Scala | mit | 4,342 |
package com.twitter.server
import com.twitter.app.App
import com.twitter.finagle.stats.{StatsReceiver, LoadedStatsReceiver}
trait Stats { app: App =>
/**
* This returns the global [[LoadedStatsReceiver]] instance.
*
* @return a [[StatsReceiver]] instance.
* @see [[com.twitter.finagle.stats.LoadedStatsReceiver]]
*/
def statsReceiver: StatsReceiver = LoadedStatsReceiver
}
| twitter/twitter-server | server/src/main/scala/com/twitter/server/Stats.scala | Scala | apache-2.0 | 395 |
package io.findify.s3mock.route
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives._
import com.typesafe.scalalogging.LazyLogging
import io.findify.s3mock.provider.Provider
/**
* Created by shutty on 8/19/16.
*/
case class ListBuckets()(implicit provider:Provider) extends LazyLogging {
def route() = get {
complete {
logger.debug("listing all buckets")
HttpResponse(
StatusCodes.OK,
entity = HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), provider.listBuckets.toXML.toString)
)
}
}
}
| findify/s3mock | src/main/scala/io/findify/s3mock/route/ListBuckets.scala | Scala | mit | 593 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views
import java.util.Locale
import org.scalatestplus.play.PlaySpec
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.Application
import play.api.i18n.{Lang, Messages, MessagesApi}
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.mvc.{Call, PathBindable}
import play.api.test.Helpers._
class LanguageSelectionSpec extends PlaySpec with GuiceOneAppPerSuite {
override implicit lazy val app: Application = new GuiceApplicationBuilder()
.configure(Map("play.i18n.langs" -> Seq("en", "cy", "es")))
.build()
val messagesApi: MessagesApi = app.injector.instanceOf[MessagesApi]
val messagesEnglish: Messages = messagesApi.preferred(Seq(Lang(new Locale("en"))))
val messagesWelsh: Messages = messagesApi.preferred(Seq(Lang(new Locale("cy"))))
val messagesSpanish: Messages = messagesApi.preferred(Seq(Lang(new Locale("es"))))
val EnglishLangCode = "en"
val WelshLangCode = "cy"
val English: Lang = Lang(EnglishLangCode)
val Welsh: Lang = Lang(WelshLangCode)
def languageMap: Map[String, Lang] = Map("english" -> English, "cymraeg" -> Welsh)
def langToUrl(lang: String): Call = Call("GET", "/language/" + implicitly[PathBindable[String]].unbind("lang", lang))
"Language selection template view" should {
"give a link to switch to Welsh when current language is English" in {
val html = views.html.language_selection.render(languageMap, langToUrl, None, None, messagesEnglish)
contentType(html) must be("text/html")
contentAsString(html) must include(messagesEnglish("id=\\"cymraeg-switch\\""))
contentAsString(html) must include("/language/cymraeg")
}
"show correct current language message when current language is English" in {
val html = views.html.language_selection.render(languageMap, langToUrl, None, None, messagesEnglish)
contentType(html) must be("text/html")
contentAsString(html) must include("English")
contentAsString(html) must not include ">English<"
}
"give a link to switch to English when current language is Welsh" in {
val html = views.html.language_selection.render(languageMap, langToUrl, None, None, messagesWelsh)
contentType(html) must be("text/html")
contentAsString(html) must include(messagesEnglish("id=\\"english-switch\\""))
contentAsString(html) must include("/language/english")
}
"show correct current language message when current language is Welsh" in {
val html = views.html.language_selection.render(languageMap, langToUrl, None, None, messagesWelsh)
contentType(html) must be("text/html")
contentAsString(html) must include("Cymraeg")
contentAsString(html) must not include ">Cymraeg<"
}
"show a custom class if it is set" in {
val html = views.html.language_selection.render(languageMap, langToUrl, Some("float--right"), None, messagesWelsh)
contentType(html) must be("text/html")
contentAsString(html) must include("class=\\"float--right\\"")
}
"show a data-journey-click attribute for GA if it is set and language is Welsh" in {
val html = views.html.language_selection.render(languageMap, langToUrl, None, Some("appName"), messagesWelsh)
contentType(html) must be("text/html")
contentAsString(html) must include("data-journey-click=\\"appName:language: en\\"")
}
"show a data-journey-click attribute for GA if it is set and language is English" in {
val html = views.html.language_selection.render(languageMap, langToUrl, None, Some("appName"), messagesEnglish)
contentType(html) must be("text/html")
contentAsString(html) must include("data-journey-click=\\"appName:language: cy\\"")
}
"show correct current language message when current language is Spanish" in {
val Spanish = Lang("es")
val mockLanguageMap = Map("english" -> English, "cymraeg" -> Welsh, "español" -> Spanish)
val html = views.html.language_selection.render(mockLanguageMap, langToUrl, None, None, messagesSpanish)
contentType(html) must be("text/html")
contentAsString(html) must include("Español")
contentAsString(html) must not include ">Español<"
}
}
}
| hmrc/play-language | src/test/scala/views/LanguageSelectionSpec.scala | Scala | apache-2.0 | 4,955 |
package com.monsanto.arch.kamon.prometheus
import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.event.Logging
import kamon.Kamon
import scala.concurrent.duration.{Duration, DurationInt}
import scala.concurrent.{Await, Future, Promise}
import scala.util.control.NonFatal
/** Provides the necessary magic to register the extension with Kamon. */
object Prometheus extends ExtensionId[PrometheusExtension] with ExtensionIdProvider {
/** Promise that will be fulfilled once Kamon starts the Prometheus extension. */
private val kamonInstancePromise = Promise[PrometheusExtension]()
/** Lazily initiates loading of the Kamon Prometheus module by starting
* Kamon and returns a future that will return the instance started by
* Kamon.
*
* @see [[awaitKamonInstance]]
*/
lazy val kamonInstance: Future[PrometheusExtension] = {
Kamon.start()
kamonInstancePromise.future
}
/** Awaits and returns the value of [[kamonInstance]]
*
* @param timeout the amount of time to wait for Kamon to load the
* Prometheus module
*/
def awaitKamonInstance(timeout: Duration = 1.second): PrometheusExtension =
Await.result(kamonInstance, timeout)
override def createExtension(system: ExtendedActorSystem): PrometheusExtension = {
system.name match {
case "kamon" ⇒
try {
val extension = new PrometheusExtension(system)
kamonInstancePromise.success(extension)
extension
} catch {
case NonFatal(e) ⇒
kamonInstancePromise.failure(e)
throw e
}
case other ⇒
val log = Logging(system, this.getClass)
log.warning("Creating a new Prometheus extension for the actor " +
s"system $other, maybe you should just use " +
s"Prometheus.awaitKamonInstance or Prometheus.kamonInstance?")
new PrometheusExtension(system)
}
}
override def lookup(): ExtensionId[_ <: Extension] = Prometheus
}
| MonsantoCo/kamon-prometheus | library/src/main/scala/com/monsanto/arch/kamon/prometheus/Prometheus.scala | Scala | bsd-3-clause | 2,040 |
/* Title: Pure/Admin/build_log.scala
Author: Makarius
Management of build log files and database storage.
*/
package isabelle
import java.io.{File => JFile}
import java.time.format.{DateTimeFormatter, DateTimeParseException}
import java.util.Locale
import scala.collection.immutable.SortedMap
import scala.collection.mutable
import scala.util.matching.Regex
object Build_Log
{
/** content **/
/* properties */
object Prop
{
val build_tags = SQL.Column.string("build_tags") // lines
val build_args = SQL.Column.string("build_args") // lines
val build_group_id = SQL.Column.string("build_group_id")
val build_id = SQL.Column.string("build_id")
val build_engine = SQL.Column.string("build_engine")
val build_host = SQL.Column.string("build_host")
val build_start = SQL.Column.date("build_start")
val build_end = SQL.Column.date("build_end")
val isabelle_version = SQL.Column.string("isabelle_version")
val afp_version = SQL.Column.string("afp_version")
val all_props: List[SQL.Column] =
List(build_tags, build_args, build_group_id, build_id, build_engine,
build_host, build_start, build_end, isabelle_version, afp_version)
}
/* settings */
object Settings
{
val ISABELLE_BUILD_OPTIONS = SQL.Column.string("ISABELLE_BUILD_OPTIONS")
val ML_PLATFORM = SQL.Column.string("ML_PLATFORM")
val ML_HOME = SQL.Column.string("ML_HOME")
val ML_SYSTEM = SQL.Column.string("ML_SYSTEM")
val ML_OPTIONS = SQL.Column.string("ML_OPTIONS")
val ml_settings = List(ML_PLATFORM, ML_HOME, ML_SYSTEM, ML_OPTIONS)
val all_settings = ISABELLE_BUILD_OPTIONS :: ml_settings
type Entry = (String, String)
type T = List[Entry]
object Entry
{
def unapply(s: String): Option[Entry] =
s.indexOf('=') match {
case -1 => None
case i =>
val a = s.substring(0, i)
val b = Library.perhaps_unquote(s.substring(i + 1))
Some((a, b))
}
def apply(a: String, b: String): String = a + "=" + quote(b)
def getenv(a: String): String = apply(a, Isabelle_System.getenv(a))
}
def show(): String =
cat_lines(
List(Entry.getenv(ISABELLE_BUILD_OPTIONS.name), "") :::
ml_settings.map(c => Entry.getenv(c.name)))
}
/* file names */
def log_date(date: Date): String =
String.format(Locale.ROOT, "%s.%05d",
DateTimeFormatter.ofPattern("yyyy-MM-dd").format(date.rep),
new java.lang.Long((date.time - date.midnight.time).ms / 1000))
def log_subdir(date: Date): Path =
Path.explode("log") + Path.explode(date.rep.getYear.toString)
def log_filename(engine: String, date: Date, more: List[String] = Nil): Path =
Path.explode((engine :: log_date(date) :: more).mkString("", "_", ".log"))
/** log file **/
def print_date(date: Date): String = Log_File.Date_Format(date)
object Log_File
{
/* log file */
def plain_name(name: String): String =
{
List(".log", ".log.gz", ".log.xz", ".gz", ".xz").find(name.endsWith(_)) match {
case Some(s) => Library.try_unsuffix(s, name).get
case None => name
}
}
def apply(name: String, lines: List[String]): Log_File =
new Log_File(plain_name(name), lines)
def apply(name: String, text: String): Log_File =
Log_File(name, Library.trim_split_lines(text))
def apply(file: JFile): Log_File =
{
val name = file.getName
val text =
if (name.endsWith(".gz")) File.read_gzip(file)
else if (name.endsWith(".xz")) File.read_xz(file)
else File.read(file)
apply(name, text)
}
def apply(path: Path): Log_File = apply(path.file)
/* log file collections */
def is_log(file: JFile,
prefixes: List[String] =
List(Build_History.log_prefix, Identify.log_prefix, Identify.log_prefix2,
Isatest.log_prefix, AFP_Test.log_prefix, Jenkins.log_prefix),
suffixes: List[String] = List(".log", ".log.gz", ".log.xz")): Boolean =
{
val name = file.getName
prefixes.exists(name.startsWith(_)) &&
suffixes.exists(name.endsWith(_)) &&
name != "isatest.log" &&
name != "afp-test.log" &&
name != "main.log"
}
/* date format */
val Date_Format =
{
val fmts =
Date.Formatter.variants(
List("EEE MMM d HH:mm:ss O yyyy", "EEE MMM d HH:mm:ss VV yyyy"),
List(Locale.ENGLISH, Locale.GERMAN)) :::
List(
DateTimeFormatter.RFC_1123_DATE_TIME,
Date.Formatter.pattern("EEE MMM d HH:mm:ss yyyy").withZone(Date.timezone_berlin))
def tune_timezone(s: String): String =
s match {
case "CET" | "MET" => "GMT+1"
case "CEST" | "MEST" => "GMT+2"
case "EST" => "Europe/Berlin"
case _ => s
}
def tune_weekday(s: String): String =
s match {
case "Die" => "Di"
case "Mit" => "Mi"
case "Don" => "Do"
case "Fre" => "Fr"
case "Sam" => "Sa"
case "Son" => "So"
case _ => s
}
def tune(s: String): String =
Word.implode(
Word.explode(s) match {
case a :: "M\\uFFFDr" :: bs => tune_weekday(a) :: "Mär" :: bs.map(tune_timezone(_))
case a :: bs => tune_weekday(a) :: bs.map(tune_timezone(_))
case Nil => Nil
}
)
Date.Format.make(fmts, tune)
}
/* inlined content */
def print_props(marker: String, props: Properties.T): String =
marker + YXML.string_of_body(XML.Encode.properties(Properties.encode_lines(props)))
}
class Log_File private(val name: String, val lines: List[String])
{
log_file =>
override def toString: String = name
def text: String = cat_lines(lines)
def err(msg: String): Nothing =
error("Error in log file " + quote(name) + ": " + msg)
/* date format */
object Strict_Date
{
def unapply(s: String): Some[Date] =
try { Some(Log_File.Date_Format.parse(s)) }
catch { case exn: DateTimeParseException => log_file.err(exn.getMessage) }
}
/* inlined content */
def find[A](f: String => Option[A]): Option[A] =
lines.iterator.map(f).find(_.isDefined).map(_.get)
def find_line(marker: String): Option[String] =
find(Library.try_unprefix(marker, _))
def find_match(regexes: List[Regex]): Option[String] =
regexes match {
case Nil => None
case regex :: rest =>
lines.iterator.map(regex.unapplySeq(_)).find(res => res.isDefined && res.get.length == 1).
map(res => res.get.head) orElse find_match(rest)
}
/* settings */
def get_setting(a: String): Option[Settings.Entry] =
lines.find(_.startsWith(a + "=")) match {
case Some(line) => Settings.Entry.unapply(line)
case None => None
}
def get_all_settings: Settings.T =
for { c <- Settings.all_settings; entry <- get_setting(c.name) }
yield entry
/* properties (YXML) */
val xml_cache = XML.make_cache()
def parse_props(text: String): Properties.T =
try {
xml_cache.props(Properties.decode_lines(XML.Decode.properties(YXML.parse_body(text))))
}
catch { case _: XML.Error => log_file.err("malformed properties") }
def filter_lines(marker: String): List[String] =
for (line <- lines; s <- Library.try_unprefix(marker, line)) yield s
def filter_props(marker: String): List[Properties.T] =
for (s <- filter_lines(marker) if YXML.detect(s)) yield parse_props(s)
def find_props(marker: String): Option[Properties.T] =
find_line(marker) match {
case Some(text) if YXML.detect(text) => Some(parse_props(text))
case _ => None
}
/* parse various formats */
def parse_meta_info(): Meta_Info = Build_Log.parse_meta_info(log_file)
def parse_build_info(ml_statistics: Boolean = false): Build_Info =
Build_Log.parse_build_info(log_file, ml_statistics)
def parse_session_info(
command_timings: Boolean = false,
theory_timings: Boolean = false,
ml_statistics: Boolean = false,
task_statistics: Boolean = false): Session_Info =
Build_Log.parse_session_info(
log_file, command_timings, theory_timings, ml_statistics, task_statistics)
}
/** digested meta info: produced by Admin/build_history in log.xz file **/
object Meta_Info
{
val empty: Meta_Info = Meta_Info(Nil, Nil)
}
sealed case class Meta_Info(props: Properties.T, settings: Settings.T)
{
def is_empty: Boolean = props.isEmpty && settings.isEmpty
def get(c: SQL.Column): Option[String] =
Properties.get(props, c.name) orElse
Properties.get(settings, c.name)
def get_date(c: SQL.Column): Option[Date] =
get(c).map(Log_File.Date_Format.parse(_))
}
object Identify
{
val log_prefix = "isabelle_identify_"
val log_prefix2 = "plain_identify_"
def engine(log_file: Log_File): String =
if (log_file.name.startsWith(Jenkins.log_prefix)) "jenkins_identify"
else if (log_file.name.startsWith(log_prefix2)) "plain_identify"
else "identify"
def content(date: Date, isabelle_version: Option[String], afp_version: Option[String]): String =
terminate_lines(
List("isabelle_identify: " + Build_Log.print_date(date), "") :::
isabelle_version.map("Isabelle version: " + _).toList :::
afp_version.map("AFP version: " + _).toList)
val Start = new Regex("""^isabelle_identify: (.+)$""")
val No_End = new Regex("""$.""")
val Isabelle_Version = List(new Regex("""^Isabelle version: (\\S+)$"""))
val AFP_Version = List(new Regex("""^AFP version: (\\S+)$"""))
}
object Isatest
{
val log_prefix = "isatest-makeall-"
val engine = "isatest"
val Start = new Regex("""^------------------- starting test --- (.+) --- (.+)$""")
val End = new Regex("""^------------------- test (?:successful|FAILED) --- (.+) --- .*$""")
val Isabelle_Version = List(new Regex("""^Isabelle version: (\\S+)$"""))
}
object AFP_Test
{
val log_prefix = "afp-test-devel-"
val engine = "afp-test"
val Start = new Regex("""^Start test(?: for .+)? at ([^,]+), (.*)$""")
val Start_Old = new Regex("""^Start test(?: for .+)? at ([^,]+)$""")
val End = new Regex("""^End test on (.+), .+, elapsed time:.*$""")
val Isabelle_Version = List(new Regex("""^Isabelle version: .* -- hg id (\\S+)$"""))
val AFP_Version = List(new Regex("""^AFP version: .* -- hg id (\\S+)$"""))
val Bad_Init = new Regex("""^cp:.*: Disc quota exceeded$""")
}
object Jenkins
{
val log_prefix = "jenkins_"
val engine = "jenkins"
val Host = new Regex("""^Building remotely on (\\S+) \\((\\S+)\\).*$""")
val Start = new Regex("""^(?:Started by an SCM change|Started from command line by admin|).*$""")
val Start_Date = new Regex("""^Build started at (.+)$""")
val No_End = new Regex("""$.""")
val Isabelle_Version =
List(new Regex("""^(?:Build for Isabelle id|Isabelle id) (\\w+).*$"""),
new Regex("""^ISABELLE_CI_REPO_ID="(\\w+)".*$"""),
new Regex("""^(\\w{12}) tip.*$"""))
val AFP_Version =
List(new Regex("""^(?:Build for AFP id|AFP id) (\\w+).*$"""),
new Regex("""^ISABELLE_CI_AFP_ID="(\\w+)".*$"""))
val CONFIGURATION = "=== CONFIGURATION ==="
val BUILD = "=== BUILD ==="
}
private def parse_meta_info(log_file: Log_File): Meta_Info =
{
def parse(engine: String, host: String, start: Date,
End: Regex, Isabelle_Version: List[Regex], AFP_Version: List[Regex]): Meta_Info =
{
val build_id =
{
val prefix = proper_string(host) orElse proper_string(engine) getOrElse "build"
prefix + ":" + start.time.ms
}
val build_engine = if (engine == "") Nil else List(Prop.build_engine.name -> engine)
val build_host = if (host == "") Nil else List(Prop.build_host.name -> host)
val start_date = List(Prop.build_start.name -> print_date(start))
val end_date =
log_file.lines.last match {
case End(log_file.Strict_Date(end_date)) =>
List(Prop.build_end.name -> print_date(end_date))
case _ => Nil
}
val isabelle_version =
log_file.find_match(Isabelle_Version).map(Prop.isabelle_version.name -> _)
val afp_version =
log_file.find_match(AFP_Version).map(Prop.afp_version.name -> _)
Meta_Info((Prop.build_id.name -> build_id) :: build_engine ::: build_host :::
start_date ::: end_date ::: isabelle_version.toList ::: afp_version.toList,
log_file.get_all_settings)
}
log_file.lines match {
case line :: _ if line.startsWith(Build_History.META_INFO_MARKER) =>
Meta_Info(log_file.find_props(Build_History.META_INFO_MARKER).get,
log_file.get_all_settings)
case Identify.Start(log_file.Strict_Date(start)) :: _ =>
parse(Identify.engine(log_file), "", start, Identify.No_End,
Identify.Isabelle_Version, Identify.AFP_Version)
case Isatest.Start(log_file.Strict_Date(start), host) :: _ =>
parse(Isatest.engine, host, start, Isatest.End,
Isatest.Isabelle_Version, Nil)
case AFP_Test.Start(log_file.Strict_Date(start), host) :: _ =>
parse(AFP_Test.engine, host, start, AFP_Test.End,
AFP_Test.Isabelle_Version, AFP_Test.AFP_Version)
case AFP_Test.Start_Old(log_file.Strict_Date(start)) :: _ =>
parse(AFP_Test.engine, "", start, AFP_Test.End,
AFP_Test.Isabelle_Version, AFP_Test.AFP_Version)
case Jenkins.Start() :: _ =>
log_file.lines.dropWhile(_ != Jenkins.BUILD) match {
case Jenkins.BUILD :: _ :: Jenkins.Start_Date(log_file.Strict_Date(start)) :: _ =>
val host =
log_file.lines.takeWhile(_ != Jenkins.CONFIGURATION).collectFirst({
case Jenkins.Host(a, b) => a + "." + b
}).getOrElse("")
parse(Jenkins.engine, host, start.to(Date.timezone_berlin), Jenkins.No_End,
Jenkins.Isabelle_Version, Jenkins.AFP_Version)
case _ => Meta_Info.empty
}
case line :: _ if line.startsWith("\\u0000") => Meta_Info.empty
case List(Isatest.End(_)) => Meta_Info.empty
case _ :: AFP_Test.Bad_Init() :: _ => Meta_Info.empty
case Nil => Meta_Info.empty
case _ => log_file.err("cannot detect log file format")
}
}
/** build info: toplevel output of isabelle build or Admin/build_history **/
val THEORY_TIMING_MARKER = "\\ftheory_timing = "
val ML_STATISTICS_MARKER = "\\fML_statistics = "
val ERROR_MESSAGE_MARKER = "\\ferror_message = "
val SESSION_NAME = "session_name"
object Session_Status extends Enumeration
{
val existing, finished, failed, cancelled = Value
}
sealed case class Session_Entry(
chapter: String = "",
groups: List[String] = Nil,
threads: Option[Int] = None,
timing: Timing = Timing.zero,
ml_timing: Timing = Timing.zero,
sources: Option[String] = None,
heap_size: Option[Long] = None,
status: Option[Session_Status.Value] = None,
errors: List[String] = Nil,
theory_timings: Map[String, Timing] = Map.empty,
ml_statistics: List[Properties.T] = Nil)
{
def proper_groups: Option[String] = if (groups.isEmpty) None else Some(cat_lines(groups))
def finished: Boolean = status == Some(Session_Status.finished)
def failed: Boolean = status == Some(Session_Status.failed)
}
object Build_Info
{
val sessions_dummy: Map[String, Session_Entry] =
Map("" -> Session_Entry(theory_timings = Map("" -> Timing.zero)))
}
sealed case class Build_Info(sessions: Map[String, Session_Entry])
{
def finished_sessions: List[String] = for ((a, b) <- sessions.toList if b.finished) yield a
def failed_sessions: List[String] = for ((a, b) <- sessions.toList if b.failed) yield a
}
private def parse_build_info(log_file: Log_File, parse_ml_statistics: Boolean): Build_Info =
{
object Chapter_Name
{
def unapply(s: String): Some[(String, String)] =
space_explode('/', s) match {
case List(chapter, name) => Some((chapter, name))
case _ => Some(("", s))
}
}
val Session_No_Groups = new Regex("""^Session (\\S+)$""")
val Session_Groups = new Regex("""^Session (\\S+) \\((.*)\\)$""")
val Session_Finished1 =
new Regex("""^Finished (\\S+) \\((\\d+):(\\d+):(\\d+) elapsed time, (\\d+):(\\d+):(\\d+) cpu time.*$""")
val Session_Finished2 =
new Regex("""^Finished (\\S+) \\((\\d+):(\\d+):(\\d+) elapsed time.*$""")
val Session_Timing =
new Regex("""^Timing (\\S+) \\((\\d+) threads, (\\d+\\.\\d+)s elapsed time, (\\d+\\.\\d+)s cpu time, (\\d+\\.\\d+)s GC time.*$""")
val Session_Started = new Regex("""^(?:Running|Building) (\\S+) \\.\\.\\.$""")
val Session_Failed = new Regex("""^(\\S+) FAILED""")
val Session_Cancelled = new Regex("""^(\\S+) CANCELLED""")
val Sources = new Regex("""^Sources (\\S+) (\\S{""" + SHA1.digest_length + """})$""")
val Heap = new Regex("""^Heap (\\S+) \\((\\d+) bytes\\)$""")
object Theory_Timing
{
def unapply(line: String): Option[(String, (String, Timing))] =
{
val line1 = line.replace('~', '-')
Library.try_unprefix(THEORY_TIMING_MARKER, line1).map(log_file.parse_props(_)) match {
case Some((SESSION_NAME, name) :: props) =>
(props, props) match {
case (Markup.Name(thy), Markup.Timing_Properties(t)) => Some((name, thy -> t))
case _ => None
}
case _ => None
}
}
}
var chapter = Map.empty[String, String]
var groups = Map.empty[String, List[String]]
var threads = Map.empty[String, Int]
var timing = Map.empty[String, Timing]
var ml_timing = Map.empty[String, Timing]
var started = Set.empty[String]
var failed = Set.empty[String]
var cancelled = Set.empty[String]
var sources = Map.empty[String, String]
var heap_sizes = Map.empty[String, Long]
var theory_timings = Map.empty[String, Map[String, Timing]]
var ml_statistics = Map.empty[String, List[Properties.T]]
var errors = Map.empty[String, List[String]]
def all_sessions: Set[String] =
chapter.keySet ++ groups.keySet ++ threads.keySet ++ timing.keySet ++ ml_timing.keySet ++
failed ++ cancelled ++ started ++ sources.keySet ++ heap_sizes.keySet ++
theory_timings.keySet ++ ml_statistics.keySet
for (line <- log_file.lines) {
line match {
case Session_No_Groups(Chapter_Name(chapt, name)) =>
chapter += (name -> chapt)
groups += (name -> Nil)
case Session_Groups(Chapter_Name(chapt, name), grps) =>
chapter += (name -> chapt)
groups += (name -> Word.explode(grps))
case Session_Started(name) =>
started += name
case Session_Finished1(name,
Value.Int(e1), Value.Int(e2), Value.Int(e3),
Value.Int(c1), Value.Int(c2), Value.Int(c3)) =>
val elapsed = Time.hms(e1, e2, e3)
val cpu = Time.hms(c1, c2, c3)
timing += (name -> Timing(elapsed, cpu, Time.zero))
case Session_Finished2(name,
Value.Int(e1), Value.Int(e2), Value.Int(e3)) =>
val elapsed = Time.hms(e1, e2, e3)
timing += (name -> Timing(elapsed, Time.zero, Time.zero))
case Session_Timing(name,
Value.Int(t), Value.Double(e), Value.Double(c), Value.Double(g)) =>
val elapsed = Time.seconds(e)
val cpu = Time.seconds(c)
val gc = Time.seconds(g)
ml_timing += (name -> Timing(elapsed, cpu, gc))
threads += (name -> t)
case Sources(name, s) =>
sources += (name -> s)
case Heap(name, Value.Long(size)) =>
heap_sizes += (name -> size)
case _ if line.startsWith(THEORY_TIMING_MARKER) && YXML.detect(line) =>
line match {
case Theory_Timing(name, theory_timing) =>
theory_timings += (name -> (theory_timings.getOrElse(name, Map.empty) + theory_timing))
case _ => log_file.err("malformed theory_timing " + quote(line))
}
case _ if parse_ml_statistics && line.startsWith(ML_STATISTICS_MARKER) && YXML.detect(line) =>
Library.try_unprefix(ML_STATISTICS_MARKER, line).map(log_file.parse_props(_)) match {
case Some((SESSION_NAME, name) :: props) =>
ml_statistics += (name -> (props :: ml_statistics.getOrElse(name, Nil)))
case _ => log_file.err("malformed ML_statistics " + quote(line))
}
case _ if line.startsWith(ERROR_MESSAGE_MARKER) && YXML.detect(line) =>
Library.try_unprefix(ERROR_MESSAGE_MARKER, line).map(log_file.parse_props(_)) match {
case Some(List((SESSION_NAME, name), (Markup.CONTENT, msg))) =>
errors += (name -> (Library.decode_lines(msg) :: errors.getOrElse(name, Nil)))
case _ => log_file.err("malformed error message " + quote(line))
}
case _ =>
}
}
val sessions =
Map(
(for (name <- all_sessions.toList) yield {
val status =
if (failed(name)) Session_Status.failed
else if (cancelled(name)) Session_Status.cancelled
else if (timing.isDefinedAt(name) || ml_timing.isDefinedAt(name))
Session_Status.finished
else if (started(name)) Session_Status.failed
else Session_Status.existing
val entry =
Session_Entry(
chapter = chapter.getOrElse(name, ""),
groups = groups.getOrElse(name, Nil),
threads = threads.get(name),
timing = timing.getOrElse(name, Timing.zero),
ml_timing = ml_timing.getOrElse(name, Timing.zero),
sources = sources.get(name),
heap_size = heap_sizes.get(name),
status = Some(status),
errors = errors.getOrElse(name, Nil).reverse,
theory_timings = theory_timings.getOrElse(name, Map.empty),
ml_statistics = ml_statistics.getOrElse(name, Nil).reverse)
(name -> entry)
}):_*)
Build_Info(sessions)
}
/** session info: produced by isabelle build as session log.gz file **/
sealed case class Session_Info(
session_timing: Properties.T,
command_timings: List[Properties.T],
theory_timings: List[Properties.T],
ml_statistics: List[Properties.T],
task_statistics: List[Properties.T],
errors: List[String])
{
def error(s: String): Session_Info =
copy(errors = errors ::: List(s))
}
private def parse_session_info(
log_file: Log_File,
command_timings: Boolean,
theory_timings: Boolean,
ml_statistics: Boolean,
task_statistics: Boolean): Session_Info =
{
Session_Info(
session_timing = log_file.find_props("\\fTiming = ") getOrElse Nil,
command_timings = if (command_timings) log_file.filter_props("\\fcommand_timing = ") else Nil,
theory_timings = if (theory_timings) log_file.filter_props(THEORY_TIMING_MARKER) else Nil,
ml_statistics = if (ml_statistics) log_file.filter_props(ML_STATISTICS_MARKER) else Nil,
task_statistics = if (task_statistics) log_file.filter_props("\\ftask_statistics = ") else Nil,
errors = log_file.filter_lines(ERROR_MESSAGE_MARKER).map(Library.decode_lines(_)))
}
def compress_errors(errors: List[String], cache: XZ.Cache = XZ.cache()): Option[Bytes] =
if (errors.isEmpty) None
else {
Some(Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.string)(errors))).
compress(cache = cache))
}
def uncompress_errors(bytes: Bytes, cache: XZ.Cache = XZ.cache()): List[String] =
if (bytes.isEmpty) Nil
else {
XML.Decode.list(YXML.string_of_body(_))(YXML.parse_body(bytes.uncompress(cache = cache).text))
}
/** persistent store **/
/* SQL data model */
object Data
{
def build_log_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table =
SQL.Table("isabelle_build_log_" + name, columns, body)
/* main content */
val log_name = SQL.Column.string("log_name").make_primary_key
val session_name = SQL.Column.string("session_name").make_primary_key
val theory_name = SQL.Column.string("theory_name").make_primary_key
val chapter = SQL.Column.string("chapter")
val groups = SQL.Column.string("groups")
val threads = SQL.Column.int("threads")
val timing_elapsed = SQL.Column.long("timing_elapsed")
val timing_cpu = SQL.Column.long("timing_cpu")
val timing_gc = SQL.Column.long("timing_gc")
val timing_factor = SQL.Column.double("timing_factor")
val ml_timing_elapsed = SQL.Column.long("ml_timing_elapsed")
val ml_timing_cpu = SQL.Column.long("ml_timing_cpu")
val ml_timing_gc = SQL.Column.long("ml_timing_gc")
val ml_timing_factor = SQL.Column.double("ml_timing_factor")
val theory_timing_elapsed = SQL.Column.long("theory_timing_elapsed")
val theory_timing_cpu = SQL.Column.long("theory_timing_cpu")
val theory_timing_gc = SQL.Column.long("theory_timing_gc")
val heap_size = SQL.Column.long("heap_size")
val status = SQL.Column.string("status")
val errors = SQL.Column.bytes("errors")
val sources = SQL.Column.string("sources")
val ml_statistics = SQL.Column.bytes("ml_statistics")
val known = SQL.Column.bool("known")
val meta_info_table =
build_log_table("meta_info", log_name :: Prop.all_props ::: Settings.all_settings)
val sessions_table =
build_log_table("sessions",
List(log_name, session_name, chapter, groups, threads, timing_elapsed, timing_cpu,
timing_gc, timing_factor, ml_timing_elapsed, ml_timing_cpu, ml_timing_gc, ml_timing_factor,
heap_size, status, errors, sources))
val theories_table =
build_log_table("theories",
List(log_name, session_name, theory_name, theory_timing_elapsed, theory_timing_cpu,
theory_timing_gc))
val ml_statistics_table =
build_log_table("ml_statistics", List(log_name, session_name, ml_statistics))
/* AFP versions */
val isabelle_afp_versions_table: SQL.Table =
{
val version1 = Prop.isabelle_version
val version2 = Prop.afp_version
build_log_table("isabelle_afp_versions", List(version1.make_primary_key, version2),
SQL.select(List(version1, version2), distinct = true) + meta_info_table +
" WHERE " + version1.defined + " AND " + version2.defined)
}
/* earliest pull date for repository version (PostgreSQL queries) */
def pull_date(afp: Boolean = false) =
if (afp) SQL.Column.date("afp_pull_date")
else SQL.Column.date("pull_date")
def pull_date_table(afp: Boolean = false): SQL.Table =
{
val (name, versions) =
if (afp) ("afp_pull_date", List(Prop.isabelle_version, Prop.afp_version))
else ("pull_date", List(Prop.isabelle_version))
build_log_table(name, versions.map(_.make_primary_key) ::: List(pull_date(afp)),
"SELECT " + versions.mkString(", ") +
", min(" + Prop.build_start + ") AS " + pull_date(afp) +
" FROM " + meta_info_table +
" WHERE " + (versions ::: List(Prop.build_start)).map(_.defined).mkString(" AND ") +
" GROUP BY " + versions.mkString(", "))
}
/* recent entries */
def recent_time(days: Int): SQL.Source =
"now() - INTERVAL '" + days.max(0) + " days'"
def recent_pull_date_table(
days: Int, rev: String = "", afp_rev: Option[String] = None): SQL.Table =
{
val afp = afp_rev.isDefined
val rev2 = afp_rev.getOrElse("")
val table = pull_date_table(afp)
val version1 = Prop.isabelle_version
val version2 = Prop.afp_version
val eq1 = version1(table) + " = " + SQL.string(rev)
val eq2 = version2(table) + " = " + SQL.string(rev2)
SQL.Table("recent_pull_date", table.columns,
table.select(table.columns,
"WHERE " + pull_date(afp)(table) + " > " + recent_time(days) +
(if (rev != "" && rev2 == "") " OR " + eq1
else if (rev == "" && rev2 != "") " OR " + eq2
else if (rev != "" && rev2 != "") " OR (" + eq1 + " AND " + eq2 + ")"
else "")))
}
def select_recent_log_names(days: Int): SQL.Source =
{
val table1 = meta_info_table
val table2 = recent_pull_date_table(days)
table1.select(List(log_name), distinct = true) + SQL.join_inner + table2.query_named +
" ON " + Prop.isabelle_version(table1) + " = " + Prop.isabelle_version(table2)
}
def select_recent_versions(days: Int,
rev: String = "", afp_rev: Option[String] = None, sql: SQL.Source = ""): SQL.Source =
{
val afp = afp_rev.isDefined
val version = Prop.isabelle_version
val table1 = recent_pull_date_table(days, rev = rev, afp_rev = afp_rev)
val table2 = meta_info_table
val aux_table = SQL.Table("aux", table2.columns, table2.select(sql = sql))
val columns =
table1.columns.map(c => c(table1)) :::
List(known.copy(expr = log_name(aux_table).defined))
SQL.select(columns, distinct = true) +
table1.query_named + SQL.join_outer + aux_table.query_named +
" ON " + version(table1) + " = " + version(aux_table) +
" ORDER BY " + pull_date(afp)(table1) + " DESC"
}
/* universal view on main data */
val universal_table: SQL.Table =
{
val afp_pull_date = pull_date(afp = true)
val version1 = Prop.isabelle_version
val version2 = Prop.afp_version
val table1 = meta_info_table
val table2 = pull_date_table(afp = true)
val table3 = pull_date_table()
val a_columns = log_name :: afp_pull_date :: table1.columns.tail
val a_table =
SQL.Table("a", a_columns,
SQL.select(List(log_name, afp_pull_date) ::: table1.columns.tail.map(_.apply(table1))) +
table1 + SQL.join_outer + table2 +
" ON " + version1(table1) + " = " + version1(table2) +
" AND " + version2(table1) + " = " + version2(table2))
val b_columns = log_name :: pull_date() :: a_columns.tail
val b_table =
SQL.Table("b", b_columns,
SQL.select(
List(log_name(a_table), pull_date()(table3)) ::: a_columns.tail.map(_.apply(a_table))) +
a_table.query_named + SQL.join_outer + table3 +
" ON " + version1(a_table) + " = " + version1(table3))
val c_columns = b_columns ::: sessions_table.columns.tail
val c_table =
SQL.Table("c", c_columns,
SQL.select(log_name(b_table) :: c_columns.tail) +
b_table.query_named + SQL.join_inner + sessions_table +
" ON " + log_name(b_table) + " = " + log_name(sessions_table))
SQL.Table("isabelle_build_log", c_columns ::: List(ml_statistics),
{
SQL.select(c_columns.map(_.apply(c_table)) ::: List(ml_statistics)) +
c_table.query_named + SQL.join_outer + ml_statistics_table +
" ON " + log_name(c_table) + " = " + log_name(ml_statistics_table) +
" AND " + session_name(c_table) + " = " + session_name(ml_statistics_table)
})
}
}
/* database access */
def store(options: Options): Store = new Store(options)
class Store private[Build_Log](options: Options)
{
val xml_cache: XML.Cache = XML.make_cache()
val xz_cache: XZ.Cache = XZ.make_cache()
def open_database(
user: String = options.string("build_log_database_user"),
password: String = options.string("build_log_database_password"),
database: String = options.string("build_log_database_name"),
host: String = options.string("build_log_database_host"),
port: Int = options.int("build_log_database_port"),
ssh_host: String = options.string("build_log_ssh_host"),
ssh_user: String = options.string("build_log_ssh_user"),
ssh_port: Int = options.int("build_log_ssh_port")): PostgreSQL.Database =
{
PostgreSQL.open_database(
user = user, password = password, database = database, host = host, port = port,
ssh =
if (ssh_host == "") None
else Some(SSH.open_session(options, host = ssh_host, user = ssh_user, port = port)),
ssh_close = true)
}
def update_database(db: PostgreSQL.Database, dirs: List[Path], ml_statistics: Boolean = false)
{
val log_files =
dirs.flatMap(dir =>
File.find_files(dir.file, pred = Log_File.is_log(_), follow_links = true))
write_info(db, log_files, ml_statistics = ml_statistics)
db.create_view(Data.pull_date_table())
db.create_view(Data.pull_date_table(afp = true))
db.create_view(Data.universal_table)
}
def snapshot_database(db: PostgreSQL.Database, sqlite_database: Path,
days: Int = 100, ml_statistics: Boolean = false)
{
Isabelle_System.mkdirs(sqlite_database.dir)
sqlite_database.file.delete
using(SQLite.open_database(sqlite_database))(db2 =>
{
db.transaction {
db2.transaction {
// main content
db2.create_table(Data.meta_info_table)
db2.create_table(Data.sessions_table)
db2.create_table(Data.theories_table)
db2.create_table(Data.ml_statistics_table)
val recent_log_names =
db.using_statement(Data.select_recent_log_names(days))(stmt =>
stmt.execute_query().iterator(_.string(Data.log_name)).toList)
for (log_name <- recent_log_names) {
read_meta_info(db, log_name).foreach(meta_info =>
update_meta_info(db2, log_name, meta_info))
update_sessions(db2, log_name, read_build_info(db, log_name))
if (ml_statistics) {
update_ml_statistics(db2, log_name,
read_build_info(db, log_name, ml_statistics = true))
}
}
// pull_date
for (afp <- List(false, true))
{
val afp_rev = if (afp) Some("") else None
val table = Data.pull_date_table(afp)
db2.create_table(table)
db2.using_statement(table.insert())(stmt2 =>
{
db.using_statement(
Data.recent_pull_date_table(days, afp_rev = afp_rev).query)(stmt =>
{
val res = stmt.execute_query()
while (res.next()) {
for ((c, i) <- table.columns.zipWithIndex) {
stmt2.string(i + 1) = res.get_string(c)
}
stmt2.execute()
}
})
})
}
// full view
db2.create_view(Data.universal_table)
}
}
db2.rebuild
})
}
def domain(db: SQL.Database, table: SQL.Table, column: SQL.Column): Set[String] =
db.using_statement(table.select(List(column), distinct = true))(stmt =>
stmt.execute_query().iterator(_.string(column)).toSet)
def update_meta_info(db: SQL.Database, log_name: String, meta_info: Meta_Info)
{
val table = Data.meta_info_table
db.using_statement(db.insert_permissive(table))(stmt =>
{
stmt.string(1) = log_name
for ((c, i) <- table.columns.tail.zipWithIndex) {
if (c.T == SQL.Type.Date)
stmt.date(i + 2) = meta_info.get_date(c)
else
stmt.string(i + 2) = meta_info.get(c)
}
stmt.execute()
})
}
def update_sessions(db: SQL.Database, log_name: String, build_info: Build_Info)
{
val table = Data.sessions_table
db.using_statement(db.insert_permissive(table))(stmt =>
{
val sessions =
if (build_info.sessions.isEmpty) Build_Info.sessions_dummy
else build_info.sessions
for ((session_name, session) <- sessions) {
stmt.string(1) = log_name
stmt.string(2) = session_name
stmt.string(3) = proper_string(session.chapter)
stmt.string(4) = session.proper_groups
stmt.int(5) = session.threads
stmt.long(6) = session.timing.elapsed.proper_ms
stmt.long(7) = session.timing.cpu.proper_ms
stmt.long(8) = session.timing.gc.proper_ms
stmt.double(9) = session.timing.factor
stmt.long(10) = session.ml_timing.elapsed.proper_ms
stmt.long(11) = session.ml_timing.cpu.proper_ms
stmt.long(12) = session.ml_timing.gc.proper_ms
stmt.double(13) = session.ml_timing.factor
stmt.long(14) = session.heap_size
stmt.string(15) = session.status.map(_.toString)
stmt.bytes(16) = compress_errors(session.errors, cache = xz_cache)
stmt.string(17) = session.sources
stmt.execute()
}
})
}
def update_theories(db: SQL.Database, log_name: String, build_info: Build_Info)
{
val table = Data.theories_table
db.using_statement(db.insert_permissive(table))(stmt =>
{
val sessions =
if (build_info.sessions.forall({ case (_, session) => session.theory_timings.isEmpty }))
Build_Info.sessions_dummy
else build_info.sessions
for {
(session_name, session) <- sessions
(theory_name, timing) <- session.theory_timings
} {
stmt.string(1) = log_name
stmt.string(2) = session_name
stmt.string(3) = theory_name
stmt.long(4) = timing.elapsed.ms
stmt.long(5) = timing.cpu.ms
stmt.long(6) = timing.gc.ms
stmt.execute()
}
})
}
def update_ml_statistics(db: SQL.Database, log_name: String, build_info: Build_Info)
{
val table = Data.ml_statistics_table
db.using_statement(db.insert_permissive(table))(stmt =>
{
val ml_stats: List[(String, Option[Bytes])] =
Par_List.map[(String, Session_Entry), (String, Option[Bytes])](
{ case (a, b) => (a, Properties.compress(b.ml_statistics, cache = xz_cache).proper) },
build_info.sessions.iterator.filter(p => p._2.ml_statistics.nonEmpty).toList)
val entries = if (ml_stats.nonEmpty) ml_stats else List("" -> None)
for ((session_name, ml_statistics) <- entries) {
stmt.string(1) = log_name
stmt.string(2) = session_name
stmt.bytes(3) = ml_statistics
stmt.execute()
}
})
}
def write_info(db: SQL.Database, files: List[JFile], ml_statistics: Boolean = false)
{
abstract class Table_Status(table: SQL.Table)
{
db.create_table(table)
private var known: Set[String] = domain(db, table, Data.log_name)
def required(file: JFile): Boolean = !known(Log_File.plain_name(file.getName))
def update_db(db: SQL.Database, log_file: Log_File): Unit
def update(log_file: Log_File)
{
if (!known(log_file.name)) {
update_db(db, log_file)
known += log_file.name
}
}
}
val status =
List(
new Table_Status(Data.meta_info_table) {
override def update_db(db: SQL.Database, log_file: Log_File): Unit =
update_meta_info(db, log_file.name, log_file.parse_meta_info())
},
new Table_Status(Data.sessions_table) {
override def update_db(db: SQL.Database, log_file: Log_File): Unit =
update_sessions(db, log_file.name, log_file.parse_build_info())
},
new Table_Status(Data.theories_table) {
override def update_db(db: SQL.Database, log_file: Log_File): Unit =
update_theories(db, log_file.name, log_file.parse_build_info())
},
new Table_Status(Data.ml_statistics_table) {
override def update_db(db: SQL.Database, log_file: Log_File): Unit =
if (ml_statistics) {
update_ml_statistics(db, log_file.name,
log_file.parse_build_info(ml_statistics = true))
}
})
for (file_group <-
files.filter(file => status.exists(_.required(file))).
grouped(options.int("build_log_transaction_size") max 1))
{
val log_files = Par_List.map[JFile, Log_File](Log_File.apply _, file_group)
db.transaction { log_files.foreach(log_file => status.foreach(_.update(log_file))) }
}
}
def read_meta_info(db: SQL.Database, log_name: String): Option[Meta_Info] =
{
val table = Data.meta_info_table
val columns = table.columns.tail
db.using_statement(table.select(columns, Data.log_name.where_equal(log_name)))(stmt =>
{
val res = stmt.execute_query()
if (!res.next) None
else {
val results =
columns.map(c => c.name ->
(if (c.T == SQL.Type.Date)
res.get_date(c).map(Log_File.Date_Format(_))
else
res.get_string(c)))
val n = Prop.all_props.length
val props = for ((x, Some(y)) <- results.take(n)) yield (x, y)
val settings = for ((x, Some(y)) <- results.drop(n)) yield (x, y)
Some(Meta_Info(props, settings))
}
})
}
def read_build_info(
db: SQL.Database,
log_name: String,
session_names: List[String] = Nil,
ml_statistics: Boolean = false): Build_Info =
{
val table1 = Data.sessions_table
val table2 = Data.ml_statistics_table
val where_log_name =
Data.log_name(table1).where_equal(log_name) + " AND " +
Data.session_name(table1) + " <> ''"
val where =
if (session_names.isEmpty) where_log_name
else where_log_name + " AND " + SQL.member(Data.session_name(table1).ident, session_names)
val columns1 = table1.columns.tail.map(_.apply(table1))
val (columns, from) =
if (ml_statistics) {
val columns = columns1 ::: List(Data.ml_statistics(table2))
val join =
table1 + SQL.join_outer + table2 + " ON " +
Data.log_name(table1) + " = " + Data.log_name(table2) + " AND " +
Data.session_name(table1) + " = " + Data.session_name(table2)
(columns, SQL.enclose(join))
}
else (columns1, table1.ident)
val sessions =
db.using_statement(SQL.select(columns) + from + " " + where)(stmt =>
{
stmt.execute_query().iterator(res =>
{
val session_name = res.string(Data.session_name)
val session_entry =
Session_Entry(
chapter = res.string(Data.chapter),
groups = split_lines(res.string(Data.groups)),
threads = res.get_int(Data.threads),
timing = res.timing(Data.timing_elapsed, Data.timing_cpu, Data.timing_gc),
ml_timing =
res.timing(Data.ml_timing_elapsed, Data.ml_timing_cpu, Data.ml_timing_gc),
sources = res.get_string(Data.sources),
heap_size = res.get_long(Data.heap_size),
status = res.get_string(Data.status).map(Session_Status.withName(_)),
errors = uncompress_errors(res.bytes(Data.errors), cache = xz_cache),
ml_statistics =
if (ml_statistics) {
Properties.uncompress(
res.bytes(Data.ml_statistics), cache = xz_cache, Some(xml_cache))
}
else Nil)
session_name -> session_entry
}).toMap
})
Build_Info(sessions)
}
}
}
| larsrh/libisabelle | modules/pide/2019-RC4/src/main/scala/Admin/build_log.scala | Scala | apache-2.0 | 44,034 |
package com.p3trur0.packtsub
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import com.typesafe.config.ConfigFactory
trait PacktSubConfig
case class PacktConfig(email: String, pwd: String, crawlUrl: String, loginUrl: String) extends PacktSubConfig
case class MailConfig(to: String, subject: String, bodyOK: String, bodyKO: String) extends PacktSubConfig
case class SMTPConfig(host: String, user: String, password: String) extends PacktSubConfig
object PacktSubConfiguration {
val applicationConfig = ConfigFactory.load();
private val packtConfigData: PacktConfig = {
PacktConfig(
applicationConfig.getString("packt.email"),
applicationConfig.getString("packt.password"),
applicationConfig.getString("packt.crawl_address"),
applicationConfig.getString("packt.url"))
}
val mailConfiguration: MailConfig = {
MailConfig(
applicationConfig.getString("email.to"),
applicationConfig.getString("email.subject"),
applicationConfig.getString("email.body_OK"),
applicationConfig.getString("email.body_KO"))
}
val smtpConfiguration: Try[SMTPConfig] = {
Try (SMTPConfig(
applicationConfig.getString("email.smtp.server"),
applicationConfig.getString("email.smtp.user"),
applicationConfig.getString("email.smtp.password")))
}
val packtConfiguration: Option[PacktConfig] =
packtConfigData.email match {
case "[email protected]" => None
case _ => Some(packtConfigData)
}
} | P3trur0/packtsub | src/main/scala/com/p3trur0/packtsub/PacktSubConfiguration.scala | Scala | apache-2.0 | 1,528 |
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package org.scalajs.testsuite.javalib.util
import java.{util => ju}
import org.scalajs.jasminetest.JasmineTest
import org.scalajs.testsuite.utils.ExpectExceptions
import org.scalajs.testsuite.javalib.util.concurrent.ConcurrentMapFactory
import scala.collection.JavaConversions._
import scala.collection.{mutable => mu}
trait MapTest extends JasmineTest with ExpectExceptions {
def testMapApi(mapFactory: MapFactory): Unit = {
it("should store strings") {
val mp = mapFactory.empty[String, String]
expect(mp.size()).toEqual(0)
mp.put("ONE", "one")
expect(mp.size()).toEqual(1)
expect(mp.get("ONE")).toEqual("one")
mp.put("TWO", "two")
expect(mp.size()).toEqual(2)
expect(mp.get("TWO")).toEqual("two")
}
it("should store integers") {
val mp = mapFactory.empty[Int, Int]
mp.put(100, 12345)
expect(mp.size()).toEqual(1)
val one = mp.get(100)
expect(one).toEqual(12345)
}
it("should store doubles also in corner cases") {
val mp = mapFactory.empty[Double, Double]
mp.put(1.2345, 11111.0)
expect(mp.size()).toEqual(1)
val one = mp.get(1.2345)
expect(one).toEqual(11111.0)
mp.put(Double.NaN, 22222.0)
expect(mp.size()).toEqual(2)
val two = mp.get(Double.NaN)
expect(two).toEqual(22222.0)
mp.put(+0.0, 33333.0)
expect(mp.size()).toEqual(3)
val three = mp.get(+0.0)
expect(three).toEqual(33333.0)
mp.put(-0.0, 44444.0)
expect(mp.size()).toEqual(4)
val four = mp.get(-0.0)
expect(four).toEqual(44444.0)
}
it("should store custom objects") {
case class TestObj(num: Int)
val mp = mapFactory.empty[TestObj, TestObj]
mp.put(TestObj(100), TestObj(12345))
expect(mp.size()).toEqual(1)
val one = mp.get(TestObj(100))
expect(one.num).toEqual(12345)
}
it("should remove stored elements") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
expect(mp.size()).toEqual(1)
expect(mp.remove("ONE")).toEqual("one")
val newOne = mp.get("ONE")
expect(mp.get("ONE")).toBeNull
}
it("should remove stored elements on double corner cases") {
val mp = mapFactory.empty[Double, String]
mp.put(1.2345, "11111.0")
mp.put(Double.NaN, "22222.0")
mp.put(+0.0, "33333.0")
mp.put(-0.0, "44444.0")
expect(mp.get(1.2345)).toEqual("11111.0")
expect(mp.get(Double.NaN)).toEqual("22222.0")
expect(mp.get(+0.0)).toEqual("33333.0")
expect(mp.get(-0.0)).toEqual("44444.0")
expect(mp.remove(-0.0)).toEqual("44444.0")
expect(mp.get(-0.0)).toBeNull
mp.put(-0.0, "55555.0")
expect(mp.remove(+0.0)).toEqual("33333.0")
expect(mp.get(+0.0)).toBeNull
mp.put(+0.0, "66666.0")
expect(mp.remove(Double.NaN)).toEqual("22222.0")
expect(mp.get(Double.NaN)).toBeNull
mp.put(Double.NaN, "77777.0")
mp.clear()
expect(mp.isEmpty).toBeTruthy
}
if (mapFactory.allowsNullKeys) {
it("should put null keys") {
val mp = mapFactory.empty[String, String]
mp.put(null, "one")
expect(mp.get(null)).toEqual("one")
}
} else {
it("should not put null keys") {
val mp = mapFactory.empty[String, String]
expectThrows[NullPointerException](mp.put(null, "one"))
}
}
if (mapFactory.allowsNullValues) {
it("should put null values") {
val mp = mapFactory.empty[String, String]
mp.put("one", null)
expect(mp.get("one")).toEqual(null)
}
} else {
it("should not put null values") {
val mp = mapFactory.empty[String, String]
expectThrows[NullPointerException](mp.put("one", null))
}
}
it("should be cleared with one operation") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
expect(mp.size()).toEqual(2)
mp.clear()
expect(mp.size()).toEqual(0)
}
it("should check contained key presence") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeFalsy
expect(mp.containsKey(null)).toBeFalsy
}
it("should check contained value presence") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
expect(mp.containsValue("one")).toBeTruthy
expect(mp.containsValue("two")).toBeFalsy
expect(mp.containsValue(null)).toBeFalsy
}
it("should give proper Collection over values") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
val values = mp.values
expect(values.size).toEqual(1)
val iter = values.iterator
expect(iter.hasNext).toBeTruthy
expect(iter.next).toEqual("one")
expect(iter.hasNext).toBeFalsy
}
it("should give proper EntrySet over key values pairs") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
val entrySet = mp.entrySet
expect(entrySet.size).toEqual(1)
val iter = entrySet.iterator
expect(iter.hasNext).toBeTruthy
val next = iter.next
expect(iter.hasNext).toBeFalsy
expect(next.getKey).toEqual("ONE")
expect(next.getValue).toEqual("one")
}
it("should give proper KeySet over keys") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
val keySet = mp.keySet
expect(keySet.size).toEqual(1)
val iter = keySet.iterator
expect(iter.hasNext).toBeTruthy
expect(iter.next).toEqual("ONE")
expect(iter.hasNext).toBeFalsy
}
it("should put a whole map into") {
val mp = mapFactory.empty[String, String]
val m = mu.Map[String, String](
"X" -> "y")
mp.putAll(mutableMapAsJavaMap(m))
expect(mp.size).toEqual(1)
expect(mp.get("X")).toEqual("y")
val nullMap = mu.Map[String, String](
(null: String) -> "y",
"X" -> "y")
if (mapFactory.allowsNullKeys) {
mp.putAll(mutableMapAsJavaMap(nullMap))
expect(mp.get(null)).toEqual("y")
expect(mp.get("X")).toEqual("y")
} else {
expectThrows[NullPointerException](mp.putAll(mutableMapAsJavaMap(nullMap)))
}
}
class SimpleQueryableMap[K, V](inner: mu.HashMap[K, V])
extends ju.AbstractMap[K, V] {
def entrySet(): java.util.Set[java.util.Map.Entry[K, V]] = {
setAsJavaSet(inner.map {
case (k, v) => new ju.AbstractMap.SimpleImmutableEntry(k, v)
}.toSet)
}
}
it("values should mirror the related map size") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
val values = mp.values
expect(values.size).toEqual(2)
mp.put("THREE", "three")
expect(values.size).toEqual(3)
mp.remove("ONE")
expect(values.size).toEqual(2)
expect(values.isEmpty).toBeFalsy
mp.clear()
expect(values.size).toEqual(0)
expect(values.isEmpty).toBeTruthy
val hm1 = mu.HashMap(
"ONE" -> "one",
"TWO" -> "two")
val hm2 = mu.HashMap(
"ONE" -> null,
"TWO" -> "two")
val hm3 = mu.HashMap(
(null: String) -> "one",
"TWO" -> "two")
val hm4 = mu.HashMap(
(null: String) -> null,
"TWO" -> "two")
expect(new SimpleQueryableMap(hm1).values.size).toEqual(2)
expect(new SimpleQueryableMap(hm2).values.size).toEqual(2)
expect(new SimpleQueryableMap(hm3).values.size).toEqual(2)
expect(new SimpleQueryableMap(hm4).values.size).toEqual(2)
}
it("values should check single and multiple objects presence") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
val values = mp.values
expect(values.contains("one")).toBeTruthy
expect(values.contains("two")).toBeTruthy
expect(values.contains("three")).toBeFalsy
expect(values.contains(null)).toBeFalsy
mp.put("THREE", "three")
expect(values.contains("three")).toBeTruthy
val coll1 = asJavaCollection(Set("one", "two", "three"))
expect(values.containsAll(coll1)).toBeTruthy
val coll2 = asJavaCollection(Set("one", "two", "three", "four"))
expect(values.containsAll(coll2)).toBeFalsy
val coll3 = asJavaCollection(Set("one", "two", "three", null))
expect(values.containsAll(coll2)).toBeFalsy
val nummp = mapFactory.empty[Double, Double]
val numValues = nummp.values
nummp.put(1, +0.0)
expect(numValues.contains(+0.0)).toBeTruthy
expect(numValues.contains(-0.0)).toBeFalsy
expect(numValues.contains(Double.NaN)).toBeFalsy
nummp.put(2, -0.0)
expect(numValues.contains(+0.0)).toBeTruthy
expect(numValues.contains(-0.0)).toBeTruthy
expect(numValues.contains(Double.NaN)).toBeFalsy
nummp.put(3, Double.NaN)
expect(numValues.contains(+0.0)).toBeTruthy
expect(numValues.contains(-0.0)).toBeTruthy
expect(numValues.contains(Double.NaN)).toBeTruthy
val hm1 = mu.HashMap(
1.0 -> null,
2.0 -> 2.0)
val hm2 = mu.HashMap(
(null: Any) -> 1.0,
2.0 -> 2.0)
val hm3 = mu.HashMap(
(null: Any) -> null,
2.0 -> 2.0)
expect(new SimpleQueryableMap(hm1).values.contains(1.0)).toBeFalsy
expect(new SimpleQueryableMap(hm2).values.contains(1.0)).toBeTruthy
expect(new SimpleQueryableMap(hm3).values.contains(1.0)).toBeFalsy
expect(new SimpleQueryableMap(hm1).values.contains(null)).toBeTruthy
expect(new SimpleQueryableMap(hm2).values.contains(null)).toBeFalsy
expect(new SimpleQueryableMap(hm3).values.contains(null)).toBeTruthy
}
it("values should side effect clear/remove/retain on the related map") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
val values = mp.values
expect(values.isEmpty).toBeFalsy
expect(mp.isEmpty).toBeFalsy
values.clear()
expect(values.isEmpty).toBeTruthy
expect(mp.isEmpty).toBeTruthy
mp.put("ONE", "one")
mp.put("TWO", "two")
expect(mp.containsKey("ONE")).toBeTruthy
values.remove("one")
expect(mp.containsKey("ONE")).toBeFalsy
mp.put("ONE", "one")
mp.put("THREE", "three")
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeTruthy
expect(mp.containsKey("THREE")).toBeTruthy
values.removeAll(asJavaCollection(List("one", "two")))
expect(mp.containsKey("ONE")).toBeFalsy
expect(mp.containsKey("TWO")).toBeFalsy
expect(mp.containsKey("THREE")).toBeTruthy
mp.put("ONE", "one")
mp.put("TWO", "two")
mp.put("THREE", "three")
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeTruthy
expect(mp.containsKey("THREE")).toBeTruthy
values.retainAll(asJavaCollection(List("one", "two")))
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeTruthy
expect(mp.containsKey("THREE")).toBeFalsy
}
it("keySet should mirror the related map size") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
val keySet = mp.keySet
expect(keySet.size).toEqual(2)
mp.put("THREE", "three")
expect(keySet.size).toEqual(3)
mp.remove("ONE")
expect(keySet.size).toEqual(2)
expect(keySet.isEmpty).toBeFalsy
mp.clear()
expect(keySet.size).toEqual(0)
expect(keySet.isEmpty).toBeTruthy
val hm1 = mu.HashMap(
"ONE" -> "one",
"TWO" -> "two")
val hm2 = mu.HashMap(
"ONE" -> null,
"TWO" -> "two")
val hm3 = mu.HashMap(
(null: String) -> "one",
"TWO" -> "two")
val hm4 = mu.HashMap(
(null: String) -> null,
"TWO" -> "two")
expect(new SimpleQueryableMap(hm1).keySet.size).toEqual(2)
expect(new SimpleQueryableMap(hm2).keySet.size).toEqual(2)
expect(new SimpleQueryableMap(hm3).keySet.size).toEqual(2)
expect(new SimpleQueryableMap(hm4).keySet.size).toEqual(2)
}
it("keySet should check single and multiple objects presence") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
val keySet = mp.keySet
expect(keySet.contains("ONE")).toBeTruthy
expect(keySet.contains("TWO")).toBeTruthy
expect(keySet.contains("THREE")).toBeFalsy
expect(keySet.contains(null)).toBeFalsy
mp.put("THREE", "three")
expect(keySet.contains("THREE")).toBeTruthy
val coll1 =
asJavaCollection(Set("ONE", "TWO", "THREE"))
expect(keySet.containsAll(coll1)).toBeTruthy
val coll2 =
asJavaCollection(Set("ONE", "TWO", "THREE", "FOUR"))
expect(keySet.containsAll(coll2)).toBeFalsy
val coll3 =
asJavaCollection(Set("ONE", "TWO", "THREE", null))
expect(keySet.containsAll(coll2)).toBeFalsy
val nummp = mapFactory.empty[Double, Double]
val numkeySet = nummp.keySet
nummp.put(+0.0, 1)
expect(numkeySet.contains(+0.0)).toBeTruthy
expect(numkeySet.contains(-0.0)).toBeFalsy
expect(numkeySet.contains(Double.NaN)).toBeFalsy
nummp.put(-0.0, 2)
expect(numkeySet.contains(+0.0)).toBeTruthy
expect(numkeySet.contains(-0.0)).toBeTruthy
expect(numkeySet.contains(Double.NaN)).toBeFalsy
nummp.put(Double.NaN, 3)
expect(numkeySet.contains(+0.0)).toBeTruthy
expect(numkeySet.contains(-0.0)).toBeTruthy
expect(numkeySet.contains(Double.NaN)).toBeTruthy
val hm1 = mu.HashMap(
1.0 -> null,
2.0 -> 2.0)
val hm2 = mu.HashMap(
(null: Any) -> 1.0,
2.0 -> 2.0)
val hm3 = mu.HashMap(
(null: Any) -> null,
2.0 -> 2.0)
expect(new SimpleQueryableMap(hm1).keySet.contains(1.0)).toBeTruthy
expect(new SimpleQueryableMap(hm2).keySet.contains(1.0)).toBeFalsy
expect(new SimpleQueryableMap(hm3).keySet.contains(1.0)).toBeFalsy
expect(new SimpleQueryableMap(hm1).keySet.contains(null)).toBeFalsy
expect(new SimpleQueryableMap(hm2).keySet.contains(null)).toBeTruthy
expect(new SimpleQueryableMap(hm3).keySet.contains(null)).toBeTruthy
}
it("keySet should side effect clear/remove/retain on the related map") {
val mp = mapFactory.empty[String, String]
mp.put("ONE", "one")
mp.put("TWO", "two")
val keySet = mp.keySet
expect(keySet.isEmpty).toBeFalsy
expect(mp.isEmpty).toBeFalsy
keySet.clear()
expect(keySet.isEmpty).toBeTruthy
expect(mp.isEmpty).toBeTruthy
mp.put("ONE", "one")
mp.put("TWO", "two")
expect(mp.containsKey("ONE")).toBeTruthy
keySet.remove("ONE")
expect(mp.containsKey("ONE")).toBeFalsy
mp.put("ONE", "one")
mp.put("THREE", "three")
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeTruthy
expect(mp.containsKey("THREE")).toBeTruthy
keySet.removeAll(asJavaCollection(List("ONE", "TWO")))
expect(mp.containsKey("ONE")).toBeFalsy
expect(mp.containsKey("TWO")).toBeFalsy
expect(mp.containsKey("THREE")).toBeTruthy
mp.put("ONE", "one")
mp.put("TWO", "two")
mp.put("THREE", "three")
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeTruthy
expect(mp.containsKey("THREE")).toBeTruthy
keySet.retainAll(asJavaCollection(List("ONE", "TWO")))
expect(mp.containsKey("ONE")).toBeTruthy
expect(mp.containsKey("TWO")).toBeTruthy
expect(mp.containsKey("THREE")).toBeFalsy
}
}
}
object MapFactory {
def allFactories: Iterator[MapFactory] =
HashMapFactory.allFactories ++ SortedMapFactory.allFactories ++ ConcurrentMapFactory.allFactories
}
trait MapFactory {
def implementationName: String
def empty[K, V]: ju.Map[K, V]
def allowsNullKeys: Boolean
def allowsNullValues: Boolean
}
| andreaTP/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/javalib/util/MapTest.scala | Scala | bsd-3-clause | 16,836 |
package org.http4s
package headers
import org.http4s.parser.HttpHeaderParser
import org.http4s.util.Writer
object `Set-Cookie` extends HeaderKey.Internal[`Set-Cookie`] with HeaderKey.Singleton {
override def parse(s: String): ParseResult[`Set-Cookie`] =
HttpHeaderParser.SET_COOKIE(s)
}
final case class `Set-Cookie`(cookie: org.http4s.Cookie) extends Header.Parsed {
override def key: `Set-Cookie`.type = `Set-Cookie`
override def renderValue(writer: Writer): writer.type = cookie.render(writer)
}
| hvesalai/http4s | core/src/main/scala/org/http4s/headers/Set-Cookie.scala | Scala | apache-2.0 | 513 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.sumobot.plugins
import akka.actor.{ActorSystem, Props}
import com.sumologic.sumobot.core.config.ListOfConfigs
import scala.util.Try
object PluginsFromConfig extends PluginCollection {
override def setup(implicit system: ActorSystem): Unit = {
val plugins: Map[String, Option[Props]] = ListOfConfigs.parse(system.settings.config, "plugins") {
(name, pluginConfig) =>
Try(pluginConfig.getString("class")).toOption.map {
className =>
Props(Class.forName(className))
}
}
plugins.filter(_._2.isDefined).foreach {
tpl =>
addPlugin(tpl._1, tpl._2.get)
}
}
}
| SumoLogic/sumobot | src/main/scala/com/sumologic/sumobot/plugins/PluginsFromConfig.scala | Scala | apache-2.0 | 1,463 |
package chess
package variant
case object Atomic
extends Variant(
id = 7,
key = "atomic",
name = "Atomic",
shortName = "Atom",
title = "Nuke your opponent's king to win.",
standardInitialPosition = true
) {
def pieces = Standard.pieces
override def hasMoveEffects = true
/** Move threatens to explode the opponent's king */
private def explodesOpponentKing(situation: Situation)(move: Move): Boolean =
move.captures && {
situation.board.kingPosOf(!situation.color) exists move.dest.touches
}
/** Move threatens to illegally explode our own king */
private def explodesOwnKing(situation: Situation)(move: Move): Boolean = {
move.captures && (situation.kingPos exists move.dest.touches)
}
private def protectedByOtherKing(board: Board, to: Pos, color: Color): Boolean =
board.kingPosOf(color) exists to.touches
/** In atomic chess, a king cannot be threatened while it is in the perimeter of the other king as were the other player
* to capture it, their own king would explode. This effectively makes a king invincible while connected with another
* king.
*/
override def kingThreatened(
board: Board,
color: Color,
to: Pos,
filter: Piece => Boolean = _ => true
): Boolean = {
board.pieces exists {
case (pos, piece)
if piece.color == color && filter(piece) && piece.eyes(pos, to) && !protectedByOtherKing(
board,
to,
color
) =>
(!piece.role.projection) || piece.role.dir(pos, to).exists {
longRangeThreatens(board, pos, _, to)
}
case _ => false
}
}
// moves exploding opponent king are always playable
override def kingSafety(m: Move, filter: Piece => Boolean, kingPos: Option[Pos]): Boolean = {
!kingPos.exists(kingThreatened(m.after, !m.color, _, filter)) ||
explodesOpponentKing(m.situationBefore)(m)
} && !explodesOwnKing(m.situationBefore)(m)
/** If the move captures, we explode the surrounding pieces. Otherwise, nothing explodes. */
private def explodeSurroundingPieces(move: Move): Move = {
if (move.captures) {
val affectedPos = surroundingPositions(move.dest)
val afterBoard = move.after
val destination = move.dest
val boardPieces = afterBoard.pieces
// Pawns are immune (for some reason), but all pieces surrounding the captured piece and the capturing piece
// itself explode
val piecesToExplode = affectedPos.filter(boardPieces.get(_).fold(false)(_.isNot(Pawn))) + destination
val afterExplosions = boardPieces -- piecesToExplode
val newBoard = afterBoard withPieces afterExplosions
move withAfter newBoard
} else move
}
/** The positions surrounding a given position on the board. Any square at the edge of the board has
* less surrounding positions than the usual eight.
*/
private[chess] def surroundingPositions(pos: Pos): Set[Pos] =
Set(pos.up, pos.down, pos.left, pos.right, pos.upLeft, pos.upRight, pos.downLeft, pos.downRight).flatten
override def addVariantEffect(move: Move): Move = explodeSurroundingPieces(move)
/** Since kings cannot confine each other, if either player has only a king
* then either a queen or multiple pieces are required for checkmate.
*/
private def insufficientAtomicWinningMaterial(board: Board) = {
val kingsAndBishopsOnly = board.pieces forall { p =>
(p._2 is King) || (p._2 is Bishop)
}
lazy val bishopsOnOppositeColors = InsufficientMatingMaterial.bishopsOnOppositeColors(board)
lazy val kingsAndKnightsOnly = board.pieces forall { p =>
(p._2 is King) || (p._2 is Knight)
}
lazy val kingsRooksAndMinorsOnly = board.pieces forall { p =>
(p._2 is King) || (p._2 is Rook) || (p._2 is Bishop) || (p._2 is Knight)
}
// Bishops of opposite color (no other pieces) endgames are dead drawn
// except if either player has multiple bishops so a helpmate is possible
if (board.count(White) >= 2 && board.count(Black) >= 2)
kingsAndBishopsOnly && board.pieces.size <= 4 && bishopsOnOppositeColors
// Queen, rook + any, bishop + any (same piece color), or 3 knights can mate
else if (kingsAndKnightsOnly) board.pieces.size <= 4
else kingsRooksAndMinorsOnly && !bishopsOnOppositeColors && board.pieces.size <= 3
}
/*
* Bishops on opposite coloured squares can never capture each other to cause a king to explode and a traditional
* mate would be not be very likely. Additionally, a player can only mate another player with sufficient material.
* We also look out for closed positions (pawns that cannot move and kings which cannot capture them.)
*/
override def isInsufficientMaterial(board: Board) = {
insufficientAtomicWinningMaterial(board) || atomicClosedPosition(board)
}
/** Since a king cannot capture, K + P vs K + P where none of the pawns can move is an automatic draw
*/
private def atomicClosedPosition(board: Board) = {
val closedStructure = board.actors.values.forall(actor =>
(actor.piece.is(Pawn) && actor.moves.isEmpty
&& InsufficientMatingMaterial.pawnBlockedByPawn(actor, board))
|| actor.piece.is(King) || actor.piece.is(Bishop)
)
val randomBishop = board.pieces.find { case (_, piece) => piece.is(Bishop) }
val bishopsAbsentOrPawnitized = randomBishop match {
case Some((pos, piece)) => bishopPawnitized(board, piece.color, pos.isLight)
case None => true
}
closedStructure && bishopsAbsentOrPawnitized
}
private def bishopPawnitized(board: Board, sideWithBishop: Color, bishopLight: Boolean) = {
board.actors.values.forall(actor =>
(actor.piece.is(Pawn) && actor.piece.is(sideWithBishop)) ||
(actor.piece.is(Pawn) && actor.piece.is(!sideWithBishop) && actor.pos.isLight == !bishopLight) ||
(actor.piece.is(Bishop) && actor.piece.is(sideWithBishop) && actor.pos.isLight == bishopLight) ||
actor.piece.is(King)
)
}
/** In atomic chess, it is possible to win with a single knight, bishop, etc, by exploding
* a piece in the opponent's king's proximity. On the other hand, a king alone or a king with
* immobile pawns is not sufficient material to win with.
*/
override def opponentHasInsufficientMaterial(situation: Situation) =
situation.board.rolesOf(!situation.color) == List(King)
/** Atomic chess has a special end where a king has been killed by exploding with an adjacent captured piece */
override def specialEnd(situation: Situation) = situation.board.kingPos.size != 2
}
| ornicar/scalachess | src/main/scala/variant/Atomic.scala | Scala | mit | 6,657 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.content
import org.scalatest.{FunSpec, Matchers}
import play.api.data.validation.ValidationError
import play.api.libs.json._
class ExecuteInputSpec extends FunSpec with Matchers {
val executeInputJson: JsValue = Json.parse("""
{
"code": "<STRING>",
"execution_count": 42
}
""")
val executeInput: ExecuteInput = ExecuteInput(
"<STRING>", 42
)
describe("ExecuteInput") {
describe("#toTypeString") {
it("should return correct type") {
ExecuteInput.toTypeString should be ("execute_input")
}
}
describe("implicit conversions") {
it("should implicitly convert from valid json to a executeInput instance") {
// This is the least safe way to convert as an error is thrown if it fails
executeInputJson.as[ExecuteInput] should be (executeInput)
}
it("should also work with asOpt") {
// This is safer, but we lose the error information as it returns
// None if the conversion fails
val newExecuteInput = executeInputJson.asOpt[ExecuteInput]
newExecuteInput.get should be (executeInput)
}
it("should also work with validate") {
// This is the safest as it collects all error information (not just first error) and reports it
val executeInputResults = executeInputJson.validate[ExecuteInput]
executeInputResults.fold(
(invalid: Seq[(JsPath, Seq[ValidationError])]) => println("Failed!"),
(valid: ExecuteInput) => valid
) should be (executeInput)
}
it("should implicitly convert from a executeInput instance to valid json") {
Json.toJson(executeInput) should be (executeInputJson)
}
}
}
}
| chipsenkbeil/incubator-toree | protocol/src/test/scala/org/apache/toree/kernel/protocol/v5/content/ExecuteInputSpec.scala | Scala | apache-2.0 | 2,564 |
package com.twitter.scalding.examples
import com.twitter.scalding._
import com.twitter.scalding.mathematics.Matrix
/*
* MatrixTutorial2.scala
*
* Loads a directed graph adjacency matrix where a[i,j] = 1 if there is an edge from a[i] to b[j]
* and returns a graph containing only the nodes with outdegree smaller than a given value
*
* ../scripts/scald.rb --local MatrixTutorial2.scala --input data/graph.tsv --maxOutdegree 1000 --output data/graphFiltered.tsv
*
*/
class FilterOutdegreeJob(args : Args) extends Job(args) {
import Matrix._
val adjacencyMatrix = Tsv( args("input"), ('user1, 'user2, 'rel) )
.read
.toMatrix[Long,Long,Double]('user1, 'user2, 'rel)
// Each row corresponds to the outgoing edges so to compute the outdegree we sum out the columns
val outdegree = adjacencyMatrix.sumColVectors
// We convert the column vector to a matrix object to be able to use the matrix method filterValues
// we make all non zero values into ones and then convert it back to column vector
val outdegreeFiltered = outdegree.toMatrix[Int](1)
.filterValues{ _ < args("maxOutdegree").toDouble }
.binarizeAs[Double].getCol(1)
// We multiply on the left hand side with the diagonal matrix created from the column vector
// to keep only the rows with outdregree smaller than maxOutdegree
(outdegreeFiltered.diag * adjacencyMatrix).write(Tsv( args("output") ) )
}
| wanyifu/scaldingtest | tutorial/MatrixTutorial2.scala | Scala | apache-2.0 | 1,489 |
// Databricks notebook source exported at Wed, 31 Aug 2016 00:07:54 UTC
// MAGIC %md
// MAGIC
// MAGIC #
// MAGIC
// MAGIC # RDDs, DataFrames and Datasets
// MAGIC
// MAGIC ## Using Wikipedia data
// MAGIC
// MAGIC We're going to use some Wikipedia data, representing changes to various Wikimedia project pages in an hour's time. These edits are from March 3rd (yesterday), covering the hour starting at 22:00 UTC.
// MAGIC
// MAGIC Dataset: https://dumps.wikimedia.org/other/pagecounts-raw/
// COMMAND ----------
// MAGIC %fs ls dbfs:/tmp/pagecounts
// COMMAND ----------
// MAGIC %md How big is the file, in megabytes?
// COMMAND ----------
// For some reason, if this is outside a local block, some of the RDD transformation lambdas
// pick up the global dirContents variable, even though they don't reference it. Since the
// type of dirContents isn't serializable, a lambda that picks it up fails. Putting the code
// inside a local block solves the problem.
{
val dirContents = dbutils.fs.ls("dbfs:/tmp/pagecounts")
val size = dirContents.map(_.size).head / (1024 * 1024)
println(s"$size megabytes\\n")
}
// COMMAND ----------
val path = dbutils.fs.ls("dbfs:/tmp/pagecounts").sortWith { (a, b) => a.name > b.name }.head.path
// COMMAND ----------
// MAGIC %md
// MAGIC ### RDDs
// MAGIC
// MAGIC RDDs can be created by using the Spark Context object's `textFile()` method.
// COMMAND ----------
// MAGIC %md Create an RDD from the recent pagecounts file:
// COMMAND ----------
// Notice that this returns a RDD of Strings
val pagecountsRDD = sc.textFile(path)
// COMMAND ----------
// MAGIC %md There's one partition, because a gzipped file cannot be uncompressed in parallel.
// COMMAND ----------
pagecountsRDD.partitions.length
// COMMAND ----------
// MAGIC %md Let's increase the parallelism by repartitioning. I'm using 6 partitions, which is twice the number of available threads on Databricks Community Edition.
// COMMAND ----------
val pagecountsRDD2 = pagecountsRDD.repartition(6)
pagecountsRDD2.partitions.length
// COMMAND ----------
// MAGIC %md The `count` action counts how many items (lines) total are in the RDD (this requires a full scan of the file):
// COMMAND ----------
val fmt = new java.text.DecimalFormat
println(fmt.format(pagecountsRDD2.count()))
// COMMAND ----------
// MAGIC %md So the count shows that there are several million lines in the file. Notice that the `count()` action took some time to run because it had to read the entire file remotely from S3.
// COMMAND ----------
// MAGIC %md Let's take a look at some of the data. This will return faster, because `take()` doesn't have to process the whole file.
// COMMAND ----------
pagecountsRDD2.take(10).foreach(println)
// COMMAND ----------
// MAGIC %md Notice that each line in the file actually contains 2 strings and 2 numbers, but our RDD is treating each line as a long string. We'll fix this typing issue shortly by using a custom parsing function.
// COMMAND ----------
// MAGIC %md In the output above, the first column (like `aa`) is the Wikimedia project name. The following abbreviations are used for the first column:
// MAGIC
// MAGIC * wikibooks: `".b"`
// MAGIC * wiktionary: `".d"`
// MAGIC * wikimedia: `".m"`
// MAGIC * wikipedia mobile: `".mw"`
// MAGIC * wikinews: `".n"`
// MAGIC * wikiquote: `".q"`
// MAGIC * wikisource: `".s"`
// MAGIC * wikiversity: `".v"`
// MAGIC * mediawiki: `".w"`
// MAGIC
// MAGIC Projects without a period and a following character are Wikipedia projects. So, any line starting with the column `aa` refers to the Aragones language Wikipedia. Similarly, any line starting with the column `en` refers to the English language Wikipedia. `en.b` refers to English Language Wikibooks.
// MAGIC
// MAGIC The second column is the title of the page retrieved, the third column is the number of requests, and the fourth column is the size of the content returned.
// COMMAND ----------
// MAGIC %md Let's sum up the request counts per page in the English Wikipiedia, then pull back the top 10. This is a variation of the code on Slide 4.
// MAGIC
// COMMAND ----------
pagecountsRDD2.flatMap { line =>
line.split("""\\s+""") match {
case Array(project, page, numRequests, contentSize) => Some((project, page, numRequests.toLong))
case _ => None
}
}.
filter { case (project, page, numRequests) => project == "en" }.
map { case (project, page, numRequests) => (page, numRequests) }.
reduceByKey(_ + _).
sortBy({ case (page, numRequests) => numRequests }, ascending = false).
take(100).
foreach { case (page, totalRequests) => println(s"$page: $totalRequests") }
// COMMAND ----------
// MAGIC %md Let's remove the special pages. Page like "Talk:Topic", "User:username", "Special:Something", and anything starting with a "." are just cluttering things up. Here's a modification of the above, with a slightly different `filter()` call.
// COMMAND ----------
val SkipPages = Array(
"""^Special:""".r,
"""^File:""".r,
"""^Category:""".r,
"""^User:""".r,
"""^Talk:""".r,
"""^Template:""".r,
"""^Help:""".r,
"""^Wikipedia:""".r,
"""^MediaWiki:""".r,
"""^Portal:""".r,
"""^Book:""".r,
"""^Draft:""".r,
"""^Education_Program:""".r,
"""^TimedText:""".r,
"""^Module:""".r,
"""^Topic:""".r,
"""^Images/""".r,
"""^%22//upload.wikimedia.org""".r,
"""^%22//en.wikipedia.org""".r
)
def isSpecialPage(pageTitle: String): Boolean = SkipPages.exists { r => r.findFirstIn(pageTitle).isDefined }
// COMMAND ----------
def keepPage(page: String) = (! page.startsWith(".")) && (! isSpecialPage(page))
val pagecountsRDD3 =
pagecountsRDD2.flatMap { line =>
line.split("""\\s+""") match {
case Array(project, page, numRequests, contentSize) if (project == "en") && keepPage(page) => Some((page, numRequests.toLong))
case _ => None
}
}.
reduceByKey(_ + _).
sortBy({ case (page, numRequests) => numRequests }, ascending = false)
pagecountsRDD3.take(100).foreach { case (page, totalRequests) => println(s"$page: $totalRequests") }
// COMMAND ----------
// MAGIC %md That's completely type-safe, but it's up to us to choose the right implementation. For instance, the code, above, _could_ have by done like this:
// MAGIC
// MAGIC ```
// MAGIC pagecountsParsedRDD2.flatMap { ... }.
// MAGIC filter { ... }.
// MAGIC map { case (project, page, numRequests) => (page, numRequests) }.
// MAGIC groupByKey(_._1).
// MAGIC reduce(_ + _).
// MAGIC ...
// MAGIC ```
// MAGIC
// MAGIC However, `groupByKey() + reduce()` is _far_ more inefficient than `reduceByKey()`. Yet, Spark cannot protect us from choosing the wrong transformations.
// COMMAND ----------
// MAGIC %md Before we move on to DataFrames, let's cache this last RDD and see how much memory it uses.
// COMMAND ----------
pagecountsRDD3.setName("pagecountsRDD3").cache()
val totalPagesRDD = pagecountsRDD3.count() // we need to run an action to fill the cache
println(fmt.format(totalPagesRDD))
// COMMAND ----------
// MAGIC %md
// MAGIC ## DataFrames
// MAGIC
// MAGIC Let's try the same thing with DataFrames.
// MAGIC
// MAGIC To make a DataFrame, we can simply our RDD into another RDD of a different type, something Spark can use to infer a _schema_. Since we have fewer than 23 columns, we'll use a case class.
// COMMAND ----------
object Parser extends Serializable { // This helps with scoping issues in the notebook
case class EditEntry(project: String, pageTitle: String, numberOfRequests: Long)
def parseLine(line: String) = {
line.split("""\\s+""") match {
case Array(project, page, numRequests, _) =>
Some(EditEntry(project, page, numRequests.toLong))
case _ =>
None
}
}
}
val pagecountsDF = pagecountsRDD2.flatMap(Parser.parseLine).toDF
// COMMAND ----------
pagecountsDF.printSchema()
// COMMAND ----------
pagecountsDF.rdd.partitions.length
// COMMAND ----------
// MAGIC %md Of course, it would be more efficient to read the DataFrame from something other than a gzipped text file. Let's try that, as well. We'll save it to a Parquet file and read it back.
// COMMAND ----------
import org.apache.spark.sql.SaveMode
pagecountsDF.write.mode(SaveMode.Overwrite).parquet("dbfs:/tmp/pagecounts.parquet")
// COMMAND ----------
// MAGIC %fs ls /tmp/pagecounts.parquet
// COMMAND ----------
// Note the use of "spark", not "sqlContext". "spark" is a preinstantiated
// SparkSession, introduced in 2.0.
val pagecountsDFParquet = spark.read.parquet("dbfs:/tmp/pagecounts.parquet")
// COMMAND ----------
pagecountsDFParquet.rdd.partitions.length
// COMMAND ----------
// MAGIC %md Let's get rid of the special pages, as we did with the RDD version.
// COMMAND ----------
import org.apache.spark.sql.functions._
val uKeepPage = sqlContext.udf.register("keepPage", keepPage _)
val pagecountsDF2 = pagecountsDF.filter($"project" === "en").
filter(uKeepPage($"pageTitle")).
filter(substring($"pageTitle", 0, 1) !== ".").
groupBy($"pageTitle").
agg(sum($"numberOfRequests").as("count")).
orderBy($"count".desc)
pagecountsDF2.take(100).foreach { row =>
println(s"${row(0)}: ${row(1)}")
}
// COMMAND ----------
// MAGIC %md Easier to read, but... not type-safe.
// COMMAND ----------
// MAGIC %md
// MAGIC ### A very brief aside, about partitions
// MAGIC
// MAGIC According to Spark documentation, when a DataFrame shuffle occurs, the number of post-shuffle partitions is defined by Spark configuration parameter
// MAGIC `spark.sql.shuffle.partitions`, which defaults to 200. So, we should have that many partitions in `pagecountsDF2`, because the `groupBy` and `agg` calls, above, are likely to produce shuffles.
// MAGIC
// MAGIC But...
// COMMAND ----------
println(s"spark.sql.shuffle.partitions = ${sqlContext.getConf("spark.sql.shuffle.partitions")}")
println(s"pagecountsDF2 partitions = ${pagecountsDF2.rdd.partitions.length}")
// COMMAND ----------
// MAGIC %md Okay, that's not 200.
// MAGIC
// MAGIC If we reran the creation of `pagecountsDF2`, above, we might see 18, 19, 20, 21, 22, or some number in that area for the number of post-shuffle partitions. Why?
// MAGIC
// MAGIC It turns out that the `orderBy`, above, uses _range partitioning_. To determine reasonable upper and lower bounds for the range, Spark randomly samples the data. In this case, we end up with something around 22 partitions; the partition could might differ with different data. It's not always the same from run to run because the random sampling doesn't use the same seed every time.
// COMMAND ----------
// MAGIC %md
// MAGIC ### Back to our DataFrame
// MAGIC
// MAGIC The `pagecountsDF2` DataFrame consists of `Row` objects, which can contain heterogenous types. If we pull some `Rows` back to the driver, we can extract the columns, but only as type `Any`:
// COMMAND ----------
val first10Rows = pagecountsDF2.take(10)
// COMMAND ----------
val row = first10Rows.head
row(0)
// COMMAND ----------
// MAGIC %md Note that `Row` isn't typed. It can't be, since each row consists of columns of potentially different types. (I suppose Spark could've used Shapeless, but it didn't...) If we want to get back to actual, useful Scala types, we have to do something ugly, like this:
// COMMAND ----------
first10Rows.map { row =>
(row(0).asInstanceOf[String], row(1).asInstanceOf[Long])
}
// COMMAND ----------
// MAGIC %md Before we move on to Datasets, let's:
// MAGIC * verify that the number of items in the DataFrame match the RDD with the special pages filtered out
// MAGIC * cache the DataFrame
// MAGIC * compare the cached size to the cached size of the RDD.
// COMMAND ----------
val totalPagesDF = pagecountsDF2.cache().count()
println(s"RDD total: ${fmt.format(totalPagesRDD)}")
println(s"DF total: ${fmt.format(totalPagesDF)}")
// COMMAND ----------
// MAGIC %md
// MAGIC ## Datasets
// MAGIC The easiest way to create a Dataset from scratch is from a DataFrame. Prior to Spark 2.0, we'd use the `SQLContext` for that:
// MAGIC
// MAGIC ```
// MAGIC val ds = sqlContext.read.text("dbfs:/tmp/pagecounts").as[String]
// MAGIC |------------- makes a DataFrame ----------|
// MAGIC |--------|
// MAGIC |
// MAGIC +- converts to a Dataset
// MAGIC ```
// MAGIC
// MAGIC However, in 2.0, while that approach still works, we should prefer to use the `SparkSession`, available as `spark` in these notebooks and in the `spark-shell` Scala REPL.
// COMMAND ----------
// Notice that this returns a Dataset of Strings
val pagecountsDS = spark.read.text("dbfs:/tmp/pagecounts/").as[String]
pagecountsDS.take(3).foreach(println)
// COMMAND ----------
// MAGIC %md Of course, we could also just convert the existing `pagecountsDF` to a Dataset:
// COMMAND ----------
val pagecountsDS2 = pagecountsDF.as[(String, String, Long)]
pagecountsDS2.take(3).foreach(println)
// COMMAND ----------
// MAGIC %md Even better, though, let's make a Dataset that uses something more convenient than a tuple:
// COMMAND ----------
// Matching is done DataFrame column name -> case class field name
case class Edit(project: String, pageTitle: String, numberOfRequests: Long)
val pagecountsDS3 = pagecountsDF.as[Edit]
// COMMAND ----------
pagecountsDS3.take(4).foreach(println)
// COMMAND ----------
println(fmt.format(pagecountsDS3.count()))
// COMMAND ----------
// MAGIC %md ### I lied (a little)
// MAGIC
// MAGIC Prior to 2.0, `DataFrame` and `Dataset` were two different types. In 2.0, though, a `DataFrame` is just a type alias for `Dataset[Row]`. Thus, in 2.0,
// MAGIC when you start with a `DataFrame` and "convert" it to a `Dataset`, you're actually converting a `Dataset` of one type to a `Dataset` of another type.
// COMMAND ----------
pagecountsDF.getClass
// COMMAND ----------
// MAGIC %md You still have to get the types right on the conversions to Datasets, but once you have the Dataset, you have something that's type safe again.
// MAGIC
// MAGIC Once again, let's filter out the special pages, group by page title, and show to top 100 hits.
// COMMAND ----------
val pagecountsDS4 = pagecountsDS3.filter { e => (e.project == "en") && (! e.pageTitle.startsWith(".")) && (! isSpecialPage(e.pageTitle)) }.
groupByKey { _.pageTitle }. // GroupedDataset[String, DSEntry]
reduceGroups { (e1, e2) => e1.copy(e1.project, e1.pageTitle, e1.numberOfRequests + e2.numberOfRequests) }.
map(_._2). // skip the key; extract the value
orderBy($"numberOfRequests".desc)
pagecountsDS4.take(100).foreach { e => println(s"${e.pageTitle}: ${e.numberOfRequests}") }
// COMMAND ----------
// MAGIC %md Let's cache this Dataset and, for good measure, compare the number of items with our RDD and DataFrame.
// COMMAND ----------
val totalPagesDS = pagecountsDS4.cache().count()
println(s"DF total: ${fmt.format(totalPagesDF)}")
println(s"RDD total: ${fmt.format(totalPagesRDD)}")
println(s"DS total: ${fmt.format(totalPagesDS)}")
// COMMAND ----------
pagecountsDS4.rdd.partitions.length
// COMMAND ----------
// MAGIC %md
// MAGIC ### The last little bit of caching
// MAGIC
// MAGIC So far, we have cached:
// MAGIC
// MAGIC * A filtered RDD of `(pageTitle, totalRequests)` tuples
// MAGIC * A DataFrame of `Row` objects
// MAGIC * A Dataset of `Edit` objects
// COMMAND ----------
// MAGIC %md Just for completeness, let's cache an RDD with `Edit` objects. Given the small amount of memory available to Community Edition clusters, we'll allow this cache to spill to disk if it has to.
// COMMAND ----------
import org.apache.spark.storage.StorageLevel
val pagecountsEditRDD = pagecountsRDD2.flatMap { line =>
line.split("""\\s+""") match {
case Array(project, pageTitle, requests, _) => Some(Edit(project, pageTitle, requests.toLong))
case _ => None
}
}.
filter { e => (e.project == "en") && (! e.pageTitle.startsWith(".")) && (! isSpecialPage(e.pageTitle)) }
pagecountsEditRDD.setName("pagecountsEditRDD").persist(StorageLevel.MEMORY_AND_DISK).count()
// COMMAND ----------
// MAGIC %md The Spark UI's Storage tab now shows all of these in memory. The Dataset is compressed in memory by default, so it takes up much less space.
// COMMAND ----------
// MAGIC %md ## END OF DEMO
// COMMAND ----------
| bmc/rdds-dataframes-datasets-presentation-2016 | demo/RDDs-DataFrames-and-Datasets.scala | Scala | bsd-3-clause | 16,979 |
package linguistica.stemmer.porter
import org.scalatest._
import org.scalatest.matchers._
import org.scalatest.Assertions._
class RuleSpec extends FunSuite with Logic {
def checkStem(rule: SimpleRule, word: String, stem: String) = rule.stem(Word(word)) === Some(Word(stem))
def checkApply(rule: Rule, word: String, stem: String) = rule(Word(word)) === Some(Word(stem))
test("test that result of stem and apply is right [step 1a]") {
val sses = Rule("SSES", "SS")
checkStem(sses, "CARESSES", "CARE")
checkApply(sses, "CARESSES", "CARESS")
val ies = Rule("IES", "I")
checkStem(ies, "PONIES", "PON")
checkApply(ies, "PONIES", "PONI")
checkStem(ies, "TIES", "T")
checkApply(ies, "TIES", "TI")
val ss = Rule("SS", "SS")
checkStem(ss, "CARESS", "CARE")
checkApply(ss, "CARESS", "CARESS")
}
test("test that stem or apply return None for unmatched suffix") {
val sses = Rule("SSES", "SS")
assert(sses.stem(Word("CONSES")) === None)
assert(sses.apply(Word("CONSES")) === None)
}
test("test that result of stem and apply is right [step 1b | first part]") {
val eed_m_gt_0 = Rule("EED", "EE", Condition.measureIs(m => m > 0))
checkStem(eed_m_gt_0, "FEED", "F")
assert(eed_m_gt_0(Word("FEED")) === None)
checkStem(eed_m_gt_0, "AGREED", "AGR")
checkApply(eed_m_gt_0, "AGREED", "AGREE")
val ed = Rule("ED", "", Condition.containsVowel)
checkStem(ed, "RED", "R")
assert(ed(Word("RED")) === None)
checkStem(ed, "PLASTERED", "PLASTER")
checkApply(ed, "PLASTERED", "PLASTER")
checkStem(ed, "BLED", "BL")
assert(ed(Word("BLED")) === None)
val ing = Rule("ING", "", Condition.containsVowel)
checkStem(ing, "MOTORING", "MOTOR")
checkApply(ing, "MOTORING", "MOTOR")
checkStem(ing, "SING", "S")
assert(ing(Word("SING")) === None)
}
test("test that result of apply is right for complex rules [step 1b | second part]") {
val doubleNotLsz = Rule.doubleToSingle(Condition.not(Condition.endCharIn("LSZ")))
checkApply(doubleNotLsz, "HOPP", "HOP")
checkApply(doubleNotLsz, "TANN", "TAN")
assert(doubleNotLsz(Word("FALL")) === None)
assert(doubleNotLsz(Word("HISS")) === None)
assert(doubleNotLsz(Word("FIZZ")) === None)
val m_gt_1_o = Rule("", "E", Condition.and(Condition.measureIs(_ == 1), Condition.endsCvcNotWxy))
assert(m_gt_1_o(Word("FAIL")) === None)
checkStem(m_gt_1_o, "FIL", "FIL")
checkApply(m_gt_1_o, "FIL", "FILE")
}
} | vcherkassky/porter-stemmer | src/test/scala/linguistica/stemmer/porter/RuleSpec.scala | Scala | mit | 2,501 |
package com.example.datticker
// Android Imports
import android.os.Bundle
import android.widget.{LinearLayout, RelativeLayout, TextView, Button}
import android.view.ViewGroup.LayoutParams._
import android.view.{Gravity, View}
import android.app.Activity
import android.graphics.Color
// Macroid Imports
import macroid._
import macroid.FullDsl._
import macroid.contrib._
import macroid.contrib.Layouts.RuleRelativeLayout
import macroid.contrib.Layouts.RuleRelativeLayout.Rule
import macroid.Tweak
// Scala Imports
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
// Local Imports
import jsonExtraction.CryptoCoin.fBTCData
import Tweaks.DTT._
/**
* __Author__ = bmcg
* __Email__ = [email protected]
* __VERSION__ = 0.1
*
* __License__ =
* Copyright (C) 2015-2018 b-mcg <[email protected]>
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
/**
* MainActivity for a bitcoin ticker which gathers
* its data from bitcoinaverage.
*
*/
class MainActivity extends Activity with Contexts[Activity] with IdGeneration {
// Slots for BTC data to be held in
var capLast = slot[TextView]
var capAsk = slot[TextView]
var cap24h = slot[TextView]
// Predefined font sizes
lazy val fat = size(22)
lazy val med = size(15)
lazy val fmed = size(18)
/** Short hands for arranging the ask and 24_h textviews below the last one
* so it looks something like this:
* Last
* Ask 24_h
*/
def bLS() = lp[RRL](WRAP_CONTENT, WRAP_CONTENT, Rule(belowOf, Id.btcLast), Rule(leftOf, Id.btcLast), Rule(startOf, Id.btcLast))
def bRE() = lp[RRL](WRAP_CONTENT, WRAP_CONTENT, Rule(belowOf, Id.btcLast), Rule(rightOf, Id.btcLast), Rule(endOf, Id.btcLast))
override def onCreate(savedInstanceState: Bundle) = {
super.onCreate(savedInstanceState)
// Initial BTC ticker data
val btcData = fBTCData()
// Setup the main view
lazy val view = l[RuleRelativeLayout](
// Title TextView
w[TextView] <~
tvC(0, 0, 0) <~ // Set text color to black
text("BTC|USD") <~
fmed <~
tCH(),
// TextView for holding last ticker field
w[TextView] <~
tvC(0, 0, 0) <~ // Set text color to black
btcData.map(p => text(p._1)) <~ // Get the last field
fat <~
tvG(Gravity.CENTER_HORIZONTAL) <~
id(Id.btcLast) <~ // Give the main textview an id for future textview formatting
wire(capLast) <~
cHCV(),
// TextView for holding ask ticker field
w[TextView] <~
tvC(0, 0, 0) <~ // Set text color to black
btcData.map(p => text(p._2)) <~ // Get the ask field
med <~ // Set medium text size
wire(capAsk) <~
bLS(),
// TextView for holding 24_h ticker field
w[TextView] <~
tvC(0, 0, 0) <~ // Set text color to black
btcData.map(p => text(p._3)) <~ // Get the 24_h field
med <~ // Set medium text size
wire(cap24h) <~
bRE(),
// Refresh button
w[Button] <~
text("Refresh") <~
bgC(255, 215, 0) <~ // Set button background color to gold
// Update with new ticker info on click
On.click {
// Fetch current ticker data and reset each textview
val newBTCData = fBTCData()
(capLast <~ newBTCData.map(p => text(p._1)) <~ fat) ~
(capAsk <~ newBTCData.map(p => text(p._2)) <~ med) ~
(cap24h <~ newBTCData.map(p => text(p._3)) <~ med)
} <~ bCH()
) <~ bgC(224, 255, 255) <~ // Set activity background color to light cyan
mP()
setContentView(getUi(view))
}
}
| b-mcg/datticker | src/main/scala/com/example/datticker/MainActivity.scala | Scala | gpl-3.0 | 4,309 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.tf
import com.intel.analytics.bigdl.utils.tf.TFRecordIterator
import org.scalatest.{FlatSpec, Matchers}
import java.io.{File => JFile}
import java.nio.{ByteBuffer, ByteOrder}
import com.google.protobuf.ByteString
import com.intel.analytics.bigdl.tensor.{FloatType, Tensor}
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import org.tensorflow.example.Example
import org.tensorflow.framework.DataType
import com.intel.analytics.bigdl.utils.tf.TFTensorNumeric.NumericByteString
class DecodeImageSpec extends FlatSpec with Matchers {
"DecodeRaw " should "be able to decode raw bytes" in {
val input = getInputs("raw")
val decoder = new DecodeRaw[Float](DataType.DT_UINT8, true)
val output = decoder.forward(input).asInstanceOf[Tensor[Int]]
output.size() should be (Array(28*28))
}
"DecodeRaw " should "be able to decode float raw bytes" in {
val data = ByteBuffer.allocate(16)
data.order(ByteOrder.LITTLE_ENDIAN)
data.putFloat(1.0f)
data.putFloat(2.0f)
data.putFloat(3.0f)
data.putFloat(4.0f)
val input = Tensor.scalar(ByteString.copyFrom(data.array()))
val decoder = new DecodeRaw[Float](DataType.DT_FLOAT, true)
val output = decoder.forward(input).asInstanceOf[Tensor[Int]]
output should be (Tensor[Float](Array(1.0f, 2.0f, 3.0f, 4.0f), Array(4)))
}
"DecodePng " should "be able to decode png" in {
val input = getInputs("png")
val decoder = new DecodePng[Int](1)
val output = decoder.forward(input)
val expected = getRaw()
output should be (expected)
}
"DecodeJpeg " should "be able to decode jpeg" in {
val input = getInputs("jpeg")
val decoder = new DecodeJpeg[Int](1)
val output = decoder.forward(input)
output.size() should be (Array(28, 28, 1))
}
"DecodeGif " should "be able to decode gif" in {
val input = getInputs("gif")
val decoder = new DecodeGif[Int]()
val output = decoder.forward(input)
output.size() should be (Array(1, 28, 28, 3))
}
private def getRaw(): Tensor[Int] = {
val input = getInputs("raw")
val decoder = new DecodeRaw[Float](DataType.DT_UINT8, true)
val output = decoder.forward(input).asInstanceOf[Tensor[Int]]
output.resize(Array(28, 28, 1))
}
private def getInputs(name: String): Tensor[ByteString] = {
val index = name match {
case "png" => 0
case "jpeg" => 1
case "gif" => 2
case "raw" => 3
}
val resource = getClass.getClassLoader.getResource("tf")
val path = resource.getPath + JFile.separator + "decode_image_test_case.tfrecord"
val file = new JFile(path)
val bytesVector = TFRecordIterator(file).toVector
val pngBytes = bytesVector(index)
val example = Example.parseFrom(pngBytes)
val imageByteString = example.getFeatures.getFeatureMap.get("image/encoded")
.getBytesList.getValueList.get(0)
Tensor[ByteString](Array(imageByteString), Array[Int]())
}
}
class DecodeImageSerialTest extends ModuleSerializationTest {
private def getInputs(name: String): Tensor[ByteString] = {
val index = name match {
case "png" => 0
case "jpeg" => 1
case "gif" => 2
case "raw" => 3
}
val resource = getClass.getClassLoader.getResource("tf")
val path = resource.getPath + JFile.separator + "decode_image_test_case.tfrecord"
val file = new JFile(path)
val bytesVector = TFRecordIterator(file).toVector
val pngBytes = bytesVector(index)
val example = Example.parseFrom(pngBytes)
val imageByteString = example.getFeatures.getFeatureMap.get("image/encoded")
.getBytesList.getValueList.get(0)
Tensor[ByteString](Array(imageByteString), Array[Int]())
}
override def test(): Unit = {
val decodeImage = new DecodeImage[Float](1).setName("decodeImage")
val input = getInputs("png")
runSerializationTest(decodeImage, input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/tf/DecodeImageSpec.scala | Scala | apache-2.0 | 4,556 |
package com.github.mgoeminne.sitar.parser.acm
import com.github.mgoeminne.sitar.parser.{Book, Citation, CitationParser}
/**
* acm style for book citation
*/
private[acm] class ACMBookParser extends CitationParser
{
def lastName: Parser[String] = """[^,]+""".r ^^ { case l => l.split(" ").last}
def firstName: Parser[String] = """([A-Z]\\.\\s?)+""".r
def author: Parser[String] = lastName ~ "," ~ firstName ^^ { case l~","~f => l}
def authors: Parser[Seq[String]] = rep(author ~ ",") ~ "and" ~ author ^^ { case a ~ "and" ~ c => a.map(_._1) :+ c} |
author ~ ", and" ~ author ^^ { case a ~ ", and" ~ b => Seq(a,b)} |
author ^^ { case a => Seq(a) }
def editor: Parser[String] = opt(""", Ed(s)?\\.""".r) ^^ { case e => e.getOrElse("")}
def volume: Parser[String] = """,\\svol(\\.|ume)\\s\\d+\\sof[^\\.]+""".r
def title: Parser[String] = """[^,\\.]+""".r ^^ {case t => println(t) ; t}
def printer: Parser[String] = """[^\\d]+""".r ^^ { case p => println(p) ; p}
def year: Parser[Int] = """\\d{4}""".r ^^ {case y => println(y); y.toInt}
def citation: Parser[Book] = authors~editor~title~opt(volume)~"."~printer~year<~"." ^^ { case a~e~t~v~"."~p~y =>
new Book(t, a, y)
}
} | mgoeminne/sitar | src/main/scala/com/github/mgoeminne/sitar/parser/acm/ACMBookParser.scala | Scala | mit | 1,300 |
package scala.meta.trees
import com.intellij.psi.PsiElement
import scala.meta.internal.{ast => m, semantic => h}
class AbortException(reason: String) extends RuntimeException(reason) {
def this(place: Any, mess: String) = this(mess + s"[$place]")
}
class UnimplementedException(what: Any) extends
AbortException(what, s"This code path is not implemented yet[${Thread.currentThread().getStackTrace.drop(3).head}]")
class ScalaMetaException(message: String) extends Exception(message)
class ScalaMetaResolveError(elem: PsiElement) extends ScalaMetaException(s"Cannot resolve ${elem.getClass} at ${elem.toString}")
class ScalaMetaTypeResultFailure(elem: Option[PsiElement], cause: String) extends ScalaMetaException(s"Cannot calculate type at ${elem.map(_.getText).getOrElse("UNKNOWN")}($cause)")
package object error {
def unreachable = throw new AbortException("This code should be unreachable")
def unreachable(reason: String) = throw new AbortException("This code should be unreachable: " + reason)
def unresolved(cause: String, place: Option[PsiElement]) = throw new AbortException(place, s"""Failed to typecheck "${place.map(_.getText).getOrElse("UNKNOWN")}" - $cause""")
def die(reason: String = "unknown") = throw new AbortException(reason)
}
| ilinum/intellij-scala | src/scala/meta/trees/ScalaMetaException.scala | Scala | apache-2.0 | 1,268 |
package bar
object Bar {
def xxx(s: String): foo.Foo = foo.Foo.create(s)
}
| lrytz/scala | test/files/pos/t10350/Bar.scala | Scala | apache-2.0 | 79 |
package akka.dispatch.verification
import akka.actor.{ActorCell, ActorRef, ActorSystem, Props}
import akka.dispatch.{Envelope}
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.Semaphore,
scala.collection.mutable.HashMap,
scala.collection.mutable.HashSet
// ----------- Internal event types -------------
abstract trait Event
// Metadata events, not actually events.
// MsgEvents appearing between `BeginUnignorableEvents' and `EndUnigorableEvents'
// will never be skipped over during replay.
final case object BeginUnignorableEvents extends Event
final case object EndUnignorableEvents extends Event
// An external thread has just started an `atomic block`, where it will now
// send some number of messages. Upon replay, wait until the end of the
// atomic block before deciding whether those messages are or are not going
// to show up.
final case class BeginExternalAtomicBlock(taskId: Long) extends Event
final case class EndExternalAtomicBlock(taskId: Long) extends Event
// Internal events.
// MsgSend is the initial send, not the delivery
// N.B., if an event trace was serialized, it's possible that msg is of type
// MessageFingerprint rather than a whole message!
// Message delivery -- (not the initial send)
// N.B., if an event trace was serialized, it's possible that msg is of type
// MessageFingerprint rather than a whole message!
case class MsgEvent(
sender: String, receiver: String, msg: Any) extends Event
case class SpawnEvent(
parent: String, props: Props, name: String, actor: ActorRef) extends Event
// (Used by DPOR)
// TODO(cs): consolidate with redundant types below.
case class NetworkPartition(
first: Set[String],
second: Set[String]) extends
UniqueExternalEvent with ExternalEvent with Event
// (Used by DPOR)
case class NetworkUnpartition(
first: Set[String],
second: Set[String]) extends
UniqueExternalEvent with ExternalEvent with Event
// (More general than DPOR)
final case class MsgSend (sender: String,
receiver: String, msg: Any) extends Event
final case class KillEvent (actor: String) extends Event
final case class PartitionEvent (endpoints: (String, String)) extends Event
final case class UnPartitionEvent (endpoints: (String, String)) extends Event
// Marks when WaitQuiescence was first processed.
final case object BeginWaitQuiescence extends Event
// Marks when Quiescence was actually reached.
final case object Quiescence extends Event
final case class ChangeContext (actor: String) extends Event
// Recording/Replaying Akka.FSM.Timer's (which aren't serializable! hence this madness)
// N.B. these aren't explicitly recorded. We use them only when we want to serialize event
// traces.
final case class TimerFingerprint(name: String,
msgFingerprint: MessageFingerprint, repeat: Boolean, generation: Int) extends MessageFingerprint
// Corresponds to MsgEvent.
final case class TimerDelivery(sender: String, receiver: String, fingerprint: TimerFingerprint) extends Event
// Keep this as a static class rather than a trait for backwards-compatibility
object MetaEvents {
def isMetaEvent(e: Event): Boolean = {
e match {
case BeginUnignorableEvents | EndUnignorableEvents | _: BeginExternalAtomicBlock |
_: EndExternalAtomicBlock | _: MsgSend | BeginWaitQuiescence |
Quiescence | _: ChangeContext => return true
case _ => return false
}
}
}
object IDGenerator {
var uniqueId = new AtomicInteger // DPOR root event is assumed to be ID 0, incrementAndGet ensures starting at 1
def get() : Integer = {
val id = uniqueId.incrementAndGet()
if (id == Int.MaxValue) {
throw new RuntimeException("Deal with overflow..")
}
return id
}
}
case class Unique(
val event : Event,
var id : Int = IDGenerator.get()
) extends ExternalEvent {
def label: String = "e"+id
}
case class Uniq[E](
val element : E,
var id : Int = IDGenerator.get()
)
case object RootEvent extends Event
case class WildCardMatch(
// Given:
// - a list of pending messages, sorted from least recently to most recently sent
// - a "backtrack setter" function: given an index of the pending
// messages, sets a backtrack point for that pending message, to be
// replayed in the future.
// return the index of the chosen one, or None
msgSelector: (Seq[Any], (Int) => Unit) => Option[Int],
name:String=""
)
/**
* TellEnqueue is a semaphore that ensures a linearizable execution, and protects
* schedulers' data structures during akka's concurrent processing of `tell`
* (the `!` operator).
*
* Instrumenter()'s control flow is as follows:
* - Invoke scheduler.schedule_new_message to find a new message to deliver
* - Call `dispatcher.dispatch` to deliver the message. Note that
* `dispatcher.dispatch` hands off work to a separate thread and returns
* immediately.
* - The actor `receive()`ing the message now becomes active
* - Every time that actor invokes `tell` to send a message to a known actor,
* a ticket is taken from TellEnqueue via TellEnqueue.tell()
* - Concurrently, akka will process the `tell`s by enqueuing the message in
* the receiver's mailbox.
* - Every time akka finishes enqueueing a message to the recevier's mailbox,
* we first call scheduler.event_produced, and then replaces a ticket to
* TellEnqueue via TellEnqueue.enqueue()
* - When the actor returns from `receive`, we wait for all tickets to be
* returned (via TellEnqueue.await()) before scheduling the next message.
*
* The `known actor` part is crucial. If the receiver is not an actor (e.g.
* the main thread) or we do not interpose on the receiving actor, we will not
* be able to return the ticket via TellEnqueue.enqueue(), and the system will
* block forever on TellEnqueue.await().
*/
trait TellEnqueue {
def tell()
def enqueue()
def reset()
def await ()
}
class TellEnqueueBusyWait extends TellEnqueue {
var enqueue_count = new AtomicInteger
var tell_count = new AtomicInteger
def tell() {
tell_count.incrementAndGet()
}
def enqueue() {
enqueue_count.incrementAndGet()
}
def reset() {
tell_count.set(0)
enqueue_count.set(0)
}
def await () {
while (tell_count.get != enqueue_count.get) {}
}
}
class TellEnqueueSemaphore extends Semaphore(1) with TellEnqueue {
var enqueue_count = new AtomicInteger
var tell_count = new AtomicInteger
def tell() {
tell_count.incrementAndGet()
reducePermits(1)
require(availablePermits() <= 0)
}
def enqueue() {
enqueue_count.incrementAndGet()
require(availablePermits() <= 0)
release()
}
def reset() {
tell_count.set(0)
enqueue_count.set(0)
// Set available permits to 0
drainPermits()
// Add a permit
release()
}
def await() {
acquire
release
}
}
class ExploredTacker {
var exploredStack = new HashMap[Int, HashSet[(Unique, Unique)] ]
def setExplored(index: Int, pair: (Unique, Unique)) =
exploredStack.get(index) match {
case Some(set) => set += pair
case None =>
val newElem = new HashSet[(Unique, Unique)] + pair
exploredStack(index) = newElem
}
def isExplored(pair: (Unique, Unique)): Boolean = {
for ((index, set) <- exploredStack) set.contains(pair) match {
case true => return true
case false =>
}
return false
}
def trimExplored(index: Int) = {
exploredStack = exploredStack.filter { other => other._1 <= index }
}
def printExplored() = {
for ((index, set) <- exploredStack.toList.sortBy(t => (t._1))) {
println(index + ": " + set.size)
//val content = set.map(x => (x._1.id, x._2.id))
//println(index + ": " + set.size + ": " + content))
}
}
def clear() = {
exploredStack.clear()
}
}
// Shared instance
object ExploredTacker {
var obj:ExploredTacker = null
def apply() = {
if (obj == null) {
obj = new ExploredTacker
}
obj
}
}
| NetSys/demi | src/main/scala/verification/schedulers/AuxilaryTypes.scala | Scala | bsd-2-clause | 8,014 |
/*
* Copyright (c) 2014 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 12/24/14 4:37 PM
*/
package base.common.lib
/**
* Description of "ISO" (not really) currencies that we accept at API level
* @author rconrad
*/
object Currencies extends Enumeration {
type Currency = Value
implicit def asString(c: Currency) = c.toString
implicit def asValue(s: String) = withName(s)
/**
* Render an amount in the provided currency to string with the currency's associated precision
*/
def preciselyString(c: Currency, a: Double): String = s"%.${precision(c)}f".format(a)
def preciselyDouble(c: Currency, a: Double): Double = preciselyString(c, a).toDouble
/**
* The amount within which two doubles can differ and still be considered the same for a given currency
*/
def allowedDelta(c: Currency) = 1d / Math.pow(10d, precision(c))
val USD = Value("USD")
val KRW = Value("KRW")
val BTC = Value("BTC")
val fromCurrencies = Set(USD, KRW)
val toCurrencies = Set(BTC)
val paidCurrencies = fromCurrencies
/**
* Defines the expected and allowed precision for each currency. Extremely important for
* security hash calculation because decimal numbers are converted to string then hashed
* via md5 - so any precision differences would manifest as non-matching hashes.
*/
val precision = Map(
USD -> 2,
KRW -> 0,
BTC -> 9 // 100,000,000 satoshi per BTC
)
// ensure precision map includes all currencies
assert(precision.keys.size == values.size &&
!values.map(precision.keys.toList.contains).exists(p => !p))
}
| robconrad/base-api | project-common/src/main/scala/base/common/lib/Currencies.scala | Scala | mit | 1,725 |
/*
* Copyright 2019 Google LLC All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jzos
class MockRecordReader(lRecl: Int, blkSize: Int, recFm: String) extends RecordReader {
override def read(bytes: Array[Byte]): Int = -1
override def read(bytes: Array[Byte], i: Int, i1: Int): Int = -1
override def close(): Unit = {}
override def getLrecl: Int = lRecl
override def getBlksize: Int = blkSize
override def getRecfmBits: Int = 1
override def getRecfm: String = recFm
override def getDDName: String = "DD"
override def getDsn: String = "//DSN"
}
| CloudVLab/professional-services | tools/bigquery-zos-mainframe-connector/src/test/scala/com/ibm/jzos/MockRecordReader.scala | Scala | apache-2.0 | 1,121 |
package com.komanov.serialization.converters
import com.komanov.serialization.domain.{EventProcessor, Site, SiteEvent}
trait BasePerfTest[Input, Output] {
val N = 100000
def createInput(converter: MyConverter, site: Site): Input
def convert(converter: MyConverter, input: Input): Output
def doTest(): Unit = {
println("Warming up...")
doWarmUp()
println("Testing!")
println("Converter," + TestData.sites.map(_._1).mkString(","))
for ((converterName, converter) <- Converters.all) {
val results = for {
(name, site) <- TestData.sites
input = createInput(converter, site)
} yield doTest(converter, input)
println(converterName + "," + results.map(_._2).mkString(","))
}
}
private def doTest(c: MyConverter, input: Input): (Long, Long) = {
Runtime.getRuntime.gc()
Runtime.getRuntime.runFinalization()
Runtime.getRuntime.gc()
Runtime.getRuntime.gc()
val start = System.nanoTime()
runXTimes(c, input, N)
val duration = System.nanoTime() - start
val avg = duration / N
duration -> avg
}
private def runXTimes(c: MyConverter, input: Input, x: Int): Unit = {
for (_ <- 0 until x) {
convert(c, input)
}
}
private def doWarmUp() = {
val x = N / 10
for ((converterName, c) <- Converters.all) {
print(s"$converterName... ")
for (data <- TestData.sites) {
val input = createInput(c, data._2)
runXTimes(c, input, x)
}
println("done")
}
}
}
/*
Converter,1k,2k,4k,8k,64k
JSON,4365,8437,16771,35164,270175
ScalaPB,2176,3936,7475,14822,133119
Java PB,3173,6393,10123,21379,209716
Java Thrift,3657,6805,13074,27667,261673
Scrooge,3572,6506,12050,25036,233895
Serializable,13156,21203,36457,79045,652942
Pickles,53991,83601,220440,589888,4162785
Boopickle,5451,10628,17533,29765,225717
Chill,7202,9783,15130,27338,207871
*/
object SiteSerializationPerfTestApp extends App with BasePerfTest[Site, Array[Byte]] {
override def createInput(converter: MyConverter, site: Site): Site = site
override def convert(converter: MyConverter, input: Site): Array[Byte] = converter.toByteArray(input)
doTest()
}
/*
Converter,1k,2k,4k,8k,64k
JSON,7670,12964,24804,51578,384623
ScalaPB,2335,4576,7326,14754,128730
Java PB,3504,6076,10269,19792,168952
Java Thrift,3451,5812,10048,20693,176020
Scrooge,3640,6522,12740,25081,230556
Serializable,61455,84196,102870,126839,575232
Pickles,40337,63840,165109,446043,3201348
Boopickle,2848,5017,8454,15962,97270
Chill,6675,9654,14770,25261,193136
*/
object SiteDeserializationPerfTestApp extends App with BasePerfTest[Array[Byte], Site] {
override def createInput(converter: MyConverter, site: Site): Array[Byte] = converter.toByteArray(site)
override def convert(converter: MyConverter, input: Array[Byte]): Site = converter.fromByteArray(input)
doTest()
}
/*
Converter,1k,2k,4k,8k,64k
JSON,9192,17366,34574,76571,701055
ScalaPB,2194,4542,8618,17011,170413
Java PB,3110,6390,11922,25144,283493
Java Thrift,4357,9180,17560,37924,405784
Scrooge,4842,10226,19922,42428,423060
Serializable,16957,31195,68399,160541,1492595
Pickles,47793,83754,236829,648561,4936980
Boopickle,16867,32278,62663,135614,1379687
Chill,3704,7098,15025,33376,326856
*/
object EventsSerializationPerfTestApp extends App with BasePerfTest[Seq[SiteEvent], Unit] {
override def createInput(converter: MyConverter, site: Site): Seq[SiteEvent] = {
EventProcessor.unapply(site).map(_.event)
}
override def convert(converter: MyConverter, input: Seq[SiteEvent]): Unit = {
for (e <- input) {
converter.toByteArray(e)
}
}
doTest()
}
/*
Converter,1k,2k,4k,8k,64k
JSON,12125,23012,45171,98008,880806
ScalaPB,3394,6644,12681,26589,251012
Java PB,2690,5550,10564,20359,214071
Java Thrift,3556,6974,13436,29135,281339
Scrooge,3911,7678,15867,33832,331989
Serializable,78081,138535,323729,774177,6725015
Pickles,34623,61638,169895,462075,3522794
Boopickle,4828,9941,18158,38296,389896
Chill,4770,9203,18638,38146,382506
*/
object EventsDeserializationPerfTestApp extends App with BasePerfTest[Seq[(Class[_], Array[Byte])], Unit] {
override def createInput(converter: MyConverter, site: Site): Seq[(Class[_], Array[Byte])] = {
EventProcessor.unapply(site).map(_.event).map(e => e.getClass -> converter.toByteArray(e))
}
override def convert(converter: MyConverter, input: Seq[(Class[_], Array[Byte])]): Unit = {
for ((clazz, bytes) <- input) {
converter.siteEventFromByteArray(clazz, bytes)
}
}
doTest()
}
| dkomanov/scala-serialization | scala-serialization/src/test/scala/com/komanov/serialization/converters/BasePerfTest.scala | Scala | mit | 4,551 |
/*
* Copyright (c) 2012, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.queue.util
import org.apache.commons.mail.{MultiPartEmail, EmailAttachment}
import java.io.{FileReader, File}
import javax.mail.internet.InternetAddress
import scala.collection.JavaConversions._
/**
* Encapsulates a message to be sent over email.
*/
class EmailMessage extends Logging {
var from: String = _
var to: Seq[String] = Nil
var cc: Seq[String] = Nil
var bcc: Seq[String] = Nil
var subject: String = _
var body: String = _
var attachments: Seq[File] = Nil
/**
* Sends the email and throws an exception if the email can't be sent.
* @param settings The server settings for the email.
*/
def send(settings: EmailSettings) = {
val email = new MultiPartEmail
email.setHostName(settings.host)
email.setSmtpPort(settings.port)
email.setTLS(settings.tls)
if (settings.ssl) {
email.setSSL(true)
email.setSslSmtpPort(settings.port.toString)
}
if (settings.username != null && (settings.password != null || settings.passwordFile != null)) {
val password = {
if (settings.passwordFile != null) {
val reader = new FileReader(settings.passwordFile)
try {
org.apache.commons.io.IOUtils.toString(reader).replaceAll("\\\\r|\\\\n", "")
} finally {
org.apache.commons.io.IOUtils.closeQuietly(reader)
}
} else {
settings.password
}
}
email.setAuthentication(settings.username, password)
}
email.setFrom(this.from)
if (this.subject != null)
email.setSubject(this.subject)
if (this.body != null)
email.setMsg(this.body)
if (this.to.size > 0)
email.setTo(convert(this.to))
if (this.cc.size > 0)
email.setCc(convert(this.cc))
if (this.bcc.size > 0)
email.setBcc(convert(this.bcc))
for (file <- this.attachments) {
val attachment = new EmailAttachment
attachment.setDisposition(EmailAttachment.ATTACHMENT)
attachment.setPath(file.getAbsolutePath)
attachment.setDescription(file.getAbsolutePath)
attachment.setName(file.getName)
email.attach(attachment)
}
email.send
}
/**
* Tries twice 30 seconds apart to send the email. Then logs the message if it can't be sent.
* @param settings The server settings for the email.
*/
def trySend(settings: EmailSettings) = {
try {
Retry.attempt(() => send(settings), .5)
} catch {
case e => logger.error("Error sending message: %n%s".format(this.toString), e)
}
}
/**
* Converts the email addresses to a collection of InternetAddress which can bypass client side validation,
* specifically that the domain is specified.
* @param addresses Seq of email addresses.
* @return java.util.List of InternetAddress'es
*/
private def convert(addresses: Seq[String]): java.util.List[InternetAddress] = {
addresses.map(address => new InternetAddress(address, false))
}
override def toString = {
"""
|From: %s
|To: %s
|Cc: %s
|Bcc: %s
|Subject: %s
|
|%s
|
|Attachments:
|%s
|""".stripMargin.format(
this.from, this.to.mkString(", "),
this.cc.mkString(", "), this.bcc.mkString(", "),
this.subject, this.body,
this.attachments.map(_.getAbsolutePath).mkString("%n".format()))
}
}
| iontorrent/Torrent-Variant-Caller-stable | public/scala/src/org/broadinstitute/sting/queue/util/EmailMessage.scala | Scala | mit | 4,493 |
package dwaspada.thedaam.application
sealed trait BaseCommandBus {
type H <: CommandHandler[Command]
}
object CommandBus extends BaseCommandBus {
def handle(command: Command): Unit = {
val handlerName = getHandlerName(command)
val handlerInstance: H = getClassInstance(handlerName)
handlerInstance.handle(command)
}
def getHandlerName(command: Command): String = {
val commandClass = command.getClass.getName
val commandName: String = commandClass.split("Command")(0)
commandName + "Handler"
}
/**
* Get handler instance of the command
*
* @param handler Name of the handler
* @return
*/
def getClassInstance(handler: String): H = {
Class.forName(handler).newInstance.asInstanceOf[H]
}
}
| dewey92/commuterline-ddd | src/main/scala/dwaspada/thedaam/application/CommandBus.scala | Scala | mit | 758 |
package com.example
import akka.actor.{ActorSystem, Props}
import akka.io.IO
import spray.can.Http
object Boot extends App {
// we need an ActorSystem to host our application in
implicit val system = ActorSystem("on-spray-can")
// create and start our service actor
val service = system.actorOf(Props[MyServiceActor], "demo-service")
// start a new HTTP server on port 8080 with our service actor as the handler
IO(Http) ! Http.Bind(service, interface = "localhost", port = 8080)
} | grinchy/farkle | src/main/scala/com/example/Boot.scala | Scala | apache-2.0 | 498 |
package org.eobjects.analyzer.result.html
import scala.xml.XML
/**
* A body element which wraps several other body elements in a div
*/
class CompositeBodyElement(cssClassName: String, children: Seq[BodyElement]) extends BodyElement {
override def toHtml(context: HtmlRenderingContext): String = {
val innerHtml = children.map(_.toHtml(context)).mkString("");
return "<div class=\\"" + cssClassName + "\\">" + innerHtml + "</div>";
}
} | datacleaner/AnalyzerBeans | components/html-rendering/src/main/scala/org/eobjects/analyzer/result/html/CompositeBodyElement.scala | Scala | lgpl-3.0 | 450 |
package concrete.constraint
import concrete._
import concrete.util.Interval
import scala.annotation.tailrec
trait FixPoint {
@annotation.tailrec
final def fixPoint(ps: ProblemState, shave: ProblemState => Outcome): Outcome = {
shave(ps) match {
case c: Contradiction =>
c
case ns: ProblemState =>
if (ns.sameDomains(ps)) {
ns
} else {
fixPoint(ns, shave)
}
}
}
//
// def fixPointM(ps: ProblemState, shavers: IndexedSeq[ProblemState => Outcome]): Outcome = {
// fixPoint(ps, shavers.indices, (ps, i) => shavers(i)(ps))
// }
//
def fixPoint(ps: ProblemState, range: Range, shave: (ProblemState, Int) => Outcome): Outcome = {
if (range.isEmpty) ps
else {
val it = Iterator.continually(range).flatten
var i = it.next()
var lastModified = i
var state = shave(ps, i)
i = it.next()
while (i != lastModified && state.isState) {
val ns = shave(state.toState, i)
if (ns ne state) {
lastModified = i
state = ns
}
i = it.next()
}
state
}
}
}
trait OpsFixPoint extends Constraint {
def domOps(doms: Array[Domain], pos: Int): Domain
def fixPoint(ps: ProblemState): Outcome = {
val doms = Array.tabulate(arity)(p => ps.dom(scope(p)))
@tailrec
def fixPoint(i: Int, last: Int): Option[Int] = {
if (i < 0) {
fixPoint(doms.length - 1, last)
} else {
// print(s"${doms.toSeq}, revising $i: ")
val d = domOps(doms, i)
if (d eq doms(i)) {
if (i == last) {
//println("End")
None
} else {
fixPoint(i - 1, last)
}
} else if (d.isEmpty) {
// println("Empty domain")
Some(i)
} else {
doms(i) = d
fixPoint(i - 1, i)
}
}
}
fixPoint(doms.length - 1, 0) match {
case Some(i) => Contradiction(Seq(scope(i)))
case None => ps.fold(0 until arity)((ps, p) => ps.updateDomNonEmpty(scope(p), doms(p)))
}
}
}
trait ItvFixPoint extends OpsFixPoint {
override def domOps(doms: Array[Domain], pos: Int): Domain = {
itvOps(doms, pos).map(doms(pos) & _).getOrElse(EmptyIntDomain)
}
def itvOps(doms: Array[Domain], i: Int): Option[Interval]
}
trait ItvArrayFixPoint extends ItvFixPoint {
def ops: Array[Array[Domain] => Option[Interval]]
def itvOps(doms: Array[Domain], i: Int): Option[Interval] = ops(i)(doms)
} | concrete-cp/concrete | src/main/scala/concrete/constraint/FixPoint.scala | Scala | lgpl-2.1 | 2,516 |
package hu.gansperger.neptunapi.constants
object Request {
val requestType = (typ : String) => ("RequestType", typ)
val gridId = (id : String) => ("GridID", "c_messages_gridMessages")
val pageIndex = (page : Int) => ("pageindex", page.toString)
val pageSize = (pagesize : Int) => ("pagesize", pagesize.toString)
val sort1 = (sort : String) => ("sort1", sort)
val sort2 = (sort : String) => ("sort2", sort)
val fixedHeader = (header : Boolean) => ("fixedheader", header.toString)
val searchCol = (col : String) => ("searchcol", col)
val searchText = (text : String) => ("searchtext", text)
val searchExp = (expanded : Boolean) => ("searchexpanded", expanded.toString)
val allowSubRows = (allow : Boolean) => ("allsubrowsexpanded", allow.toString)
}
| qwe2/neptun-api | src/main/scala/hu/gansperger/neptunapi/constants/Request.scala | Scala | mit | 808 |
// Copyright (c) 2014, Sam Thomson
package edu.cmu.lti.nlp.amr
import scala.annotation.tailrec
object CycleTester {
/**
Takes a directed graph, as a set of nodes and a map from node to its out-adjacent nodes,
and determines whether the graph contains a cycle.
*/
@tailrec
def hasCycle[T](nodes: Traversable[T], outgoingEdges: Map[T, Traversable[T]]): Boolean = {
if (outgoingEdges.isEmpty) {
false
} else {
val oFirstLeaf: Option[T] = nodes.toIterator.find(v => outgoingEdges.getOrElse(v, Nil).isEmpty).headOption
oFirstLeaf match {
case None => true
case Some(node) => {
val removed = (outgoingEdges - node) map { case (k, v) => (k, v.toSet - node) }
hasCycle(nodes.toSet - node, removed)
}
}
}
}
def main(args: Array[String]) {
val graphA = Map(
1 -> Set(2),
2 -> Set(3),
3 -> Set(1)
)
println(hasCycle(1 to 3, graphA) + " should be true")
val graphB = Map(
1 -> Set(2, 3),
2 -> Set(3)
)
println(hasCycle(1 to 3, graphB) + " should be false")
val graphC = Map(
1 -> Set(2),
2 -> Set(3, 4),
4 -> Set(5, 6),
5 -> Set(6),
6 -> Set(3)
)
println(hasCycle(1 to 6, graphC) + " should be false")
val graphD = Map(
1 -> Set(2),
2 -> Set(3, 4),
4 -> Set(5),
5 -> Set(6),
6 -> Set(3, 4)
)
println(hasCycle(1 to 6, graphD) + " should be true")
}
}
| keenon/jamr | src/CycleTester.scala | Scala | bsd-2-clause | 1,485 |
package com.github.tminglei.slickpg
package utils
import slick.driver.{PostgresDriver, JdbcTypesComponent}
import slick.profile.RelationalProfile.ColumnOption.Length
import scala.reflect.ClassTag
import java.sql.{PreparedStatement, ResultSet}
trait PgCommonJdbcTypes extends JdbcTypesComponent { driver: PostgresDriver =>
class GenericJdbcType[T](val sqlTypeName: String,
fnFromString: (String => T),
fnToString: (T => String) = ((r: T) => r.toString),
val sqlType: Int = java.sql.Types.OTHER,
zero: T = null.asInstanceOf[T],
override val hasLiteralForm: Boolean = false)(
implicit override val classTag: ClassTag[T]) extends DriverJdbcType[T] {
override def sqlTypeName(size: Option[Length]): String = sqlTypeName
override def getValue(r: ResultSet, idx: Int): T = {
val value = r.getString(idx)
if (r.wasNull) zero else fnFromString(value)
}
override def setValue(v: T, p: PreparedStatement, idx: Int): Unit = p.setObject(idx, mkPgObject(v))
override def updateValue(v: T, r: ResultSet, idx: Int): Unit = r.updateObject(idx, mkPgObject(v))
override def valueToSQLLiteral(v: T) = if(v == null) "NULL" else s"'${fnToString(v)}'"
///
private def mkPgObject(v: T) = mkPGobject(sqlTypeName, if(v == null) null else fnToString(v))
}
}
| bearrito/slick-pg | core/src/main/scala/com/github/tminglei/slickpg/utils/PgCommonJdbcTypes.scala | Scala | bsd-2-clause | 1,441 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.scalatest.Matchers.the
import org.apache.spark.TestUtils.{assertNotSpilled, assertSpilled}
import org.apache.spark.sql.catalyst.optimizer.TransposeWindow
import org.apache.spark.sql.execution.exchange.Exchange
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction, Window}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
/**
* Window function testing for DataFrame API.
*/
class DataFrameWindowFunctionsSuite extends QueryTest with SharedSQLContext {
import testImplicits._
test("reuse window partitionBy") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
val w = Window.partitionBy("key").orderBy("value")
checkAnswer(
df.select(
lead("key", 1).over(w),
lead("value", 1).over(w)),
Row(1, "1") :: Row(2, "2") :: Row(null, null) :: Row(null, null) :: Nil)
}
test("reuse window orderBy") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
val w = Window.orderBy("value").partitionBy("key")
checkAnswer(
df.select(
lead("key", 1).over(w),
lead("value", 1).over(w)),
Row(1, "1") :: Row(2, "2") :: Row(null, null) :: Row(null, null) :: Nil)
}
test("rank functions in unspecific window") {
val df = Seq((1, "1"), (2, "2"), (1, "2"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
max("key").over(Window.partitionBy("value").orderBy("key")),
min("key").over(Window.partitionBy("value").orderBy("key")),
mean("key").over(Window.partitionBy("value").orderBy("key")),
count("key").over(Window.partitionBy("value").orderBy("key")),
sum("key").over(Window.partitionBy("value").orderBy("key")),
ntile(2).over(Window.partitionBy("value").orderBy("key")),
row_number().over(Window.partitionBy("value").orderBy("key")),
dense_rank().over(Window.partitionBy("value").orderBy("key")),
rank().over(Window.partitionBy("value").orderBy("key")),
cume_dist().over(Window.partitionBy("value").orderBy("key")),
percent_rank().over(Window.partitionBy("value").orderBy("key"))),
Row(1, 1, 1, 1.0d, 1, 1, 1, 1, 1, 1, 1.0d, 0.0d) ::
Row(1, 1, 1, 1.0d, 1, 1, 1, 1, 1, 1, 1.0d / 3.0d, 0.0d) ::
Row(2, 2, 1, 5.0d / 3.0d, 3, 5, 1, 2, 2, 2, 1.0d, 0.5d) ::
Row(2, 2, 1, 5.0d / 3.0d, 3, 5, 2, 3, 2, 2, 1.0d, 0.5d) :: Nil)
}
test("window function should fail if order by clause is not specified") {
val df = Seq((1, "1"), (2, "2"), (1, "2"), (2, "2")).toDF("key", "value")
val e = intercept[AnalysisException](
// Here we missed .orderBy("key")!
df.select(row_number().over(Window.partitionBy("value"))).collect())
assert(e.message.contains("requires window to be ordered"))
}
test("corr, covar_pop, stddev_pop functions in specific window") {
val df = Seq(
("a", "p1", 10.0, 20.0),
("b", "p1", 20.0, 10.0),
("c", "p2", 20.0, 20.0),
("d", "p2", 20.0, 20.0),
("e", "p3", 0.0, 0.0),
("f", "p3", 6.0, 12.0),
("g", "p3", 6.0, 12.0),
("h", "p3", 8.0, 16.0),
("i", "p4", 5.0, 5.0)).toDF("key", "partitionId", "value1", "value2")
checkAnswer(
df.select(
$"key",
corr("value1", "value2").over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
covar_pop("value1", "value2")
.over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
var_pop("value1")
.over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
stddev_pop("value1")
.over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
var_pop("value2")
.over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
stddev_pop("value2")
.over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing))),
// As stddev_pop(expr) = sqrt(var_pop(expr))
// the "stddev_pop" column can be calculated from the "var_pop" column.
//
// As corr(expr1, expr2) = covar_pop(expr1, expr2) / (stddev_pop(expr1) * stddev_pop(expr2))
// the "corr" column can be calculated from the "covar_pop" and the two "stddev_pop" columns.
Seq(
Row("a", -1.0, -25.0, 25.0, 5.0, 25.0, 5.0),
Row("b", -1.0, -25.0, 25.0, 5.0, 25.0, 5.0),
Row("c", null, 0.0, 0.0, 0.0, 0.0, 0.0),
Row("d", null, 0.0, 0.0, 0.0, 0.0, 0.0),
Row("e", 1.0, 18.0, 9.0, 3.0, 36.0, 6.0),
Row("f", 1.0, 18.0, 9.0, 3.0, 36.0, 6.0),
Row("g", 1.0, 18.0, 9.0, 3.0, 36.0, 6.0),
Row("h", 1.0, 18.0, 9.0, 3.0, 36.0, 6.0),
Row("i", Double.NaN, 0.0, 0.0, 0.0, 0.0, 0.0)))
}
test("covar_samp, var_samp (variance), stddev_samp (stddev) functions in specific window") {
val df = Seq(
("a", "p1", 10.0, 20.0),
("b", "p1", 20.0, 10.0),
("c", "p2", 20.0, 20.0),
("d", "p2", 20.0, 20.0),
("e", "p3", 0.0, 0.0),
("f", "p3", 6.0, 12.0),
("g", "p3", 6.0, 12.0),
("h", "p3", 8.0, 16.0),
("i", "p4", 5.0, 5.0)).toDF("key", "partitionId", "value1", "value2")
checkAnswer(
df.select(
$"key",
covar_samp("value1", "value2").over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
var_samp("value1").over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
variance("value1").over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
stddev_samp("value1").over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
stddev("value1").over(Window.partitionBy("partitionId")
.orderBy("key").rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing))
),
Seq(
Row("a", -50.0, 50.0, 50.0, 7.0710678118654755, 7.0710678118654755),
Row("b", -50.0, 50.0, 50.0, 7.0710678118654755, 7.0710678118654755),
Row("c", 0.0, 0.0, 0.0, 0.0, 0.0),
Row("d", 0.0, 0.0, 0.0, 0.0, 0.0),
Row("e", 24.0, 12.0, 12.0, 3.4641016151377544, 3.4641016151377544),
Row("f", 24.0, 12.0, 12.0, 3.4641016151377544, 3.4641016151377544),
Row("g", 24.0, 12.0, 12.0, 3.4641016151377544, 3.4641016151377544),
Row("h", 24.0, 12.0, 12.0, 3.4641016151377544, 3.4641016151377544),
Row("i", Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN)))
}
test("collect_list in ascending ordered window") {
val df = Seq(
("a", "p1", "1"),
("b", "p1", "2"),
("c", "p1", "2"),
("d", "p1", null),
("e", "p1", "3"),
("f", "p2", "10"),
("g", "p2", "11"),
("h", "p3", "20"),
("i", "p4", null)).toDF("key", "partition", "value")
checkAnswer(
df.select(
$"key",
sort_array(
collect_list("value").over(Window.partitionBy($"partition").orderBy($"value")
.rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)))),
Seq(
Row("a", Array("1", "2", "2", "3")),
Row("b", Array("1", "2", "2", "3")),
Row("c", Array("1", "2", "2", "3")),
Row("d", Array("1", "2", "2", "3")),
Row("e", Array("1", "2", "2", "3")),
Row("f", Array("10", "11")),
Row("g", Array("10", "11")),
Row("h", Array("20")),
Row("i", Array())))
}
test("collect_list in descending ordered window") {
val df = Seq(
("a", "p1", "1"),
("b", "p1", "2"),
("c", "p1", "2"),
("d", "p1", null),
("e", "p1", "3"),
("f", "p2", "10"),
("g", "p2", "11"),
("h", "p3", "20"),
("i", "p4", null)).toDF("key", "partition", "value")
checkAnswer(
df.select(
$"key",
sort_array(
collect_list("value").over(Window.partitionBy($"partition").orderBy($"value".desc)
.rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)))),
Seq(
Row("a", Array("1", "2", "2", "3")),
Row("b", Array("1", "2", "2", "3")),
Row("c", Array("1", "2", "2", "3")),
Row("d", Array("1", "2", "2", "3")),
Row("e", Array("1", "2", "2", "3")),
Row("f", Array("10", "11")),
Row("g", Array("10", "11")),
Row("h", Array("20")),
Row("i", Array())))
}
test("collect_set in window") {
val df = Seq(
("a", "p1", "1"),
("b", "p1", "2"),
("c", "p1", "2"),
("d", "p1", "3"),
("e", "p1", "3"),
("f", "p2", "10"),
("g", "p2", "11"),
("h", "p3", "20")).toDF("key", "partition", "value")
checkAnswer(
df.select(
$"key",
sort_array(
collect_set("value").over(Window.partitionBy($"partition").orderBy($"value")
.rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)))),
Seq(
Row("a", Array("1", "2", "3")),
Row("b", Array("1", "2", "3")),
Row("c", Array("1", "2", "3")),
Row("d", Array("1", "2", "3")),
Row("e", Array("1", "2", "3")),
Row("f", Array("10", "11")),
Row("g", Array("10", "11")),
Row("h", Array("20"))))
}
test("skewness and kurtosis functions in window") {
val df = Seq(
("a", "p1", 1.0),
("b", "p1", 1.0),
("c", "p1", 2.0),
("d", "p1", 2.0),
("e", "p1", 3.0),
("f", "p1", 3.0),
("g", "p1", 3.0),
("h", "p2", 1.0),
("i", "p2", 2.0),
("j", "p2", 5.0)).toDF("key", "partition", "value")
checkAnswer(
df.select(
$"key",
skewness("value").over(Window.partitionBy("partition").orderBy($"key")
.rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)),
kurtosis("value").over(Window.partitionBy("partition").orderBy($"key")
.rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing))),
// results are checked by scipy.stats.skew() and scipy.stats.kurtosis()
Seq(
Row("a", -0.27238010581457267, -1.506920415224914),
Row("b", -0.27238010581457267, -1.506920415224914),
Row("c", -0.27238010581457267, -1.506920415224914),
Row("d", -0.27238010581457267, -1.506920415224914),
Row("e", -0.27238010581457267, -1.506920415224914),
Row("f", -0.27238010581457267, -1.506920415224914),
Row("g", -0.27238010581457267, -1.506920415224914),
Row("h", 0.5280049792181881, -1.5000000000000013),
Row("i", 0.5280049792181881, -1.5000000000000013),
Row("j", 0.5280049792181881, -1.5000000000000013)))
}
test("aggregation function on invalid column") {
val df = Seq((1, "1")).toDF("key", "value")
val e = intercept[AnalysisException](
df.select($"key", count("invalid").over()))
assert(e.message.contains("cannot resolve '`invalid`' given input columns: [key, value]"))
}
test("numerical aggregate functions on string column") {
val df = Seq((1, "a", "b")).toDF("key", "value1", "value2")
checkAnswer(
df.select($"key",
var_pop("value1").over(),
variance("value1").over(),
stddev_pop("value1").over(),
stddev("value1").over(),
sum("value1").over(),
mean("value1").over(),
avg("value1").over(),
corr("value1", "value2").over(),
covar_pop("value1", "value2").over(),
covar_samp("value1", "value2").over(),
skewness("value1").over(),
kurtosis("value1").over()),
Seq(Row(1, null, null, null, null, null, null, null, null, null, null, null, null)))
}
test("statistical functions") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("a", 2), ("b", 4), ("b", 3), ("b", 2)).
toDF("key", "value")
val window = Window.partitionBy($"key")
checkAnswer(
df.select(
$"key",
var_pop($"value").over(window),
var_samp($"value").over(window),
approx_count_distinct($"value").over(window)),
Seq.fill(4)(Row("a", 1.0d / 4.0d, 1.0d / 3.0d, 2))
++ Seq.fill(3)(Row("b", 2.0d / 3.0d, 1.0d, 3)))
}
test("window function with aggregates") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("a", 2), ("b", 4), ("b", 3), ("b", 2)).
toDF("key", "value")
val window = Window.orderBy()
checkAnswer(
df.groupBy($"key")
.agg(
sum($"value"),
sum(sum($"value")).over(window) - sum($"value")),
Seq(Row("a", 6, 9), Row("b", 9, 6)))
}
test("SPARK-16195 empty over spec") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("b", 2)).
toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select($"key", $"value", sum($"value").over(), avg($"value").over()),
Seq(Row("a", 1, 6, 1.5), Row("a", 1, 6, 1.5), Row("a", 2, 6, 1.5), Row("b", 2, 6, 1.5)))
checkAnswer(
sql("select key, value, sum(value) over(), avg(value) over() from window_table"),
Seq(Row("a", 1, 6, 1.5), Row("a", 1, 6, 1.5), Row("a", 2, 6, 1.5), Row("b", 2, 6, 1.5)))
}
test("window function with udaf") {
val udaf = new UserDefinedAggregateFunction {
def inputSchema: StructType = new StructType()
.add("a", LongType)
.add("b", LongType)
def bufferSchema: StructType = new StructType()
.add("product", LongType)
def dataType: DataType = LongType
def deterministic: Boolean = true
def initialize(buffer: MutableAggregationBuffer): Unit = {
buffer(0) = 0L
}
def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
if (!(input.isNullAt(0) || input.isNullAt(1))) {
buffer(0) = buffer.getLong(0) + input.getLong(0) * input.getLong(1)
}
}
def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
buffer1(0) = buffer1.getLong(0) + buffer2.getLong(0)
}
def evaluate(buffer: Row): Any =
buffer.getLong(0)
}
val df = Seq(
("a", 1, 1),
("a", 1, 5),
("a", 2, 10),
("a", 2, -1),
("b", 4, 7),
("b", 3, 8),
("b", 2, 4))
.toDF("key", "a", "b")
val window = Window.partitionBy($"key").orderBy($"a").rangeBetween(Long.MinValue, 0L)
checkAnswer(
df.select(
$"key",
$"a",
$"b",
udaf($"a", $"b").over(window)),
Seq(
Row("a", 1, 1, 6),
Row("a", 1, 5, 6),
Row("a", 2, 10, 24),
Row("a", 2, -1, 24),
Row("b", 4, 7, 60),
Row("b", 3, 8, 32),
Row("b", 2, 4, 8)))
}
test("null inputs") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("a", 2), ("b", 4), ("b", 3), ("b", 2))
.toDF("key", "value")
val window = Window.orderBy()
checkAnswer(
df.select(
$"key",
$"value",
avg(lit(null)).over(window),
sum(lit(null)).over(window)),
Seq(
Row("a", 1, null, null),
Row("a", 1, null, null),
Row("a", 2, null, null),
Row("a", 2, null, null),
Row("b", 4, null, null),
Row("b", 3, null, null),
Row("b", 2, null, null)))
}
test("last/first with ignoreNulls") {
val nullStr: String = null
val df = Seq(
("a", 0, nullStr),
("a", 1, "x"),
("a", 2, "y"),
("a", 3, "z"),
("a", 4, nullStr),
("b", 1, nullStr),
("b", 2, nullStr)).
toDF("key", "order", "value")
val window = Window.partitionBy($"key").orderBy($"order")
checkAnswer(
df.select(
$"key",
$"order",
first($"value").over(window),
first($"value", ignoreNulls = false).over(window),
first($"value", ignoreNulls = true).over(window),
last($"value").over(window),
last($"value", ignoreNulls = false).over(window),
last($"value", ignoreNulls = true).over(window)),
Seq(
Row("a", 0, null, null, null, null, null, null),
Row("a", 1, null, null, "x", "x", "x", "x"),
Row("a", 2, null, null, "x", "y", "y", "y"),
Row("a", 3, null, null, "x", "z", "z", "z"),
Row("a", 4, null, null, "x", null, null, "z"),
Row("b", 1, null, null, null, null, null, null),
Row("b", 2, null, null, null, null, null, null)))
}
test("last/first on descending ordered window") {
val nullStr: String = null
val df = Seq(
("a", 0, nullStr),
("a", 1, "x"),
("a", 2, "y"),
("a", 3, "z"),
("a", 4, "v"),
("b", 1, "k"),
("b", 2, "l"),
("b", 3, nullStr)).
toDF("key", "order", "value")
val window = Window.partitionBy($"key").orderBy($"order".desc)
checkAnswer(
df.select(
$"key",
$"order",
first($"value").over(window),
first($"value", ignoreNulls = false).over(window),
first($"value", ignoreNulls = true).over(window),
last($"value").over(window),
last($"value", ignoreNulls = false).over(window),
last($"value", ignoreNulls = true).over(window)),
Seq(
Row("a", 0, "v", "v", "v", null, null, "x"),
Row("a", 1, "v", "v", "v", "x", "x", "x"),
Row("a", 2, "v", "v", "v", "y", "y", "y"),
Row("a", 3, "v", "v", "v", "z", "z", "z"),
Row("a", 4, "v", "v", "v", "v", "v", "v"),
Row("b", 1, null, null, "l", "k", "k", "k"),
Row("b", 2, null, null, "l", "l", "l", "l"),
Row("b", 3, null, null, null, null, null, null)))
}
test("SPARK-12989 ExtractWindowExpressions treats alias as regular attribute") {
val src = Seq((0, 3, 5)).toDF("a", "b", "c")
.withColumn("Data", struct("a", "b"))
.drop("a")
.drop("b")
val winSpec = Window.partitionBy("Data.a", "Data.b").orderBy($"c".desc)
val df = src.select($"*", max("c").over(winSpec) as "max")
checkAnswer(df, Row(5, Row(0, 3), 5))
}
test("aggregation and rows between with unbounded + predicate pushdown") {
val df = Seq((1, "1"), (2, "2"), (2, "3"), (1, "3"), (3, "2"), (4, "3")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
val selectList = Seq($"key", $"value",
last("key").over(
Window.partitionBy($"value").orderBy($"key").rowsBetween(0, Long.MaxValue)),
last("key").over(
Window.partitionBy($"value").orderBy($"key").rowsBetween(Long.MinValue, 0)),
last("key").over(Window.partitionBy($"value").orderBy($"key").rowsBetween(-1, 1)))
checkAnswer(
df.select(selectList: _*).where($"value" < "3"),
Seq(Row(1, "1", 1, 1, 1), Row(2, "2", 3, 2, 3), Row(3, "2", 3, 3, 3)))
}
test("aggregation and range between with unbounded + predicate pushdown") {
val df = Seq((5, "1"), (5, "2"), (4, "2"), (6, "2"), (3, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
val selectList = Seq($"key", $"value",
last("value").over(
Window.partitionBy($"value").orderBy($"key").rangeBetween(-2, -1)).equalTo("2")
.as("last_v"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(Long.MinValue, 1))
.as("avg_key1"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(0, Long.MaxValue))
.as("avg_key2"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(-1, 1))
.as("avg_key3"))
checkAnswer(
df.select(selectList: _*).where($"value" < 2),
Seq(Row(3, "1", null, 3.0, 4.0, 3.0), Row(5, "1", false, 4.0, 5.0, 5.0)))
}
test("Window spill with less than the inMemoryThreshold") {
val df = Seq((1, "1"), (2, "2"), (1, "3"), (2, "4")).toDF("key", "value")
val window = Window.partitionBy($"key").orderBy($"value")
withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "2",
SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "2") {
assertNotSpilled(sparkContext, "select") {
df.select($"key", sum("value").over(window)).collect()
}
}
}
test("Window spill with more than the inMemoryThreshold but less than the spillThreshold") {
val df = Seq((1, "1"), (2, "2"), (1, "3"), (2, "4")).toDF("key", "value")
val window = Window.partitionBy($"key").orderBy($"value")
withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "1",
SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "2") {
assertNotSpilled(sparkContext, "select") {
df.select($"key", sum("value").over(window)).collect()
}
}
}
test("Window spill with more than the inMemoryThreshold and spillThreshold") {
val df = Seq((1, "1"), (2, "2"), (1, "3"), (2, "4")).toDF("key", "value")
val window = Window.partitionBy($"key").orderBy($"value")
withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "1",
SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "1") {
assertSpilled(sparkContext, "select") {
df.select($"key", sum("value").over(window)).collect()
}
}
}
test("SPARK-21258: complex object in combination with spilling") {
// Make sure we trigger the spilling path.
withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "1",
SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "17") {
val sampleSchema = new StructType().
add("f0", StringType).
add("f1", LongType).
add("f2", ArrayType(new StructType().
add("f20", StringType))).
add("f3", ArrayType(new StructType().
add("f30", StringType)))
val w0 = Window.partitionBy("f0").orderBy("f1")
val w1 = w0.rowsBetween(Long.MinValue, Long.MaxValue)
val c0 = first(struct($"f2", $"f3")).over(w0) as "c0"
val c1 = last(struct($"f2", $"f3")).over(w1) as "c1"
val input =
"""{"f1":1497820153720,"f2":[{"f20":"x","f21":0}],"f3":[{"f30":"x","f31":0}]}
|{"f1":1497802179638}
|{"f1":1497802189347}
|{"f1":1497802189593}
|{"f1":1497802189597}
|{"f1":1497802189599}
|{"f1":1497802192103}
|{"f1":1497802193414}
|{"f1":1497802193577}
|{"f1":1497802193709}
|{"f1":1497802202883}
|{"f1":1497802203006}
|{"f1":1497802203743}
|{"f1":1497802203834}
|{"f1":1497802203887}
|{"f1":1497802203893}
|{"f1":1497802203976}
|{"f1":1497820168098}
|""".stripMargin.split("\\n").toSeq
import testImplicits._
assertSpilled(sparkContext, "select") {
spark.read.schema(sampleSchema).json(input.toDS()).select(c0, c1).foreach { _ => () }
}
}
}
test("SPARK-24575: Window functions inside WHERE and HAVING clauses") {
def checkAnalysisError(df: => DataFrame): Unit = {
val thrownException = the[AnalysisException] thrownBy {
df.queryExecution.analyzed
}
assert(thrownException.message.contains("window functions inside WHERE and HAVING clauses"))
}
checkAnalysisError(testData2.select('a).where(rank().over(Window.orderBy('b)) === 1))
checkAnalysisError(testData2.where('b === 2 && rank().over(Window.orderBy('b)) === 1))
checkAnalysisError(
testData2.groupBy('a)
.agg(avg('b).as("avgb"))
.where('a > 'avgb && rank().over(Window.orderBy('a)) === 1))
checkAnalysisError(
testData2.groupBy('a)
.agg(max('b).as("maxb"), sum('b).as("sumb"))
.where(rank().over(Window.orderBy('a)) === 1))
checkAnalysisError(
testData2.groupBy('a)
.agg(max('b).as("maxb"), sum('b).as("sumb"))
.where('sumb === 5 && rank().over(Window.orderBy('a)) === 1))
checkAnalysisError(sql("SELECT a FROM testData2 WHERE RANK() OVER(ORDER BY b) = 1"))
checkAnalysisError(sql("SELECT * FROM testData2 WHERE b = 2 AND RANK() OVER(ORDER BY b) = 1"))
checkAnalysisError(
sql("SELECT * FROM testData2 GROUP BY a HAVING a > AVG(b) AND RANK() OVER(ORDER BY a) = 1"))
checkAnalysisError(
sql("SELECT a, MAX(b), SUM(b) FROM testData2 GROUP BY a HAVING RANK() OVER(ORDER BY a) = 1"))
checkAnalysisError(
sql(
s"""SELECT a, MAX(b)
|FROM testData2
|GROUP BY a
|HAVING SUM(b) = 5 AND RANK() OVER(ORDER BY a) = 1""".stripMargin))
}
test("window functions in multiple selects") {
val df = Seq(
("S1", "P1", 100),
("S1", "P1", 700),
("S2", "P1", 200),
("S2", "P2", 300)
).toDF("sno", "pno", "qty")
Seq(true, false).foreach { transposeWindowEnabled =>
val excludedRules = if (transposeWindowEnabled) "" else TransposeWindow.ruleName
withSQLConf(SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> excludedRules) {
val w1 = Window.partitionBy("sno")
val w2 = Window.partitionBy("sno", "pno")
val select = df.select($"sno", $"pno", $"qty", sum($"qty").over(w2).alias("sum_qty_2"))
.select($"sno", $"pno", $"qty", col("sum_qty_2"), sum("qty").over(w1).alias("sum_qty_1"))
val expectedNumExchanges = if (transposeWindowEnabled) 1 else 2
val actualNumExchanges = select.queryExecution.executedPlan.collect {
case e: Exchange => e
}.length
assert(actualNumExchanges == expectedNumExchanges)
checkAnswer(
select,
Seq(
Row("S1", "P1", 100, 800, 800),
Row("S1", "P1", 700, 800, 800),
Row("S2", "P1", 200, 200, 500),
Row("S2", "P2", 300, 300, 500)))
}
}
}
test("NaN and -0.0 in window partition keys") {
val df = Seq(
(Float.NaN, Double.NaN, 1),
(0.0f/0.0f, 0.0/0.0, 1),
(0.0f, 0.0, 1),
(-0.0f, -0.0, 1)).toDF("f", "d", "i")
val result = df.select($"f", count("i").over(Window.partitionBy("f", "d")))
checkAnswer(result, Seq(
Row(Float.NaN, 2),
Row(Float.NaN, 2),
Row(0.0f, 2),
Row(0.0f, 2)))
}
}
| guoxiaolongzte/spark | sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala | Scala | apache-2.0 | 27,470 |
package collins.softlayer
import collins.power._
import collins.power.management._
import models.Asset
import play.api.{Application, Plugin}
import play.api.libs.json._
import com.twitter.finagle.Service
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.http.{Http, RequestBuilder, Response}
import com.twitter.util.Future
import org.jboss.netty.buffer.ChannelBuffers
import org.jboss.netty.handler.codec.http.{HttpRequest, HttpResponse, QueryStringEncoder}
import org.jboss.netty.util.CharsetUtil.UTF_8
import scala.util.control.Exception.allCatch
class SoftLayerPlugin(app: Application) extends Plugin with SoftLayer {
type ClientSpec = ClientBuilder.Complete[HttpRequest, HttpResponse]
protected[this] val clientSpec: ClientSpec = ClientBuilder()
.tlsWithoutValidation()
.codec(Http())
.hosts(SOFTLAYER_API_HOST)
.hostConnectionLimit(1)
override def enabled: Boolean = {
SoftLayerConfig.pluginInitialize(app.configuration)
SoftLayerConfig.enabled
}
override def onStart() {
SoftLayerConfig.validateConfig()
}
override def onStop() {
}
override def username = SoftLayerConfig.username
override def password = SoftLayerConfig.password
// start plugin API
override def isSoftLayerAsset(asset: Asset): Boolean = {
asset.tag.startsWith("sl-")
}
override def softLayerId(asset: Asset): Option[Long] = isSoftLayerAsset(asset) match {
case true => try {
Some(asset.tag.split("-", 2).last.toLong)
} catch {
case _ => None
}
case false => None
}
private[this] val TicketExtractor = "^.* ([0-9]+).*$".r
override def cancelServer(id: Long, reason: String = "No longer needed"): Future[Long] = {
val encoder = new QueryStringEncoder(cancelServerPath(id))
encoder.addParam("attachmentId", id.toString)
encoder.addParam("reason", "No longer needed")
encoder.addParam("content", reason)
val url = softLayerUrl(encoder.toString())
val request = RequestBuilder()
.url(url)
.buildGet();
makeRequest(request) map { r =>
val response = Response(r)
val json = Json.parse(response.contentString)
(json \ "error" ) match {
case JsString(value) => allCatch[Long].opt {
val TicketExtractor(number) = value
number.toLong
}.getOrElse(0L)
case _ =>
(json \ "id") match {
case JsNumber(number) => number.longValue()
case _ => 0L
}
}
} handle {
case e => 0L
}
}
/*
override def powerCycle(e: Asset): PowerStatus = {
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/powerCycle.json")
}
*/
override def powerSoft(e: Asset): PowerStatus = {
// This does not actually exist at SL
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/powerSoft.json")
}
override def powerOff(e: Asset): PowerStatus = {
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/powerOff.json")
}
override def powerOn(e: Asset): PowerStatus = {
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/powerOn.json")
}
override def powerState(e: Asset): PowerStatus = {
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/getServerPowerState.json", Some({ s =>
s.replace("\"", "")
}))
}
override def rebootHard(e: Asset): PowerStatus = {
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/rebootHard.json")
}
override def rebootSoft(e: Asset): PowerStatus = {
doPowerOperation(e, "/SoftLayer_Hardware_Server/%d/rebootSoft.json")
}
override def verify(e: Asset): PowerStatus = {
Future.value(Failure("verify not implemented for softlayer"))
}
override def identify(e: Asset): PowerStatus = {
Future.value(Failure("identify not implemented for softlayer"))
}
override def activateServer(id: Long): Future[Boolean] = {
val url = softLayerUrl("/SoftLayer_Hardware_Server/%d/sparePool.json".format(id))
val query = JsObject(Seq("parameters" -> JsArray(List(JsString("activate")))))
val queryString = Json.stringify(query)
val value = ChannelBuffers.copiedBuffer(queryString, UTF_8)
val request = RequestBuilder()
.url(url)
.setHeader("Content-Type", "application/json")
.setHeader("Content-Length", queryString.length.toString)
.buildPost(value)
makeRequest(request) map { r =>
val response = Response(r)
Json.parse(response.contentString) match {
case JsBoolean(v) => v
case o => false
}
} handle {
case e => false
}
}
override def setNote(id: Long, note: String): Future[Boolean] = {
val url = softLayerUrl("/SoftLayer_Hardware_Server/%d/editObject.json".format(id))
val query = JsObject(Seq("parameters" -> JsArray(List(JsObject(Seq("notes" -> JsString(note)))))))
val queryString = Json.stringify(query)
val value = ChannelBuffers.copiedBuffer(queryString, UTF_8)
val request = RequestBuilder()
.url(url)
.setHeader("Content-Type", "application/json")
.setHeader("Content-Length", queryString.length.toString)
.buildPut(value)
makeRequest(request) map { r =>
true
} handle {
case e => false
}
}
protected def makeRequest(request: HttpRequest): Future[HttpResponse] = {
val client: Service[HttpRequest,HttpResponse] = clientSpec.build()
client(request) ensure {
client.release()
}
}
private def doPowerOperation(e: Asset, url: String, captureFn: Option[String => String] = None): PowerStatus = {
softLayerId(e).map { id =>
val request = RequestBuilder()
.url(softLayerUrl(url.format(id)))
.setHeader("Accept", "application/json")
.buildGet();
makeRequest(request).map { r =>
Response(r).contentString.toLowerCase match {
case rl if rl.contains("at this time") => RateLimit
case err if err.contains("error") => Failure()
case responseString => captureFn match {
case None => Success()
case Some(fn) => Success(fn(responseString))
}
}
} handle {
case e => Failure("IPMI may not be enabled, internal error")
}
}.getOrElse(Future(Failure("Asset can not be managed with SoftLayer API")))
}
}
| Shopify/collins | app/collins/softlayer/SoftLayerPlugin.scala | Scala | apache-2.0 | 6,284 |
package kr.ac.kaist.jsaf.shell
import java.io.{BufferedWriter, File, FileWriter}
import java.text.SimpleDateFormat
import java.util.Calendar
import edu.lu.uni.serval.idempotent.comm.ResultSender
import edu.lu.uni.serval.js.exp.safe.AlarmCollectorByPack
import edu.rice.cs.plt.tuple.{Option => JOption}
import kr.ac.kaist.jsaf.analysis.cfg.CFGBuilder
import kr.ac.kaist.jsaf.analysis.typing.models.DOMBuilder
import kr.ac.kaist.jsaf.analysis.typing.{AddressManager, Config, Helper, InitHeap, Typing, TypingInterface}
import kr.ac.kaist.jsaf.bug_detector.{BugDetector, BugEntry2, BugList2, StrictModeChecker}
import kr.ac.kaist.jsaf.compiler.{Parser, Predefined}
import kr.ac.kaist.jsaf.exceptions.UserError
import kr.ac.kaist.jsaf.nodes.{IRRoot, Program}
import kr.ac.kaist.jsaf.nodes_util.{DOMStatistics, JSFromHTML, NodeRelation, NodeUtil}
import kr.ac.kaist.jsaf.scala_src.nodes.{SProgram, STopLevel}
import kr.ac.kaist.jsaf.scala_src.useful.Lists._
import kr.ac.kaist.jsaf.useful.Pair
import kr.ac.kaist.jsaf.{ProjectProperties, Shell, ShellParameters}
import scala.collection.JavaConversions
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
import scala.sys.process._
/**
* Created by darkrsw on 2016/October/22.
*/
object BugDetectorProxy
{
// default timeout
var timeout = 60 * 3
// class path (should be set by other classes)
var classPathString: String = ""
def detectBugsOnJVM(opt: String, pname: String, commitHash: String, filePath: String, relPath: String): (Int, String) =
{
val arguments = "%s %s %s %s %s %s".format(opt, pname, commitHash, filePath, relPath, AlarmCollectorByPack.CONF_PATH)
val cmd = "java -cp " + classPathString + " -Xmx4g " +
"kr.ac.kaist.jsaf.shell.BugDetectorProxy " + arguments
val stdout = new StringBuilder
val stderr = new StringBuilder
val recorder = ProcessLogger(
(o: String) => Console.println(o), //if( o.startsWith("OUTPUT:") ) stdout.append(o.replaceFirst("OUTPUT:","")+"\\n"),
(e: String) => {Console.println(e); stderr.append(e+"\\n")}
)
val proc = cmd.run(recorder)
val f = Future(blocking(proc.exitValue()))
val exitCode = try {
Await.result(f, timeout+1 second)
} catch {
case e: TimeoutException =>
Console.err.println(getTimeString() + "Timeout from JVM...")
//killProcess(proc) // to send SIGKILL
proc.destroy()
4 // exitCode = 4
}
(exitCode, stderr.toString())
}
def detectJSBugsOnJVM(pname: String, commitHash: String, filePath: String, relPath: String): (Int, String) = detectBugsOnJVM("jss", pname, commitHash, filePath, relPath)
def detectWebAppBugsOnJVM(pname: String, commitHash: String, filePath: String, relPath: String): (Int, String) = detectBugsOnJVM("webapp", pname, commitHash, filePath, relPath)
///////////////////////////////////////////////////////////////////////////////////////////////
//@deprecated
def detectJSBugs(inFile: File): BugList2 = {
val quiet = true
val option1 = Array[String]("bug-detector", "-dev")
val timeout_opt = if (timeout > 0) {
Array[String]("-timeout", timeout.toString)
} else {
Array[String]()
}
val tokens = option1 ++ timeout_opt ++ Array[String](inFile.getCanonicalPath)
Shell.params = new ShellParameters()
val errorMessage = Shell.params.Set(tokens)
val pred = new Predefined(Shell.params)
Config.setQuietMode
val locclone = Shell.params.opt_LocClone
AddressManager.reset()
val fileName: String = Shell.params.FileNames(0)
val fileNames = JavaConversions.seqAsJavaList(Shell.params.FileNames)
Config.setFileName(fileName)
if (Shell.params.opt_loop) Config.setLoopMode
if (Shell.params.opt_Verbose1) Config.setVerbose(1)
if (Shell.params.opt_Verbose2) Config.setVerbose(2)
if (Shell.params.opt_Verbose3) Config.setVerbose(3)
if (Shell.params.opt_Compare) Config.setCompareMode
// Context-sensitivity for main analysis
var context: Int = -1
context = Config.contextSensitivityMode
// Temporary parameter setting for html and bug-detector
if (Shell.params.command == ShellParameters.CMD_BUG_DETECTOR) {
context = Config.Context_OneCallsiteAndObject
}
context = Config.Context_OneCallsiteAndObject
Config.setContextSensitivityMode(context)
Config.setDefaultUnrollingCount(Shell.params.opt_unrollingCount)
Config.setDefaultForinUnrollingCount(Shell.params.opt_forinunrollingCount)
// Initialize AbsString cache
kr.ac.kaist.jsaf.analysis.typing.domain.AbsString.initCache
// Read a JavaScript file and translate to IR
var program: Program = null
program = Parser.fileToAST(fileNames)
// concatenate modeled ASTs
val SEP = File.separator
val base = ProjectProperties.BASEDIR + SEP
val modeledFiles: List[String] =
((List[String](base + "bin/models/builtin/__builtin__.js")) ++
(if (Config.domMode) List(base + "bin/models/dom/__dom__.js") else List[String]())).filter(f => {
val file = new File(f)
file.exists()
})
// input files for webapp bugdetector
val inputFiles: List[String] = List()
Config.setModeledFiles(Config.getModeledFiles ++ modeledFiles ++ inputFiles)
val modeledASTs: Program = Parser.fileToAST(toJavaList(modeledFiles ++ inputFiles))
program = (modeledASTs, program) match {
case (SProgram(info0, STopLevel(fds0, vds0, body0)), SProgram(info1, STopLevel(fds1, vds1, body1))) =>
SProgram(info1, STopLevel(fds0 ++ fds1, vds0 ++ vds1, body0 ++ body1))
}
val irErrors = Shell.ASTtoIR(fileName, program, JOption.none[String], JOption.none[kr.ac.kaist.jsaf.nodes_util.Coverage])
val irOpt: JOption[IRRoot] = irErrors.first
val program2: Program = irErrors.third // Disambiguated and hoisted and with written
val ir: IRRoot = irOpt.unwrap
val builder = new CFGBuilder(ir)
val cfg = builder.build
val errors = builder.getErrors
if (!(errors.isEmpty)) {
Shell.reportErrors(NodeUtil.getFileName(ir), Shell.flattenErrors(errors), JOption.none[Pair[FileWriter, BufferedWriter]])
}
// Initialize bulit-in models
val previousBasicBlocks: Int = cfg.getNodes.size
val init = new InitHeap(cfg)
init.initialize
val presentBasicBlocks = cfg.getNodes.size
// Set the initial state with DOM objects
var jshtml: JSFromHTML = null
if (Config.domMode && jshtml != null) new DOMBuilder(cfg, init, jshtml.getDocument).initialize(quiet)
// Create Typing
var typingInterface: TypingInterface = null
if (Shell.params.command == ShellParameters.CMD_ANALYZE ||
Shell.params.command == ShellParameters.CMD_HTML ||
Shell.params.command == ShellParameters.CMD_BUG_DETECTOR ||
Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR) typingInterface = new Typing(cfg, quiet, locclone)
Config.setTypingInterface(typingInterface)
// Check global variables in initial heap against list of predefined variables.
init.checkPredefined
// Analyze
if (Shell.params.command == ShellParameters.CMD_ANALYZE ||
Shell.params.command == ShellParameters.CMD_PREANALYZE ||
Shell.params.command == ShellParameters.CMD_HTML ||
Shell.params.command == ShellParameters.CMD_HTML_PRE ||
Shell.params.command == ShellParameters.CMD_BUG_DETECTOR ||
Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR) {
typingInterface.analyze(init)
}
// Turn off '-max-loc-count' option
Shell.params.opt_MaxLocCount = 0
// Bug Detector
NodeRelation.set(program2, ir, cfg, quiet)
// Execute Bug Detector
System.out.println("\\n* Bug Detector *")
val detector = new BugDetector(program2, cfg, typingInterface, quiet, irErrors.second)
if(!(Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR))
StrictModeChecker.checkAdvanced(program2, cfg, detector.varManager, detector.stateManager)
val bugStorage = detector.detectBug2
val bugList = bugStorage.getBugList()
//bugList.foreach( x => println(stringfy(x)) )
println("Total #bugs: " + bugList.size)
return bugList
}
// Not necessary anymore.
@deprecated
def init() =
{
// init
AddressManager.init()
Helper.init()
AnalyzeMain.isTimeout = false
}
//@deprecated
def detectWebAppBugs(inFile: File): BugList2 = {
val quiet = true
val option1 = Array[String]("webapp-bug-detector", "-dev")
val timeout_opt = if (timeout > 0) {
Array[String]("-timeout", timeout.toString)
} else {
Array[String]()
}
val tokens = option1 ++ timeout_opt ++ Array[String](inFile.getCanonicalPath)
//val tokens = Array[String]("webapp-bug-detector", "-dev", "-timeout", , inFile.getCanonicalPath)
Shell.params = new ShellParameters()
val errorMessage = Shell.params.Set(tokens)
val pred = new Predefined(Shell.params)
Config.setQuietMode
var locclone = Shell.params.opt_LocClone
// Init Global settings and Singleton Object.
AddressManager.reset()
val fileName: String = Shell.params.FileNames(0)
val fileNames = JavaConversions.seqAsJavaList(Shell.params.FileNames)
Config.setFileName(fileName)
if(Shell.params.opt_loop) Config.setLoopMode
if (Shell.params.opt_Verbose1) Config.setVerbose(1)
if (Shell.params.opt_Verbose2) Config.setVerbose(2)
if (Shell.params.opt_Verbose3) Config.setVerbose(3)
if (Shell.params.opt_Compare) Config.setCompareMode
// Context-sensitivity for main analysis
var context: Int = -1
context = Config.contextSensitivityMode
Config.setContextSensitivityMode(context)
Config.setDefaultUnrollingCount(Shell.params.opt_unrollingCount)
Config.setDefaultForinUnrollingCount(Shell.params.opt_forinunrollingCount)
//if (Shell.params.FileNames.length > 1) throw new UserError("Only one HTML file supported at a time.")
val low = fileName.toLowerCase
if (!(low.endsWith(".html") || low.endsWith(".xhtml") || low.endsWith(".htm"))) throw new UserError("Not an HTML file.")
// DOM mode
Config.setDomMode
if(Shell.params.opt_jQuery) Config.setJQueryMode
if(Shell.params.opt_Domprop) Config.setDOMPropMode
if(Shell.params.opt_disEvent) Config.setDisableEventMode
if(Shell.params.opt_loop) Config.setLoopMode
Config.setDefaultForinUnrollingCount(1)
// loop sensitivity
Config.setLoopSensitiveMode(true)
// call context depth : 10
Config.setContextSensitivityMode(Config.Context_Loop)
Config.setContextSensitivityDepth(10)
// location cloning
Shell.params.opt_LocClone = true
locclone = true
// dom property like 'innerHTML' update mode
Shell.params.opt_Domprop
Config.setDOMPropMode
// use set domain with 32 size
Shell.params.opt_MaxStrSetSize = 32
if(Shell.params.opt_disEvent) {
Config.setDisableEventMode
}
// DOM mode
Config.setDomMode
// Initialize AbsString cache
kr.ac.kaist.jsaf.analysis.typing.domain.AbsString.initCache
// Read a JavaScript file and translate to IR
var program: Program = null
var jshtml: JSFromHTML = null
// DOMAPI statistics
if(Shell.params.opt_Domstat){
DOMStatistics.setInputFile(Shell.params.opt_Domstat_in)
DOMStatistics.setOutputFile(Shell.params.opt_Domstat_out)
}
jshtml = new JSFromHTML(fileName)
// Parse JavaScript code in the target html file
program = jshtml.parseScripts
// concatenate modeled ASTs
val SEP = File.separator
val base = ProjectProperties.BASEDIR + SEP
val modeledFiles: List[String] =
((List[String](base + "bin/models/builtin/__builtin__.js")) ++
(if(Config.domMode) List(base + "bin/models/dom/__dom__.js") else List[String]())).filter(f => {
val file = new File(f)
file.exists()
})
// input files for webapp bugdetector
val inputFiles: List[String] =
if(Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR) {
(List[String](base + "bin/inputs/__input__.js")).filter(f => {
val file =new File(f);
file.exists();
})
}
else List()
Config.setModeledFiles(Config.getModeledFiles ++ modeledFiles ++ inputFiles)
val modeledASTs: Program = Parser.fileToAST(toJavaList(modeledFiles++inputFiles))
program = (modeledASTs, program) match {
case (SProgram(info0, STopLevel(fds0, vds0, body0)), SProgram(info1, STopLevel(fds1, vds1, body1))) =>
SProgram(info1, STopLevel(fds0 ++ fds1, vds0 ++ vds1, body0 ++ body1))
}
val irErrors = Shell.ASTtoIR(fileName, program, JOption.none[String], JOption.none[kr.ac.kaist.jsaf.nodes_util.Coverage])
val irOpt: JOption[IRRoot] = irErrors.first
val program2: Program = irErrors.third // Disambiguated and hoisted and with written
// Check the translation result
val ir: IRRoot = irOpt.unwrap
// Build CFG
val builder = new CFGBuilder(ir)
val cfg = builder.build
val errors = builder.getErrors
if (!(errors.isEmpty)) {
Shell.reportErrors(NodeUtil.getFileName(ir), Shell.flattenErrors(errors), JOption.none[Pair[FileWriter, BufferedWriter]])
}
// Initialize bulit-in models
val previousBasicBlocks: Int = cfg.getNodes.size
val init = new InitHeap(cfg)
init.initialize
val presentBasicBlocks = cfg.getNodes.size
// Set the initial state with DOM objects
if (Config.domMode && jshtml != null) new DOMBuilder(cfg, init, jshtml.getDocument).initialize(quiet)
// Create Typing
var typingInterface: TypingInterface = null
typingInterface = new Typing(cfg, quiet, locclone)
Config.setTypingInterface(typingInterface)
// Check global variables in initial heap against list of predefined variables.
init.checkPredefined
// Analyze
if (Shell.params.command == ShellParameters.CMD_ANALYZE ||
Shell.params.command == ShellParameters.CMD_PREANALYZE ||
Shell.params.command == ShellParameters.CMD_HTML ||
Shell.params.command == ShellParameters.CMD_HTML_PRE ||
Shell.params.command == ShellParameters.CMD_BUG_DETECTOR ||
Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR) {
typingInterface.analyze(init)
}
// Turn off '-max-loc-count' option
Shell.params.opt_MaxLocCount = 0
// Node relation set
NodeRelation.set(program2, ir, cfg, quiet)
// Execute Bug Detector
System.out.println("\\n* Bug Detector *")
val detector = new BugDetector(program2, cfg, typingInterface, quiet, irErrors.second)
//if(!(Shell.params.command == ShellParameters.CMD_WEBAPP_BUG_DETECTOR))
// StrictModeChecker.checkAdvanced(program2, cfg, detector.varManager, detector.stateManager)
val bugStorage = detector.detectBug2
val bugList = bugStorage.getBugList()
//bugList.foreach( x => println(stringfy(x)) )
println("Total #bugs: " + bugList.size)
return bugList
}
@deprecated
private def stringfy(in: BugEntry2): String =
{
// this is temporary. don't use.
val startLoc = "%d:%d".format(in._4.getLine, in._4.getOffset)
val endLoc = "%d:%d".format(in._5.getLine, in._5.getOffset)
val filePath = in._3
// [
val vector = "%d,%s,%s,%s,%d,%s".format(in._2, filePath, startLoc, endLoc, in._6, in._7)
return vector
}
private def vectorize(in: BugEntry2, path: String): String =
{
// BugEntry2 -> (newBId, bugKind, span.getFileNameOnly, span.getBegin, span.getEnd, bugType, fullBugMsg)
val startLoc = "%d:%d".format(in._4.getLine, in._4.getOffset)
val endLoc = "%d:%d".format(in._5.getLine, in._5.getOffset)
val vector = "%s,%s,%s,%s,%s,%s".format(quote(path), quote(in._2.toString),
quote(startLoc), quote(endLoc), quote(in._6.toString), quote(in._7) )
//val vector = path :: in._2.toString :: startLoc :: endLoc :: in._6.toString :: in._7 :: Nil
return vector // -> (path, bug type, startLoc, endLoc, bug group, full msg)
}
private def quote(in: String): String =
{
"\\"%s\\"".format(in.replaceAll("\\\\\\"", "'").replaceAll("(\\\\r|\\\\n)+", ""))
}
def main(args: Array[String]): Unit =
{
if(args.length != 6)
{
Console.err.println("Invalid # of arguments: " + args.length)
Runtime.getRuntime.exit(33)
}
val mode = args(0)
val pname = args(1)
val commit = args(2)
val path = args(3)
val relPath = args(4)
AlarmCollectorByPack.init(args(5))
val targetFile = new File(path)
if( ! targetFile.exists() || ! targetFile.isFile )
{
Console.err.println(path + " does not exist.")
Runtime.getRuntime.exit(2)
}
try {
val buglist = mode match {
case "jss" => detectJSBugs(targetFile)
case "webapp" => detectWebAppBugs(targetFile)
}
if( ! AnalyzeMain.isTimeout )
{
for( a <- buglist )
{
val csv = vectorize(a, relPath)
//Console.println("OUTPUT:"+csv)
ResultSender.sendAlarm(pname, commit, csv)
}
}
else {
Console.err.println(path + ": timeout (" + timeout + ")")
Runtime.getRuntime.exit(4) // timeout
}
} catch {
case e: Throwable => { Console.println("ERROR: " + e.getMessage); e.printStackTrace(); Runtime.getRuntime.exit(3); }
}
Runtime.getRuntime.exit(0)
}
def getPID(proc: Process): Int =
{
val cl = proc.getClass
val field = cl.getDeclaredField("pid")
field.setAccessible(true)
val pidObject = field.get(proc)
return pidObject.asInstanceOf[Int]
}
def killProcess(proc: Process): Int =
{
val pid = getPID(proc)
return Runtime.getRuntime.exec("kill -9 " + pid).waitFor()
}
def getTimeString(): String =
{
val now = Calendar.getInstance().getTime()
val minuteFormat = new SimpleDateFormat("[hh:mm:ss]:")
minuteFormat.format(now)
}
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/shell/BugDetectorProxy.scala | Scala | bsd-3-clause | 18,017 |
package chandu0101.scalajs.react.components
import chandu0101.scalajs.react.components.fascades._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import org.scalajs.dom.{Event, document, html}
import scala.scalajs.js
import scala.scalajs.js.Dynamic.{global => g}
object GoogleMap {
def parameterizeUrl(url: String, parameters: Map[String, Any]): String = {
require(url != null, "Missing argument 'url'.")
require(parameters != null, "Missing argument 'parameters'.")
parameters.foldLeft(url)((base, kv) =>
base ++ {
if (base.contains("?")) "&" else "?"
} ++ kv._1 ++ "=" + kv._2)
}
case class State(mapObjects: Option[(GMap, GInfoWindow)], markers: List[GMarker])
class Backend(t: BackendScope[Props, State]) {
def loadScript(P: Props): Callback =
if (js.isUndefined(g.google) || js.isUndefined(g.google.maps))
Callback {
val script = document.createElement("script").asInstanceOf[html.Script]
script.`type` = "text/javascript"
script.src = parameterizeUrl(P.url, Map("callback" -> "gmapinit"))
document.body.appendChild(script)
g.gmapinit = initialize(P).toJsFn
} else initialize(P)
def initialize(P: Props): Callback =
t.getDOMNode flatMap (e =>
t.modState(
_.copy(mapObjects =
Some((new GMap(e, MapOptions(P.center, P.zoom).toGMapOptions), new GInfoWindow))),
callback = updateMap(P)
))
def updateMap(P: Props): Callback =
t.modState(
S =>
S.mapObjects.fold(S) {
case (gmap, infoWindow) =>
gmap.setCenter(P.center.toGlatlng)
S.markers.foreach(_.setMap(null))
val newMarkers = P.markers.map(prepareMarker(infoWindow, gmap)).toList
S.copy(markers = newMarkers)
}
)
private def prepareMarker(infowindow: GInfoWindow, map: GMap)(m: Marker) = {
val marker = new GMarker(m.toGMarker(map))
if (!m.content.isEmpty) {
new GAddListener(
marker,
"click",
(e: Event) => {
infowindow.setContent(m.content)
infowindow.open(map, marker)
}
)
}
marker
}
def render(P: Props) = <.div(^.height := P.height, ^.width := P.width)
}
case class Props(width: String,
height: String,
center: LatLng,
zoom: Int,
markers: Seq[Marker],
url: String)
val component = ScalaComponent
.builder[Props]("googleMap")
.initialState(State(None, Nil))
.renderBackend[Backend]
.componentWillReceiveProps { c =>
c.backend.updateMap(c.nextProps)
}
.componentDidMount($ => $.backend.loadScript($.props))
.componentWillUnmount($ => Callback($.state.markers.foreach(new GClearInstanceListeners(_))))
.build
/**
*
* @param width width of map
* @param height height of map
* @param center center position(lat,lng) for map
* @param zoom zoom value
* @param markers markers for the map
* @param url url to get googlemap api, by default it uses https://maps.googleapis.com/maps/api/js you can override if you want.
* @return
*/
def apply(width: String = "500px",
height: String = "500px",
center: LatLng,
zoom: Int = 4,
markers: List[Marker] = Nil,
url: String = "https://maps.googleapis.com/maps/api/js") =
component(Props(width, height, center, zoom, markers, url))
}
| rleibman/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/GoogleMap.scala | Scala | apache-2.0 | 3,609 |
package io.circe.numbers
import io.circe.numbers.testing.{ IntegralString, JsonNumberString }
import java.math.{ BigDecimal, BigInteger }
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import scala.math.{ BigDecimal => SBigDecimal }
import scala.util.Try
class BiggerDecimalSuite extends AnyFlatSpec with ScalaCheckDrivenPropertyChecks {
implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(
minSuccessful = 1000,
sizeRange = 10000
)
private[this] def doubleEqv(x: Double, y: Double): Boolean = java.lang.Double.compare(x, y) == 0
private[this] def trailingZeros(i: BigInt): Int = i.toString.reverse.takeWhile(_ == '0').size
private[this] def significantDigits(i: BigInt): Int = i.toString.size - trailingZeros(i)
"fromDoubleUnsafe(0)" should "equal fromBigDecimal(ZERO) (#348)" in {
assert(BiggerDecimal.fromDoubleUnsafe(0) === BiggerDecimal.fromBigDecimal(BigDecimal.ZERO))
}
"fromDoubleUnsafe" should "round-trip Double values" in forAll { (value: Double) =>
val d = BiggerDecimal.fromDoubleUnsafe(value)
assert(
doubleEqv(d.toDouble, value) && d.toBigDecimal.exists { roundTripped =>
doubleEqv(roundTripped.doubleValue, value)
}
)
}
it should "round-trip negative zero" in {
val d = BiggerDecimal.fromDoubleUnsafe(-0.0)
assert(doubleEqv(d.toDouble, -0.0))
}
"signum" should "agree with BigInteger" in forAll { (value: BigInt) =>
val d = BiggerDecimal.fromBigInteger(value.underlying)
assert(d.signum == value.signum)
}
it should "agree with BigDecimal" in forAll { (value: SBigDecimal) =>
val d = BiggerDecimal.fromBigDecimal(value.underlying)
assert(d.signum == value.signum)
}
it should "agree with Long" in forAll { (value: Long) =>
val d = BiggerDecimal.fromLong(value)
val expected = value.signum
assert(d.signum == expected)
}
it should "agree with Double" in forAll { (value: Double) =>
val d = BiggerDecimal.fromDoubleUnsafe(value)
val expected = value.signum
assert(d.signum == expected)
}
"fromLong" should "round-trip Long values" in forAll { (value: Long) =>
val d = BiggerDecimal.fromLong(value)
assert(d.toBigDecimal.map(_.longValue) === Some(value))
}
"toLong" should "round-trip Long values" in forAll { (value: Long) =>
val d = BiggerDecimal.fromLong(value)
assert(d.toLong === Some(value))
}
"toBigInteger" should "fail on very large values" in {
val Some(d) = BiggerDecimal.parseBiggerDecimal("1e262144")
assert(d.toBigInteger === None)
}
it should "not count the sign against the digit length" in {
val Some(d) = BiggerDecimal.parseBiggerDecimal("-1e262143")
assert(d.toBigInteger === Some(new BigDecimal("-1e262143").toBigInteger))
}
"toBigIntegerWithMaxDigits" should "fail on values whose representation is too large" in {
val Some(d) = BiggerDecimal.parseBiggerDecimal("123456789")
assert(d.toBigIntegerWithMaxDigits(BigInteger.valueOf(8L)) === None)
}
it should "succeed when the representation is exactly the maximum size" in {
val Some(d) = BiggerDecimal.parseBiggerDecimal("123456789")
assert(d.toBigIntegerWithMaxDigits(BigInteger.valueOf(9L)) === d.toBigInteger)
}
"fromLong and fromDoubleUnsafe" should "agree on Int-sized integral values" in forAll { (value: Int) =>
val dl = BiggerDecimal.fromLong(value.toLong)
val dd = BiggerDecimal.fromDoubleUnsafe(value.toDouble)
assert(dl === dd)
}
"fromBigDecimal" should "round-trip BigDecimal values" in forAll { (value: SBigDecimal) =>
val result = BiggerDecimal.fromBigDecimal(value.underlying)
assert(
Try(new BigDecimal(value.toString)).toOption.forall { parsedValue =>
result.toBigDecimal.exists { roundTripped =>
roundTripped.compareTo(parsedValue) == 0
}
}
)
}
/**
* This is a workaround for a Scala.js bug that causes `BigDecimal` values
* with sufficiently large exponents to be printed with negative exponents.
*
* The filter below will have no effect on JVM tests since the condition is
* clearly nonsense.
*/
private[this] def isBadJsBigDecimal(d: SBigDecimal): Boolean =
d.abs > 1 && d.toString.contains("E-")
it should "agree with parseBiggerDecimalUnsafe" in forAll { (value: SBigDecimal) =>
whenever(!isBadJsBigDecimal(value)) {
val expected = BiggerDecimal.parseBiggerDecimalUnsafe(value.toString)
assert(BiggerDecimal.fromBigDecimal(value.underlying) === expected)
}
}
it should "agree with parseBiggerDecimalUnsafe on 0.000" in {
val value = "0.000"
val expected = BiggerDecimal.parseBiggerDecimalUnsafe(value)
assert(BiggerDecimal.fromBigDecimal(new BigDecimal(value)) === expected)
}
it should "agree with parseBiggerDecimalUnsafe on multiples of ten with trailing zeros" in {
val bigDecimal = new BigDecimal("10.0")
val fromBigDecimal = BiggerDecimal.fromBigDecimal(bigDecimal)
val fromString = BiggerDecimal.parseBiggerDecimalUnsafe(bigDecimal.toString)
assert(fromBigDecimal === fromString)
}
it should "work correctly on values whose string representations have exponents larger than Int.MaxValue" in {
val bigDecimal = new BigDecimal("-17014118346046923173168730371588410572800E+2147483647")
val fromBigDecimal = BiggerDecimal.fromBigDecimal(bigDecimal)
val fromString = BiggerDecimal.parseBiggerDecimalUnsafe(bigDecimal.toString)
assert(fromBigDecimal === fromString)
}
"fromBigInteger" should "round-trip BigInteger values" in forAll { (value: BigInt) =>
assert(BiggerDecimal.fromBigInteger(value.underlying).toBigInteger === Some(value.underlying))
}
"integralIsValidLong" should "agree with toLong" in forAll { (input: IntegralString) =>
assert(BiggerDecimal.integralIsValidLong(input.value) === Try(input.value.toLong).isSuccess)
}
"parseBiggerDecimal" should "parse any BigDecimal string" in forAll { (value: SBigDecimal) =>
val d = BiggerDecimal.parseBiggerDecimal(value.toString)
assert(
d.nonEmpty && Try(new BigDecimal(value.toString)).toOption.forall { parsedValue =>
d.flatMap(_.toBigDecimal).exists { roundTripped =>
roundTripped.compareTo(parsedValue) == 0
}
}
)
}
it should "parse number strings with big exponents" in {
forAll { (integral: BigInt, fractionalDigits: BigInt, exponent: BigInt) =>
val fractional = fractionalDigits.abs
val s = s"$integral.${fractional}e$exponent"
val scale = -exponent + (
(integral == 0, fractional == 0) match {
case (true, true) => 0
case (_, true) => -trailingZeros(integral)
case (_, _) => significantDigits(fractional)
}
)
(BiggerDecimal.parseBiggerDecimal(s), Try(new BigDecimal(s)).toOption) match {
case (Some(parsedBiggerDecimal), Some(parsedBigDecimal)) if scale.isValidInt =>
assert(parsedBiggerDecimal.toBigDecimal.exists(_.compareTo(parsedBigDecimal) == 0))
case (Some(_), None) => assert(true)
case _ => assert(false)
}
}
}
it should "parse JSON numbers" in forAll { (jns: JsonNumberString) =>
assert(BiggerDecimal.parseBiggerDecimal(jns.value).nonEmpty)
}
it should "parse integral JSON numbers" in forAll { (is: IntegralString) =>
assert(BiggerDecimal.parseBiggerDecimal(is.value) === Some(BiggerDecimal.fromBigInteger(new BigInteger(is.value))))
}
it should "fail on bad input" in {
val badNumbers = List("", "x", "01", "1x", "1ex", "1.0x", "1.x", "1e-x", "1e-0x", "1.", "1e", "1e-", "-")
badNumbers.foreach { input =>
assert(BiggerDecimal.parseBiggerDecimal(input) === None)
}
}
}
| travisbrown/circe | modules/numbers/shared/src/test/scala/io/circe/numbers/BiggerDecimalSuite.scala | Scala | apache-2.0 | 7,851 |
package com.cloudray.scalapress.item.tag
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.mock.MockitoSugar
import javax.servlet.http.HttpServletRequest
import com.cloudray.scalapress.item.Item
import com.cloudray.scalapress.folder.Folder
import com.cloudray.scalapress.framework.{ScalapressRequest, ScalapressContext}
/** @author Stephen Samuel */
class LinkTagTest extends FunSuite with OneInstancePerTest with MockitoSugar {
val context = new ScalapressContext()
val req = mock[HttpServletRequest]
val item = new Item
item.id = 12
item.name = "meatballs"
val f = new Folder
f.id = 435
f.name = "italian foods"
test("item is used if set") {
val sreq = ScalapressRequest(req, context).withItem(item)
val actual = LinkTag.render(sreq, Map.empty).get
assert("/item-12-meatballs" === actual)
}
test("folder is used if set") {
val sreq = ScalapressRequest(req, context).withFolder(f)
val actual = LinkTag.render(sreq, Map.empty).get
assert("/folder-435-italian-foods" === actual)
}
}
| vidyacraghav/scalapress | src/test/scala/com/cloudray/scalapress/item/tag/LinkTagTest.scala | Scala | apache-2.0 | 1,060 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.integration.torch
import com.intel.analytics.bigdl.nn.Add
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import spire.syntax.module
@com.intel.analytics.bigdl.tags.Serial
class AddSpec extends TorchSpec {
"A Add Module " should "generate correct output and grad" in {
torchCheck()
val inputN = 5
val seed = 100
RNG.setSeed(seed)
val module = new Add[Double](inputN)
val input = Tensor[Double](1, 5)
input(Array(1, 1)) = 1
input(Array(1, 2)) = 2
input(Array(1, 3)) = 3
input(Array(1, 4)) = 4
input(Array(1, 5)) = 5
val gradOutput = Tensor[Double](5)
gradOutput(Array(1)) = 2
gradOutput(Array(2)) = 5
gradOutput(Array(3)) = 10
gradOutput(Array(4)) = 17
gradOutput(Array(5)) = 26
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.Add(5)\\n" +
"module:reset()\\n" +
"bias = module.bias\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input, gradOutput)\\n" +
"ones = module._ones\\n"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput", "bias", "ones"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOnes = torchResult("ones").asInstanceOf[Tensor[Double]]
val start = System.nanoTime()
module.reset()
val bias = module.bias
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
luaOutput1 should be(output)
luaOutput2 should be(gradInput)
luaBias should be(bias)
}
"A Add Module " should "generate correct output and grad with batchsize > 1" in {
torchCheck()
val inputN = 5
val seed = 100
RNG.setSeed(seed)
val module = new Add[Double](inputN)
val input = Tensor[Double](2, 5)
input(Array(1, 1)) = 1
input(Array(1, 2)) = 2
input(Array(1, 3)) = 3
input(Array(1, 4)) = 4
input(Array(1, 5)) = 5
val gradOutput = Tensor[Double](2, 5)
gradOutput(Array(1, 1)) = 1
gradOutput(Array(1, 2)) = 2
gradOutput(Array(1, 3)) = 3
gradOutput(Array(1, 4)) = 4
gradOutput(Array(1, 5)) = 5
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.Add(5)\\n" +
"module:reset()\\n" +
"bias = module.bias\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input, gradOutput)\\n" +
"ones = module._ones\\n"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput", "bias", "ones"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOnes = torchResult("ones").asInstanceOf[Tensor[Double]]
val start = System.nanoTime()
module.reset()
val bias = module.bias
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
luaOutput1 should be(output)
luaOutput2 should be(gradInput)
luaBias should be(bias)
}
}
| zhangxiaoli73/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/AddSpec.scala | Scala | apache-2.0 | 4,285 |
package com.kolor.docker.api.entities
import scala.io.BufferedSource
class ConfigReader(config: BufferedSource) {
private lazy val lines = config.getLines()
def getProperty(name: String): String =
lines
.find(_.startsWith(s"$name="))
.map(_.replace(s"$name=", ""))
.getOrElse("")
}
| waveinch/reactive-docker | src/main/scala/com/kolor/docker/api/entities/ConfigReader.scala | Scala | mit | 312 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.spark.geotools
import com.vividsolutions.jts.geom.Coordinate
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkContext}
import org.geotools.data.simple.SimpleFeatureStore
import org.geotools.data.{DataStore, DataStoreFinder, DataUtilities, Query}
import org.geotools.geometry.jts.JTSFactoryFinder
import org.joda.time.format.ISODateTimeFormat
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.spark.{GeoMesaSpark, GeoMesaSparkKryoRegistrator}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class GeoToolsSpatialRDDProviderTest extends Specification {
var sc: SparkContext = null
step {
val conf = new SparkConf().setMaster("local[2]").setAppName("testSpark")
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
conf.set("spark.kryo.registrator", classOf[GeoMesaSparkKryoRegistrator].getName)
sc = SparkContext.getOrCreate(conf)
}
val dsParams = Map("cqengine" -> "true", "geotools" -> "true")
"The GeoToolsSpatialRDDProvider" should {
"read from the in-memory database" in {
val ds = DataStoreFinder.getDataStore(dsParams)
ingestChicago(ds)
val rdd = GeoMesaSpark(dsParams).rdd(new Configuration(), sc, dsParams, new Query("chicago"))
rdd.count() mustEqual(3l)
}
}
def ingestChicago(ds: DataStore): Unit = {
val sft = SimpleFeatureTypes.createType("chicago", "arrest:String,case_number:Int,dtg:Date,*geom:Point:srid=4326")
ds.createSchema(sft)
val fs = ds.getFeatureSource("chicago").asInstanceOf[SimpleFeatureStore]
val parseDate = ISODateTimeFormat.basicDateTime().parseDateTime _
val createPoint = JTSFactoryFinder.getGeometryFactory.createPoint(_: Coordinate)
val features = DataUtilities.collection(List(
new ScalaSimpleFeature("1", sft, initialValues = Array("true","1",parseDate("20160101T000000.000Z").toDate, createPoint(new Coordinate(-76.5, 38.5)))),
new ScalaSimpleFeature("2", sft, initialValues = Array("true","2",parseDate("20160102T000000.000Z").toDate, createPoint(new Coordinate(-77.0, 38.0)))),
new ScalaSimpleFeature("3", sft, initialValues = Array("true","3",parseDate("20160103T000000.000Z").toDate, createPoint(new Coordinate(-78.0, 39.0))))
))
fs.addFeatures(features)
}
} | tkunicki/geomesa | geomesa-spark/geomesa-spark-geotools/src/test/scala/org/locationtech/geomesa/spark/geotools/GeoToolsSpatialRDDProviderTest.scala | Scala | apache-2.0 | 3,018 |
package lila.db
package api
import play.api.libs.json._
import reactivemongo.bson._
import reactivemongo.core.commands._
import reactivemongo.api.{ SerializationPack, BSONSerializationPack }
import reactivemongo.api.commands.{
CollectionCommand, CommandWithPack, CommandWithResult, ImplicitCommandHelpers
}
trait MapReduceCommand[P <: SerializationPack]
extends ImplicitCommandHelpers[P] {
case class MapReduce(
mapFunction: JSFunction,
reduceFunction: JSFunction,
query: Option[pack.Document] = None,
sort: Option[pack.Document] = None,
limit: Option[Int] = None,
finalizeFunction: Option[JSFunction] = None,
scope: Option[String] = None,
verbose: Boolean = false)
extends CollectionCommand with CommandWithPack[pack.type] with CommandWithResult[pack.Document]
}
object BSONMapReduceCommand
extends MapReduceCommand[BSONSerializationPack.type] {
val pack = BSONSerializationPack
}
object BSONMapReduceCommandImplicits {
import reactivemongo.api.commands.ResolvedCollectionCommand
import reactivemongo.bson.BSONDocument
import BSONMapReduceCommand._
implicit object MapReduceWriter
extends BSONDocumentWriter[ResolvedCollectionCommand[MapReduce]] {
def write(mapr: ResolvedCollectionCommand[MapReduce]): BSONDocument = {
val cmd = mapr.command
BSONDocument(
"mapReduce" -> BSONString(mapr.collection),
"map" -> BSONString(cmd.mapFunction),
"reduce" -> BSONString(cmd.reduceFunction),
"out" -> BSONDocument("inline" -> true),
"query" -> cmd.query,
"sort" -> cmd.sort,
"limit" -> cmd.limit.map(x => BSONInteger(x)),
"finalize" -> cmd.finalizeFunction.map(x => BSONString(x)),
"scope" -> cmd.scope.map(x => BSONString(x)),
"verbose" -> BSONBoolean(cmd.verbose)
)
}
}
}
| r0k3/lila | modules/db/src/main/api/commands.scala | Scala | mit | 1,847 |
package com.github.diegopacheco.sandbox.scripts.scala.concurrency
import java.util.concurrent.atomic.AtomicReference
class Person(val name: AtomicReference[String]) {
def set(changedName: String) {
name.set(changedName)
}
}
object OldStuff {
def run{
var person = new Person(new AtomicReference("Diego"))
person.set("Diego Pacheco")
println( person.name )
}
def main(args: Array[String]) = run
} | diegopacheco/scala-playground | scala-playground/src/com/github/diegopacheco/sandbox/scripts/scala/concurrency/AtomicReference.scala | Scala | unlicense | 439 |
package demo
package components
package materialui
import chandu0101.macros.tojs.GhPagesMacros
import chandu0101.scalajs.react.components.materialui.MuiSlider
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
object MuiSliderDemo {
val code = GhPagesMacros.exampleSource
// EXAMPLE:START
val component = ScalaComponent
.builder[Unit]("MuiSliderDemo")
.render(P => {
val onChange: (ReactMouseEvent, Double) => Callback =
(e, v) => Callback.info(s"chose value: $v")
<.div(
CodeExample(code, "MuiSlider")(
MuiSlider(name = "slider1", onChange = onChange)(),
MuiSlider(name = "slider2", onChange = onChange, defaultValue = 0.5)(),
MuiSlider(name = "slider1", onChange = onChange, value = 0.3, disabled = true)()
)
)
})
.build
// EXAMPLE:END
def apply() = component()
}
| rleibman/scalajs-react-components | demo/src/main/scala/demo/components/materialui/MuiSliderDemo.scala | Scala | apache-2.0 | 899 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.holdenkarau.spark.testing
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
trait SparkContextProvider {
def sc: SparkContext
def conf: SparkConf
}
| mahmoudhanafy/spark-testing-base | src/main/1.3/scala/com/holdenkarau/spark/testing/SparkContextProvider.scala | Scala | apache-2.0 | 988 |
package learningconcurrency.ch3
import java.util.concurrent.atomic.AtomicLong
import parallelprogramming._
/**
* These abstractions permit several memory reads but
* only one concurrent write.
*
* They support linearizable operations, which are operations
* that seem to occur instantaneously to the rest of the system.
* An example is a volatile write (to the main memory)
*/
object AtomicPrimitives_2 extends App {
/**
* Reimplementation of the getId with atomic mutation.
* the incrementAndGet method:
*
* - Reads the value of uid
* - Increments it by one
* - Returns the value
*
* in a single operation as if it were a unique transaction
*/
val uid = new AtomicLong(0L)
def getUniqueId(): Long = uid.incrementAndGet()
execute(log(s"Get Unique Id 1: $getUniqueId"))
execute(log(s"Get Unique Id 2: $getUniqueId"))
Thread.sleep(200)
/**
* These abstractions possess an important method which
* is the compareAndSet() (CAS operation). This method
* updates the value provided only if the first parameter
* is the current value of the variable
*/
assert(uid.compareAndSet(2, 42))
}
| tomduhourq/learning-concurrent-programming | src/main/scala/learningconcurrency/ch3/AtomicPrimitives_2.scala | Scala | apache-2.0 | 1,151 |
package lila
package object memo extends PackageObject {
private[memo] def logger = lila.log("memo")
}
| luanlv/lila | modules/memo/src/main/package.scala | Scala | mit | 107 |
package helpers
/**
* @author [email protected]
*/
abstract class BaseException(val code: String)
extends Exception with Loggable {
override def getMessage = code
}
| lizepeng/app.io | modules/models/app/helpers/BaseException.scala | Scala | apache-2.0 | 175 |
package org.fayalite.aws
import java.util
import com.amazonaws._
import com.amazonaws.internal.StaticCredentialsProvider
import com.amazonaws.regions.{Region, Regions, ServiceAbbreviations}
import com.amazonaws.services.ec2.AmazonEC2Client
import com.amazonaws.services.ec2.model._
import com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancingClient
import com.amazonaws.services.rds.{AmazonRDS, AmazonRDSClient}
import scala.collection.JavaConversions
import scala.collection.JavaConversions._
import com.amazonaws.auth.{DefaultAWSCredentialsProviderChain, EnvironmentVariableCredentialsProvider}
import com.amazonaws.auth.profile.ProfileCredentialsProvider
import com.amazonaws.services.s3.AmazonS3Client
import com.amazonaws.auth._
import com.amazonaws.event.{ProgressEvent, ProgressEventType}
import scala.io.Source
import scala.util.{Random, Try}
import JavaConversions._
/**
* Main interaction gateway for AWS. Sort of works? Maybe?
*/
object AWS {
/**
* Some BS for getting around AWS SDK key detection failures
*
* @return : Access -> Secret
*/
def getKeys = {
Source.fromFile(".rootkey.csv")
.mkString.split("\\r\\n").map{_.split("=").tail.mkString("=")} match {
case Array(x,y) => (x,y)}
}
// Working credentials
val (access, secret) = getKeys
val credentials = new BasicAWSCredentials(access, secret) // new DefaultAWSCredentialsProviderChain()
val cred = new StaticCredentialsProvider(credentials)
/**
* Quick and dirty check
*
* @return : EC2 should be online
*/
def verifyAuthenticated = {
val supported = Region.getRegion(Regions.US_WEST_1).isServiceSupported(ServiceAbbreviations.EC2)
supported
}
val clientConfig = new ClientConfiguration()
val ec2 = Region.getRegion(Regions.US_WEST_1).createClient(
classOf[AmazonEC2Client], cred, clientConfig)
val s3 = new AmazonS3Client(credentials)
val rds = new AmazonRDSClient(credentials)
val elb = new AmazonElasticLoadBalancingClient(credentials)
def testDescribeInstances() = {
val inst = ec2.describeInstances()
inst.getReservations.foreach{
_.getInstances.foreach{
i => println(i.getLaunchTime)
}
}
ServerManager.requestServerInfo()
}
def instances = ec2.describeInstances().getReservations.toList.flatMap{
_.getInstances
}
def instanceIds = instances.map{_.getInstanceId}
def destroyInstances = {
val tr = new TerminateInstancesRequest(instanceIds)
ec2.terminateInstances(tr)
}
def main(args: Array[String]) {
//testDescribeInstances()
//checkSpotRequests()
// destroyInstances
// getKeys
//spot()
// launchTestServer
// destroyInstances
}
/**
* Get all IP addresses associated with account in
* Scala friendly form
* @return : Addresses, i.e. an elastic IP
*/
def getAddresses = ec2.describeAddresses().getAddresses.toSeq
def getRunningInstances = {
ec2.describeInstances()
.getReservations
.flatMap {
_.getInstances
}
.filter {
_.getState.getName == "running"
}.toList
}
}
| ryleg/fayalite | gate/src/main/scala/org/fayalite/aws/AWS.scala | Scala | mit | 3,131 |
package build.unstable.tylog
import org.slf4j.event.Level
object Example extends App with TypedLogging {
type TraceID = String
sealed trait CallType
case object A extends CallType
case object B extends CallType
val traceId = "1"
// this adds callType/variation/traceID to MDC
log.tylog(Level.TRACE, traceId, A, Variation.Attempt, "let's see..\\n...")
// normal log statements between attempt and resolution will have MDC set
log.debug("a message with context")
// won't compile, ERROR is not a valid Level for tylog method
// log.tylog(Level.ERROR, traceId, A, Variation.Success, "yay!")
// logging Success/Failure will clear MDC
log.tylog(Level.INFO, traceId, A, Variation.Failure(new Exception("BOOM")), "yay!")
// placeholders and arguments are checked at compile time
log.debug("this compiles normally {}", "msg")
// log.info("this does not compile because there is a missing arg {}")
// log.warning("this does not compile because there is a missing placeholder", "a")
}
| ernestrc/tylog | examples/src/main/scala/build/unstable/tylog/Example.scala | Scala | mit | 1,022 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.