code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Odessa State environmental University
* Copyright (C) 2014
*/
package ua.edu.odeku.ceem.mapRadar.tools.radar.airspace.factories
import gov.nasa.worldwind.WorldWindow
import gov.nasa.worldwind.render.airspaces.SphereAirspace
import ua.edu.odeku.ceem.mapRadar.tools.radar.airspace.{CeemRadarAirspace, CeemRadarAirspaceEditor, IsolineAirspace, RadarAirspace}
import ua.edu.odeku.ceem.mapRadar.tools.radar.models.Radar
/**
* Created by Aleo on 21.04.2014.
*/
class CeemRadarAirspaceFactory(val radar: Radar, wwd: WorldWindow, fitShapeToViewport: Boolean){
private val radarAirspaceFactory = new RadarAirspaceFactory(radar, wwd, fitShapeToViewport)
private val isolinesAirspaceFactory = new IsolineAirspaceFactory(radar, wwd, fitShapeToViewport)
radar.latLon = radarAirspaceFactory.airspace.asInstanceOf[SphereAirspace].getLocation
val airspace = new CeemRadarAirspace(radar, radarAirspaceFactory.airspace.asInstanceOf[RadarAirspace], radarAirspaceFactory.editor, isolinesAirspaceFactory.airspace.asInstanceOf[IsolineAirspace], isolinesAirspaceFactory.editor)
val editor = new CeemRadarAirspaceEditor(airspace)
}
| aleo72/ww-ceem-radar | src/main/scala/ua/edu/odeku/ceem/mapRadar/tools/radar/airspace/factories/CeemRadarAirspaceFactory.scala | Scala | apache-2.0 | 1,130 |
package com.socrata.querycoordinator
import com.rojoma.json.v3.ast.JValue
import com.rojoma.json.v3.codec.{DecodeError, JsonDecode, JsonEncode}
import com.rojoma.json.v3.matcher.{PObject, Variable}
import com.socrata.querycoordinator.util.SoQLTypeCodec
import com.socrata.soql.types.SoQLType
case class Schema(hash: String, schema: Map[String, SoQLType], pk: String)
object Schema {
object SingleRow extends Schema("", Map.empty, ":id")
implicit object SchemaCodec extends JsonDecode[Schema] with JsonEncode[Schema] {
private implicit val soQLTypeCodec = SoQLTypeCodec
private val hashVar = Variable[String]()
private val schemaVar = Variable[Map[String, SoQLType]]()
private val pkVar = Variable[String]()
private val PSchema = PObject(
"hash" -> hashVar,
"schema" -> schemaVar,
"pk" -> pkVar
)
def encode(schemaObj: Schema): JValue = {
val Schema(hash, schema, pk) = schemaObj
PSchema.generate(hashVar := hash, schemaVar := schema, pkVar := pk)
}
def decode(x: JValue): Either[DecodeError, Schema] = PSchema.matches(x) match {
case Right(results) => Right(Schema(hashVar(results), schemaVar(results), pkVar(results)))
case Left(ex) => Left(ex)
}
}
}
| socrata-platform/query-coordinator | query-coordinator/src/main/scala/com/socrata/querycoordinator/Schema.scala | Scala | apache-2.0 | 1,247 |
package lila.game
case class Crosstable(
user1: Crosstable.User,
user2: Crosstable.User,
results: List[Crosstable.Result],
nbGames: Int) {
import Crosstable.Result
def nonEmpty = results.nonEmpty option this
def users = List(user2, user1)
def winnerId =
if (user1.score > user2.score) Some(user1.id)
else if (user1.score < user2.score) Some(user2.id)
else None
def user(id: String) = users find (_.id == id)
def showScore(userId: String) = {
val byTen = user(userId) ?? (_.score)
s"${byTen / 10}${(byTen % 10 != 0).??("½")}"
}
def showOpponentScore(userId: String) =
if (userId == user1.id) showScore(user2.id).some
else if (userId == user2.id) showScore(user1.id).some
else none
def addWins(userId: Option[String], wins: Int) = copy(
user1 = user1.copy(
score = user1.score + (userId match {
case None => wins * 5
case Some(u) if user1.id == u => wins * 10
case _ => 0
})),
user2 = user2.copy(
score = user2.score + (userId match {
case None => wins * 5
case Some(u) if user2.id == u => wins * 10
case _ => 0
})))
def fromPov(userId: String) =
if (userId == user2.id) copy(user1 = user2, user2 = user1)
else this
lazy val size = results.size
def fill = (1 to 20 - size)
}
object Crosstable {
case class User(id: String, score: Int) // score is x10
case class Result(gameId: String, winnerId: Option[String])
private[game] def makeKey(u1: String, u2: String): String = List(u1, u2).sorted mkString "/"
import reactivemongo.bson._
import lila.db.BSON
object BSONFields {
val id = "_id"
val score1 = "s1"
val score2 = "s2"
val results = "r"
val nbGames = "n"
}
implicit val crosstableBSONHandler = new BSON[Crosstable] {
import BSONFields._
def reads(r: BSON.Reader): Crosstable = r str id split '/' match {
case Array(u1Id, u2Id) => Crosstable(
user1 = User(u1Id, r intD "s1"),
user2 = User(u2Id, r intD "s2"),
results = r.get[List[String]](results).map { r =>
r drop 8 match {
case "" => Result(r take 8, none)
case "+" => Result(r take 8, Some(u1Id))
case "-" => Result(r take 8, Some(u2Id))
case _ => sys error s"Invalid result string $r"
}
},
nbGames = r int nbGames)
case x => sys error s"Invalid crosstable id $x"
}
def writeResult(result: Result, u1: String): String =
result.gameId + (result.winnerId ?? { w => if (w == u1) "+" else "-" })
def writes(w: BSON.Writer, o: Crosstable) = BSONDocument(
id -> makeKey(o.user1.id, o.user2.id),
score1 -> o.user1.score,
score2 -> o.user2.score,
results -> o.results.map { writeResult(_, o.user1.id) },
nbGames -> w.int(o.nbGames))
}
}
| Happy0/lila | modules/game/src/main/Crosstable.scala | Scala | mit | 2,970 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.knn
import org.geotools.data.{DataStoreFinder, Query}
import org.geotools.factory.Hints
import org.geotools.feature.DefaultFeatureCollection
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.geotools.filter.text.ecql.ECQL
import org.joda.time.DateTime
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.data.{AccumuloDataStore, AccumuloDataStoreParams}
import org.locationtech.geomesa.features.avro.AvroSimpleFeatureFactory
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geohash.VincentyModel
import org.locationtech.geomesa.utils.geotools.Conversions._
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
import scala.util.Random
case class TestEntry(wkt: String, id: String, dt: DateTime = new DateTime())
@RunWith(classOf[JUnitRunner])
class KNearestNeighborSearchProcessTest extends Specification {
sequential
val sftName = "geomesaKNNTestType"
val sft = SimpleFeatureTypes.createType(sftName, "geom:Point:srid=4326,dtg:Date,dtg_end_time:Date")
sft.getUserData.put(SimpleFeatureTypes.Configs.DEFAULT_DATE_KEY, "dtg")
val ds = createStore
ds.createSchema(sft)
val fs = ds.getFeatureSource(sftName)
val featureCollection = new DefaultFeatureCollection(sftName, sft)
val clusterOfPoints = List[TestEntry](
TestEntry("POINT( -78.503547 38.035475 )", "rotunda"),
TestEntry("POINT( -78.503923 38.035536 )", "pavilion I"),
TestEntry("POINT( -78.504059 38.035308 )", "pavilion III"),
TestEntry("POINT( -78.504276 38.034971 )", "pavilion V"),
TestEntry("POINT( -78.504424 38.034628 )", "pavilion VII"),
TestEntry("POINT( -78.504617 38.034208 )", "pavilion IX"),
TestEntry("POINT( -78.503833 38.033938 )", "pavilion X"),
TestEntry("POINT( -78.503601 38.034343 )", "pavilion VIII"),
TestEntry("POINT( -78.503424 38.034721 )", "pavilion VI"),
TestEntry("POINT( -78.503180 38.035039 )", "pavilion IV"),
TestEntry("POINT( -78.503109 38.035278 )", "pavilion II"),
TestEntry("POINT( -78.505152 38.032704 )", "cabell"),
TestEntry("POINT( -78.510295 38.034283 )", "beams"),
TestEntry("POINT( -78.522288 38.032844 )", "mccormick"),
TestEntry("POINT( -78.520019 38.034511 )", "hep")
)
val distributedPoints = generateTestData(1000, 38.149894, -79.073639, 0.30)
// add the test points to the feature collection
addTestData(clusterOfPoints)
addTestData(distributedPoints)
// write the feature to the store
fs.addFeatures(featureCollection)
def createStore: AccumuloDataStore =
// the specific parameter values should not matter, as we
// are requesting a mock data store connection to Accumulo
DataStoreFinder.getDataStore(Map(
AccumuloDataStoreParams.InstanceIdParam.key -> "mycloud",
AccumuloDataStoreParams.ZookeepersParam.key -> "zoo1:2181,zoo2:2181,zoo3:2181",
AccumuloDataStoreParams.UserParam.key -> "myuser",
AccumuloDataStoreParams.PasswordParam.key -> "mypassword",
AccumuloDataStoreParams.AuthsParam.key -> "A,B,C",
AccumuloDataStoreParams.CatalogParam.key -> "testknn",
AccumuloDataStoreParams.MockParam.key -> "true")).asInstanceOf[AccumuloDataStore]
// utility method to generate random points about a central point
// note that these points will be uniform in cartesian space only
def generateTestData(num: Int, centerLat: Double, centerLon: Double, width: Double) = {
val rng = new Random(0)
(1 to num).map(i => {
val wkt = "POINT(" +
(centerLon + width * (rng.nextDouble() - 0.5)).toString + " " +
(centerLat + width * (rng.nextDouble() - 0.5)).toString + " " +
")"
val dt = new DateTime()
TestEntry(wkt, (100000 + i).toString, dt)
}).toList
}
// load data into the featureCollection
def addTestData(points: List[TestEntry]) = {
points.foreach { case e: TestEntry =>
val sf = AvroSimpleFeatureFactory.buildAvroFeature(sft, List(), e.id)
sf.setDefaultGeometry(WKTUtils.read(e.wkt))
sf.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
featureCollection.add(sf)
}
}
// generates a single SimpleFeature
def queryFeature(label: String, lat: Double, lon: Double) = {
val sf = AvroSimpleFeatureFactory.buildAvroFeature(sft, List(), label)
sf.setDefaultGeometry(WKTUtils.read(f"POINT($lon $lat)"))
sf.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
sf
}
// generates a very loose query
def wideQuery = {
val lat = 38.0
val lon = -78.50
val siteSize = 5.0
val minLat = lat - siteSize
val maxLat = lat + siteSize
val minLon = lon - siteSize
val maxLon = lon + siteSize
val queryString = s"BBOX(geom,$minLon, $minLat, $maxLon, $maxLat)"
val ecqlFilter = ECQL.toFilter(queryString)
//val fs = getTheFeatureSource(tableName, featureName)
//new Query(featureName, ecqlFilter, transform)
new Query(sftName, ecqlFilter)
}
// begin tests ------------------------------------------------
"GeoMesaKNearestNeighborSearch" should {
"find nothing within 10km of a single query point " in {
val inputFeatures = new DefaultFeatureCollection(sftName, sft)
inputFeatures.add(queryFeature("fan mountain", 37.878219, -78.692649))
val dataFeatures = fs.getFeatures()
val knn = new KNearestNeighborSearchProcess
knn.execute(inputFeatures, dataFeatures, 5, 500.0, 10000.0).size must equalTo(0)
}
"find 11 points within 400m of a point when k is set to 15 " in {
val inputFeatures = new DefaultFeatureCollection(sftName, sft)
inputFeatures.add(queryFeature("madison", 38.036871, -78.502720))
val dataFeatures = fs.getFeatures()
val knn = new KNearestNeighborSearchProcess
knn.execute(inputFeatures, dataFeatures, 15, 50.0, 400.0).size should be equalTo 11
}
"handle three query points, one of which will return nothing" in {
val inputFeatures = new DefaultFeatureCollection(sftName, sft)
inputFeatures.add(queryFeature("madison", 38.036871, -78.502720))
inputFeatures.add(queryFeature("fan mountain", 37.878219, -78.692649))
inputFeatures.add(queryFeature("blackfriars", 38.149185, -79.070569))
val dataFeatures = fs.getFeatures()
val knn = new KNearestNeighborSearchProcess
knn.execute(inputFeatures, dataFeatures, 5, 500.0, 5000.0).size must greaterThan(0)
}
"handle an empty query point collection" in {
val inputFeatures = new DefaultFeatureCollection(sftName, sft)
val dataFeatures = fs.getFeatures()
val knn = new KNearestNeighborSearchProcess
knn.execute(inputFeatures, dataFeatures, 100, 500.0, 5000.0).size must equalTo(0)
}
"handle non-point geometries in inputFeatures by ignoring them" in {
val sft = SimpleFeatureTypes.createType("lineStringKnn", "geom:LineString:srid=4326")
val inputFeatures = new DefaultFeatureCollection("lineStringKnn", sft)
val lineSF = SimpleFeatureBuilder.build(sft, List(), "route 29")
lineSF.setDefaultGeometry(WKTUtils.read(f"LINESTRING(-78.491 38.062, -78.474 38.082)"))
inputFeatures.add(lineSF)
val dataFeatures = fs.getFeatures()
val knn = new KNearestNeighborSearchProcess
val res = knn.execute(inputFeatures, dataFeatures, 100, 500.0, 5000.0)
res.size mustEqual 0
}
}
"runNewKNNQuery" should {
"return a NearestNeighbors object with features around Charlottesville in correct order" in {
val orderedFeatureIDs = List("rotunda",
"pavilion II",
"pavilion I",
"pavilion IV",
"pavilion III",
"pavilion VI",
"pavilion V",
"pavilion VII",
"pavilion VIII",
"pavilion IX",
"pavilion X",
"cabell",
"beams",
"hep",
"mccormick")
val knnResults =
KNNQuery.runNewKNNQuery(fs, wideQuery, 15, 500.0, 2500.0, queryFeature("madison", 38.036871, -78.502720))
// return the ordered neighbors and extract the SimpleFeatures
val knnFeatures = knnResults.getK.map { _.sf }
val knnIDs = knnFeatures.map { _.getID }
knnIDs must equalTo(orderedFeatureIDs)
}
"return a nearestNeighbors object with features around Staunton in correct order" in {
val k = 10
val referenceFeature = queryFeature("blackfriars", 38.149185, -79.070569)
val knnResults =
KNNQuery.runNewKNNQuery(fs, wideQuery, k, 5000.0, 50000.0, referenceFeature)
val knnFeatureIDs = knnResults.getK.map { _.sf.getID }
val directFeatures = SelfClosingIterator(fs.getFeatures().features).toList
val sortedByDist = directFeatures.sortBy (
a => VincentyModel.getDistanceBetweenTwoPoints(referenceFeature.point, a.point).getDistanceInMeters).take(k)
knnFeatureIDs.equals(sortedByDist.map{_.getID}) must beTrue
}
}
}
| ronq/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/process/knn/KNearestNeighborSearchProcessTest.scala | Scala | apache-2.0 | 9,605 |
package com.dero.home.items
import com.dero.home.AbstractModule
import com.dero.home.states.OnOff
import scala.languageFeature.implicitConversions
import scala.reflect.runtime.universe._
class Switch(module: AbstractModule) extends Item(module) {
override def This = typeOf[Switch]
type State = OnOff.Type
type Command = State
}
| derolf/scala4mqtt | src/main/scala/com/dero/home/items/Switch.scala | Scala | mit | 344 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.exceptions
/**
* Trait that encapsulates the information required of an exception thrown by ScalaTest's assertions
* and matchers, which includes a stack depth at which the failing line of test code resides.
*
* <p>
* This trait exists so that it can be mixed into two exception superclasses, <a href="StackDepthException.html"><code>StackDepthException</code></a>,
* from which extend several exceptions that do not depend on JUnit, and <a href="../junit/JUnitTestFailedError.html"><code>JUnitTestFailedError</code></a>, which
* does depend on JUnit. The latter, which requires JUnit be in the classpath, ensures failed ScalaTest assertions are
* reported as "failures," not "errors," by JUnit.
* </p>
*/
trait StackDepth { this: Throwable =>
/**
* An optional detail message for this <code>StackDepth</code> exception.
*/
val message: Option[String]
/**
* An optional cause, the <code>Throwable</code> that caused this <code>StackDepth</code> exception to be thrown.
*/
val cause: Option[Throwable]
/**
* The depth in the stack trace of this exception at which the line of test code that failed resides.
*/
val failedCodeStackDepth: Int
/**
* A string that provides the filename and line number of the line of code that failed, suitable
* for presenting to a user, which is taken from this exception's <code>StackTraceElement</code> at the depth specified
* by <code>failedCodeStackDepth</code>.
*
* <p>
* This is a <code>def</code> instead of a <code>val</code> because exceptions are mutable: their stack trace can
* be changed after the exception is created. This is done, for example, by the <code>SeveredStackTraces</code> trait.
* </p>
*
* @return a user-presentable string containing the filename and line number that caused the failed test
*/
def failedCodeFileNameAndLineNumberString: Option[String] = {
for (fileName <- failedCodeFileName; lineNum <- failedCodeLineNumber) yield
fileName + ":" + lineNum
}
private def stackTraceElement = getStackTrace()(failedCodeStackDepth)
/**
* A string that provides the filename of the line of code that failed, suitable
* for presenting to a user, which is taken from this exception's <code>StackTraceElement</code> at the depth specified
* by <code>failedCodeStackDepth</code>.
*
* <p>
* This is a <code>def</code> instead of a <code>val</code> because exceptions are mutable: their stack trace can
* be changed after the exception is created. This is done, for example, by the <code>SeveredStackTraces</code> trait.
* </p>
*
* @return a string containing the filename that caused the failed test
*/
def failedCodeFileName: Option[String] = {
val fileName = stackTraceElement.getFileName
if (fileName != null) {
Some(fileName)
}
else None
}
/**
* A string that provides the line number of the line of code that failed, suitable
* for presenting to a user, which is taken from this exception's <code>StackTraceElement</code> at the depth specified
* by <code>failedCodeStackDepth</code>.
*
* <p>
* This is a <code>def</code> instead of a <code>val</code> because exceptions are mutable: their stack trace can
* be changed after the exception is created. This is done, for example, by the <code>SeveredStackTraces</code> trait.
* </p>
*
* @return a string containing the line number that caused the failed test
*/
def failedCodeLineNumber: Option[Int] = {
val lineNum = stackTraceElement.getLineNumber
if (lineNum > 0) {
Some(lineNum)
}
else None
}
/**
* Returns an exception of the same class with <code>failedExceptionStackDepth</code> set to 0 and
* all frames above this stack depth severed off. This can be useful when working with tools (such as IDEs) that do not
* directly support ScalaTest. (Tools that directly support ScalaTest can use the stack depth information delivered
* in the StackDepth exceptions.)
*/
def severedAtStackDepth: Throwable with StackDepth
}
| travisbrown/scalatest | src/main/scala/org/scalatest/exceptions/StackDepth.scala | Scala | apache-2.0 | 4,691 |
package com.scalaAsm.x86
package Instructions
package General
// Description: Interrupt Return
// Category: general/breakstack
trait IRET extends InstructionDefinition {
val mnemonic = "IRET"
}
object IRET extends ZeroOperands[IRET] with IRETImpl
trait IRETImpl extends IRET {
implicit object _0 extends NoOp{
val opcode: OneOpcode = 0xCF
override def hasImplicitOperand = true
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/General/IRET.scala | Scala | apache-2.0 | 404 |
package io.github.binaryfoo.lagotto
class PivotedIteratorTest extends LagoTest {
import fieldParser.FieldExpr.expressionFor
val rotateOn = fieldParser.DirectExpr.unapply("at").get
val count = expressionFor("n")
val entries = Seq(
SimpleLogEntry("at" -> "15:59", "mti" -> "0200", "n" -> "10"),
SimpleLogEntry("at" -> "15:59", "mti" -> "0210", "n" -> "9"),
SimpleLogEntry("at" -> "16:00", "mti" -> "0200", "n" -> "5")
)
val pivotExpr = PivotExpr("mti", expressionFor("mti"))
entries.foreach(pivotExpr)
"Pivot iterator" should "output one row per (rotateOn, pivotExpr) pair" in {
val iterator = new PivotedIterator(rotateOn, pivotExpr, Seq(count), entries.toIterator)
val result = iterator.toList.map(_.exportAsSeq)
result shouldBe List(
Seq("at" -> "15:59", "0200 - n" -> "10", "0210 - n" -> "9"),
Seq("at" -> "16:00", "0200 - n" -> "5", "0210 - n" -> "0")
)
}
"Aggregate of pivot" should "apply to all columns of a single pivoted row" in {
val sumOfPivotedCounts = expressionFor("sum(pivoted(n))")
val iterator = new PivotedIterator(rotateOn, pivotExpr, Seq(count, sumOfPivotedCounts), entries.toIterator)
val result = iterator.toList.map(_.exportAsSeq)
result shouldBe List(
Seq("at" -> "15:59", "0200 - n" -> "10", "0210 - n" -> "9", "sum(pivoted(n))" -> "19"),
Seq("at" -> "16:00", "0200 - n" -> "5", "0210 - n" -> "0", "sum(pivoted(n))" -> "5")
)
}
"Multiple aggregates of pivot" should "be allowed" in {
val minOfPivot = expressionFor("min(pivoted(n))")
val maxOfPivot = expressionFor("max(pivoted(n))")
val iterator = new PivotedIterator(rotateOn, pivotExpr, Seq(count, minOfPivot, maxOfPivot), entries.toIterator)
val result = iterator.toList.map(_.exportAsSeq)
result(0) should (contain("min(pivoted(n))" -> "9") and contain ("max(pivoted(n))" -> "10"))
result(1) should (contain("min(pivoted(n))" -> "0") and contain ("max(pivoted(n))" -> "5"))
}
"Conditional count of pivot" should "apply to pivot result" in {
val sumOfPivotedCounts = expressionFor("count(pivoted(n)>1)")
val iterator = new PivotedIterator(rotateOn, pivotExpr, Seq(count, sumOfPivotedCounts), entries.toIterator)
val result = iterator.toList.map(_.exportAsSeq)
result shouldBe List(
Seq("at" -> "15:59", "0200 - n" -> "10", "0210 - n" -> "9", "count(pivoted(n)>1)" -> "2"),
Seq("at" -> "16:00", "0200 - n" -> "5", "0210 - n" -> "0", "count(pivoted(n)>1)" -> "1")
)
}
}
| binaryfoo/lagotto | src/test/scala/io/github/binaryfoo/lagotto/PivotedIteratorTest.scala | Scala | mit | 2,514 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hammerlab.guacamole.filters
import org.hammerlab.guacamole.pileup.PileupElement
/**
* Filter to remove pileup elements with low alignment quality
*/
object QualityAlignedReadsFilter {
/**
*
* @param elements sequence of pileup elements to filter
* @param minimumAlignmentQuality Threshold to define whether a read was poorly aligned
* @return filtered sequence of elements - those who had higher than minimumAlignmentQuality alignmentQuality
*/
def apply(elements: Seq[PileupElement], minimumAlignmentQuality: Int): Seq[PileupElement] = {
elements.filter(_.read.alignmentQuality >= minimumAlignmentQuality)
}
}
/**
* Filter to remove pileup elements close to edge of reads
*/
object EdgeBaseFilter {
/**
*
* @param elements sequence of pileup elements to filter
* @param minimumDistanceFromEndFromRead Threshold of distance from base to edge of read
* @return filtered sequence of elements - those who were further from directional end minimumDistanceFromEndFromRead
*/
def apply(elements: Seq[PileupElement], minimumDistanceFromEndFromRead: Int): Seq[PileupElement] = {
elements.filter(_.distanceFromSequencingEnd >= minimumDistanceFromEndFromRead)
}
}
| bikash/guacamole | src/main/scala/org/hammerlab/guacamole/filters/PileupElementsFilter.scala | Scala | apache-2.0 | 2,019 |
package com.stulsoft.exercises.bit
/** Playing with bit operations
*
* @see [[https://www.tutorialspoint.com/scala/scala_bitwise_operators.htm Scala - Bitwise Operators]]
* @author Yuriy Stul.
*/
object BitOperations extends App {
val a = 60
/* 60 = 0011 1100 */
val b = 13
/* 13 = 0000 1101 */
var c = 0
c = a & b /* 12 = 0000 1100 */
println("a & b = " + c)
c = a | b /* 61 = 0011 1101 */
println("a | b = " + c)
c = a ^ b /* 49 = 0011 0001 */
println("a ^ b = " + c)
c = ~a /* -61 = 1100 0011 */
println("~a = " + c)
c = a << 2 /* 240 = 1111 0000 */
println("a << 2 = " + c)
c = a >> 2 /* 215 = 1111 */
println("a >> 2 = " + c)
c = a >>> 2 /* 215 = 0000 1111 */
println("a >>> 2 = " + c)
println(c.toBinaryString)
}
| ysden123/scala-exercises | src/main/scala/com/stulsoft/exercises/bit/BitOperations.scala | Scala | mit | 777 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution
import java.util.concurrent.Executors
import minitest.SimpleTestSuite
import minitest.laws.Checkers
import monix.execution.schedulers._
import scala.concurrent.ExecutionContext.global
import scala.concurrent.duration.TimeUnit
object FeaturesJVMSuite extends SimpleTestSuite with Checkers {
test("TestScheduler") {
val ref = TestScheduler()
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
}
test("TracingScheduler") {
val ref = TracingScheduler(global)
assert(ref.features.contains(Scheduler.BATCHING))
assert(ref.features.contains(Scheduler.TRACING))
}
test("TracingSchedulerService") {
val ref = TracingSchedulerService(Scheduler.singleThread("features-test"))
try {
assert(ref.features.contains(Scheduler.BATCHING))
assert(ref.features.contains(Scheduler.TRACING))
} finally {
ref.shutdown()
}
}
test("Scheduler.Implicits.global") {
val ref: Scheduler = Scheduler.global
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
}
test("AsyncScheduler(global)") {
val ref = AsyncScheduler(Scheduler.DefaultScheduledExecutor, Scheduler.global, ExecutionModel.Default)
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
}
test("TrampolineScheduler(TracingScheduler(global))") {
val ref = TrampolineScheduler(TracingScheduler(Scheduler.global), ExecutionModel.Default)
assert(ref.features.contains(Scheduler.BATCHING))
assert(ref.features.contains(Scheduler.TRACING))
}
test("ExecutorScheduler(Executor") {
val ref = {
val ec = Executors.newSingleThreadExecutor()
ExecutorScheduler(ec, UncaughtExceptionReporter.default, ExecutionModel.Default, Features.empty)
}
try {
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
} finally {
ref.shutdown()
}
}
test("ExecutorScheduler(ScheduledExecutor") {
val ref = {
val ec = Executors.newSingleThreadScheduledExecutor()
ExecutorScheduler(ec, UncaughtExceptionReporter.default, ExecutionModel.Default, Features.empty)
}
try {
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
} finally {
ref.shutdown()
}
}
test("ExecutorScheduler(ScheduledExecutor)") {
val ref = {
val ec = Executors.newSingleThreadScheduledExecutor()
ExecutorScheduler(ec, UncaughtExceptionReporter.default, ExecutionModel.Default, Features.empty)
}
try {
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
} finally {
ref.shutdown()
}
}
test("ReferenceScheduler(global)") {
val ref = wrapViaReferenceScheduler(Scheduler.global)
assert(ref.features.contains(Scheduler.BATCHING))
assert(!ref.features.contains(Scheduler.TRACING))
}
test("ReferenceScheduler(TracingScheduler(global))") {
val ref = wrapViaReferenceScheduler(TracingScheduler(Scheduler.global))
assert(ref.features.contains(Scheduler.BATCHING))
assert(ref.features.contains(Scheduler.TRACING))
}
def wrapViaReferenceScheduler(ec: Scheduler): Scheduler = {
val ref = new ReferenceScheduler {
override def execute(command: Runnable): Unit =
ec.execute(command)
override def scheduleOnce(initialDelay: Long, unit: TimeUnit, r: Runnable): Cancelable =
ec.scheduleOnce(initialDelay, unit, r)
override def reportFailure(t: Throwable): Unit =
ec.reportFailure(t)
override def executionModel: ExecutionModel =
ec.executionModel
override def features: Features =
ec.features
}
ref.withUncaughtExceptionReporter(UncaughtExceptionReporter.default)
}
}
| alexandru/monifu | monix-execution/jvm/src/test/scala/monix/execution/FeaturesJVMSuite.scala | Scala | apache-2.0 | 4,617 |
/*
* Copyright (c) 2011 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
object tag {
def apply[U] = new Tagger[U]
trait Tagged[U]
type @@[+T, U] = T with Tagged[U]
class Tagger[U] {
def apply[T](t : T) : T @@ U = t.asInstanceOf[T @@ U]
}
}
object newtype {
/**
* Creates a value of the newtype given a value of its representation type.
*/
def apply[Repr, Ops](r : Repr) : Newtype[Repr, Ops] = r.asInstanceOf[Any with Newtype[Repr, Ops]]
/**
* New type with `Repr` as representation type and operations provided by `Ops`.
*
* Values of the newtype will not add any additional boxing beyond what's required for
* values of the representation type to conform to Any. In practice this means that value
* types will receive their standard Scala AnyVal boxing and reference types will be unboxed.
*/
type Newtype[Repr, Ops] = { type Tag = NewtypeTag[Repr, Ops] }
trait NewtypeTag[Repr, Ops]
/**
* Implicit conversion of newtype to `Ops` type for the selection of `Ops` newtype operations.
*
* The implicit conversion `Repr => Ops` would typically be provided by publishing the companion
* object of the `Ops` type as an implicit value.
*/
implicit def newtypeOps[Repr, Ops](t : Newtype[Repr, Ops])(implicit mkOps : Repr => Ops) : Ops = t.asInstanceOf[Repr]
}
/**
* Type class witnessing the least upper bound of a pair of types and providing conversions from each to their common
* supertype.
*
* @author Miles Sabin
*/
trait Lub[-A, -B, +Out] {
def left(a : A): Out
def right(b : B): Out
}
object Lub {
implicit def lub[T] = new Lub[T, T, T] {
def left(a : T): T = a
def right(b : T): T = b
}
}
| rorygraves/shapeless | core/src/main/scala/shapeless/typeoperators.scala | Scala | apache-2.0 | 2,245 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.reactiveflows
package proto
import java.time.Instant.now
import org.scalatest.{ Matchers, WordSpec }
final class FlowSerializerSpec extends WordSpec with Matchers {
import Flow._
private val serializer = new FlowSerializer
"FlowSerializer" should {
"serialize and deserialize GetPosts" in {
val o = GetPosts(0, 1)
val (manifest, binary) = serialize(o)
serializer.fromBinary(binary, manifest) shouldBe o
}
"serialize and deserialize Posts" in {
val o = Posts(Vector(Post(0, "text", now())))
val (manifest, binary) = serialize(o)
serializer.fromBinary(binary, manifest) shouldBe o
}
"serialize and deserialize AddPost" in {
val o = AddPost("text")
val (manifest, binary) = serialize(o)
serializer.fromBinary(binary, manifest) shouldBe o
}
"serialize and deserialize PostAdded" in {
val o = PostAdded("name", Post(0, "text", now()))
val (manifest, binary) = serialize(o)
serializer.fromBinary(binary, manifest) shouldBe o
}
"serialize and deserialize Envelope with GetPosts" in {
val o = CommandEnvelope("name", GetPosts(0, 1))
val (manifest, binary) = serialize(o)
serializer.fromBinary(binary, manifest) shouldBe o
}
"serialize and deserialize Envelope with AddPost" in {
val o = CommandEnvelope("name", AddPost("text"))
val (manifest, binary) = serialize(o)
serializer.fromBinary(binary, manifest) shouldBe o
}
}
private def serialize(o: AnyRef) = (serializer.manifest(o), serializer.toBinary(o))
}
| hseeberger/reactive-flows | src/test/scala/de/heikoseeberger/reactiveflows/proto/FlowSerializerSpec.scala | Scala | apache-2.0 | 2,303 |
package sifters.odesk.apps.rescrape
import sifters.odesk.apps.SimpleSifter
import sifters.odesk.db.ODeskSiftersDBProvider
/**
* Tool which search jobs with no additional data, remove they, and add to rescrape, note:
* onenote:///D:\\Progects\\Little%20projects\\!Notes\\Программы.one#%3eFreelance%20analytics§ion-id={2EC25AF8-8B10-4445-B54F-6B56D915AC11}&page-id={55C1974A-B9EB-4EF9-973F-DCA773220EBA}&object-id={CE61318B-9E1D-018F-0094-EE79824B7B90}&B
* Created by CAB on 03.11.2014.
*/
object RescrapeJobsWithNoAdditionalData extends SimpleSifter("RescrapeJobsWithNoAdditionalData:"){def sift(db:ODeskSiftersDBProvider) = {
//Get url
val urls = db.findJobsWithNoAdditionalData
println(" In 'odesk_jobs' found " + urls.size + " wrong jobs.")
//Delete
deleteJobsByUrl(db,urls)
//Add to rescrape
val nAdded = db.addFoundJobsRows(buildFoundJobsByURL(urls))
println(" Added " + nAdded + " jobs to rescrape.")}} | AlexCAB/FreelanceAnalytics | src/sifters/odesk/apps/rescrape/RescrapeJobsWithNoAdditionalData.scala | Scala | mit | 941 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic
import org.scalatest._
/*
// trait SeqConstraints
// Need a low priority one
implicit def seqEqualityConstraint[EA, CA[_] <: collection.GenSeq[_], EB, CB[_] <: collection.GenSeq[_]](implicit equalityOfA: Equality[CA[EA]], ev: EA <:< EB): EqualityConstraint[CA[EA], CB[EB]] = new BasicEqualityConstraint[CA[EA], CB[EB]](equalityOfA)
// trait SetConstraints
// Need a low priority one
implicit def setEqualityConstraint[EA, CA[_] <: collection.GenSet[_], EB, CB[_] <: collection.GenSet[_]](implicit equalityOfA: Equality[CA[EA]], ev: EA <:< EB): EqualityConstraint[CA[EA], CB[EB]] = new BasicEqualityConstraint[CA[EA], CB[EB]](equalityOfA)
// trait MapConstraints
// Need a low priority one for the value going in VB <:< VA direction
implicit def mapEqualityConstraint[KA, VA, CA[_, _] <: collection.GenMap[_, _], KB, VB, CB[_, _] <: collection.GenMap[_, _]](implicit equalityOfA: Equality[CA[KA, VA]], kev: KA <:< KB, vev: VA <:< VB): EqualityConstraint[CA[KA, VA], CB[KB, VB]] = new BasicEqualityConstraint[CA[KA, VA], CB[KB, VB]](equalityOfA)
// trait ArrayConstraints
// Need a low priority one for the value going in VB <:< VA direction
implicit def seqArrayEqualityConstraint[EA, CA[_] <: collection.GenSeq[_], EB, CB[_] <: Array[_]](implicit equalityOfA: Equality[CA[EA]], ev: EA <:< EB): EqualityConstraint[CA[EA], CB[EB]] = new BasicEqualityConstraint[CA[EA], CB[EB]](equalityOfA)
// Need a low priority one for the value going in VB <:< VA direction
implicit def arraySeqEqualityConstraint[EA, CA[_] <: Array[_], EB, CB[_] <: collection.GenSeq[_]](implicit equalityOfA: Equality[CA[EA]], ev: EA <:< EB): EqualityConstraint[CA[EA], CB[EB]] = new BasicEqualityConstraint[CA[EA], CB[EB]](equalityOfA)
// Need a low priority one for the value going in VB <:< VA direction
implicit def arrayEqualityConstraint[EA, CA[_] <: Array[_], EB, CB[_] <: Array[_]](implicit equalityOfA: Equality[CA[EA]], ev: EA <:< EB): EqualityConstraint[CA[EA], CB[EB]] = new BasicEqualityConstraint[CA[EA], CB[EB]](equalityOfA)
// These are not in TypeCheckedTripleEquals or ConversionCheckedTripleEquals. Just available in addition.
trait SeqConstraints
trait SetConstraints
trait MapConstraints
trait TraversableConstraints extends SeqConstraints with SetConstraints with MapConstraints
trait ArrayConstraints
trait CollectionConstraints extends TraversableConstraints with ArrayConstraints
// They are all "equality" constraints, so maybe EqualityConstraints should be called something else
// like GeneralContraints.
*/
class TraversableConstraintsSpec extends Spec with NonImplicitAssertions with TypeCheckedTripleEquals {
}
| travisbrown/scalatest | src/test/scala/org/scalactic/TraversableConstraintsSpec.scala | Scala | apache-2.0 | 3,243 |
/*
* Copyright 2014 JHC Systems Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sqlest.sql
import org.scalatest._
import org.scalatest.matchers._
import sqlest._
import sqlest.ast._
class UpdateStatementBuilderSpec extends BaseStatementBuilderSpec {
implicit def statementBuilder = new base.StatementBuilder {}
"update" should "produce the right sql" in {
sql {
update(TableOne)
.set(
TableOne.col1 -> "a",
TableOne.col2 -> "b"
)
.where("c".column === "d".column && "e".column === "f".column)
} should equal(
s"""
|update one
|set col1 = ?, col2 = ?
|where ((? = ?) and (? = ?))
""".formatSql,
List(List("a", "b", "c", "d", "e", "f"))
)
}
it should "produce the right sql for an aliased table" in {
val tableOneAliased = new TableOne(Some("one_alias"))
sql {
update(tableOneAliased)
.set(
tableOneAliased.col1 -> "a",
tableOneAliased.col2 -> "b"
)
.where(tableOneAliased.col1 === "d".column && "e".column === "f".column)
} should equal(
s"""
|update one as one_alias
|set col1 = ?, col2 = ?
|where ((one_alias.col1 = ?) and (? = ?))
""".formatSql,
List(List("a", "b", "d", "e", "f"))
)
}
} | jhc-systems/sqlest | sqlest/src/test/scala/sqlest/sql/UpdateStatementBuilderSpec.scala | Scala | apache-2.0 | 1,849 |
package fpinscala.applicative
import org.scalatest.{FlatSpec, Matchers}
class ApplicativeSpec extends FlatSpec with Matchers {
import Applicative._
// Exercise 1
"An Applicative" should "make a product" in {
streamApplicative.product(Stream(1, 2), Stream(3, 4)) shouldBe Stream(
(1, 3),
(2, 4))
}
it should "make a sequence" in {
streamApplicative.sequence(List(Stream(1, 2), Stream(3, 4))) shouldBe Stream(
List(1, 3),
List(2, 4))
}
it should "traverse a List" in {
streamApplicative.traverse(List(1, 2, 3))(i => Stream(-i)) shouldBe Stream(
List(-1, -2, -3))
}
// Exercise 2
it should "apply a function inside Applicative" in {
streamApplicative.apply(Stream((i: Int) => i + 1, (i: Int) => i - 1))(
Stream(1, 2)) shouldBe Stream(2, 1)
}
it should "map an Applicative" in {
streamApplicative.map(Stream("a", "b", "c"))(_.toUpperCase) shouldBe Stream(
"A",
"B",
"C")
}
it should "map 2 Applicatives" in {
streamApplicative.map2(Stream(1, 2), Stream("one", "two"))(_ -> _) shouldBe Stream(
1 -> "one",
2 -> "two")
}
// Exercise 3
it should "map 3 Applicatives" in {
streamApplicative.map3(Stream("this"), Stream("is"), Stream("Sparta!"))(
_ + " " + _ + " " + _) shouldBe Stream("this is Sparta!")
}
it should "map 4 Applicatives" in {
streamApplicative.map4(
Stream(1, 2),
Stream(10, 20),
Stream(100, 200),
Stream(1000, 2000)
)(_ + _ + _ + _) shouldBe Stream(1111, 2222)
}
// Exercise 6
"the validationApplicative" should "accumulate errors" in {
validationApplicative[String].map3(
Success("ok"),
Failure("invalid username", Vector.empty),
Failure("invalid password", Vector("are you really 400 years old?"))
)((_, _, _) => "signed up") shouldBe Failure(
"invalid username",
Vector("invalid password", "are you really 400 years old?"))
}
it should "map successes" in {
validationApplicative[String].map2(
Success("mr.hacker"),
Success("god")
)((uname, pwd) => s"lame confirmation email: $uname, $pwd") shouldBe Success(
"lame confirmation email: mr.hacker, god")
}
// Exercise 8
"An Applicative" should "make a product of 2 Applicatives" in {
streamApplicative
.product(streamApplicative)
.apply((Stream((i: Int) => i + 1), Stream((i: Int) => i - 1)))(
(Stream(1), Stream(2))
) shouldBe (Stream(2), Stream(1))
}
// Exercise 9
it should "compose" in {
val ap = streamApplicative.compose(streamApplicative)
ap.apply(
Stream(Stream((i: Int) => i + 1), Stream((i: Int) => i - 1))
)(Stream(Stream(1), Stream(10))) shouldBe Stream(Stream(2), Stream(9))
}
// Exercise 12
it should "sequence over a Map" in {
streamApplicative.sequenceMap(Map("x" -> Stream(1, 2), "y" -> Stream(3, 4))) shouldBe Stream(
Map("x" -> 1, "y" -> 3),
Map("x" -> 2, "y" -> 4))
}
}
| goboss/fpinscala | exercises/src/test/scala/fpinscala/applicative/ApplicativeSpec.scala | Scala | mit | 2,999 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\
* @ @ *
* # # # # (c) 2017 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package manual.sketches
import mathact.core.bricks.plumbing.wiring.fun.FunWiring
import mathact.tools.EmptyBlock
import mathact.tools.workbenches.SimpleWorkbench
/** My second sketch
* Created by CAB on 31.12.2016.
*/
class MySecondSketch extends SimpleWorkbench {
//Blocks
class BlockB extends EmptyBlock with FunWiring {
//Connection points
val in1 = In[Double]
val in2 = In[String]
val out1 = Out[Double]
val out2 = Out[String]
//Wiring
in1.map(_.toString) >> out2
in1.filter(_ != 0) >> out1
in2.map(s ⇒ "Received: " + s) >> out2
}
//Connecting
//TODO
}
| AlexCAB/MathAct | mathact_examples/src/main/scala/manual/sketches/MySecondSketch.scala | Scala | mit | 1,723 |
abstract class Foo {
def identity[T](x : => T) : (=> T)
}
| yusuke2255/dotty | tests/untried/neg/t0351.scala | Scala | bsd-3-clause | 60 |
package epic.parser
import epic.parser.projections.AnchoredPCFGProjector
import breeze.util.SerializableLogging
import epic.constraints.ChartConstraints
import epic.framework.{EPScorer, EPInference}
import epic.trees.TreeInstance
import epic.lexicon.Lexicon
import epic.parser.models.AnnotatedParserInference
/**
* TODO
*
* @author dlwh
**/
case class EPChartFactory[L, W](topology: RuleTopology[L], lexicon: Lexicon[L, W], epInference: EPInference[TreeInstance[L, W], UnrefinedGrammarAnchoring[L, W]]) extends ParseMarginal.Factory[L, W] with SerializableLogging {
def apply(words: IndexedSeq[W], initialCore: ChartConstraints[L]): ParseMarginal[L, W] = {
val scorer = epInference.scorer(TreeInstance("", null, words))
val marg = epInference.marginal(scorer, TreeInstance("", null, words), UnrefinedGrammarAnchoring.identity(topology, lexicon, words, initialCore) )
marg.q.marginal
}
}
object EPChartFactory {
def apply[L, W](grammars: Grammar[L, W]*) = {
val infs = grammars.map(new AnnotatedParserInference(null, null, _, ChartConstraints.Factory.noSparsity))
new EPChartFactory(grammars.head.topology, grammars.head.lexicon, new EPInference(infs.toIndexedSeq, 5))
}
}
| langkilde/epic | src/main/scala/epic/parser/EPChartFactory.scala | Scala | apache-2.0 | 1,208 |
package io.buoyant.namerd.storage.consul
import com.fasterxml.jackson.annotation.JsonIgnore
import com.twitter.finagle.service.Backoff
import com.twitter.finagle.tracing.NullTracer
import com.twitter.finagle.{Http, Path}
import io.buoyant.config.types.Port
import io.buoyant.consul.utils.RichConsulClient
import io.buoyant.consul.v1.{ConsistencyMode, KvApi}
import io.buoyant.namer.BackoffConfig
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Stack
import com.twitter.finagle.buoyant.TlsClientConfig
import io.buoyant.namerd.{DtabStore, DtabStoreConfig, DtabStoreInitializer}
case class ConsulConfig(
host: Option[String],
port: Option[Port],
pathPrefix: Option[Path],
token: Option[String] = None,
datacenter: Option[String] = None,
readConsistencyMode: Option[ConsistencyMode] = None,
writeConsistencyMode: Option[ConsistencyMode] = None,
failFast: Option[Boolean] = None,
backoff: Option[BackoffConfig] = None,
tls: Option[TlsClientConfig] = None
) extends DtabStoreConfig {
import ConsulConfig._
@JsonIgnore
private[this] val root = pathPrefix.getOrElse(Path.read("/namerd/dtabs"))
@JsonIgnore
override def mkDtabStore(params: Stack.Params): DtabStore = {
val serviceHost = host.getOrElse(DefaultHost)
val servicePort = port.getOrElse(DefaultPort).port
val backoffs = backoff.map(_.mk).getOrElse(DefaultBackoff)
val tlsParams = tls.map(_.params).getOrElse(Stack.Params.empty)
val service = Http.client
.interceptInterrupts
.failFast(failFast)
.setAuthToken(token)
.ensureHost(host, port)
.withTracer(NullTracer)
.withParams(tlsParams)
.newService(s"/$$/inet/$serviceHost/$servicePort")
new ConsulDtabStore(
KvApi(service, backoffs),
root,
datacenter = datacenter,
readConsistency = readConsistencyMode,
writeConsistency = writeConsistencyMode,
handlerUrl = s"storage/${root.show.drop(1)}.json"
)
}
}
object ConsulConfig {
val DefaultHost = "localhost"
val DefaultPort = Port(8500)
val DefaultBackoff = Backoff.decorrelatedJittered(1.millis, 1.minute)
}
class ConsulDtabStoreInitializer extends DtabStoreInitializer {
override def configClass = classOf[ConsulConfig]
override def configId = "io.l5d.consul"
}
object ConsulDtabStoreInitializer extends ConsulDtabStoreInitializer
| BuoyantIO/linkerd | namerd/storage/consul/src/main/scala/io/buoyant/namerd/storage/consul/ConsulDtabStoreInitializer.scala | Scala | apache-2.0 | 2,362 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.concurrent._
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
import scala.util.control.NonFatal
import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder}
private[spark] object ThreadUtils {
private val sameThreadExecutionContext =
ExecutionContext.fromExecutorService(MoreExecutors.sameThreadExecutor())
/**
* An `ExecutionContextExecutor` that runs each task in the thread that invokes `execute/submit`.
* The caller should make sure the tasks running in this `ExecutionContextExecutor` are short and
* never block.
*/
def sameThread: ExecutionContextExecutor = sameThreadExecutionContext
/**
* Create a thread factory that names threads with a prefix and also sets the threads to daemon.
*/
def namedThreadFactory(prefix: String): ThreadFactory = {
new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build()
}
/**
* Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(prefix: String): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
Executors.newCachedThreadPool(threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names
* are formatted as prefix-ID, where ID is a unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(
prefix: String, maxThreadNumber: Int, keepAliveSeconds: Int = 60): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
val threadPool = new ThreadPoolExecutor(
maxThreadNumber, // corePoolSize: the max number of threads to create before queuing the tasks
maxThreadNumber, // maximumPoolSize: because we use LinkedBlockingDeque, this one is not used
keepAliveSeconds,
TimeUnit.SECONDS,
new LinkedBlockingQueue[Runnable],
threadFactory)
threadPool.allowCoreThreadTimeOut(true)
threadPool
}
/**
* Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonFixedThreadPool(nThreads: Int, prefix: String): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
Executors.newFixedThreadPool(nThreads, threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Wrapper over newSingleThreadExecutor.
*/
def newDaemonSingleThreadExecutor(threadName: String): ExecutorService = {
val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
Executors.newSingleThreadExecutor(threadFactory)
}
/**
* Wrapper over ScheduledThreadPoolExecutor.
*/
def newDaemonSingleThreadScheduledExecutor(threadName: String): ScheduledExecutorService = {
val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
val executor = new ScheduledThreadPoolExecutor(1, threadFactory)
// By default, a cancelled task is not automatically removed from the work queue until its delay
// elapses. We have to enable it manually.
executor.setRemoveOnCancelPolicy(true)
executor
}
/**
* Run a piece of code in a new thread and return the result. Exception in the new thread is
* thrown in the caller thread with an adjusted stack trace that removes references to this
* method for clarity. The exception stack traces will be like the following
*
* SomeException: exception-message
* at CallerClass.body-method (sourcefile.scala)
* at ... run in separate thread using org.apache.spark.util.ThreadUtils ... ()
* at CallerClass.caller-method (sourcefile.scala)
* ...
*/
def runInNewThread[T](
threadName: String,
isDaemon: Boolean = true)(body: => T): T = {
@volatile var exception: Option[Throwable] = None
@volatile var result: T = null.asInstanceOf[T]
val thread = new Thread(threadName) {
override def run(): Unit = {
try {
result = body
} catch {
case NonFatal(e) =>
exception = Some(e)
}
}
}
thread.setDaemon(isDaemon)
thread.start()
thread.join()
exception match {
case Some(realException) =>
// Remove the part of the stack that shows method calls into this helper method
// This means drop everything from the top until the stack element
// ThreadUtils.runInNewThread(), and then drop that as well (hence the `drop(1)`).
val baseStackTrace = Thread.currentThread().getStackTrace().dropWhile(
! _.getClassName.contains(this.getClass.getSimpleName)).drop(1)
// Remove the part of the new thread stack that shows methods call from this helper method
val extraStackTrace = realException.getStackTrace.takeWhile(
! _.getClassName.contains(this.getClass.getSimpleName))
// Combine the two stack traces, with a place holder just specifying that there
// was a helper method used, without any further details of the helper
val placeHolderStackElem = new StackTraceElement(
s"... run in separate thread using ${ThreadUtils.getClass.getName.stripSuffix("$")} ..",
" ", "", -1)
val finalStackTrace = extraStackTrace ++ Seq(placeHolderStackElem) ++ baseStackTrace
// Update the stack trace and rethrow the exception in the caller thread
realException.setStackTrace(finalStackTrace)
throw realException
case None =>
result
}
}
}
| chenc10/Spark-PAF | core/src/main/scala/org/apache/spark/util/ThreadUtils.scala | Scala | apache-2.0 | 6,501 |
import es.weso.reconciliator.CountryReconciliator
object Main {
def main(args: Array[String]): Unit = { }
} | weso/CountryReconciliator | src/main/scala/Main.scala | Scala | apache-2.0 | 113 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Sync
import cats.syntax.all._
import monix.tail.Iterant
import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend}
import monix.tail.batches.BatchCursor
import scala.util.control.NonFatal
private[tail] object IterantConcat {
/**
* Implementation for `Iterant#flatMap`
*/
def flatMap[F[_], A, B](source: Iterant[F, A], f: A => Iterant[F, B])(implicit F: Sync[F]): Iterant[F, B] = {
source match {
case Halt(_) =>
// Fast-path
source.asInstanceOf[Iterant[F, B]]
case Suspend(rest) =>
// Fast-path
Suspend(rest.map(new UnsafeFlatMapLoop[F, A, B](f)))
case _ =>
// Suspending execution in order to preserve laziness and
// referential transparency, since the provided function can
// be side effecting and because processing NextBatch and
// NextCursor states can have side effects
Suspend(F.delay(new UnsafeFlatMapLoop(f).apply(source)))
}
}
/**
* Implementation for `Iterant#unsafeFlatMap`
*/
def unsafeFlatMap[F[_], A, B](source: Iterant[F, A])(f: A => Iterant[F, B])(implicit F: Sync[F]): Iterant[F, B] = {
source match {
case Last(item) =>
try f(item)
catch { case e if NonFatal(e) => Iterant.raiseError(e) }
case empty @ Halt(_) =>
empty.asInstanceOf[Iterant[F, B]]
case _ =>
new UnsafeFlatMapLoop(f).apply(source)
}
}
private final class UnsafeFlatMapLoop[F[_], A, B](f: A => Iterant[F, B])(implicit F: Sync[F])
extends Iterant.Visitor[F, A, Iterant[F, B]] {
loop =>
def visit(ref: Next[F, A]): Iterant[F, B] =
generate(ref.item, ref.rest.map(loop))
def visit(ref: NextCursor[F, A]): Iterant[F, B] =
evalNextCursor(ref, ref.cursor, ref.rest)
def visit(ref: NextBatch[F, A]): Iterant[F, B] = {
val cursor = ref.batch.cursor()
val rest = ref.rest
val nextRef = NextCursor(cursor, rest)
evalNextCursor(nextRef, cursor, rest)
}
def visit(ref: Suspend[F, A]): Iterant[F, B] =
Suspend(ref.rest.map(loop))
def visit(ref: Concat[F, A]): Iterant[F, B] =
ref.runMap(loop)
def visit[S](ref: Scope[F, S, A]): Iterant[F, B] =
ref.runMap(loop)
def visit(ref: Last[F, A]): Iterant[F, B] =
f(ref.item)
def visit(ref: Halt[F, A]): Iterant[F, B] =
ref.asInstanceOf[Iterant[F, B]]
def fail(e: Throwable): Iterant[F, B] =
Iterant.raiseError(e)
private def generate(item: A, rest: F[Iterant[F, B]]): Iterant[F, B] =
f(item) match {
case Last(value) =>
Next(value, rest)
case h @ Halt(e) =>
e match {
case None => Suspend(rest)
case _ => h.asInstanceOf[Iterant[F, B]]
}
case next =>
concat(next, rest)
}
private def evalNextCursor(ref: NextCursor[F, A], cursor: BatchCursor[A], rest: F[Iterant[F, A]]) = {
if (!cursor.hasNext()) {
Suspend(rest.map(loop))
} else {
val item = cursor.next()
// If iterator is empty then we can skip a beat
val tail =
if (cursor.hasNext()) F.pure(ref).map(loop)
else rest.map(loop)
generate(item, tail)
}
}
}
/**
* Implementation for `Iterant#++`
*/
def concat[F[_], A](lhs: Iterant[F, A], rhs: F[Iterant[F, A]])(implicit F: Sync[F]): Iterant[F, A] = {
lhs match {
case Last(item) =>
Next(item, rhs)
case Halt(e) =>
e match {
case None => Suspend(rhs)
case _ => lhs
}
case _ =>
Concat(F.pure(lhs), rhs)
}
}
/**
* Implementation for `Iterant.tailRecM`
*/
def tailRecM[F[_], A, B](a: A)(f: A => Iterant[F, Either[A, B]])(implicit F: Sync[F]): Iterant[F, B] = {
def loop(a: A): Iterant[F, B] =
unsafeFlatMap(f(a)) {
case Right(b) =>
Last(b)
case Left(nextA) =>
Suspend(F.delay(loop(nextA)))
}
// Function `f` may be side-effecting, or it might trigger
// side-effects, so we must suspend it
Suspend(F.delay(loop(a)))
}
}
| monix/monix | monix-tail/shared/src/main/scala/monix/tail/internal/IterantConcat.scala | Scala | apache-2.0 | 4,869 |
package org.kokho.scheduling.rts.multicritical
import org.kokho.scheduling.ScheduledJob
import org.slf4j.LoggerFactory
import scala.collection.mutable
/**
* Created with IntelliJ IDEA on 6/3/15.
* @author: Mikhail Kokho
*/
final class SwapSchedule(partition: Seq[Seq[MulticriticalTask]])
extends MulticriticalSchedule(partition) {
private val logger = LoggerFactory.getLogger(classOf[SwapSchedule])
private var absoluteTime = 0
private val swapPoints: mutable.Queue[SwapPoint] = mutable.Queue()
private val schedulesPermutation: Array[LocalSchedule] = localSchedules.toArray
private def releaseGlobally(task: LoCriticalTask, destinationSchedule: LocalSchedule) = {
val hostSchedule = taskToLocalSchedule(task).get
val job = hostSchedule.releaseEarlyJob(task)
destinationSchedule.insertJob(job)
}
private def findSwapPoint(startSchedule: LocalSchedule,
endSchedule: LocalSchedule,
task: LoCriticalTask): Option[SwapPoint] = {
val states = List(startSchedule, endSchedule)
.map(_.slackForecast(task.deadline).toList)
.map(new SlackAnalyzer(_, absoluteTime, absoluteTime + task.deadline))
assert(states.count(_.totalSlack >= task.execution) == 0,
"Swap is redundant. There is enough slack on one processor")
def findHelper(states: List[SlackAnalyzer], t: Int): Option[SwapPoint] = {
if (t - absoluteTime >= task.deadline) None
else {
states foreach (_.advanceTime())
if (!states.forall(_.isSwapAvailable))
findHelper(states, t + 1)
else states match {
case a :: b :: _ =>
if (a.slackBehind + b.slackAhead >= task.execution)
Some(SwapPoint(t, a.slackUnitsBehind(t) ++ b.slackUnitsAhead(t), startSchedule, endSchedule))
else if (b.slackBehind + a.slackAhead >= task.execution)
Some(SwapPoint(t, b.slackUnitsBehind(t) ++ a.slackUnitsAhead(t), endSchedule, startSchedule))
else findHelper(states, t + 1)
case _ => throw new IllegalArgumentException(s"Unxpected variable: $states")
}
}
}
if (states.map(_.totalSlack).sum < task.execution)
None
else
findHelper(states, absoluteTime + 1)
}
def planSwap(task: LoCriticalTask, swapPoint: SwapPoint): Unit = {
assert(swapPoint.executionPlan.forall(_ >= absoluteTime), "Cannot back-schedule")
logger.info("Planning swap at " + swapPoint.t)
swapPoints += swapPoint
val taskSchedule = taskToLocalSchedule(task).get
val job = taskSchedule.releaseEarlyJob(task).job
//we create two swap job and insert them in the corresponding schedules
val planStart = swapPoint.executionPlan.takeWhile(_ < swapPoint.t).toList
val planEnd = swapPoint.executionPlan.dropWhile(_ < swapPoint.t).take(job.length - planStart.size).toList
swapPoint.startSchedule.insertSwapJob(SwapJob(job, swapPoint.t, planStart, true))
swapPoint.endSchedule.insertSwapJob(SwapJob(job, swapPoint.t, planEnd, false))
}
private def releaseSwap(task: LoCriticalTask) = {
def releaseSwapHelper(pairs: List[Seq[LocalSchedule]]): Unit = pairs match {
case Nil =>
case head :: tail =>
findSwapPoint(head(0), head(1), task) match {
case None => releaseSwapHelper(tail)
case Some(sp) => planSwap(task, sp)
}
}
val allSchedulePairs = localSchedules.filter(!_.isSwapActive()).combinations(2)
releaseSwapHelper(allSchedulePairs.toList)
}
def swapSchedules(): Unit = if (swapPoints.nonEmpty) {
val swapPoint = swapPoints.head
if (swapPoint.t == absoluteTime) {
val idxOfStartSchedule = schedulesPermutation.indexOf(swapPoint.startSchedule)
val idxOfEndSchedule = schedulesPermutation.indexOf(swapPoint.endSchedule)
schedulesPermutation(idxOfStartSchedule) = swapPoint.endSchedule
schedulesPermutation(idxOfEndSchedule) = swapPoint.startSchedule
swapPoints.dequeue()
}
}
private def slackReclamation() = {
val tasksForER = localSchedules
.map(_.tasksForEarlyRelease).flatten
.filter(task => localSchedules.forall(!_.isSwapActive(task)))
for (task <- tasksForER) {
localSchedules find (_.hasSlackForTask(task)) match {
case Some(sch) => releaseGlobally(task, sch)
case None => releaseSwap(task)
}
}
}
private def debug(from: Int, to: Int) = {
if (absoluteTime >= from && absoluteTime < to) {
println(s"Debug info at time $absoluteTime")
val len = to - from
localSchedules foreach (sch => {
val forecast: Seq[SlackPeriod] = sch.slackForecast(len)
val totalSlack = SlackPeriod.totalSlack(forecast, to)
println(s"Total slack $totalSlack in $forecast")
})
println(s"Swap points: $swapPoints")
println(" - - - - - ")
}
}
override def next(): Seq[ScheduledJob] = {
// debug(396, 416)
swapSchedules()
slackReclamation()
absoluteTime += 1
// schedulesPermutation.map(localSchedules(_)).map(itr => itr.next()).toList
schedulesPermutation.map(itr => itr.next()).toList
}
private case class SwapPoint(t: Int, executionPlan: Seq[Int], startSchedule: LocalSchedule, endSchedule: LocalSchedule) {
def slackBehind = executionPlan.count(_ < t)
def slackAheand = executionPlan.count(_ >= t)
}
}
| mkokho/dynoslack | old_sources/main/scala/kokho/scheduling/rts/multicritical/SwapSchedule.scala | Scala | apache-2.0 | 5,414 |
package biz.k11i.xgboost.spark.demo
import biz.k11i.xgboost.TemporaryFileResource
import biz.k11i.xgboost.spark.model.XGBoostRegression
import org.apache.spark.SparkConf
import org.apache.spark.ml.evaluation.RegressionEvaluator
import org.apache.spark.sql.SparkSession
object RegressionExample {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setAppName("RegressionExample")
.setMaster("local")
val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
val tempFileResource = new TemporaryFileResource
try {
run(sparkSession, tempFileResource)
} finally {
tempFileResource.close()
}
}
def run(sparkSession: SparkSession, tempFileResource: TemporaryFileResource) {
val modelPath = tempFileResource.getAsPath("model/gbtree/spark/housing.model.spark").toString
val testDataPath = tempFileResource.getAsPath("data/housing.test").toString
val regressor = XGBoostRegression.load(modelPath)
val df = sparkSession.sqlContext.read
.format("libsvm")
.option("vectorType", "dense")
.load(testDataPath)
// Predict prices
val predDF = regressor.transform(df)
predDF.select("prediction", "label")
.show()
// Evaluate
val rmse = new RegressionEvaluator()
.setMetricName("rmse")
.evaluate(predDF)
println(s"RMSE: $rmse")
}
}
| komiya-atsushi/xgboost-predictor-java | xgboost-predictor-examples/src/main/scala/biz/k11i/xgboost/spark/demo/RegressionExample.scala | Scala | apache-2.0 | 1,384 |
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.e2e.v1
import java.io.ByteArrayInputStream
import java.net.URLEncoder
import java.util.zip.{ZipEntry, ZipInputStream}
import akka.actor.ActorSystem
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.BasicHttpCredentials
import akka.http.scaladsl.testkit.RouteTestTimeout
import akka.pattern._
import org.knora.webapi.SharedOntologyTestDataADM._
import org.knora.webapi.SharedTestDataADM._
import org.knora.webapi._
import org.knora.webapi.messages.store.triplestoremessages._
import org.knora.webapi.messages.v1.responder.resourcemessages.PropsGetForRegionV1
import org.knora.webapi.messages.v1.responder.resourcemessages.ResourceV1JsonProtocol._
import org.knora.webapi.routing.v1.{ResourcesRouteV1, ValuesRouteV1}
import org.knora.webapi.routing.v2.ResourcesRouteV2
import org.knora.webapi.testing.tags.E2ETest
import org.knora.webapi.util.{AkkaHttpUtils, MutableTestIri}
import org.scalatest.Assertion
import org.xmlunit.builder.{DiffBuilder, Input}
import org.xmlunit.diff.Diff
import resource._
import spray.json._
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContextExecutor, Future}
import scala.util.Random
import scala.xml.{Node, NodeSeq, XML}
/**
* End-to-end test specification for the resources endpoint. This specification uses the Spray Testkit as documented
* here: http://spray.io/documentation/1.2.2/spray-testkit/
*/
@E2ETest
class ResourcesV1R2RSpec extends R2RSpec {
override def testConfigSource: String =
"""
|# akka.loglevel = "DEBUG"
|# akka.stdout-loglevel = "DEBUG"
""".stripMargin
private val resourcesPathV1 = new ResourcesRouteV1(routeData).knoraApiPath
private val resourcesPathV2 = new ResourcesRouteV2(routeData).knoraApiPath
private val valuesPathV1 = new ValuesRouteV1(routeData).knoraApiPath
private val superUser = SharedTestDataADM.superUser
private val superUserEmail = superUser.email
private val imagesUser = SharedTestDataADM.imagesUser01
private val imagesUserEmail = imagesUser.email
private val incunabulaUser = SharedTestDataADM.incunabulaProjectAdminUser
private val incunabulaUserEmail = incunabulaUser.email
private val incunabulaUser2 = SharedTestDataADM.incunabulaCreatorUser
private val incunabulaUserEmail2 = incunabulaUser2.email
private val anythingUser = SharedTestDataADM.anythingUser1
private val anythingUserEmail = anythingUser.email
private val anythingAdmin = SharedTestDataADM.anythingAdminUser
private val anythingAdminEmail = anythingAdmin.email
private val beolUser = SharedTestDataADM.beolUser
private val beolUserEmail = beolUser.email
private val password = "test"
implicit def default(implicit system: ActorSystem): RouteTestTimeout = RouteTestTimeout(settings.defaultTimeout * 2)
implicit val ec: ExecutionContextExecutor = system.dispatcher
override lazy val rdfDataObjects = List(
RdfDataObject(path = "_test_data/ontologies/example-box.ttl", name = "http://www.knora.org/ontology/shared/example-box"),
RdfDataObject(path = "_test_data/ontologies/example-ibox.ttl", name = "http://www.knora.org/ontology/shared/example-ibox"),
RdfDataObject(path = "_test_data/ontologies/empty-thing-onto.ttl", name = "http://www.knora.org/ontology/0001/empty-thing"),
RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/0001/anything"),
RdfDataObject(path = "_test_data/demo_data/images-demo-data.ttl", name = "http://www.knora.org/data/00FF/images"),
RdfDataObject(path = "_test_data/all_data/incunabula-data.ttl", name = "http://www.knora.org/data/0803/incunabula")
)
private val firstThingIri = new MutableTestIri
private val firstTextValueIRI = new MutableTestIri
private val secondThingIri = new MutableTestIri
private val thirdThingIri = new MutableTestIri
private val fourthThingIri = new MutableTestIri
private val fifthThingIri = new MutableTestIri
private val sixthThingIri = new MutableTestIri
private val seventhThingIri = new MutableTestIri
private val eighthThingIri = new MutableTestIri
private val abelAuthorIri = new MutableTestIri
private val mathIntelligencerIri = new MutableTestIri
private val deutschesDingIri = new MutableTestIri
private val standoffLangDingIri = new MutableTestIri
private val thingWithString = new MutableTestIri
private val thingWithCreationDate = new MutableTestIri
// incunabula book with title "Eyn biechlin ..."
private val incunabulaBookBiechlin = "http://rdfh.ch/0803/9935159f67"
// incunabula book with title Quadragesimale
private val incunabulaBookQuadra = "http://rdfh.ch/0803/861b5644b302"
private val notTheMostBoringComment = "This is not the most boring comment I have seen."
private val mappingIri = OntologyConstants.KnoraBase.StandardMapping
private val xml1 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text><strong>Test</strong><br/>text</text>
""".stripMargin
private val xml2 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text>a <strong>new</strong> value</text>
""".stripMargin
private val xml3 =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text>
| This text links to <a href="http://www.google.ch">Google</a> and a Knora <a class="salsah-link" href="$incunabulaBookBiechlin">resource</a>.
|</text>
""".stripMargin
private val xml4 =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text>
| This text links to <a href="http://www.google.ch">Google</a> and a Knora <a class="salsah-link" href="$incunabulaBookBiechlin">resource</a> and another Knora resource <a class="salsah-link" href="$incunabulaBookQuadra">resource</a>.
|</text>
""".stripMargin
/**
* Gets the field `res_id` from a JSON response to resource creation.
*
* @param response the response sent back from the API.
* @return the value of `res_id`.
*/
private def getResIriFromJsonResponse(response: HttpResponse) = {
AkkaHttpUtils.httpResponseToJson(response).fields.get("res_id") match {
case Some(JsString(resourceId)) => resourceId
case None => throw InvalidApiJsonException(s"The response does not contain a field called 'res_id'")
case other => throw InvalidApiJsonException(s"The response does not contain a res_id of type JsString, but ${other}")
}
}
/**
* Gets the field `id` from a JSON response to value creation (new value).
*
* @param response the response sent back from the API.
* @return the value of `res_id`.
*/
private def getNewValueIriFromJsonResponse(response: HttpResponse) = {
AkkaHttpUtils.httpResponseToJson(response).fields.get("id") match {
case Some(JsString(resourceId)) => resourceId
case None => throw InvalidApiJsonException(s"The response does not contain a field called 'res_id'")
case other => throw InvalidApiJsonException(s"The response does not contain a res_id of type JsString, but $other")
}
}
/**
* Gets the given property's values from a resource full response.
*
* @param response the response to a resource full request.
* @param prop the given property IRI.
* @return the property's values.
*/
private def getValuesForProp(response: HttpResponse, prop: IRI): JsValue = {
AkkaHttpUtils.httpResponseToJson(response).fields("props").asJsObject.fields(prop).asJsObject.fields("values")
}
/**
* Gets the given property's comments from a resource full response.
*
* @param response the response to a resource full request.
* @param prop the given property IRI.
* @return the property's comments.
*/
private def getCommentsForProp(response: HttpResponse, prop: IRI): JsValue = {
AkkaHttpUtils.httpResponseToJson(response).fields("props").asJsObject.fields(prop).asJsObject.fields("comments")
}
/**
* Creates a SPARQL query string to get the standoff links (direct links) for a given resource.
*
* @param resIri the resource whose standoff links are to be queried.
* @return SPARQL query string.
*/
private def getDirectLinksSPARQL(resIri: IRI): String = {
s"""
|PREFIX knora-base: <http://www.knora.org/ontology/knora-base#>
|
|SELECT ?referredResourceIRI WHERE {
| BIND(IRI("$resIri") as ?resIRI)
| ?resIRI knora-base:hasStandoffLinkTo ?referredResourceIRI .
|}
""".stripMargin
}
/**
* Creates a SPARQL query to get the standoff links reifications to check for the target resource and the reference count.
*
* @param resIri the resource whose standoff reifications are to be queried.
* @return SPARQL query string.
*/
private def getRefCountsSPARQL(resIri: IRI): String = {
s"""
|PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
|PREFIX knora-base: <http://www.knora.org/ontology/knora-base#>
|
|SELECT DISTINCT ?reificationIRI ?object ?refCnt WHERE {
| BIND(IRI("$resIri") as ?resIRI)
| ?resIRI knora-base:hasStandoffLinkToValue ?reificationIRI .
| ?reificationIRI rdf:object ?object .
| ?reificationIRI knora-base:valueHasRefCount ?refCnt .
|}
""".stripMargin
}
private val search = "/v1/resources?restype_id=http%3A%2F%2Fwww.knora.org%2Fontology%2F0001%2Fanything%23Thing"
private val filter = "&searchstr=***"
/**
* Test the result of two subsequent requests nearly identical requests
* (used here for requesting different number of properties to be displayed)
* @param search : search query as a string
* @return : nothing, assert is called within this function
*/
private def checkSearchWithDifferentNumberOfProperties(search: String): Assertion = {
Get(search) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val resources = responseJson.fields("resources")
.asInstanceOf[JsArray].elements
val labels: Set[String] = resources.map {
element => element.asJsObject.fields("value").asInstanceOf[JsArray].elements.head.asInstanceOf[JsString].value
}.toSet
val expectedLabels = Set("Sierra", "Hotel", "Delta", "Victor", "testding")
assert(expectedLabels.subsetOf(labels))
}
}
"The Resources Endpoint" should {
"provide a HTML representation of the resource properties " in {
/* Incunabula resources*/
/* A Book without a preview image */
Get("/v1/resources.html/http%3A%2F%2Frdfh.ch%2F0803%2Fc5058f3a?noresedit=true&reqtype=properties") ~> resourcesPathV1 ~> check {
//log.debug("==>> " + responseAs[String])
assert(status === StatusCodes.OK)
assert(responseAs[String] contains "Physical description")
assert(responseAs[String] contains "Location")
assert(responseAs[String] contains "Publication location")
assert(responseAs[String] contains "URI")
assert(responseAs[String] contains "Title")
assert(responseAs[String] contains "Datum der Herausgabe")
assert(responseAs[String] contains "Citation/reference")
assert(responseAs[String] contains "Publisher")
}
/* A Page with a preview image */
Get("/v1/resources.html/http%3A%2F%2Frdfh.ch%2F0803%2Fde6c38ce3401?noresedit=true&reqtype=properties") ~> resourcesPathV1 ~> check {
//log.debug("==>> " + responseAs[String])
assert(status === StatusCodes.OK)
assert(responseAs[String] contains "preview")
assert(responseAs[String] contains "Original filename")
assert(responseAs[String] contains "Page identifier")
}
}
"get the regions of a page when doing a context query with resinfo set to true" in {
Get("/v1/resources/http%3A%2F%2Frdfh.ch%2F0803%2F9d626dc76c03?resinfo=true&reqtype=context") ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields
val resourceContext: Map[String, JsValue] = responseJson("resource_context").asJsObject.fields
val resinfo: Map[String, JsValue] = resourceContext("resinfo").asJsObject.fields
resinfo.get("regions") match {
case Some(JsArray(regionsVector)) =>
val regions: Vector[PropsGetForRegionV1] = regionsVector.map(_.convertTo[PropsGetForRegionV1])
val region1 = regions.filter {
region => region.res_id == "http://rdfh.ch/0803/021ec18f1735"
}
val region2 = regions.filter {
region => region.res_id == "http://rdfh.ch/0803/b6b64a62b006"
}
assert(region1.length == 1, "No region found with Iri 'http://rdfh.ch/0803/021ec18f1735'")
assert(region2.length == 1, "No region found with Iri 'http://rdfh.ch/0803/b6b64a62b006'")
case None => assert(false, "No regions given, but 2 were expected")
case _ => assert(false, "No valid regions given")
}
}
}
"create a resource of type 'images:person' in 'images' project" in {
val params =
s"""
|{
| "restype_id": "$IMAGES_ONTOLOGY_IRI#person",
| "label": "Testperson",
| "project_id": "$IMAGES_PROJECT_IRI",
| "properties": {
| "$IMAGES_ONTOLOGY_IRI#lastname": [{"richtext_value":{"utf8str":"Testname"}}],
| "$IMAGES_ONTOLOGY_IRI#firstname": [{"richtext_value":{"utf8str":"Name"}}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(imagesUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
}
}
"get a resource of type 'knora-base:Resource' with text with standoff" in {
val expectedXML =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text><p>Derselbe Holzschnitt wird auf Seite <a href="http://rdfh.ch/0803/c9824353ae06" class="salsah-link">c7r</a> der lateinischen Ausgabe des Narrenschiffs verwendet.</p></text>
""".stripMargin
Get("/v1/resources/http%3A%2F%2Frdfh.ch%2F0803%2F047db418ae06") ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/knora-base#hasComment")
val xml: String = text match {
case vals: JsArray =>
vals.elements.head.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// Compare the original XML with the regenerated XML.
val xmlDiff: Diff = DiffBuilder.compare(Input.fromString(expectedXML)).withTest(Input.fromString(xml)).build()
xmlDiff.hasDifferences should be(false)
}
}
"get a resource of type 'anything:thing' with two text with standoff" in {
val expectedXML1 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text>Na ja, die <a href="http://rdfh.ch/0001/a-thing" class="salsah-link">Dinge</a> sind OK.</text>
""".stripMargin
val expectedXML2 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text>Ich liebe die <a href="http://rdfh.ch/0001/a-thing" class="salsah-link">Dinge</a>, sie sind alles für mich.</text>
""".stripMargin
Get("/v1/resources/http%3A%2F%2Frdfh.ch%2F0001%2Fa-thing-with-text-values") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText")
val textValues: Seq[JsValue] = text match {
case vals: JsArray =>
vals.elements
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
val xmlStrings: Seq[String] = textValues.map {
(textVal: JsValue) =>
textVal.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
}
assert(xmlStrings.length == 2)
// determine the index of the first and the second expected text value
val (dingeOk: Int, allesFuerMich: Int) = if (xmlStrings.head.contains("sind OK")) {
// expectedXML1 comes first, expectedXML2 comes second
(0, 1)
} else {
// expectedXML1 comes second, expectedXML2 comes first
(1, 0)
}
// Compare the original XML with the regenerated XML.
val xmlDiff1: Diff = DiffBuilder.compare(Input.fromString(expectedXML1)).withTest(Input.fromString(xmlStrings(dingeOk))).build()
val xmlDiff2: Diff = DiffBuilder.compare(Input.fromString(expectedXML2)).withTest(Input.fromString(xmlStrings(allesFuerMich))).build()
xmlDiff1.hasDifferences should be(false)
xmlDiff2.hasDifferences should be(false)
}
}
"create a first resource of type anything:Thing" in {
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml": ${xml1.toJson.compactPrint}, "mapping_id": "$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}],
| "http://www.knora.org/ontology/0001/anything#hasBoolean": [{"boolean_value":true}]
| }
|}
""".stripMargin
// TODO: these properties have been commented out in the thing test ontology because of compatibility with the GUI
// "http://www.knora.org/ontology/0001/anything#hasGeoname": [{"geoname_value": "2661602"}]
// "http://www.knora.org/ontology/0001/anything#hasGeometry": [{"geom_value":"{\\"status\\":\\"active\\",\\"lineColor\\":\\"#ff3333\\",\\"lineWidth\\":2,\\"points\\":[{\\"x\\":0.5516074450084602,\\"y\\":0.4444444444444444},{\\"x\\":0.2791878172588832,\\"y\\":0.5}],\\"type\\":\\"rectangle\\",\\"original_index\\":0}"}],
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
firstThingIri.set(resId)
}
}
"get the created resource and check its standoff in the response" in {
Get("/v1/resources/" + URLEncoder.encode(firstThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText")
val xml: String = text match {
case vals: JsArray =>
vals.elements.head.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// Compare the original XML with the regenerated XML.
val xmlDiff: Diff = DiffBuilder.compare(Input.fromString(xml1)).withTest(Input.fromString(xml)).build()
xmlDiff.hasDifferences should be(false)
}
}
"create a new text value for the first thing resource" in {
val newValueParams =
s"""
|{
| "project_id": "http://rdfh.ch/projects/0001",
| "res_id": "${firstThingIri.get}",
| "prop": "http://www.knora.org/ontology/0001/anything#hasText",
| "richtext_value": {
| "xml": ${xml2.toJson.compactPrint},
| "mapping_id": "$mappingIri"
| }
|}
""".stripMargin
Post("/v1/values", HttpEntity(ContentTypes.`application/json`, newValueParams)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> valuesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val xml = AkkaHttpUtils.httpResponseToJson(response).fields.get("value") match {
case Some(value: JsObject) => value.fields.get("xml") match {
case Some(JsString(xml: String)) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ => throw new InvalidApiJsonException("member 'value' not given")
}
// Compare the original XML with the regenerated XML.
val xmlDiff: Diff = DiffBuilder.compare(Input.fromString(xml2)).withTest(Input.fromString(xml)).build()
xmlDiff.hasDifferences should be(false)
val resId = getNewValueIriFromJsonResponse(response)
firstTextValueIRI.set(resId)
}
}
"change the created text value above for the first thing resource so it has a standoff link to incunabulaBookBiechlin" in {
val xml =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text>a <u>new</u> value with a standoff <a class="salsah-link" href="$incunabulaBookBiechlin">link</a></text>
""".stripMargin
val newValueParams =
s"""
|{
| "project_id": "http://rdfh.ch/projects/0001",
| "richtext_value": {
| "xml": ${xml.toJson.compactPrint},
| "mapping_id": "$mappingIri"
| }
|}
""".stripMargin
Put("/v1/values/" + URLEncoder.encode(firstTextValueIRI.get, "UTF-8"), HttpEntity(ContentTypes.`application/json`, newValueParams)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> valuesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getNewValueIriFromJsonResponse(response)
firstTextValueIRI.set(resId)
}
}
"make sure that the first thing resource contains a direct standoff link to incunabulaBookBiechlin now" in {
val sparqlQuery = getDirectLinksSPARQL(firstThingIri.get)
Await.result(storeManager ? SparqlSelectRequest(sparqlQuery), 30.seconds) match {
case response: SparqlSelectResponse =>
val ref: Boolean = response.results.bindings.exists {
row: VariableResultsRow =>
row.rowMap("referredResourceIRI") == incunabulaBookBiechlin
}
assert(ref, s"No direct link to '$incunabulaBookBiechlin' found")
case _ => throw TriplestoreResponseException("Expected a SparqlSelectResponse")
}
}
"check that the first thing resource's standoff link reification has the correct reference count" in {
val sparqlQuery = getRefCountsSPARQL(firstThingIri.get)
Await.result(storeManager ? SparqlSelectRequest(sparqlQuery), 30.seconds) match {
case response: SparqlSelectResponse =>
val refCnt: Boolean = response.results.bindings.exists {
row: VariableResultsRow =>
row.rowMap("object") == incunabulaBookBiechlin &&
row.rowMap("refCnt").toInt == 1
}
assert(refCnt, s"Ref count for '$incunabulaBookBiechlin' should be 1")
case _ => throw TriplestoreResponseException("Expected a SparqlSelectResponse")
}
}
"create a second resource of type anything:Thing linking to the first thing via standoff" in {
val xml =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text>This text <a class="salsah-link" href="${firstThingIri.get}">links</a> to a thing</text>
""".stripMargin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${xml.toJson.compactPrint},"mapping_id" :"$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
secondThingIri.set(resId)
}
}
"get the second resource of type anything:Thing, containing the correct standoff link" in {
Get("/v1/resources/" + URLEncoder.encode(secondThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText")
val xmlString: String = text match {
case vals: JsArray =>
vals.elements.head.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// make sure that the xml contains a link to "firstThingIri"
val xml = XML.loadString(xmlString)
val link: NodeSeq = xml \\ "a"
assert(link.nonEmpty)
val target: Seq[Node] = link.head.attributes("href")
assert(target.nonEmpty && target.head.text == firstThingIri.get)
}
}
"get the first thing resource that is referred to by the second thing resource" in {
Get("/v1/resources/" + URLEncoder.encode(firstThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
// check if this resource is referred to by the second thing resource
val incoming = AkkaHttpUtils.httpResponseToJson(response).fields.get("incoming") match {
case Some(incomingRefs: JsArray) => incomingRefs
case None => throw InvalidApiJsonException(s"The response does not contain a field called 'incoming'")
case other => throw InvalidApiJsonException(s"The response does not contain a res_id of type JsObject, but $other")
}
val firstElement = incoming.elements.headOption match {
case Some(incomingRef: JsObject) => incomingRef
case None => throw NotFoundException("Field 'incoming' is empty, but one incoming reference is expected")
case other => throw InvalidApiJsonException("First element in 'incoming' is not a JsObject")
}
firstElement.fields.get("ext_res_id") match {
case Some(extResObj: JsObject) =>
// get the Iri of the referring resource
val idJsString = extResObj.fields.getOrElse("id", throw InvalidApiJsonException("No member 'id' given"))
// get the Iri of the property pointing to this resource
val propIriJsString = extResObj.fields.getOrElse("pid", throw InvalidApiJsonException("No member 'pid' given"))
idJsString match {
case JsString(id) =>
assert(id == secondThingIri.get, "This resource should be referred to by the second thing resource")
case other => throw InvalidApiJsonException("Id is not a JsString")
}
propIriJsString match {
case JsString(pid) =>
assert(pid == OntologyConstants.KnoraBase.HasStandoffLinkTo, s"This resource should be referred to by ${OntologyConstants.KnoraBase.HasStandoffLinkTo}")
case other => throw InvalidApiJsonException("pid is not a JsString")
}
case None => throw InvalidApiJsonException("Element in 'incoming' does not have a member 'ext_res_id'")
case other => throw InvalidApiJsonException("Element in 'incoming' is not a JsObject")
}
}
}
"not create a resource of type thing with an invalid standoff tag name" in {
// use a tag name that is not defined in the standard mapping ("trong" instead of "strong")
val xml =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text>This <trong>text</trong></text>
""".stripMargin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${xml.toJson.compactPrint}, "mapping_id": "$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
// the route should reject the request because `trong` is not a tag name supported by the standard mapping
assert(status == StatusCodes.BadRequest, response.toString)
}
}
"not create a resource of type thing submitting a wrong standoff link" in {
val xml =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text><u><strong>This</strong></u> <u>text</u> <a class="salsah-link" href="$incunabulaBookQuadra">links</a> to <a class="salsah-link" href="http://rdfh.ch/0803/9935159f">two</a> things</text>
""".stripMargin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${xml.toJson.compactPrint},"mapping_id": "$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
|
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
//println(response)
// the route should reject the request because an IRI is wrong (formally valid though)
assert(status == StatusCodes.NotFound, response.toString)
}
}
"create a third resource of type thing with two standoff links to the same resource and a standoff link to another one" in {
val firstXML =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text><u><strong>This</strong></u> <u>text</u> <a class="salsah-link" href="$incunabulaBookQuadra">links</a> to a thing</text>
""".stripMargin
val secondXML =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<text><u><strong>This</strong></u> <u>text</u> <a class="salsah-link" href="$incunabulaBookBiechlin">links</a> to the same thing <a class="salsah-link" href="$incunabulaBookBiechlin">twice</a> and to another <a class="salsah-link" href="$incunabulaBookQuadra">thing</a></text>
""".stripMargin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${firstXML.toJson.compactPrint},"mapping_id": "$mappingIri"}}, {"richtext_value":{"xml":${secondXML.toJson.compactPrint},"mapping_id": "$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
thirdThingIri.set(resId)
}
}
"check that the third thing resource has two direct standoff links" in {
val sparqlQuery = getDirectLinksSPARQL(thirdThingIri.get)
Await.result(storeManager ? SparqlSelectRequest(sparqlQuery), 30.seconds) match {
case response: SparqlSelectResponse =>
val ref1: Boolean = response.results.bindings.exists {
row: VariableResultsRow =>
row.rowMap("referredResourceIRI") == incunabulaBookQuadra
}
val ref2: Boolean = response.results.bindings.exists {
row: VariableResultsRow =>
row.rowMap("referredResourceIRI") == incunabulaBookBiechlin
}
assert(ref1, s"No direct link to '$incunabulaBookQuadra' found")
assert(ref2, s"No direct link to '$incunabulaBookBiechlin' found")
case _ => throw TriplestoreResponseException("Expected a SparqlSelectResponse")
}
}
"check that the third thing resource's standoff link reifications have the correct reference counts" in {
val sparqlQuery = getRefCountsSPARQL(thirdThingIri.get)
Await.result(storeManager ? SparqlSelectRequest(sparqlQuery), 30.seconds) match {
case response: SparqlSelectResponse =>
val refCnt1: Boolean = response.results.bindings.exists {
row: VariableResultsRow =>
row.rowMap("object") == incunabulaBookQuadra &&
row.rowMap("refCnt").toInt == 2
}
val refCnt2: Boolean = response.results.bindings.exists {
row: VariableResultsRow =>
row.rowMap("object") == incunabulaBookBiechlin &&
row.rowMap("refCnt").toInt == 1
}
assert(refCnt1, s"Ref count for '$incunabulaBookQuadra' should be 2")
assert(refCnt2, s"Ref count for '$incunabulaBookBiechlin' should be 1")
case _ => throw TriplestoreResponseException("Expected a SparqlSelectResponse")
}
}
"mark a resource as deleted" in {
Delete("/v1/resources/http%3A%2F%2Frdfh.ch%2F0803%2F9d626dc76c03?deleteComment=deleted%20for%20testing") ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail2, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
}
}
"create a fourth resource of type anything:Thing with a hyperlink in standoff" in {
val xml =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text>This text links to <a href="http://www.google.ch">Google</a>.</text>
""".stripMargin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${xml.toJson.compactPrint},"mapping_id":"$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
fourthThingIri.set(resId)
}
}
"get the fourth resource of type anything:Thing, containing the hyperlink in standoff" in {
Get("/v1/resources/" + URLEncoder.encode(fourthThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText")
val xmlString: String = text match {
case vals: JsArray =>
vals.elements.head.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// make sure that the xml contains a link to http://www.google.ch
val xml = XML.loadString(xmlString)
val link: NodeSeq = xml \\ "a"
assert(link.nonEmpty)
val target: Seq[Node] = link.head.attributes("href")
assert(target.nonEmpty && target.head.text == "http://www.google.ch")
}
}
"create a fifth resource of type anything:Thing with various standoff markup including internal links and hyperlinks" in {
// xml3 contains a link to google.ch and to incunabulaBookBiechlin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${xml3.toJson.compactPrint}, "mapping_id": "$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
fifthThingIri.set(resId)
}
}
"get the fifth resource of type anything:Thing, containing various standoff markup" in {
Get("/v1/resources/" + URLEncoder.encode(fifthThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText")
val xmlString: String = text match {
case vals: JsArray =>
vals.elements.head.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// make sure that the correct standoff links and references
// xml3 contains a link to google.ch and to incunabulaBookBiechlin
val xml = XML.loadString(xmlString)
val links: NodeSeq = xml \\ "a"
// there should be two links
assert(links.length == 2)
assert(links.head.attributes.asAttrMap.keySet.size == 1) // The URL has no class attribute
val linkToGoogle: Seq[Node] = links.head.attributes("href")
assert(linkToGoogle.nonEmpty && linkToGoogle.head.text == "http://www.google.ch")
assert(links(1).attributes.asAttrMap("class") == "salsah-link") // The link to a resource IRI has class="salsah-link"
val linkKnoraResource: Seq[Node] = links(1).attributes("href")
assert(linkKnoraResource.nonEmpty && linkKnoraResource.head.text == incunabulaBookBiechlin)
// Compare the original XML with the regenerated XML.
val xmlDiff: Diff = DiffBuilder.compare(Input.fromString(xmlString)).withTest(Input.fromString(xml3)).build()
xmlDiff.hasDifferences should be(false)
}
}
"create a sixth resource of type anything:Thing with internal links to two different resources" in {
// xml4 contains a link to google.ch, to incunabulaBookBiechlin and to incunabulaBookQuadra
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml": ${xml4.toJson.compactPrint},"mapping_id": "$mappingIri"}}],
| "http://www.knora.org/ontology/0001/anything#hasInteger": [{"int_value":12345}],
| "http://www.knora.org/ontology/0001/anything#hasDecimal": [{"decimal_value":5.6}],
| "http://www.knora.org/ontology/0001/anything#hasUri": [{"uri_value":"http://dhlab.unibas.ch"}],
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value":"JULIAN:1291-08-01:1291-08-01"}],
| "http://www.knora.org/ontology/0001/anything#hasColor": [{"color_value":"#4169E1"}],
| "http://www.knora.org/ontology/0001/anything#hasListItem": [{"hlist_value":"http://rdfh.ch/lists/0001/treeList10"}],
| "http://www.knora.org/ontology/0001/anything#hasInterval": [{"interval_value": [1000000000000000.0000000000000001, 1000000000000000.0000000000000002]}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
sixthThingIri.set(resId)
}
}
"get the sixth resource of type anything:Thing with internal links to two different resources" in {
Get("/v1/resources/" + URLEncoder.encode(sixthThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val text: JsValue = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText")
val xmlString: String = text match {
case vals: JsArray =>
vals.elements.head.asJsObject.fields("xml") match {
case JsString(xml: String) => xml
case _ => throw new InvalidApiJsonException("member 'xml' not given")
}
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// make sure that the correct standoff links and references
// xml4 contains a link to google.ch, to incunabulaBookBiechlin and to incunabulaBookQuadra
val xml = XML.loadString(xmlString)
val links: NodeSeq = xml \\ "a"
// there should be three links
assert(links.length == 3)
val linkToGoogle: Seq[Node] = links.head.attributes("href")
assert(linkToGoogle.nonEmpty && linkToGoogle.head.text == "http://www.google.ch")
val linkKnoraResource: Seq[Node] = links(1).attributes("href")
assert(linkKnoraResource.nonEmpty && linkKnoraResource.head.text == incunabulaBookBiechlin)
val linkKnoraResource2: Seq[Node] = links(2).attributes("href")
assert(linkKnoraResource2.nonEmpty && linkKnoraResource2.head.text == incunabulaBookQuadra)
// Compare the original XML with the regenerated XML.
val xmlDiff: Diff = DiffBuilder.compare(Input.fromString(xmlString)).withTest(Input.fromString(xml4)).build()
xmlDiff.hasDifferences should be(false)
}
}
"change a resource's label" in {
val newLabel = "my new label"
val params =
s"""
|{
| "label": "$newLabel"
|}
""".stripMargin
Put("/v1/resources/label/" + URLEncoder.encode("http://rdfh.ch/0803/c5058f3a", "UTF-8"), HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val label = AkkaHttpUtils.httpResponseToJson(response).fields.get("label") match {
case Some(JsString(str)) => str
case None => throw InvalidApiJsonException(s"The response does not contain a field called 'label'")
case other => throw InvalidApiJsonException(s"The response does not contain a label of type JsString, but $other")
}
assert(label == newLabel, "label has not been updated correctly")
}
}
"create a resource of type anything:Thing with a link (containing a comment) to another resource" in {
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A thing with a link value that has a comment",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value": {"utf8str": "simple text"}}],
| "http://www.knora.org/ontology/0001/anything#hasOtherThing": [{"link_value":"${sixthThingIri.get}", "comment":"$notTheMostBoringComment"}]
| }
}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
seventhThingIri.set(resId)
}
}
"get the created resource and check the comment on the link value" in {
Get("/v1/resources/" + URLEncoder.encode(seventhThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val targetResourceIri: String = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasOtherThing") match {
case vals: JsArray =>
vals.elements.head.asInstanceOf[JsString].value
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
assert(targetResourceIri == sixthThingIri.get)
val linkValueComment: String = getCommentsForProp(response, "http://www.knora.org/ontology/0001/anything#hasOtherThing") match {
case vals: JsArray =>
vals.elements.head.asInstanceOf[JsString].value
case _ =>
throw new InvalidApiJsonException("comments is not an array")
}
assert(linkValueComment == notTheMostBoringComment)
}
}
"add a simple TextValue to the seventh resource" in {
val newValueParams =
s"""
|{
| "project_id": "http://rdfh.ch/projects/0001",
| "res_id": "${seventhThingIri.get}",
| "prop": "http://www.knora.org/ontology/0001/anything#hasText",
| "richtext_value": {
| "utf8str": "another simple text"
| }
|}
""".stripMargin
Post("/v1/values", HttpEntity(ContentTypes.`application/json`, newValueParams)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> valuesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val utf8str = AkkaHttpUtils.httpResponseToJson(response).fields.get("value") match {
case Some(value: JsObject) => value.fields.get("utf8str") match {
case Some(JsString(xml: String)) => xml
case _ => throw new InvalidApiJsonException("member 'utf8str' not given")
}
case _ => throw new InvalidApiJsonException("member 'value' not given")
}
assert(utf8str == "another simple text")
}
}
"create eighth resource of type anything:Thing with the date of the murder of Caesar" in {
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A thing with a BCE date of the murder of Caesar",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasDate": [{"date_value": "JULIAN:44-03-15 BCE"}]
| }
}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
eighthThingIri.set(resId)
}
}
"get the eighth resource and check its date" in {
Get("/v1/resources/" + URLEncoder.encode(eighthThingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val dateObj: JsObject = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasDate") match {
case vals: JsArray =>
vals.elements.head.asInstanceOf[JsObject]
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
// expected result:
// {"dateval1":"0044-03-15","calendar":"JULIAN","era1":"BCE","dateval2":"0044-03-15","era2":"BCE"}
dateObj.fields.get("dateval1") match {
case Some(JsString(dateval1)) => assert(dateval1 == "0044-03-15")
case None => throw InvalidApiJsonException("No member 'dateval1' given for date value")
case _ => throw InvalidApiJsonException("'dateval1' is not a JsString")
}
dateObj.fields.get("era1") match {
case Some(JsString(era1)) => assert(era1 == "BCE")
case None => throw InvalidApiJsonException("No member 'era1' given for date value")
case _ => throw InvalidApiJsonException("'era1' is not a JsString")
}
dateObj.fields.get("dateval2") match {
case Some(JsString(dateval1)) => assert(dateval1 == "0044-03-15")
case None => throw InvalidApiJsonException("No member 'dateval1' given for date value")
case _ => throw InvalidApiJsonException("'dateval1' is not a JsString")
}
dateObj.fields.get("era2") match {
case Some(JsString(era2)) => assert(era2 == "BCE")
case None => throw InvalidApiJsonException("No member 'era2' given for date value")
case _ => throw InvalidApiJsonException("'era2' is not a JsString")
}
}
}
"create resources from an XML import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0801/biblio/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0801/biblio/xml-import/v1# p0801-biblio.xsd"
| xmlns:p0801-biblio="http://api.knora.org/ontology/0801/biblio/xml-import/v1#"
| xmlns:p0801-beol="http://api.knora.org/ontology/0801/beol/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0801-beol:person id="abel">
| <knoraXmlImport:label>Niels Henrik Abel</knoraXmlImport:label>
| <p0801-beol:hasFamilyName knoraType="richtext_value">Abel</p0801-beol:hasFamilyName>
| <p0801-beol:hasGivenName knoraType="richtext_value">Niels Henrik</p0801-beol:hasGivenName>
| <p0801-beol:personHasTitle knoraType="richtext_value" lang="en">Sir</p0801-beol:personHasTitle>
| </p0801-beol:person>
| <p0801-beol:person id="holmes">
| <knoraXmlImport:label>Sherlock Holmes</knoraXmlImport:label>
| <p0801-beol:hasFamilyName knoraType="richtext_value">Holmes</p0801-beol:hasFamilyName>
| <p0801-beol:hasGivenName knoraType="richtext_value">Sherlock</p0801-beol:hasGivenName>
| </p0801-beol:person>
| <p0801-biblio:Journal id="math_intelligencer">
| <knoraXmlImport:label>Math Intelligencer</knoraXmlImport:label>
| <p0801-biblio:hasName knoraType="richtext_value">Math Intelligencer</p0801-biblio:hasName>
| </p0801-biblio:Journal>
| <p0801-biblio:JournalArticle id="strings_in_the_16th_and_17th_centuries">
| <knoraXmlImport:label>Strings in the 16th and 17th Centuries</knoraXmlImport:label>
| <p0801-biblio:p0801-beol__comment knoraType="richtext_value" mapping_id="$mappingIri">
| <text xmlns="">The most <strong>interesting</strong> article in <a class="salsah-link" href="ref:math_intelligencer">Math Intelligencer</a>.</text>
| </p0801-biblio:p0801-beol__comment>
| <p0801-biblio:endPage knoraType="richtext_value">73</p0801-biblio:endPage>
| <p0801-biblio:isPartOfJournal>
| <p0801-biblio:Journal knoraType="link_value" target="math_intelligencer" linkType="ref"/>
| </p0801-biblio:isPartOfJournal>
| <p0801-biblio:journalVolume knoraType="richtext_value">27</p0801-biblio:journalVolume>
| <p0801-biblio:publicationHasAuthor>
| <p0801-beol:person knoraType="link_value" linkType="ref" target="abel"/>
| </p0801-biblio:publicationHasAuthor>
| <p0801-biblio:publicationHasAuthor>
| <p0801-beol:person knoraType="link_value" linkType="ref" target="holmes"/>
| </p0801-biblio:publicationHasAuthor>
| <p0801-biblio:publicationHasDate knoraType="date_value">GREGORIAN:500 BC:400 BC</p0801-biblio:publicationHasDate>
| <p0801-biblio:publicationHasTitle knoraType="richtext_value">Strings in the 16th and 17th Centuries</p0801-biblio:publicationHasTitle>
| <p0801-biblio:publicationHasTitle knoraType="richtext_value">An alternate title</p0801-biblio:publicationHasTitle>
| <p0801-biblio:startPage knoraType="richtext_value">48</p0801-biblio:startPage>
| </p0801-biblio:JournalArticle>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/yTerZGyxjZVqFMNNKXCDPF", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(beolUserEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr: String = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val createdResources: Seq[JsValue] = responseJson.fields("createdResources").asInstanceOf[JsArray].elements
abelAuthorIri.set(createdResources.head.asJsObject.fields("resourceIri").asInstanceOf[JsString].value)
mathIntelligencerIri.set(createdResources(2).asJsObject.fields("resourceIri").asInstanceOf[JsString].value)
}
}
"reject XML import data that fails schema validation" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0801/biblio/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0801/biblio/xml-import/v1# p0801-biblio.xsd"
| xmlns:p0801-biblio="http://api.knora.org/ontology/0801/biblio/xml-import/v1#"
| xmlns:p0801-beol="http://api.knora.org/ontology/0801/beol/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0801-beol:person id="abel">
| <knoraXmlImport:label>Niels Henrik Abel</knoraXmlImport:label>
| <p0801-beol:hasFamilyName knoraType="richtext_value">Abel</p0801-beol:hasFamilyName>
| <p0801-beol:hasGivenName knoraType="richtext_value">Niels Henrik</p0801-beol:hasGivenName>
| </p0801-beol:person>
| <p0801-beol:person id="holmes">
| <knoraXmlImport:label>Sherlock Holmes</knoraXmlImport:label>
| <p0801-beol:hasFamilyName knoraType="richtext_value">Holmes</p0801-beol:hasFamilyName>
| <p0801-beol:hasGivenName knoraType="richtext_value">Sherlock</p0801-beol:hasGivenName>
| </p0801-beol:person>
| <p0801-biblio:Journal id="math_intelligencer">
| <knoraXmlImport:label>Math Intelligencer</knoraXmlImport:label>
| <p0801-biblio:hasName knoraType="richtext_value">Math Intelligencer</p0801-biblio:hasName>
| </p0801-biblio:Journal>
| <p0801-biblio:JournalArticle id="strings_in_the_16th_and_17th_centuries">
| <knoraXmlImport:label>Strings in the 16th and 17th Centuries</knoraXmlImport:label>
| <p0801-biblio:p0801-beol__comment knoraType="richtext_value" mapping_id="$mappingIri">
| <text xmlns="">The most <strong>interesting</strong> article in <a class="salsah-link" href="ref:math_intelligencer">Math Intelligencer</a>.</text>
| </p0801-biblio:p0801-beol__comment>
| <p0801-biblio:endPage knoraType="richtext_value">73</p0801-biblio:endPage>
| <p0801-biblio:isPartOfJournal>
| <p0801-biblio:Journal knoraType="link_value" target="math_intelligencer" linkType="ref"/>
| </p0801-biblio:isPartOfJournal>
| <p0801-biblio:journalVolume knoraType="richtext_value">27</p0801-biblio:journalVolume>
| <p0801-biblio:publicationHasAuthor>
| <p0801-beol:person knoraType="link_value" linkType="ref" target="abel"/>
| </p0801-biblio:publicationHasAuthor>
| <p0801-biblio:publicationHasAuthor>
| <p0801-beol:person knoraType="link_value" linkType="ref" target="holmes"/>
| </p0801-biblio:publicationHasAuthor>
| <p0801-biblio:publicationHasDate knoraType="date_value">GREGORIAN:19foo76</p0801-biblio:publicationHasDate>
| <p0801-biblio:publicationHasTitle knoraType="richtext_value" lang="en">Strings in the 16th and 17th Centuries</p0801-biblio:publicationHasTitle>
| <p0801-biblio:publicationHasTitle knoraType="richtext_value">An alternate title</p0801-biblio:publicationHasTitle>
| <p0801-biblio:startPage knoraType="richtext_value">48</p0801-biblio:startPage>
| </p0801-biblio:JournalArticle>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/yTerZGyxjZVqFMNNKXCDPF", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(beolUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.BadRequest, response.toString)
val responseStr = responseAs[String]
responseStr should include("org.xml.sax.SAXParseException")
responseStr should include("cvc-pattern-valid")
}
}
"refer to existing resources in an XML import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0801/biblio/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0801/biblio/xml-import/v1# p0801-biblio.xsd"
| xmlns:p0801-biblio="http://api.knora.org/ontology/0801/biblio/xml-import/v1#"
| xmlns:p0801-beol="http://api.knora.org/ontology/0801/beol/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0801-biblio:JournalArticle id="strings_in_the_18th_century">
| <knoraXmlImport:label>Strings in the 18th Century</knoraXmlImport:label>
| <p0801-biblio:p0801-beol__comment knoraType="richtext_value" mapping_id="$mappingIri">
| <text xmlns="">The most <strong>boring</strong> article in <a class="salsah-link" href="${mathIntelligencerIri.get}">Math Intelligencer</a>.</text>
| </p0801-biblio:p0801-beol__comment>
| <p0801-biblio:endPage knoraType="richtext_value">76</p0801-biblio:endPage>
| <p0801-biblio:isPartOfJournal>
| <p0801-biblio:Journal knoraType="link_value" linkType="iri" target="${mathIntelligencerIri.get}"/>
| </p0801-biblio:isPartOfJournal>
| <p0801-biblio:journalVolume knoraType="richtext_value">27</p0801-biblio:journalVolume>
| <p0801-biblio:publicationHasAuthor>
| <p0801-beol:person knoraType="link_value" linkType="iri" target="${abelAuthorIri.get}"/>
| </p0801-biblio:publicationHasAuthor>
| <p0801-biblio:publicationHasDate knoraType="date_value">GREGORIAN:1977</p0801-biblio:publicationHasDate>
| <p0801-biblio:publicationHasTitle knoraType="richtext_value">Strings in the 18th Century</p0801-biblio:publicationHasTitle>
| <p0801-biblio:startPage knoraType="richtext_value">52</p0801-biblio:startPage>
| </p0801-biblio:JournalArticle>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/yTerZGyxjZVqFMNNKXCDPF", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(beolUserEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
}
}
"create an anything:Thing with all data types from an XML import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:Thing id="test_thing">
| <knoraXmlImport:label>These are a few of my favorite things</knoraXmlImport:label>
| <p0001-anything:hasBoolean knoraType="boolean_value">true</p0001-anything:hasBoolean>
| <p0001-anything:hasColor knoraType="color_value">#4169E1</p0001-anything:hasColor>
| <p0001-anything:hasDate knoraType="date_value">JULIAN:1291-08-01:1291-08-01</p0001-anything:hasDate>
| <p0001-anything:hasDecimal knoraType="decimal_value">5.6</p0001-anything:hasDecimal>
| <p0001-anything:hasInteger knoraType="int_value">12345</p0001-anything:hasInteger>
| <p0001-anything:hasInterval knoraType="interval_value">1000000000000000.0000000000000001,1000000000000000.0000000000000002</p0001-anything:hasInterval>
| <p0001-anything:hasListItem knoraType="hlist_value">http://rdfh.ch/lists/0001/treeList10</p0001-anything:hasListItem>
| <p0001-anything:hasOtherThing>
| <p0001-anything:Thing knoraType="link_value" linkType="iri" target="${sixthThingIri.get}"/>
| </p0001-anything:hasOtherThing>
| <p0001-anything:hasText knoraType="richtext_value">This is a test.</p0001-anything:hasText>
| <p0001-anything:hasUri knoraType="uri_value">http://dhlab.unibas.ch</p0001-anything:hasUri>
| </p0001-anything:Thing>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
}
}
"not create an anything:Thing in the incunabula project in a bulk import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:Thing id="test_thing">
| <knoraXmlImport:label>These are a few of my favorite things</knoraXmlImport:label>
| <p0001-anything:hasText knoraType="richtext_value">This is a test.</p0001-anything:hasText>
| </p0001-anything:Thing>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0803", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.BadRequest, response.toString)
val responseStr = responseAs[String]
responseStr should include("not shared")
}
}
"not create a resource in a shared ontologies project in a bulk import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/shared/example-box/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/shared/example-box/xml-import/v1# p0000-example-box.xsd"
| xmlns:p0000-example-box="http://api.knora.org/ontology/shared/example-box/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0000-example-box:Box id="test_box">
| <knoraXmlImport:label>test box</knoraXmlImport:label>
| <p0000-example-box:hasName knoraType="richtext_value">This is a test.</p0000-example-box:hasName>
| </p0000-example-box:Box>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://www.knora.org/ontology/knora-admin#DefaultSharedOntologiesProject", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(superUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.BadRequest, response.toString)
val responseStr = responseAs[String]
responseStr should include("Resources cannot be created in project")
}
}
"create a resource in the incunabula project using a class from the default shared ontologies project" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/shared/example-box/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/shared/example-box/xml-import/v1# p0000-example-box.xsd"
| xmlns:p0000-example-box="http://api.knora.org/ontology/shared/example-box/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0000-example-box:Box id="test_box">
| <knoraXmlImport:label>test box</knoraXmlImport:label>
| <p0000-example-box:hasName knoraType="richtext_value">This is a test.</p0000-example-box:hasName>
| </p0000-example-box:Box>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0803", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
}
}
"use a knora-base property directly in a bulk import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:ThingWithSeqnum id="thing_with_seqnum">
| <knoraXmlImport:label>Thing with seqnum</knoraXmlImport:label>
| <p0001-anything:knoraXmlImport__seqnum knoraType="int_value">3</p0001-anything:knoraXmlImport__seqnum>
| </p0001-anything:ThingWithSeqnum>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
}
}
"serve a Zip file containing XML schemas for validating an XML import" in {
val ontologyIri = URLEncoder.encode("http://www.knora.org/ontology/0801/biblio", "UTF-8")
Get(s"/v1/resources/xmlimportschemas/$ontologyIri") ~> addCredentials(BasicHttpCredentials(beolUserEmail, password)) ~> resourcesPathV1 ~> check {
val responseBodyFuture: Future[Array[Byte]] = response.entity.toStrict(5.seconds).map(_.data.toArray)
val responseBytes: Array[Byte] = Await.result(responseBodyFuture, 5.seconds)
val zippedFilenames = collection.mutable.Set.empty[String]
for (zipInputStream <- managed(new ZipInputStream(new ByteArrayInputStream(responseBytes)))) {
var zipEntry: ZipEntry = null
while ( {
zipEntry = zipInputStream.getNextEntry
zipEntry != null
}) {
zippedFilenames.add(zipEntry.getName)
}
}
assert(zippedFilenames == Set("p0801-beol.xsd", "p0801-biblio.xsd", "knoraXmlImport.xsd"))
}
}
"consider inherited cardinalities when generating XML schemas for referenced ontologies in an XML import" in {
val ontologyIri = URLEncoder.encode("http://www.knora.org/ontology/0001/something", "UTF-8")
Get(s"/v1/resources/xmlimportschemas/$ontologyIri") ~> addCredentials(BasicHttpCredentials(beolUserEmail, password)) ~> resourcesPathV1 ~> check {
val responseBodyFuture: Future[Array[Byte]] = response.entity.toStrict(5.seconds).map(_.data.toArray)
val responseBytes: Array[Byte] = Await.result(responseBodyFuture, 5.seconds)
val zippedFilenames = collection.mutable.Set.empty[String]
for (zipInputStream <- managed(new ZipInputStream(new ByteArrayInputStream(responseBytes)))) {
var zipEntry: ZipEntry = null
while ( {
zipEntry = zipInputStream.getNextEntry
zipEntry != null
}) {
zippedFilenames.add(zipEntry.getName)
}
}
assert(zippedFilenames == Set("p0001-something.xsd", "knoraXmlImport.xsd", "p0001-anything.xsd"))
}
}
"follow rdfs:subClassOf when generating XML schemas for referenced ontologies in an XML import" in {
val ontologyIri = URLEncoder.encode("http://www.knora.org/ontology/0001/empty-thing", "UTF-8")
Get(s"/v1/resources/xmlimportschemas/$ontologyIri") ~> addCredentials(BasicHttpCredentials(beolUserEmail, password)) ~> resourcesPathV1 ~> check {
val responseBodyFuture: Future[Array[Byte]] = response.entity.toStrict(5.seconds).map(_.data.toArray)
val responseBytes: Array[Byte] = Await.result(responseBodyFuture, 5.seconds)
val zippedFilenames = collection.mutable.Set.empty[String]
for (zipInputStream <- managed(new ZipInputStream(new ByteArrayInputStream(responseBytes)))) {
var zipEntry: ZipEntry = null
while ( {
zipEntry = zipInputStream.getNextEntry
zipEntry != null
}) {
zippedFilenames.add(zipEntry.getName)
}
}
assert(zippedFilenames == Set("p0001-empty-thing.xsd", "knoraXmlImport.xsd", "p0001-anything.xsd"))
}
}
"create 10,000 anything:Thing resources with random contents" in {
def maybeAppendValue(random: Random, xmlStringBuilder: StringBuilder, value: String): Unit = {
if (random.nextBoolean) {
xmlStringBuilder.append(value)
}
}
val xmlStringBuilder = new StringBuilder
val random = new Random
xmlStringBuilder.append(
"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
|
""".stripMargin)
for (i <- 1 to 10000) {
xmlStringBuilder.append(
s"""
|<p0001-anything:Thing id="test_thing_$i">
|<knoraXmlImport:label>This is thing $i</knoraXmlImport:label>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
"""
|<p0001-anything:hasBoolean knoraType="boolean_value">true</p0001-anything:hasBoolean>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
"""
|<p0001-anything:hasColor knoraType="color_value">#4169E1</p0001-anything:hasColor>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
"""
|<p0001-anything:hasDate knoraType="date_value">JULIAN:1291-08-01:1291-08-01</p0001-anything:hasDate>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
s"""
|<p0001-anything:hasDecimal knoraType="decimal_value">$i.$i</p0001-anything:hasDecimal>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
s"""
|<p0001-anything:hasInteger knoraType="int_value">$i</p0001-anything:hasInteger>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
"""
|<p0001-anything:hasInterval knoraType="interval_value">1000000000000000.0000000000000001,1000000000000000.0000000000000002</p0001-anything:hasInterval>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
"""
|<p0001-anything:hasListItem knoraType="hlist_value">http://rdfh.ch/lists/0001/treeList10</p0001-anything:hasListItem>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
s"""
|<p0001-anything:hasText knoraType="richtext_value">This is a test in thing $i.</p0001-anything:hasText>
""".stripMargin)
maybeAppendValue(random = random,
xmlStringBuilder = xmlStringBuilder,
value =
"""
|<p0001-anything:hasUri knoraType="uri_value">http://dhlab.unibas.ch</p0001-anything:hasUri>
""".stripMargin)
xmlStringBuilder.append(
"""
|</p0001-anything:Thing>
""".stripMargin)
}
xmlStringBuilder.append(
"""
|</knoraXmlImport:resources>
""".stripMargin)
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlStringBuilder.toString)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
}
}
"create a resource of type anything:Thing with textValue which has language" in {
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "Ein Ding auf deutsch",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value": {"utf8str": "Ein deutscher Text", "language": "de"}}]
| }
}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
deutschesDingIri.set(resId)
}
}
"get the deutschesDing Resource and check its textValue" in {
Get("/v1/resources/" + URLEncoder.encode(deutschesDingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val textObj: JsObject = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText") match {
case vals: JsArray =>
vals.elements.head.asInstanceOf[JsObject]
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
textObj.fields.get("utf8str") match {
case Some(JsString(textVal)) => assert(textVal == "Ein deutscher Text")
case _ => throw InvalidApiJsonException("'utf8str' is not a JsString")
}
textObj.fields.get("language") match {
case Some(JsString(lang)) => assert(lang == "de")
case _ => throw InvalidApiJsonException("'lang' is not a JsString")
}
}
}
"get the resource created by bulk import and check language of its textValue" in {
Get("/v1/resources/" + URLEncoder.encode(abelAuthorIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val textObj: JsObject = getValuesForProp(response, "http://www.knora.org/ontology/0801/beol#personHasTitle") match {
case vals: JsArray =>
vals.elements.head.asInstanceOf[JsObject]
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
textObj.fields.get("utf8str") match {
case Some(JsString(textVal)) => assert(textVal == "Sir")
case _ => throw InvalidApiJsonException("'utf8str' is not a JsString")
}
textObj.fields.get("language") match {
case Some(JsString(lang)) => assert(lang == "en")
case _ => throw InvalidApiJsonException("'lang' is not a JsString")
}
}
}
"create a resource of type anything:Thing with textValueWithStandoff which has language" in {
val xml =
"""<?xml version="1.0" encoding="UTF-8"?>
|<text>This text links to <a href="http://www.google.ch">Google</a>.</text>
""".stripMargin
val params =
s"""
|{
| "restype_id": "http://www.knora.org/ontology/0001/anything#Thing",
| "label": "A second thing",
| "project_id": "http://rdfh.ch/projects/0001",
| "properties": {
| "http://www.knora.org/ontology/0001/anything#hasText": [{"richtext_value":{"xml":${xml.toJson.compactPrint},"mapping_id":"$mappingIri", "language": "en"}}]
| }
|}
""".stripMargin
Post("/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val resId = getResIriFromJsonResponse(response)
standoffLangDingIri.set(resId)
}
}
"get the Resource with standoff and language and check its textValue" in {
Get("/v1/resources/" + URLEncoder.encode(standoffLangDingIri.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val textObj: JsObject = getValuesForProp(response, "http://www.knora.org/ontology/0001/anything#hasText") match {
case vals: JsArray =>
vals.elements.head.asInstanceOf[JsObject]
case _ =>
throw new InvalidApiJsonException("values is not an array")
}
textObj.fields.get("language") match {
case Some(JsString(lang)) => assert(lang == "en")
case None => throw InvalidApiJsonException("'lang' is not specified but expected")
case _ => throw InvalidApiJsonException("'lang' is not a JsString")
}
}
}
"create a string value with chars encoded as entities but without markup in a bulk import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:Thing id="thing_with_string">
| <knoraXmlImport:label>Thing with string</knoraXmlImport:label>
| <p0001-anything:hasText knoraType="richtext_value">test & ' > < test</p0001-anything:hasText>
| </p0001-anything:Thing>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val createdResources: Seq[JsValue] = responseJson.fields("createdResources").asInstanceOf[JsArray].elements
thingWithString.set(createdResources.head.asJsObject.fields("resourceIri").asInstanceOf[JsString].value)
}
}
"get the resource created by bulk import and check for entities in string value" in {
Get("/v1/resources/" + URLEncoder.encode(thingWithString.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val stringVal = responseJson.fields("props")
.asInstanceOf[JsObject].fields("http://www.knora.org/ontology/0001/anything#hasText")
.asInstanceOf[JsObject].fields("values")
.asInstanceOf[JsArray].elements.head
.asInstanceOf[JsObject].fields("utf8str").asInstanceOf[JsString].value
assert(!(stringVal contains "&"))
assert(stringVal contains "&")
assert(!(stringVal contains "<"))
assert(stringVal contains "<")
assert(!(stringVal contains ">"))
assert(stringVal contains ">")
assert(!(stringVal contains "'"))
assert(stringVal contains "'")
}
}
"create a string value with a newline in a bulk import" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:Thing id="thing_with_string">
| <knoraXmlImport:label>Thing with string</knoraXmlImport:label>
| <p0001-anything:hasText knoraType="richtext_value">test
| test</p0001-anything:hasText>
| </p0001-anything:Thing>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val createdResources: Seq[JsValue] = responseJson.fields("createdResources").asInstanceOf[JsArray].elements
thingWithString.set(createdResources.head.asJsObject.fields("resourceIri").asInstanceOf[JsString].value)
}
}
"get the resource created by bulk import and check for the newline in the string value" in {
Get("/v1/resources/" + URLEncoder.encode(thingWithString.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val stringVal = responseJson.fields("props")
.asInstanceOf[JsObject].fields("http://www.knora.org/ontology/0001/anything#hasText")
.asInstanceOf[JsObject].fields("values")
.asInstanceOf[JsArray].elements.head
.asInstanceOf[JsObject].fields("utf8str").asInstanceOf[JsString].value
assert(!(stringVal contains "\\\\n"))
assert(stringVal contains "\\n")
}
}
"create a resource whose label ends in a double quote" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:Thing id="thing_with_string">
| <knoraXmlImport:label>Thing with "label"</knoraXmlImport:label>
| </p0001-anything:Thing>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.OK, responseStr)
responseStr should include("createdResources")
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val createdResources: Seq[JsValue] = responseJson.fields("createdResources").asInstanceOf[JsArray].elements
thingWithString.set(createdResources.head.asJsObject.fields("resourceIri").asInstanceOf[JsString].value)
}
}
"create a resource with a custom creation date in a bulk import" in {
val creationDateStr = "2019-01-09T15:45:54.502951Z"
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<knoraXmlImport:resources xmlns="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/0001/anything/xml-import/v1# p0001-anything.xsd"
| xmlns:p0001-anything="http://api.knora.org/ontology/0001/anything/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0001-anything:Thing id="thing_with_creation_date" creationDate="$creationDateStr">
| <knoraXmlImport:label>Thing with creation date</knoraXmlImport:label>
| <p0001-anything:hasText knoraType="richtext_value">test</p0001-anything:hasText>
| </p0001-anything:Thing>
|</knoraXmlImport:resources>""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseStr = responseAs[String]
responseStr should include("createdResources")
val responseJson: JsObject = AkkaHttpUtils.httpResponseToJson(response)
val createdResources: Seq[JsValue] = responseJson.fields("createdResources").asInstanceOf[JsArray].elements
thingWithCreationDate.set(createdResources.head.asJsObject.fields("resourceIri").asInstanceOf[JsString].value)
}
Get("/v2/resourcespreview/" + URLEncoder.encode(thingWithCreationDate.get, "UTF-8")) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcesPathV2 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseStr = responseAs[String]
responseStr should include(creationDateStr)
}
}
"create a resource belonging to a class in a shared ontology that refers to a property in another shared ontology" in {
val xmlImport =
s"""<?xml version="1.0" encoding="UTF-8"?>
| <knoraXmlImport:resources xmlns="http://api.knora.org/ontology/shared/example-ibox/xml-import/v1#"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://api.knora.org/ontology/004D/kuno-raeber/xml-import/v1#"
| xmlns:p0000-example-box="http://api.knora.org/ontology/shared/example-box/xml-import/v1#"
| xmlns:p0000-example-ibox="http://api.knora.org/ontology/shared/example-ibox/xml-import/v1#"
| xmlns:knoraXmlImport="http://api.knora.org/ontology/knoraXmlImport/v1#">
| <p0000-example-ibox:iBox id="test_box">
| <knoraXmlImport:label>test box 2</knoraXmlImport:label>
| <p0000-example-box__hasName knoraType="richtext_value">This is a test.</p0000-example-box__hasName>
| </p0000-example-ibox:iBox>
|</knoraXmlImport:resources>
""".stripMargin
val projectIri = URLEncoder.encode("http://rdfh.ch/projects/0001", "UTF-8")
Post(s"/v1/resources/xmlimport/$projectIri", HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), xmlImport)) ~> addCredentials(BasicHttpCredentials(anythingAdminEmail, password)) ~> resourcesPathV1 ~> check {
assert(status == StatusCodes.OK, response.toString)
val responseStr = responseAs[String]
responseStr should include("createdResources")
}
}
"perform a search for an anything:Thing matching a '***'" in {
checkSearchWithDifferentNumberOfProperties(search + filter)
}
"perform a search for an anything:Thing matching a '***' with 2 numprops displayed" in {
checkSearchWithDifferentNumberOfProperties(search + filter + "&numprops=2")
}
}
}
| musicEnfanthen/Knora | webapi/src/test/scala/org/knora/webapi/e2e/v1/ResourcesV1R2RSpec.scala | Scala | agpl-3.0 | 114,786 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2018, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package xml
package transform
import scala.collection.Seq
/**
* A class for XML transformations.
*
* @author Burak Emir
*/
abstract class BasicTransformer extends Function1[Node, Node] {
protected def unchanged(n: Node, ns: Seq[Node]) =
ns.length == 1 && (ns.head == n)
/**
* Call transform(Node) for each node in ns, append results
* to NodeBuffer.
*/
def transform(it: Iterator[Node], nb: NodeBuffer): Seq[Node] =
it.foldLeft(nb)(_ ++= transform(_)).toSeq
/**
* Call transform(Node) to each node in ns, yield ns if nothing changes,
* otherwise a new sequence of concatenated results.
*/
def transform(ns: Seq[Node]): Seq[Node] = {
val changed = ns flatMap transform
if (changed.length != ns.length || (changed, ns).zipped.exists(_ != _)) changed
else ns
}
def transform(n: Node): Seq[Node] = {
if (n.doTransform) n match {
case Group(xs) => Group(transform(xs)) // un-group the hack Group tag
case _ =>
val ch = n.child
val nch = transform(ch)
if (ch eq nch) n
else Elem(n.prefix, n.label, n.attributes, n.scope, nch.isEmpty, nch: _*)
}
else n
}
def apply(n: Node): Node = {
val seq = transform(n)
if (seq.length > 1)
throw new UnsupportedOperationException("transform must return single node for root")
else seq.head
}
}
| ashawley/scala-xml | shared/src/main/scala/scala/xml/transform/BasicTransformer.scala | Scala | bsd-3-clause | 1,909 |
package nlp_serde.writers
import nlp_serde.readers.PerLineJsonReader
import nlp_serde.{JsonUtil, FileUtil, Document}
import nlp_serde.JsonUtil
import java.io.{File, PrintWriter}
import play.api.libs.json.Json
import scala.collection.mutable
/**
* Json writer that writes a single json per file
* @author sameer
* @since 9/1/14.
*/
class JsonWriter(gzip: Boolean = true) extends Writer with DocPerFile {
override def writeDoc(name: String, doc: Document) {
val writer = FileUtil.writer(name + ".json" + (if (gzip) ".gz" else ""), gzip)
writer.println(Json.prettyPrint(JsonUtil.fromDoc(doc.toCase)))
writer.flush
writer.close
}
}
/**
* Json writer that writes a single json per line
* @author sameer
* @since 9/1/14.
*/
class PerLineJsonWriter(gzip: Boolean = true) extends Writer {
override def write(name: String, docs: Iterator[Document]) {
val writer = FileUtil.writer(name, gzip)
var idx = 0
for (doc <- docs) {
writer.println(Json.stringify(JsonUtil.fromDoc(doc.toCase)))
idx += 1
if(idx % 10 == 0) print(".")
if(idx % 1000 == 0) println(": " + idx)
}
writer.flush
writer.close
}
}
/**
* Write out multiple files, splitting the input collection of documents in multiple ones
*/
class SplitPerLineJsonWriter(val fname: (File, Document) => String, val gzip: Boolean = true) extends Writer {
override def write(name: String, docs: Iterator[Document]): Unit = {
val writers = new mutable.HashMap[String, PrintWriter]()
for (d <- docs) {
val newName = fname(new File(name), d)
val writer = writers.getOrElseUpdate(newName, FileUtil.writer(newName, gzip))
writer.println(Json.stringify(JsonUtil.fromDoc(d.toCase)))
}
writers.values.foreach(w => {
w.flush()
w.close()
})
}
}
object SplitPerLineJsonWriter {
def main(args: Array[String]): Unit = {
assert(args.length == 1)
val inputFile = args(0)
def newName(file: File, d: Document): String = {
val name = file.getName
val parent = file.getParent
val year = d.attrs.get("date").getOrElse("NONE").take(4)
val month = d.attrs.get("date").getOrElse("NONE").drop(5).take(2)
"%s/%s-%s.%s".format(parent, year, month, name)
}
val docs = new PerLineJsonReader(true).read(inputFile)
// println(docs.size)
val writer = new SplitPerLineJsonWriter(newName, true)
writer.write(inputFile, docs)
}
} | sameersingh/nlp_serde | src/main/scala/nlp_serde/writers/JsonWriter.scala | Scala | bsd-2-clause | 2,440 |
package org.jetbrains.plugins.scala.lang.psi.light
import java.util
import javax.swing._
import com.intellij.navigation.ItemPresentation
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.util.{Pair, TextRange}
import com.intellij.pom.java.LanguageLevel
import com.intellij.psi._
import com.intellij.psi.impl.PsiClassImplUtil.MemberType
import com.intellij.psi.impl.light.LightElement
import com.intellij.psi.impl.{PsiClassImplUtil, PsiSuperMethodImplUtil}
import com.intellij.psi.javadoc.PsiDocComment
import com.intellij.psi.scope.PsiScopeProcessor
import com.intellij.psi.scope.processor.MethodsProcessor
import com.intellij.psi.search.{GlobalSearchScope, SearchScope}
import com.intellij.psi.util.PsiUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.adapters.PsiClassAdapter
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject, ScTemplateDefinition, ScTrait}
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.TypeDefinitionMembers
import org.jetbrains.plugins.scala.lang.psi.light.PsiTypedDefinitionWrapper.DefinitionRole._
import org.jetbrains.plugins.scala.lang.resolve.processor.BaseProcessor
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
import _root_.scala.collection.mutable.ArrayBuffer
/**
* @author Alefas
* @since 10.02.12
*/
class PsiClassWrapper(val definition: ScTemplateDefinition,
private var qualName: String,
private var name: String) extends LightElement(definition.getManager, definition.getLanguage) with PsiClassAdapter /*with SyntheticElement*/ {
override def hashCode(): Int = definition.hashCode()
override def equals(obj: Any): Boolean = {
obj match {
case wrapper: PsiClassWrapper =>
definition.equals(wrapper.definition) && qualName == wrapper.qualName && name == wrapper.name
case _ => false
}
}
def getQualifiedName: String = qualName
def isInterface: Boolean = false
def isAnnotationType: Boolean = false
def isEnum: Boolean = false
def getExtendsList: PsiReferenceList = null //todo:
def getImplementsList: PsiReferenceList = null //todo: ?
def getExtendsListTypes: Array[PsiClassType] = Array.empty
def getImplementsListTypes: Array[PsiClassType] = Array.empty
def getSuperClass: PsiClass = null
def getInterfaces: Array[PsiClass] = Array.empty
def getSupers: Array[PsiClass] = Array.empty
def getSuperTypes: Array[PsiClassType] = Array.empty
def psiFields: Array[PsiField] = {
definition match {
case _: ScObject => Array.empty
case _ => definition.getFields //todo:
}
}
def psiMethods: Array[PsiMethod] = {
definition match {
case obj: ScObject =>
val res = new ArrayBuffer[PsiMethod]()
TypeDefinitionMembers.SignatureNodes.forAllSignatureNodes(obj) { node =>
this.processPsiMethodsForNode(node, isStatic = true, isInterface = false)(res += _)
}
res.toArray
case t: ScTrait =>
val res = new ArrayBuffer[PsiMethod]()
def addGettersAndSetters(holder: ScAnnotationsHolder, declaredElements: Seq[ScTypedDefinition]): Unit = {
val beanProperty = ScalaPsiUtil.isBeanProperty(holder)
val booleanBeanProperty = ScalaPsiUtil.isBooleanBeanProperty(holder)
if (beanProperty || booleanBeanProperty) {
for (t <- declaredElements) {
if (beanProperty) {
res += t.getStaticTypedDefinitionWrapper(GETTER, this)
if (t.isVar) {
res += t.getStaticTypedDefinitionWrapper(SETTER, this)
}
} else if (booleanBeanProperty) {
res += t.getStaticTypedDefinitionWrapper(IS_GETTER, this)
if (t.isVar) {
res += t.getStaticTypedDefinitionWrapper(SETTER, this)
}
}
}
}
}
val members = t.members
members foreach {
case fun: ScFunctionDefinition => res += fun.getStaticTraitFunctionWrapper(this)
case definition: ScPatternDefinition => //only getters and setters should be added
addGettersAndSetters(definition, definition.declaredElements)
case definition: ScVariableDefinition => //only getters and setters should be added
addGettersAndSetters(definition, definition.declaredElements)
case _ =>
}
res.toArray
}
}
@Cached(ModCount.getBlockModificationCount, this)
private def getEmptyConstructor: PsiMethod = new EmptyPrivateConstructor(this)
def getConstructors: Array[PsiMethod] = {
Array(getEmptyConstructor)
}
override def psiInnerClasses: Array[PsiClass] = {
definition match {
case o: ScObject =>
o.members.flatMap {
case o: ScObject => o.fakeCompanionClass match {
case Some(clazz) => Seq(o, clazz)
case None => Seq(o)
}
case t: ScTrait => Seq(t, t.fakeCompanionClass)
case c: ScClass => Seq(c)
case _ => Seq.empty
}.toArray
case _ => definition.getInnerClasses //todo:
}
}
def getInitializers: Array[PsiClassInitializer] = Array.empty
def getAllFields: Array[PsiField] = {
PsiClassImplUtil.getAllFields(this)
}
def getAllMethods: Array[PsiMethod] = {
PsiClassImplUtil.getAllMethods(this)
}
def getAllInnerClasses: Array[PsiClass] = {
PsiClassImplUtil.getAllInnerClasses(this)
}
def findFieldByName(name: String, checkBases: Boolean): PsiField = {
PsiClassImplUtil.findFieldByName(this, name, checkBases)
}
def findMethodBySignature(patternMethod: PsiMethod, checkBases: Boolean): PsiMethod = {
PsiClassImplUtil.findMethodBySignature(this, patternMethod, checkBases)
}
def findMethodsBySignature(patternMethod: PsiMethod, checkBases: Boolean): Array[PsiMethod] = {
PsiClassImplUtil.findMethodsBySignature(this, patternMethod, checkBases)
}
def findMethodsByName(name: String, checkBases: Boolean): Array[PsiMethod] = {
PsiClassImplUtil.findMethodsByName(this, name, checkBases)
}
def findMethodsAndTheirSubstitutorsByName(name: String, checkBases: Boolean): util.List[Pair[PsiMethod, PsiSubstitutor]] = {
PsiClassImplUtil.findMethodsAndTheirSubstitutorsByName(this, name, checkBases)
}
def getAllMethodsAndTheirSubstitutors: util.List[Pair[PsiMethod, PsiSubstitutor]] = {
PsiClassImplUtil.getAllWithSubstitutorsByMap(this, MemberType.METHOD)
}
def findInnerClassByName(name: String, checkBases: Boolean): PsiClass = {
PsiClassImplUtil.findInnerByName(this, name, checkBases)
}
def getLBrace: PsiElement = {
definition.getLBrace
}
def getRBrace: PsiElement = {
definition.getRBrace
}
def getNameIdentifier: PsiIdentifier = {
definition.getNameIdentifier
}
def getScope: PsiElement = {
definition.getScope
}
def isInheritor(baseClass: PsiClass, checkDeep: Boolean): Boolean = {
definition match {
case _: ScObject =>
baseClass.getQualifiedName == "java.lang.Object" ||
(baseClass.getQualifiedName == "scala.ScalaObject" && !baseClass.isDeprecated)
case _ => false
}
}
def isInheritorDeep(baseClass: PsiClass, classToByPass: PsiClass): Boolean = {
definition match {
case _: ScObject =>
baseClass.getQualifiedName == "java.lang.Object" ||
(baseClass.getQualifiedName == "scala.ScalaObject" && !baseClass.isDeprecated)
case _ => false
}
}
def getContainingClass: PsiClass = {
definition.getContainingClass
}
def getVisibleSignatures: util.Collection[HierarchicalMethodSignature] = {
PsiSuperMethodImplUtil.getVisibleSignatures(this)
}
def setName(name: String): PsiElement = {
this.name = name
val packageName = StringUtil.getPackageName(this.qualName)
this.qualName = if (packageName.isEmpty) name else packageName + "." + name
this
}
override def getName: String = name
override def copy: PsiElement = {
new PsiClassWrapper(definition.copy.asInstanceOf[ScTemplateDefinition], qualName, name)
}
override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = {
if (!processor.isInstanceOf[BaseProcessor]) {
val languageLevel: LanguageLevel =
processor match {
case methodProcessor: MethodsProcessor => methodProcessor.getLanguageLevel
case _ => PsiUtil.getLanguageLevel(place)
}
return PsiClassImplUtil.processDeclarationsInClass(this, processor, state, null, lastParent, place, languageLevel, false)
}
true
}
override def getContainingFile: PsiFile = {
definition.getContainingFile
}
override def isValid: Boolean = definition.isValid
override def getNextSibling: PsiElement = definition.getNextSibling
override def getPrevSibling: PsiElement = definition.getPrevSibling
override def getContext: PsiElement = {
definition.getContext
}
override def getParent: PsiElement = definition.getParent
override def getResolveScope: GlobalSearchScope = {
definition.resolveScope
}
override def getUseScope: SearchScope = {
definition.getUseScope
}
override def toString: String = {
"PsiClassWrapper(" + definition.toString + ")"
}
override def getIcon(flags: Int): Icon = {
definition.getIcon(flags)
}
def getModifierList: PsiModifierList = {
definition.getModifierList
}
def hasModifierProperty(name: String): Boolean = {
definition.hasModifierProperty(name)
}
def getDocComment: PsiDocComment = {
definition.getDocComment
}
def isDeprecated: Boolean = {
definition.isDeprecated
}
override def getPresentation: ItemPresentation = {
definition.getPresentation //todo: ?
}
override def navigate(requestFocus: Boolean) {
definition.navigate(requestFocus)
}
override def canNavigate: Boolean = {
definition.canNavigate
}
override def canNavigateToSource: Boolean = {
definition.canNavigateToSource
}
override def getTextRange: TextRange = definition.getTextRange
override def getTextOffset: Int = definition.getTextOffset
def hasTypeParameters: Boolean = false
def getTypeParameterList: PsiTypeParameterList = null
def psiTypeParameters: Array[PsiTypeParameter] = Array.empty
override def isEquivalentTo(another: PsiElement): Boolean = {
PsiClassImplUtil.isClassEquivalentTo(this, another)
}
}
| gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/light/PsiClassWrapper.scala | Scala | apache-2.0 | 10,811 |
package com.themillhousegroup.l7
import org.specs2.mutable.Specification
class AutomergeSpec extends Specification {
"Automerge" should {
"Reject file handles that don't exist" in {
val result = Automerge("src/test/resources/kjfh", "src/test/resources/one").dryRun
result must beAFailedTry
}
"Reject file handles that aren't dirs" in {
val result = Automerge("src/test/resources/one/level1.xml", "src/test/resources/one").dryRun
result must beAFailedTry
}
"Reject empty dirs" in {
val result = Automerge("src/test/resources/empty", "src/test/resources/empty").dryRun
result must beAFailedTry
}
"Work, but do nothing if applied to the same dir" in {
val result = Automerge("src/test/resources/two", "src/test/resources/two").dryRun
result must beASuccessfulTry
result.get.added must beEmpty
result.get.removed must beEmpty
result.get.modified must beEmpty
}
}
}
| themillhousegroup/l7-merge | src/test/scala/com/themillhousegroup/l7/AutomergeSpec.scala | Scala | mit | 975 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.serving.serialization
import java.io.ByteArrayInputStream
import java.util.Base64
import org.apache.arrow.memory.RootAllocator
import org.apache.arrow.vector.{Float4Vector, IntVector, VectorSchemaRoot}
import org.apache.arrow.vector.holders.NullableIntHolder
import org.apache.arrow.vector.ipc.ArrowStreamReader
/**
* Arrow deserializer to deserialize arrow buffer for Scala/Java frond end
* The frontend will get Array of data and Array of shape
* and then make string based on them
*/
class ArrowDeserializer {
/**
* Get data and shape Array from VectorSchemaRoot
* @param vectorSchemaRoot VectorSchemaRoot read from Arrow buffer
* @return Tuple, (dataArray, shapeArray)
*/
def getFromSchemaRoot(vectorSchemaRoot: VectorSchemaRoot): (Array[Float], Array[Int]) = {
val dataVector = vectorSchemaRoot.getVector("data").asInstanceOf[Float4Vector]
val shapeVector = vectorSchemaRoot.getVector("shape").asInstanceOf[IntVector]
val dataArray = new Array[Float](dataVector.getValueCount)
(0 until dataArray.size).foreach(i => dataArray(i) = dataVector.get(i))
var shapeArray = Array[Int]()
val nullableHolder = new NullableIntHolder()
(0 until dataArray.size).foreach(i => {
shapeVector.get(i, nullableHolder)
if (nullableHolder.isSet == 1) {
shapeArray = shapeArray :+ nullableHolder.value
}
})
(dataArray, shapeArray)
}
/**
* Deserialize base64 string to Array of (dataArray, shapeArray)
* some models would have multiple output, aka. Table
* Thus, Array is used to represent the multiple output
* @param b64string base64 string to decode
* @return
*/
def create(b64string: String): Array[(Array[Float], Array[Int])] = {
var result = Array[(Array[Float], Array[Int])]()
val readAllocator = new RootAllocator(Int.MaxValue)
val byteArr = Base64.getDecoder.decode(b64string)
val reader = new ArrowStreamReader(new ByteArrayInputStream(byteArr), readAllocator)
val schema = reader.getVectorSchemaRoot.getSchema
while (reader.loadNextBatch()) {
val vectorSchemaRoot = reader.getVectorSchemaRoot
result = result :+ getFromSchemaRoot(vectorSchemaRoot)
}
// readAllocator.close()
reader.close()
result
}
def getJsonString(arr: Array[(Array[Float], Array[Int])]): String = {
val strArr = arr.map(dataAndShape => {
val dataStr = dataAndShape._1.mkString("[", ",", "]")
val shapeStr = dataAndShape._2.mkString("[", ",", "]")
s"""{"data":$dataStr,"shape":$shapeStr}"""
})
var str = ""
strArr.foreach(s => str += s)
str
}
}
/**
* Wrap class ArrowDeserializer to deserialize base64 string
* to Array of (dataArray, shapeArray)
*/
object ArrowDeserializer {
def apply(b64string: String): String = {
val deserializer = new ArrowDeserializer()
val arr = deserializer.create(b64string)
deserializer.getJsonString(arr)
}
def getArray(b64string: String): Array[(Array[Float], Array[Int])] = {
val deserializer = new ArrowDeserializer()
val arr = deserializer.create(b64string)
arr
}
}
| intel-analytics/BigDL | scala/serving/src/main/scala/com/intel/analytics/bigdl/serving/serialization/ArrowDeserializer.scala | Scala | apache-2.0 | 3,739 |
package sample.hello
import akka.actor._
import collection.JavaConversions._
import collection.mutable.{HashMap,MultiMap,Map}
import scala.collection.mutable
/**
* Created by root on 3/3/15.
*/
class FragmentationFeatures extends Actor{
val relevance= context.actorOf(Props[Relevance], name= "relevance_features")
var old_fi_frg : Map[String,Int]= mutable.Map()
var old_seq_conc :Map[String,Int]=mutable.Map()
var clearing_map =new HashMap[String, scala.collection.mutable.Set[String]] with scala.collection.mutable.MultiMap [String,String]
//var clearing_map2 =new HashMap[String, scala.collection.mutable.Set[String]] with scala.collection.mutable.MultiMap [String,String]
var wk_Arr_Dr :Map[String,Int]= Map()
var wk_Arr_Ds :Map[String,Int]= Map()
var new_source_file_matches =new HashMap[String, scala.collection.mutable.Set[Int]] with scala.collection.mutable.MultiMap [String,Int]
var new_plag_file_matches =new HashMap[String, scala.collection.mutable.Set[Int]] with scala.collection.mutable.MultiMap [String,Int]
var document_count_ids :Map [String,Int]= Map()
def receive ={
case frag_calculate(seq_str,start_end_fltr) =>
val condition :String=(start_end_fltr(0)+start_end_fltr(3))+"."+start_end_fltr(4)+"."+(start_end_fltr(4)+start_end_fltr(3))+"@"+start_end_fltr(6)+","+start_end_fltr(7)
if(!clearing_map.entryExists(seq_str,_ == (condition)) ){ //(seq_str -> plag_string_start_point) && source_file_point != apo to idio key me source file point
clearing_map=clearing_map.addBinding(seq_str , condition)
val frg_condition=start_end_fltr(3)+"@"+start_end_fltr(6)+","+start_end_fltr(7) //arithmos leksewn tou koinou string+"@"+id source,plag keimenou
//if(start_end_fltr(3) > 100)
//println(seq_str+" and "+start_end_fltr(0)+" "+start_end_fltr(1))
if(old_fi_frg.containsKey(frg_condition) ){ //if else gia ton ypologismo map Fragmentation features
val new_value :Int=old_fi_frg.apply(frg_condition)+start_end_fltr(3)
//common_sequences.put(temp_str,new_value)
old_fi_frg = old_fi_frg.+(frg_condition -> new_value)
}
else {
if(start_end_fltr(3) != 0) {
old_fi_frg = old_fi_frg.+(frg_condition -> start_end_fltr(3))
}
}
val seq_condition=seq_str+"@"+start_end_fltr(6)+","+start_end_fltr(7)
if(old_seq_conc.containsKey(seq_condition)){
val new_value2 :Int=old_seq_conc.apply(seq_condition)+1
old_seq_conc=old_seq_conc.+(seq_condition -> new_value2)
}
else{
if(!seq_condition.isEmpty()) {
old_seq_conc = old_seq_conc.+(seq_condition -> 1)
}
}
}
case FR_Calcs_Routees_Terminated(source_file_matches,plag_file_matches,normalised_term_frequency,normalised_source_term_freq,term_files_occ,id_total,compared_tuple_w_ids) =>
val plagfile_id :String=compared_tuple_w_ids._3+","+compared_tuple_w_ids._4 //id source file+","+id plag file
if(document_count_ids.contains(plagfile_id)){
document_count_ids=document_count_ids.+(plagfile_id -> (document_count_ids.apply(plagfile_id)+1))
}
else
document_count_ids=document_count_ids.+(plagfile_id -> 1)
if(document_count_ids.containsValue(16)){
val x =document_count_ids.find(x => x._2 == 16).get //afairoume to (doument_id -> 16) stoixeio apo to Map document_count_ids
document_count_ids=document_count_ids.-(x._1)
for(keyvalue <- source_file_matches.iterator){
wk_Arr_Dr=wk_Arr_Dr.+(keyvalue._1 -> keyvalue._2.size ) //to wk_Arr_Dr periexei ws keys ta matched keys tou source file kai values ton arithmo twn emfanisewn tous sto (source) keimeno
}
for(keyvalue2 <- plag_file_matches.iterator){
wk_Arr_Ds=wk_Arr_Ds.+(keyvalue2._1 -> keyvalue2._2.size ) // to eixa wk_Arr_Ds=wk_Arr_Dr.+(keyvalue2._1 -> keyvalue2._2.size ) ????
}
var fi_frg :Map[Int,Int]= Map()
for(kv <- old_fi_frg.iterator) {
if (kv._1.substring(kv._1.lastIndexOf("@") + 1, kv._1.length()).==(x._1)) {
fi_frg = fi_frg.+(kv._1.substring(0, kv._1.lastIndexOf("@")).toInt -> kv._2)
old_fi_frg = old_fi_frg.-(kv._1)
}
}
//println("fi_frg:"+fi_frg)
var seq_conc :Map[String,Int]= Map()
for(kv <- old_seq_conc.iterator) {
if (kv._1.substring(kv._1.lastIndexOf("@") + 1, kv._1.length()).==(x._1)) {
seq_conc = seq_conc.+(kv._1.substring(0, kv._1.lastIndexOf("@")) -> kv._2)
old_seq_conc = old_seq_conc.-(kv._1)
}
}
////////
//if(compared_tuple_w_ids._3 ==1 && compared_tuple_w_ids._4==1){
// println(source_file_matches+" and "+plag_file_matches)
//}
////////
//println("seq_conq: "+seq_conc+"\\t fi_frg: "+fi_frg+"\\t wk_Arr_Ds: "+wk_Arr_Ds+"\\t wk_Arr_Dr: "+wk_Arr_Dr)
relevance.!(calculate_features(wk_Arr_Dr,wk_Arr_Ds,fi_frg,seq_conc,normalised_term_frequency,normalised_source_term_freq,term_files_occ,id_total,compared_tuple_w_ids) )
wk_Arr_Ds= Map()
wk_Arr_Dr=Map()
}
case _ =>
println("There was a problem fetching the data for the calculation of the fragmentation features")
}
}
| SteliosKats/Plagiarism_Detection_System_Using_Akka | src/main/scala/sample/hello/FragmentationFeatures.scala | Scala | cc0-1.0 | 5,221 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package au.com.cba.omnia.maestro.schema
import au.com.cba.omnia.maestro.schema.hive._
import au.com.cba.omnia.maestro.schema.pretty._
/** Column specification.
*
* @param name column name
* @param hivetype hive storage type (eg String)
* @param format data format / semantic type for the column
* @param histogram histogram of how many values match each classifier
* @param comment comment associated with the column
*/
case class ColumnSpec(
name: String,
hivetype: HiveType.HiveType,
format: Option[Format],
histogram: Histogram,
comment: String) {
/** Check if this value matches the column format. */
def matches(s: String): Boolean =
format match {
case None => true
case Some(f) => f.matches(s)
}
/** Pretty print the ColumnSpec as a String. */
def pretty: String =
f"$name%-25s" + " | " +
HiveType.pretty(hivetype) + " | " +
prettyOptionFormat(format) + "\\n" +
" " * 25 + " | " +
histogram.pretty + ";\\n"
/** Pretty print a Format as String, or '-' for a missing format. */
def prettyOptionFormat(of: Option[Format]): String =
of match {
case None => "-"
case Some(f) => f.pretty
}
/** Convert the ColumnSpec to JSON. */
def toJson: JsonDoc = {
val fields = List(
Some(("name", JsonString(name))),
Some(("storage", JsonString(HiveType.pretty(hivetype)))),
Some(("format", JsonString(prettyOptionFormat(format)))),
Some(("histogram", histogram.toJson)),
(if (comment.size > 0)
Some (("comment", JsonString(comment)))
else None)).flatten
JsonMap(fields, true)
}
}
| CommBank/maestro | maestro-schema/src/main/scala/au/com/cba/omnia/maestro/schema/schema/ColumnSpec.scala | Scala | apache-2.0 | 2,352 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor.dungeon
import scala.collection.immutable
import akka.actor.{ InvalidActorNameException, ChildStats, ChildRestartStats, ChildNameReserved, ActorRef }
import akka.dispatch.sysmsg.{ EarliestFirstSystemMessageList, SystemMessageList, LatestFirstSystemMessageList, SystemMessage }
import akka.util.Collections.{ EmptyImmutableSeq, PartialImmutableValuesIterable }
/**
* INTERNAL API
*/
private[akka] trait ChildrenContainer {
def add(name: String, stats: ChildRestartStats): ChildrenContainer
def remove(child: ActorRef): ChildrenContainer
def getByName(name: String): Option[ChildStats]
def getByRef(actor: ActorRef): Option[ChildRestartStats]
def children: immutable.Iterable[ActorRef]
def stats: immutable.Iterable[ChildRestartStats]
def shallDie(actor: ActorRef): ChildrenContainer
// reserve that name or throw an exception
def reserve(name: String): ChildrenContainer
// cancel a reservation
def unreserve(name: String): ChildrenContainer
def isTerminating: Boolean = false
def isNormal: Boolean = true
}
/**
* INTERNAL API
*
* This object holds the classes performing the logic of managing the children
* of an actor, hence they are intimately tied to ActorCell.
*/
private[akka] object ChildrenContainer {
sealed trait SuspendReason
case object UserRequest extends SuspendReason
// careful with those system messages, all handling to be taking place in ActorCell.scala!
case class Recreation(cause: Throwable) extends SuspendReason with WaitingForChildren
case class Creation() extends SuspendReason with WaitingForChildren
case object Termination extends SuspendReason
class ChildRestartsIterable(stats: immutable.MapLike[_, ChildStats, _]) extends PartialImmutableValuesIterable[ChildStats, ChildRestartStats] {
override final def apply(c: ChildStats) = c.asInstanceOf[ChildRestartStats]
override final def isDefinedAt(c: ChildStats) = c.isInstanceOf[ChildRestartStats]
override final def valuesIterator = stats.valuesIterator
}
class ChildrenIterable(stats: immutable.MapLike[_, ChildStats, _]) extends PartialImmutableValuesIterable[ChildStats, ActorRef] {
override final def apply(c: ChildStats) = c.asInstanceOf[ChildRestartStats].child
override final def isDefinedAt(c: ChildStats) = c.isInstanceOf[ChildRestartStats]
override final def valuesIterator = stats.valuesIterator
}
trait WaitingForChildren
trait EmptyChildrenContainer extends ChildrenContainer {
val emptyStats = immutable.TreeMap.empty[String, ChildStats]
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = new NormalChildrenContainer(emptyStats.updated(name, stats))
override def remove(child: ActorRef): ChildrenContainer = this
override def getByName(name: String): Option[ChildRestartStats] = None
override def getByRef(actor: ActorRef): Option[ChildRestartStats] = None
override def children: immutable.Iterable[ActorRef] = EmptyImmutableSeq
override def stats: immutable.Iterable[ChildRestartStats] = EmptyImmutableSeq
override def shallDie(actor: ActorRef): ChildrenContainer = this
override def reserve(name: String): ChildrenContainer = new NormalChildrenContainer(emptyStats.updated(name, ChildNameReserved))
override def unreserve(name: String): ChildrenContainer = this
}
/**
* This is the empty container, shared among all leaf actors.
*/
object EmptyChildrenContainer extends EmptyChildrenContainer {
override def toString = "no children"
}
/**
* This is the empty container which is installed after the last child has
* terminated while stopping; it is necessary to distinguish from the normal
* empty state while calling handleChildTerminated() for the last time.
*/
object TerminatedChildrenContainer extends EmptyChildrenContainer {
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = this
override def reserve(name: String): ChildrenContainer =
throw new IllegalStateException("cannot reserve actor name '" + name + "': already terminated")
override def isTerminating: Boolean = true
override def isNormal: Boolean = false
override def toString = "terminated"
}
/**
* Normal children container: we do have at least one child, but none of our
* children are currently terminating (which is the time period between
* calling context.stop(child) and processing the ChildTerminated() system
* message).
*/
class NormalChildrenContainer(val c: immutable.TreeMap[String, ChildStats]) extends ChildrenContainer {
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = new NormalChildrenContainer(c.updated(name, stats))
override def remove(child: ActorRef): ChildrenContainer = NormalChildrenContainer(c - child.path.name)
override def getByName(name: String): Option[ChildStats] = c.get(name)
override def getByRef(actor: ActorRef): Option[ChildRestartStats] = c.get(actor.path.name) match {
case c @ Some(crs: ChildRestartStats) if (crs.child == actor) ⇒ c.asInstanceOf[Option[ChildRestartStats]]
case _ ⇒ None
}
override def children: immutable.Iterable[ActorRef] =
if (c.isEmpty) EmptyImmutableSeq else new ChildrenIterable(c)
override def stats: immutable.Iterable[ChildRestartStats] =
if (c.isEmpty) EmptyImmutableSeq else new ChildRestartsIterable(c)
override def shallDie(actor: ActorRef): ChildrenContainer = TerminatingChildrenContainer(c, Set(actor), UserRequest)
override def reserve(name: String): ChildrenContainer =
if (c contains name)
throw new InvalidActorNameException(s"actor name [$name] is not unique!")
else new NormalChildrenContainer(c.updated(name, ChildNameReserved))
override def unreserve(name: String): ChildrenContainer = c.get(name) match {
case Some(ChildNameReserved) ⇒ NormalChildrenContainer(c - name)
case _ ⇒ this
}
override def toString =
if (c.size > 20) c.size + " children"
else c.mkString("children:\\n ", "\\n ", "")
}
object NormalChildrenContainer {
def apply(c: immutable.TreeMap[String, ChildStats]): ChildrenContainer =
if (c.isEmpty) EmptyChildrenContainer
else new NormalChildrenContainer(c)
}
/**
* Waiting state: there are outstanding termination requests (i.e. context.stop(child)
* was called but the corresponding ChildTerminated() system message has not yet been
* processed). There could be no specific reason (UserRequested), we could be Restarting
* or Terminating.
*
* Removing the last child which was supposed to be terminating will return a different
* type of container, depending on whether or not children are left and whether or not
* the reason was “Terminating”.
*/
case class TerminatingChildrenContainer(c: immutable.TreeMap[String, ChildStats], toDie: Set[ActorRef], reason: SuspendReason)
extends ChildrenContainer {
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = copy(c.updated(name, stats))
override def remove(child: ActorRef): ChildrenContainer = {
val t = toDie - child
if (t.isEmpty) reason match {
case Termination ⇒ TerminatedChildrenContainer
case _ ⇒ NormalChildrenContainer(c - child.path.name)
}
else copy(c - child.path.name, t)
}
override def getByName(name: String): Option[ChildStats] = c.get(name)
override def getByRef(actor: ActorRef): Option[ChildRestartStats] = c.get(actor.path.name) match {
case c @ Some(crs: ChildRestartStats) if (crs.child == actor) ⇒ c.asInstanceOf[Option[ChildRestartStats]]
case _ ⇒ None
}
override def children: immutable.Iterable[ActorRef] =
if (c.isEmpty) EmptyImmutableSeq else new ChildrenIterable(c)
override def stats: immutable.Iterable[ChildRestartStats] =
if (c.isEmpty) EmptyImmutableSeq else new ChildRestartsIterable(c)
override def shallDie(actor: ActorRef): ChildrenContainer = copy(toDie = toDie + actor)
override def reserve(name: String): ChildrenContainer = reason match {
case Termination ⇒ throw new IllegalStateException("cannot reserve actor name '" + name + "': terminating")
case _ ⇒
if (c contains name)
throw new InvalidActorNameException(s"actor name [$name] is not unique!")
else copy(c = c.updated(name, ChildNameReserved))
}
override def unreserve(name: String): ChildrenContainer = c.get(name) match {
case Some(ChildNameReserved) ⇒ copy(c = c - name)
case _ ⇒ this
}
override def isTerminating: Boolean = reason == Termination
override def isNormal: Boolean = reason == UserRequest
override def toString =
if (c.size > 20) c.size + " children"
else c.mkString("children (" + toDie.size + " terminating):\\n ", "\\n ", "\\n") + toDie
}
}
| jmnarloch/akka.js | akka-js-actor/js/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala | Scala | bsd-3-clause | 9,077 |
package com.rbmhtechnology.eventuate.chaos
import akka.actor.ActorRef
import akka.util.Timeout
import com.rbmhtechnology.eventuate.crdt.ORSetService
import scala.concurrent.duration._
class ChaosSetInterface(service: ORSetService[Int]) extends ChaosInterface {
val setId = "test"
implicit val timeout = Timeout(1.seconds)
private def writeSet(set: Set[Int], receiver: ActorRef) = {
reply(s"[${set.mkString(",")}]", receiver)
}
def handleCommand = {
case ("add", Some(v), recv) =>
service.add(setId, v).map(x => writeSet(x, recv))
case ("remove", Some(v), recv) =>
service.remove(setId, v).map(x => writeSet(x, recv))
case ("get", None, recv) =>
service.value(setId).map(x => writeSet(x, recv))
}
}
| RBMHTechnology/eventuate-chaos | src/main/scala/com/rbmhtechnology/eventuate/chaos/ChaosSetInterface.scala | Scala | apache-2.0 | 752 |
import dit4c.common.KryoSerializable
import dit4c.common.ProtobufSerializable
import java.time.Instant
import com.google.protobuf.timestamp.Timestamp
package object domain {
object BaseDomainEvent {
import scala.language.implicitConversions
implicit def instant2timestamp(instant: Instant): Timestamp =
Timestamp(instant.getEpochSecond, instant.getNano)
implicit def timestamp2instant(ts: Timestamp): Instant =
Instant.ofEpochSecond(ts.seconds, ts.nanos.toLong)
implicit def optTimestamp2optInstant(oi: Option[Instant]): Option[Timestamp] =
oi.map(instant2timestamp)
implicit def optInstant2optTimestamp(ots: Option[Timestamp]): Option[Instant] =
ots.map(timestamp2instant)
def now: Some[Timestamp] = Some(instant2timestamp(Instant.now))
}
trait BaseDomainEvent extends ProtobufSerializable
trait BaseCommand extends KryoSerializable
trait BaseResponse extends KryoSerializable
} | dit4c/dit4c | dit4c-portal/app/domain/package.scala | Scala | mit | 946 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp.
package scala
object Product7 {
def unapply[T1, T2, T3, T4, T5, T6, T7](x: Product7[T1, T2, T3, T4, T5, T6, T7]): Option[Product7[T1, T2, T3, T4, T5, T6, T7]] =
Some(x)
}
/** Product7 is a Cartesian product of 7 components.
* @since 2.3
*/
trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product {
/** The arity of this product.
* @return 7
*/
override def productArity = 7
/** Returns the n-th projection of this product if 0 <= n < productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
* @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
case 3 => _4
case 4 => _5
case 5 => _6
case 6 => _7
case _ => throw new IndexOutOfBoundsException(n.toString())
}
/** A projection of element 1 of this Product.
* @return A projection of element 1.
*/
def _1: T1
/** A projection of element 2 of this Product.
* @return A projection of element 2.
*/
def _2: T2
/** A projection of element 3 of this Product.
* @return A projection of element 3.
*/
def _3: T3
/** A projection of element 4 of this Product.
* @return A projection of element 4.
*/
def _4: T4
/** A projection of element 5 of this Product.
* @return A projection of element 5.
*/
def _5: T5
/** A projection of element 6 of this Product.
* @return A projection of element 6.
*/
def _6: T6
/** A projection of element 7 of this Product.
* @return A projection of element 7.
*/
def _7: T7
}
| felixmulder/scala | src/library/scala/Product7.scala | Scala | bsd-3-clause | 2,404 |
package com.programmaticallyspeaking.nashornmonads.nashorn
import java.io._
import java.nio.file.{FileSystems, StandardCopyOption, Files, Path}
import java.util.concurrent.Executors
import java.util.{UUID, Scanner}
import javax.script.{ScriptContext, ScriptEngine, Bindings}
import jdk.nashorn.api.scripting.{JSObject, ScriptObjectMirror, NashornScriptEngineFactory}
object Bridge {
def apply(): Bridge = apply(classOf[Bridge])
def apply(resourceReferenceClass: Class[_]): Bridge = {
val engine = new NashornScriptEngineFactory().getScriptEngine
new Bridge(engine, engine.getBindings(ScriptContext.ENGINE_SCOPE), resourceReferenceClass)
}
}
class Bridge(engine: ScriptEngine, bindings: Bindings, resourceReferenceClass: Class[_]) {
//TODO: When to shut this one down?
private val scheuledExecutorService = Executors.newSingleThreadScheduledExecutor()
// Patch the global load function so that it supports resource loading
patchLoad()
// Install setTimeout and friends
installTimerFunctions()
// Install Q for promise support
installPromiseSupport()
private def getResourceAsStream(resourcePath: String): Option[InputStream] =
Option(resourceReferenceClass.getResourceAsStream(resourcePath))
private def writeResourceToTempFile(is: InputStream, resourcePath: String): File = {
val tempFile = File.createTempFile("bridge", resourcePath.replace("/", "_"))
Files.copy(is, tempFile.toPath, StandardCopyOption.REPLACE_EXISTING)
tempFile.deleteOnExit() // the file gets deleted when the JVM exits
tempFile
}
private def getResourceAsTempFile(resourcePath: String): Option[File] =
getResourceAsStream(resourcePath).map(stream => writeResourceToTempFile(stream, resourcePath))
private def patchLoad(): Unit = {
val loader = new Loader
bindings.put("__loader", loader)
eval(
"""
|var noResourceFoundEx = Java.type('com.programmaticallyspeaking.nashornmonads.nashorn.NoResourceFoundException');
|var global = this;
|var originalLoad = global.load;
|this.load = function () {
| var args = Array.prototype.slice.call(arguments);
| try {
| originalLoad.apply(this, args);
| } catch (e) {
| try {
| global.__loader.load(e, args[0]);
| } catch (fromLoader) {
| if (fromLoader instanceof noResourceFoundEx)
| throw e; // the loader couldn't load the resource, re-throw the original error
| throw fromLoader;
| }
| }
|};
""".stripMargin)
}
private def installPromiseSupport(): Unit = {
eval(
"""
|load('/lib/generated/jvm-npm.js');
|this.Q = require('Q');
""".stripMargin)
}
private def installTimerFunctions(): Unit = {
val timerFunctions = new WindowTimers(scheuledExecutorService)
val installer = eval(
"""
|var global = this;
|var installer = function (WindowTimers) {
| ["setTimeout", "setInterval", "clearTimeout", "clearInterval"].forEach(function (name) {
| global[name] = WindowTimers[name];
| });
|};
|installer;
""".stripMargin)
installer match {
case mirror: JSObject if mirror.isFunction =>
mirror.call(null, timerFunctions)
case _ => throw new UnsupportedOperationException("Expected a JSObject from Nashorn when installing timer functions")
}
}
def eval(script: String): AnyRef = engine.eval(script, bindings)
class Loader {
def load(originalException: AnyRef, src: AnyRef): AnyRef = {
// Maybe the source refers to a resource. Try to obtain the resource as a temp file.
getResourceAsTempFile(src.toString) match {
case Some(tempFile) =>
val filePath = tempFile.getAbsolutePath.replace("\\\\", "\\\\\\\\")
eval(s"""load('$filePath');""")
case None =>
// No resource found, throw the original exception so that the ultimate caller of `load` gets a proper
// JavaScript exception.
throw new NoResourceFoundException() // caught by the load wrapper above
}
}
}
}
/**
* A marker exception thrown by the resource loader inside [[Bridge]], so that the load wrapper can determine why
* resource loading failed. This is a top-level class so that it has an "easy" class name.
*/
class NoResourceFoundException extends RuntimeException | provegard/NashornMonads | src/main/scala/nashorn/Bridge.scala | Scala | mit | 4,450 |
package jp.hotbrain.makecsv
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import javax.crypto._
import javax.crypto.spec._
/**
* Created by hideki takada on 2016/09/10.
*/
case class AesParam(
serial: String,
keyStr: String,
ivStr: String
) {
lazy val key: Array[Byte] = AesParam.getArray(keyStr)
lazy val iv: Array[Byte] = AesParam.getArray(ivStr)
val longSerial: Long = java.lang.Long.parseLong(serial)
def getCipherOf(mode: Int): Cipher = {
val keySpec = new SecretKeySpec(key, "AES")
val cipher = Cipher.getInstance("AES/PCBC/PKCS5Padding")
val ivspec = new IvParameterSpec(iv)
cipher.init(mode, keySpec, ivspec)
cipher
}
override def toString: String = {
s"""serial:$serial,key:"$keyStr",iv:"$ivStr""""
}
}
/**
* 128Bit->16Byte ArrayByte のみ対応
*/
object AesParam {
private[this] lazy val regex16 = "([0-9a-fA-F]{32})".r
def getArray(str: String): Array[Byte] = {
str match {
case regex16(base16) => Base16.toByteArray(base16)
case `str` => MessageDigest.getInstance("MD5").digest(str.getBytes(StandardCharsets.US_ASCII))
}
}
}
| HidekiTak/make_csv | src/main/scala/jp/hotbrain/makecsv/AesParam.scala | Scala | apache-2.0 | 1,235 |
package dotty.tools
package dotc
package util
import scala.collection.mutable.ArrayBuffer
import dotty.tools.io._
import annotation.tailrec
import java.util.regex.Pattern
import java.io.IOException
import Chars._
import ScriptSourceFile._
import Positions._
object ScriptSourceFile {
private val headerPattern = Pattern.compile("""^(::)?!#.*(\\r|\\n|\\r\\n)""", Pattern.MULTILINE)
private val headerStarts = List("#!", "::#!")
def apply(file: AbstractFile, content: Array[Char]) = {
/** Length of the script header from the given content, if there is one.
* The header begins with "#!" or "::#!" and ends with a line starting
* with "!#" or "::!#".
*/
val headerLength =
if (headerStarts exists (content startsWith _)) {
val matcher = headerPattern matcher content.mkString
if (matcher.find) matcher.end
else throw new IOException("script file does not close its header with !# or ::!#")
} else 0
new SourceFile(file, content drop headerLength) {
override val underlying = new SourceFile(file, content)
}
}
}
case class SourceFile(file: AbstractFile, content: Array[Char]) {
def this(_file: AbstractFile) = this(_file, _file.toCharArray)
def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
/** Tab increment; can be overridden */
def tabInc = 8
override def equals(that : Any) = that match {
case that : SourceFile => file.path == that.file.path && start == that.start
case _ => false
}
override def hashCode = file.path.## + start.##
def apply(idx: Int) = content.apply(idx)
val length = content.length
/** true for all source files except `NoSource` */
def exists: Boolean = true
/** The underlying source file */
def underlying: SourceFile = this
/** The start of this file in the underlying source file */
def start = 0
def atPos(pos: Position): SourcePosition =
if (pos.exists) SourcePosition(underlying, pos)
else NoSourcePosition
def isSelfContained = underlying eq this
/** Map a position to a position in the underlying source file.
* For regular source files, simply return the argument.
*/
def positionInUltimateSource(position: SourcePosition): SourcePosition =
SourcePosition(underlying, position.pos shift start)
def isLineBreak(idx: Int) =
if (idx >= length) false else {
val ch = content(idx)
// don't identify the CR in CR LF as a line break, since LF will do.
if (ch == CR) (idx + 1 == length) || (content(idx + 1) != LF)
else isLineBreakChar(ch)
}
def calculateLineIndices(cs: Array[Char]) = {
val buf = new ArrayBuffer[Int]
buf += 0
for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
buf += cs.length // sentinel, so that findLine below works smoother
buf.toArray
}
private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
/** Map line to offset of first character in line */
def lineToOffset(index : Int): Int = lineIndices(index)
/** A cache to speed up offsetToLine searches to similar lines */
private var lastLine = 0
/** Convert offset to line in this source file
* Lines are numbered from 0
*/
def offsetToLine(offset: Int): Int = {
lastLine = Util.bestFit(lineIndices, offset, lastLine)
lastLine
}
def startOfLine(offset: Int): Int = lineToOffset(offsetToLine(offset))
def nextLine(offset: Int): Int =
lineToOffset(offsetToLine(offset) + 1 min lineIndices.length - 1)
def lineContents(offset: Int): String =
content.slice(startOfLine(offset), nextLine(offset)).mkString
def column(offset: Int): Int = {
var idx = startOfLine(offset)
var col = 0
while (idx != offset) {
col += (if (content(idx) == '\\t') tabInc - col % tabInc else 1)
idx += 1
}
col + 1
}
override def toString = file.toString
}
object NoSource extends SourceFile("<no source>", Nil) {
override def exists = false
}
| AlexSikia/dotty | src/dotty/tools/dotc/util/SourceFile.scala | Scala | bsd-3-clause | 4,082 |
package com.codacy.client.bitbucket.v2.service
import java.net.URLEncoder
import com.codacy.client.bitbucket.client.{BitbucketClient, Request, RequestResponse}
import com.codacy.client.bitbucket.v2.{PullRequest, PullRequestComment, PullRequestReviewers, SimpleCommit}
import play.api.libs.json._
class PullRequestServices(client: BitbucketClient) {
/*
* Gets the list of a repository pull requests
*
* States: OPEN | MERGED | DECLINED
*
*/
def getPullRequests(
owner: String,
repository: String,
states: Seq[String] = Seq("OPEN"),
size: Int = 50
): RequestResponse[Seq[PullRequest]] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val encodedStates = states.map(state => URLEncoder.encode(state, "UTF-8"))
val url = s"$pullRequestsUrl?pagelen=$size&state=${encodedStates.mkString("&state=")}"
client.executePaginated(Request(url, classOf[Seq[PullRequest]]))
}
/*
* Gets the list of commits of a pull request
*
*/
def getPullRequestCommits(
owner: String,
repository: String,
prId: Long,
size: Int = 100
): RequestResponse[Seq[SimpleCommit]] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId/commits?pagelen=$size"
client.executePaginated(Request(url, classOf[Seq[SimpleCommit]]))
}
private[this] def postNewComment(
owner: String,
repository: String,
prId: Int,
values: JsObject
): RequestResponse[PullRequestComment] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId/comments"
client.postJson(Request(url, classOf[PullRequestComment]), values)
}
def create(
owner: String,
repository: String,
title: String,
sourceBranch: String,
destinationBranch: String
): RequestResponse[JsObject] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val payload = Json.obj(
"title" -> title,
"source" -> Json.obj("branch" -> Json.obj("name" -> sourceBranch)),
"destination" -> Json.obj("branch" -> Json.obj("name" -> destinationBranch))
)
client.postJson(Request(pullRequestsUrl, classOf[JsObject]), payload)
}
def postApprove(owner: String, repository: String, prId: Long): RequestResponse[JsObject] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId/approve"
client.postJson(Request(url, classOf[JsObject]), JsNull)
}
def deleteApprove(owner: String, repository: String, prId: Long): RequestResponse[Boolean] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId/approve"
client.delete(url)
}
def merge(owner: String, repository: String, prId: Long): RequestResponse[JsObject] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId/merge"
client.postJson(Request(url, classOf[JsObject]), JsNull)
}
def decline(owner: String, repository: String, prId: Long): RequestResponse[JsObject] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId/decline"
client.postJson(Request(url, classOf[JsObject]), JsNull)
}
def createLineComment(
author: String,
repo: String,
prId: Int,
body: String,
file: Option[String],
line: Option[Int]
): RequestResponse[PullRequestComment] = {
val params = for {
filename <- file
lineTo <- line
} yield {
"inline" -> Json.obj("path" -> JsString(filename), "to" -> JsNumber(lineTo))
}
val values = JsObject(params.toSeq :+ "content" -> Json.obj("raw" -> JsString(body)))
postNewComment(author, repo, prId, values)
}
def createPullRequestComment(
author: String,
repo: String,
prId: Int,
content: String
): RequestResponse[PullRequestComment] = {
val values = Json.obj("content" -> Json.obj("raw" -> JsString(content)))
postNewComment(author, repo, prId, values)
}
def deleteComment(
author: String,
repository: String,
pullRequestId: Int,
commentId: Long
): RequestResponse[Boolean] = {
val pullRequestsUrl = generatePullRequestsUrl(author, repository)
val url = s"$pullRequestsUrl/$pullRequestId/comments/$commentId"
client.delete(url)
}
def listComments(author: String, repository: String, pullRequestId: Int): RequestResponse[Seq[PullRequestComment]] = {
val pullRequestsUrl = generatePullRequestsUrl(author, repository)
val url = s"$pullRequestsUrl/$pullRequestId/comments"
client
.executePaginated(Request(url, classOf[Seq[PullRequestComment]]))
.map(_.filterNot(_.deleted))
}
def getPullRequestsReviewers(owner: String, repository: String, prId: Long): RequestResponse[PullRequestReviewers] = {
val pullRequestsUrl = generatePullRequestsUrl(owner, repository)
val url = s"$pullRequestsUrl/$prId"
client.execute(Request(url, classOf[PullRequestReviewers]))
}
private def generatePullRequestsUrl(owner: String, repo: String): String = {
val encodedOwner = URLEncoder.encode(owner, "UTF-8")
val encodedRepo = URLEncoder.encode(repo, "UTF-8")
s"${client.repositoriesBaseUrl}/$encodedOwner/$encodedRepo/pullrequests"
}
}
| codacy/bitbucket-scala-client | src/main/scala/com/codacy/client/bitbucket/v2/service/PullRequestServices.scala | Scala | apache-2.0 | 5,420 |
package org.json4s
import org.json4s.reflect.ScalaType
trait RichSerializer[A] {
def deserialize(implicit format: Formats): PartialFunction[(ScalaType, JValue), A]
def serialize(implicit format: Formats): PartialFunction[Any, JValue]
}
| json4s/json4s | core/src/main/scala/org/json4s/RichSerializer.scala | Scala | apache-2.0 | 242 |
package sk.scalagine.math.transformation
import sk.scalagine.math.{Matrix4x4, Vector3}
/**
* Created with IntelliJ IDEA.
* User: zladovan
* Date: 13.7.2014
* Time: 12:33
*/
object Rotation {
def apply(axis: Vector3, angleInRadians: Float): Matrix4x4 = rotation(axis, angleInRadians)
def x(angleInRadians: Float): Matrix4x4 = rotationX(angleInRadians)
def y(angleInRadians: Float): Matrix4x4 = rotationY(angleInRadians)
def z(angleInRadians: Float): Matrix4x4 = rotationZ(angleInRadians)
private def rotation(axis: Vector3, angleInRadians: Float): Matrix4x4 = {
val axisNormalized = axis.normalize
val c = Math.cos(angleInRadians).asInstanceOf[Float]
val omc = 1 - c
val s = Math.sin(angleInRadians).asInstanceOf[Float]
val x = axisNormalized.x; val xPow2 = Math.pow(x, 2).asInstanceOf[Float]
val y = axisNormalized.y; val yPow2 = Math.pow(y, 2).asInstanceOf[Float]
val z = axisNormalized.z; val zPow2 = Math.pow(z, 2).asInstanceOf[Float]
Matrix4x4(
c + omc * xPow2, omc * x * y - s * z, omc * x * z + s * y, 0,
omc * x * y + s * z, c + omc * yPow2, omc * y * z - s * x, 0,
omc * x * z - s * y, omc * y * z + s * x, c + omc * zPow2, 0,
0, 0, 0, 1)
}
private def rotateAroundAxis(angleInRadians: Float, createMatrix: (Float, Float) => Matrix4x4): Matrix4x4 =
createMatrix(Math.cos(angleInRadians).asInstanceOf[Float], Math.sin(angleInRadians).asInstanceOf[Float])
private def rotationX(angleInRadians: Float): Matrix4x4 =
rotateAroundAxis(angleInRadians, (cos, sin) =>
Matrix4x4(
1, 0, 0, 0,
0, cos, -sin, 0,
0, sin, cos, 0,
0, 0, 0, 1))
private def rotationY(angleInRadians: Float): Matrix4x4 =
rotateAroundAxis(angleInRadians, (cos, sin) =>
Matrix4x4(
cos, 0, sin, 0,
0, 1, 0, 0,
-sin, 0, cos, 0,
0, 0, 0, 1))
private def rotationZ(angleInRadians: Float): Matrix4x4 =
rotateAroundAxis(angleInRadians, (cos, sin) =>
Matrix4x4(
cos, -sin, 0, 0,
sin, cos, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1))
}
| zladovan/scalagine | engine/math/src/main/scala/sk/scalagine/math/transformation/Rotation.scala | Scala | mit | 2,405 |
package org.bitcoins.wallet
import org.bitcoins.asyncutil.AsyncUtil
import org.bitcoins.core.api.wallet.db.SpendingInfoDb
import org.bitcoins.core.currency.Satoshis
import org.bitcoins.core.protocol.BitcoinAddress
import org.bitcoins.core.protocol.blockchain.{Block, RegTestNetChainParams}
import org.bitcoins.core.protocol.transaction.{EmptyTransaction, Transaction}
import org.bitcoins.testkit.wallet.BitcoinSWalletTest
import org.bitcoins.testkit.wallet.FundWalletUtil.FundedWallet
import org.scalatest.FutureOutcome
import scala.concurrent.duration.DurationInt
import scala.concurrent.{Future, Promise}
class WalletCallbackTest extends BitcoinSWalletTest {
type FixtureParam = FundedWallet
override def withFixture(test: OneArgAsyncTest): FutureOutcome = {
withFundedWallet(test, getBIP39PasswordOpt())(getFreshWalletAppConfig)
}
behavior of "WalletCallbacks"
it must "verify OnNewAddressGenerated callbacks are executed" in {
fundedWallet: FundedWallet =>
val resultP: Promise[BitcoinAddress] = Promise()
val callback: OnNewAddressGenerated = (addr: BitcoinAddress) => {
Future {
resultP.success(addr)
()
}
}
val callbacks = WalletCallbacks.onNewAddressGenerated(callback)
fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
val wallet = fundedWallet.wallet
for {
address <- wallet.getNewAddress()
exists <- wallet.contains(address, None)
_ = assert(exists, "Wallet must contain address after generating it")
result <- resultP.future
} yield assert(result == address)
}
it must "verify OnTransactionProcessed callbacks are executed" in {
fundedWallet: FundedWallet =>
val resultP: Promise[Transaction] = Promise()
val callback: OnTransactionProcessed = (tx: Transaction) => {
Future {
resultP.success(tx)
()
}
}
val callbacks = WalletCallbacks.onTransactionProcessed(callback)
fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
val wallet = fundedWallet.wallet
for {
address <- wallet.getNewAddress()
tx <- wallet.sendToAddress(address, Satoshis(1000), None)
_ <- wallet.processTransaction(tx, None)
result <- resultP.future
} yield assert(result == tx)
}
it must "verify OnTransactionProcessed callbacks are not executed for a transaction unrelated to the wallet" in {
fundedWallet: FundedWallet =>
val resultP: Promise[Transaction] = Promise()
val callback: OnTransactionProcessed = (tx: Transaction) => {
Future {
resultP.success(tx)
()
}
}
val callbacks = WalletCallbacks.onTransactionProcessed(callback)
fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
val wallet = fundedWallet.wallet
// a random testnet transaction
val tx = Transaction.fromHex(
"""02000000000101c2cb8b4d16d2a111cfd2f44e674a89327cfb2dcad5828ec9ad12edb3972b2c
|b20100000000feffffff023ef929e900000000160014a5f44222c5859b388f513f07c96bdf69
|8a5a6bfd87c71d00000000001600145543e613b22f2393e76510cede73952405a5c9b9024730
|440220348dc443d9a0cc6b5365d7ef8d62e1ca4d890c6f4d817a0fb0f48ff36b97e08702201d
|77554641889932523e7d103385d99834cb9f29328ce11282ccbe218acf56440121028bb78dbe
|0ea469c97061b8dcc870ec25d5abcd938f19ec17e32422f8f318fa251b992000""".stripMargin)
for {
txno <- wallet.listTransactions().map(_.size)
_ <- wallet.processTransaction(tx, None)
_ <- AsyncUtil.nonBlockingSleep(50.millis)
txs <- wallet.listTransactions()
} yield {
assert(txs.size == txno)
assert(!resultP.isCompleted)
}
}
it must "verify OnTransactionBroadcast callbacks are executed" in {
fundedWallet: FundedWallet =>
val resultP: Promise[Transaction] = Promise()
val callback: OnTransactionBroadcast = (tx: Transaction) => {
Future {
resultP.success(tx)
()
}
}
val callbacks = WalletCallbacks.onTransactionBroadcast(callback)
fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
val wallet = fundedWallet.wallet
for {
_ <- wallet.broadcastTransaction(EmptyTransaction)
result <- resultP.future
} yield assert(result == EmptyTransaction)
}
it must "verify OnReservedUtxos callbacks are executed when reserving" in {
fundedWallet: FundedWallet =>
val resultP: Promise[Vector[SpendingInfoDb]] = Promise()
val callback: OnReservedUtxos = (infos: Vector[SpendingInfoDb]) => {
Future {
resultP.success(infos)
()
}
}
val callbacks = WalletCallbacks.onReservedUtxos(callback)
fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
val wallet = fundedWallet.wallet
for {
utxos <- wallet.listUtxos()
_ <- wallet.markUTXOsAsReserved(Vector(utxos.head))
result <- resultP.future
} yield assert(
// just compare outPoints because states will be changed so they won't be equal
result.map(_.outPoint) == Vector(utxos.head).map(_.outPoint))
}
it must "verify OnReservedUtxos callbacks are executed when un-reserving" in {
fundedWallet: FundedWallet =>
val resultP: Promise[Vector[SpendingInfoDb]] = Promise()
val callback: OnReservedUtxos = (infos: Vector[SpendingInfoDb]) => {
Future {
resultP.success(infos)
()
}
}
val callbacks = WalletCallbacks.onReservedUtxos(callback)
val wallet = fundedWallet.wallet
for {
utxos <- wallet.listUtxos()
reserved <- wallet.markUTXOsAsReserved(Vector(utxos.head))
_ = fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
_ <- wallet.unmarkUTXOsAsReserved(reserved)
result <- resultP.future
// just compare outPoints because states will be changed so they won't be equal
} yield assert(result.map(_.outPoint) == reserved.map(_.outPoint))
}
it must "verify OnBlockProcessed callbacks are executed" in {
fundedWallet: FundedWallet =>
val resultP: Promise[Block] = Promise()
val block = RegTestNetChainParams.genesisBlock
val callback: OnBlockProcessed = (b: Block) => {
Future {
resultP.success(b)
()
}
}
val callbacks = WalletCallbacks.onBlockProcessed(callback)
fundedWallet.wallet.walletConfig.addCallbacks(callbacks)
val wallet = fundedWallet.wallet
for {
_ <- wallet.processBlock(block)
result <- resultP.future
} yield assert(result == block)
}
}
| bitcoin-s/bitcoin-s | wallet-test/src/test/scala/org/bitcoins/wallet/WalletCallbackTest.scala | Scala | mit | 6,761 |
package xiatian.knowledge.models
import java.text.SimpleDateFormat
import reactivemongo.api.collections.bson.BSONCollection
import reactivemongo.bson.{BSONDocument, BSONDocumentReader, BSONDocumentWriter, Macros}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/**
* 期刊对象
*
* @author Tian Xia
* Jun 15, 2017 17:20
*/
case class Journal(name: String, rank: Int)
object Journal extends MongoDocument {
val collectionName = "journal"
val collection: Future[BSONCollection] = db.map(_.collection(collectionName))
val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
implicit def journalWriter: BSONDocumentWriter[Journal] =
Macros.writer[Journal]
implicit def journalReader: BSONDocumentReader[Journal] =
Macros.reader[Journal]
/**
* 把期刊加入到数据库中
*
* @return 如果成功,返回(true, "success"),否则返回(false, 出错信息)
*/
def add(name: String, rank: Int): Future[(Boolean, String)] = collection
.flatMap {
c =>
c.count(Some(BSONDocument("name" -> name))).flatMap(
count =>
if (count > 0) {
Future.successful((false, s"$name has already exists."))
} else {
c.insert(Journal(name, rank)).flatMap(
r =>
Future.successful((r.ok, if (r.ok) name else r.toString))
)
}
)
}
/**
* 初始化时,注入期刊名称
*/
override def init(): Future[(Boolean, String)] = {
//把data/papers目录下的期刊名称都输出来
// import better.files.File
// val dir = File("./data/papers")
// dir.list.zipWithIndex
// .foreach {
// case (x: File, idx: Int) =>
// println(s"""add("${x.name}", ${idx + 1}),""")
// }
val addingFutures = Future.sequence(Seq(
add("情报资料工作", 1),
add("图书馆建设", 2),
add("图书馆", 3),
add("图书馆杂志", 4),
add("档案学通讯", 5),
add("情报杂志", 6),
add("图书情报知识", 7),
add("国家图书馆学刊", 8),
add("情报科学", 9),
add("情报学报", 10),
add("图书馆论坛", 11),
add("中国图书馆学报", 12),
add("大学图书馆学报", 13),
add("图书馆工作与研究", 14),
add("图书情报工作", 15),
add("现代图书情报技术", 16),
add("图书馆学研究", 17),
add("图书与情报", 18),
add("情报理论与实践", 19),
add("档案学研究", 20)
)).flatMap(
results => Future.successful((results.forall(_._1), "插入期刊名称"))
)
//删除原有collection, 然后再重新插入
collection.flatMap(c=>
c.drop(false).flatMap(_ => addingFutures)
)
}
}
| iamxiatian/knowledge | src/main/scala/xiatian/knowledge/models/Journal.scala | Scala | gpl-3.0 | 2,855 |
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.action
import java.util.UUID
import org.scalatest.BeforeAndAfterAll
import play.api.test.Helpers._
import play.api.test.{FakeRequest}
import play.api.Play
import play.api.test._
import play.api.mvc._
import play.api.mvc.Results._
import service._
import scala.concurrent.Future
class ReadKeystoreActionSpec extends test.BaseSpec {
"ReadKeystore" should {
"convert should read keystore" in new MockKeystoreFixture {
// set up
val request = FakeRequest("GET", "/abc")
MockKeystore.map = Map[String,String]("msg"->"hello worlds")
val readKeystore = new ReadKeystore with test.NullMetrics {
def keystore: KeystoreService = MockKeystore
def test[T](r: Request[T]): Future[DataRequest[T]] = super.convert(r)
def invokeBlock[A](r: Request[A], block: DataRequest[A] => Future[Result]): Future[Result] = Future.successful(Ok("hi"))
}
// test
val result = await(readKeystore.test(request))
// check
result.data.get("msg") shouldBe Some("hello worlds")
}
"updateSession" should {
"update session with request keystore data" in new MockKeystoreFixture {
// set up
implicit val request = FakeRequest("GET", "/abc").withSession("msg"->"abc")
val dataRequest = new DataRequest(Map[String,String]("msg"->"hello worlds"),request)
val readKeystore = new ReadKeystore with test.NullMetrics {
def keystore: KeystoreService = MockKeystore
def test[T](): Future[Result] = super.updateSession(dataRequest, Future.successful(Ok("hi")))
def invokeBlock[A](r: Request[A], block: DataRequest[A] => Future[Result]): Future[Result] = Future.successful(Ok("hi"))
}
// test
val result = await(readKeystore.test())
// check
result.session.get("msg") shouldBe Some("hello worlds")
}
"does not update session when no keystore data" in new MockKeystoreFixture {
// set up
implicit val request = FakeRequest("GET", "/abc").withSession("msg"->"abc")
val dataRequest = new DataRequest(Map[String,String](),request)
val readKeystore = new ReadKeystore with test.NullMetrics {
def keystore: KeystoreService = MockKeystore
def test[T](): Future[Result] = super.updateSession(dataRequest, Future.successful(Ok("hi")))
def invokeBlock[A](r: Request[A], block: DataRequest[A] => Future[Result]): Future[Result] = Future.successful(Ok("hi"))
}
// test
val result = await(readKeystore.test())
// check
result.session.get("msg") shouldBe Some("abc")
}
}
"ReadKeystoreAction" should {
"read keystore and update result sesssion" in new MockKeystoreFixture {
// set up
implicit val request = FakeRequest("GET", "/abc").withSession("msg"->"abc")
MockKeystore.map = Map[String,String]("msg"->"hello worlds")
val readKeystore = ReadKeystoreAction(MockKeystore)
// test
val futureResult = readKeystore.invokeBlock(request, {
(request: DataRequest[_]) =>
Future.successful(Ok("hi"))
})
val result = await(futureResult)
// check
result.session.get("msg") shouldBe Some("hello worlds")
}
}
}
}
| hmrc/paac-frontend | test/controllers/action/ReadKeystoreActionSpec.scala | Scala | apache-2.0 | 3,922 |
package org.ndc.ndc
import scala.io.Source
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.testng.TestNGSuite
import org.testng.Assert._
import org.testng.annotations.Test
import org.testng.annotations.DataProvider
case class XmlResource(resourcePath: String, expectedContent: String) {
def getSource: Source = Source.fromURL(getClass.getResource(resourcePath))
}
class ExtractContentTest extends TestNGSuite with ShouldMatchers {
@DataProvider
def xmlSources() = {
Array(Array[Object](XmlResource("/empty_document.xml", "")),
Array[Object](XmlResource("/empty_text.xml", "")),
Array[Object](XmlResource("/empty.xml", "")),
Array[Object](XmlResource("/text.xml", "word secondWord 00. .? ! third")))
}
@Test(dataProvider = "xmlSources")
def returnOnlyTextTagFromXmlDocument(resource: XmlResource) {
val extract = new ExtractContent
val result = extract.textFromSource(resource.getSource)
result should equal(resource.expectedContent)
}
@DataProvider
def invalidXml() = {
Array(Array[Object]("/invalid_xml/empty.xml"), Array[Object]("/invalid_xml/missing_tag.xml"))
}
@Test(dataProvider = "invalidXml")
def shouldThrowForNonXmlInput(url: String) {
val extract = new ExtractContent
val source = Source.fromURL(getClass.getResource(url))
evaluating { extract.textFromSource(source) } should produce[IllegalStateException]
}
} | NikolajLeischner/near_duplicates | src/test/scala/org/ndc/ndc/ExtractContentTest.scala | Scala | mit | 1,425 |
package play.api.libs.functional
import scala.language.higherKinds
trait Applicative[M[_]] {
def pure[A](a: A): M[A]
def map[A, B](m: M[A], f: A => B): M[B]
def apply[A, B](mf: M[A => B], ma: M[A]): M[B]
}
class ApplicativeOps[M[_], A](ma: M[A])(implicit a: Applicative[M]) {
def ~>[B](mb: M[B]): M[B] = a(a(a.pure((_: A) => (b: B) => b), ma), mb)
def andKeep[B](mb: M[B]): M[B] = ~>(mb)
def <~[B](mb: M[B]): M[A] = a(a(a.pure((a: A) => (_: B) => a), ma), mb)
def keepAnd[B](mb: M[B]): M[A] = <~(mb)
def <~>[B, C](mb: M[B])(implicit witness: <:<[A, B => C]): M[C] = apply(mb)
def apply[B, C](mb: M[B])(implicit witness: <:<[A, B => C]): M[C] = a(a.map(ma, witness), mb)
} | michaelahlers/team-awesome-wedding | vendor/play-2.2.1/framework/src/play-functional/src/main/scala/play/api/libs/functional/Applicative.scala | Scala | mit | 697 |
package fi.pyppe.ircbot.slave
import org.specs2.mutable._
import scala.concurrent.Future
class ImgurIntegrationTest extends Specification {
val url = "http://imgur.com/gallery/ONusBWD"
def await[T](f: Future[T]) = scala.concurrent.Await.result(f, scala.concurrent.duration.Duration("10s"))
"Imgur" should {
s"yield a response for $url" in {
val response = await(Imgur.publicGet(url))
println(response)
response must contain("likes imgur more than tumblr after a few weeks of having shown her imgur")
response must contain("posted: 13.7.2015")
}
}
}
| Pyppe/akka-ircbot | slave/src/test/scala/fi/pyppe/ircbot/slave/ImgurIntegrationTest.scala | Scala | mit | 596 |
package com.saikocat.spark.streaming
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
abstract class AbstractWriteTask[T] {
def parameters: Map[String, String]
def expressionEncoder: ExpressionEncoder[T]
def setup: () => Unit
def writeRecordFunc: (T) => Unit
def execute(iterator: Iterator[InternalRow]): Unit = {
setup()
while (iterator.hasNext) {
val currentRow: InternalRow = iterator.next()
val record: T = expressionEncoder.fromRow(currentRow)
writeRecordFunc(record)
}
}
def close(): Unit
}
| saikocat/spark-foreachiterator-sink | core/src/main/scala/com/idyllic/spark/streaming/AbstractWriterTask.scala | Scala | mit | 610 |
/*
* Copyright (c) 2012-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.schemaddl.redshift
/**
* column_attributes are:
* [ DEFAULT default_expr ]
* [ IDENTITY ( seed, step ) ]
* [ ENCODE encoding ]
* [ DISTKEY ]
* [ SORTKEY ]
*/
sealed trait ColumnAttribute extends Ddl
case class Default(value: String) extends ColumnAttribute {
def toDdl = s"DEFAULT $value"
}
case class Identity(seed: Int, step: Int) extends ColumnAttribute {
def toDdl = s"IDENTITY ($seed, $step)"
}
case object DistKey extends ColumnAttribute {
def toDdl = "DISTKEY"
}
case object SortKey extends ColumnAttribute {
def toDdl = "SORTKEY"
}
/**
* Compression encodings
* http://docs.aws.amazon.com/redshift/latest/dg/c_Compression_encodings.html
*/
case class CompressionEncoding(value: CompressionEncodingValue) extends ColumnAttribute {
def toDdl = s"ENCODE ${value.toDdl}"
}
sealed trait CompressionEncodingValue extends Ddl
case object RawEncoding extends CompressionEncodingValue { def toDdl = "RAW" }
case object ByteDictEncoding extends CompressionEncodingValue { def toDdl = "BYTEDICT" }
case object DeltaEncoding extends CompressionEncodingValue { def toDdl = "DELTA" }
case object Delta32kEncoding extends CompressionEncodingValue { def toDdl = "DELTA32K" }
case object LzoEncoding extends CompressionEncodingValue { def toDdl = "LZO" }
case object Mostly8Encoding extends CompressionEncodingValue { def toDdl = "MOSTLY8ENCODING" }
case object Mostly16Encoding extends CompressionEncodingValue { def toDdl = "MOSTLY16ENCODING" }
case object Mostly32Encoding extends CompressionEncodingValue { def toDdl = "MOSTLY32ENCODING" }
case object RunLengthEncoding extends CompressionEncodingValue { def toDdl = "RUNLENGTH" }
case object Text255Encoding extends CompressionEncodingValue { def toDdl = "TEXT255" }
case object Text32KEncoding extends CompressionEncodingValue { def toDdl = "TEXT32K" }
case object ZstdEncoding extends CompressionEncodingValue { def toDdl = "ZSTD"}
| snowplow/schema-ddl | modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/redshift/ColumnAttribute.scala | Scala | apache-2.0 | 2,659 |
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt.wrap
// This file exists for compatibility between Scala 2.7.x and 2.8.0
import java.util.{Map => JMap, Set => JSet}
private[sbt] object Wrappers
{
def javaMap[K,V](pairs: (K,V)*) =
{
val basic = basicMap[K,V]
for( (k,v) <- pairs)
basic(k) = v
basic.underlying
}
def basicMap[K,V] = new MutableMapWrapper(new java.util.HashMap[K,V])
def identityMap[K,V] = new MutableMapWrapper(new java.util.IdentityHashMap[K,V])
def weakMap[K,V] = new MutableMapWrapper(new java.util.WeakHashMap[K,V])
def toList[K,V](s: java.util.Map[K,V]): List[(K,V)] = toList(s.entrySet).map(e => (e.getKey, e.getValue))
def toList[T](s: java.util.Collection[T]): List[T] = toList(s.iterator)
def toList[T](s: java.util.Iterator[T]): List[T] =
{
def add(l: List[T]): List[T] =
if(s.hasNext)
add(s.next() :: l)
else
l
add(Nil).reverse
}
def toList[T](s: java.util.Enumeration[T]): List[T] =
{
def add(l: List[T]): List[T] =
if(s.hasMoreElements)
add(s.nextElement() :: l)
else
l
add(Nil).reverse
}
def readOnly[K,V](map: scala.collection.mutable.Map[K,V]): scala.collection.Map[K,V] = map//.readOnly
def readOnly[T](set: scala.collection.mutable.Set[T]): scala.collection.Set[T] = set//.readOnly
def readOnly[T](buffer: scala.collection.mutable.Buffer[T]): Seq[T] = buffer//.readOnly
}
private[sbt] sealed abstract class Iterable[T] extends NotNull
{
def foreach(f: T => Unit) = toList.foreach(f)
def toList: List[T]
}
private[sbt] sealed trait Removable[T] extends NotNull
{
def -=(t: T) : Unit
def --=(all: Iterable[T]) { all.foreach(-=) }
def --=(all: scala.Iterable[T]) { all.foreach(-=) }
}
private[sbt] sealed trait Addable[T] extends NotNull
{
def +=(t: T) : Unit
def ++=(all: Iterable[T]) { all.foreach(+=) }
def ++=(all: scala.Iterable[T]) { all.foreach(+=) }
}
private[sbt] sealed abstract class Set[T] extends Iterable[T]
{
def contains(t: T): Boolean
}
private[sbt] sealed class SetWrapper[T](val underlying: JSet[T]) extends Set[T]
{
def contains(t: T) = underlying.contains(t)
def toList =Wrappers.toList(underlying.iterator)
}
private[sbt] final class MutableSetWrapper[T](wrapped: JSet[T]) extends SetWrapper[T](wrapped) with Addable[T] with Removable[T]
{
def +=(t: T) { underlying.add(t) }
def -=(t: T) { underlying.remove(t) }
def readOnly: Set[T] = this
}
private[sbt] sealed abstract class Map[K,V] extends Iterable[(K,V)]
{
def apply(key: K): V
def get(key: K): Option[V]
def containsKey(key: K): Boolean
final def getOrElse[V2 >: V](key: K, default: => V2): V2 =
get(key) match
{
case Some(value) => value
case None => default
}
}
private[sbt] sealed abstract class MapWrapper[K,V](val underlying: JMap[K,V]) extends Map[K,V]
{
final def apply(key: K) = underlying.get(key)
final def get(key: K) =
{
val value = underlying.get(key)
if(value == null)
None
else
Some(value)
}
final def containsKey(key: K) = underlying.containsKey(key)
final def toList = Wrappers.toList(underlying)
final def values = toList.map(_._2)
}
private[sbt] sealed class MutableMapWrapper[K,V](wrapped: JMap[K,V]) extends MapWrapper[K,V](wrapped) with Removable[K] with Addable[(K,V)]
{
final def getOrElseUpdate(key: K, default: => V): V =
get(key) match
{
case Some(value) => value
case None =>
val newValue = default
underlying.put(key, newValue)
newValue
}
final def clear() = underlying.clear()
final def update(key: K, value: V) { underlying.put(key, value) }
final def +=(pair: (K, V) ) { update(pair._1, pair._2) }
final def -=(key: K) { underlying.remove(key) }
final def remove(key: K) = underlying.remove(key)
final def readOnly: Map[K,V] = this
} | matheshar/simple-build-tool | src/main/scala/sbt/wrap/Wrappers.scala | Scala | bsd-3-clause | 3,747 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.examples
import java.util.Properties
import kafka.producer._
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.kafka._
import org.apache.spark.streaming.util.RawTextHelper._
// scalastyle:off
/**
* Consumes messages from one or more topics in Kafka and does wordcount.
* Usage: KafkaWordCount <master> <zkQuorum> <group> <topics> <numThreads>
* <master> is the Spark master URL. In local mode, <master> should be 'local[n]' with n > 1.
* <zkQuorum> is a list of one or more zookeeper servers that make quorum
* <group> is the name of kafka consumer group
* <topics> is a list of one or more kafka topics to consume from
* <numThreads> is the number of threads the kafka consumer should use
*
* Example:
* `./bin/run-example org.apache.spark.streaming.examples.KafkaWordCount local[2] zoo01,zoo02,zoo03 my-consumer-group topic1,topic2 1`
*/
// scalastyle:on
object KafkaWordCount {
def main(args: Array[String]) {
if (args.length < 5) {
System.err.println("Usage: KafkaWordCount <master> <zkQuorum> <group> <topics> <numThreads>")
System.exit(1)
}
StreamingExamples.setStreamingLogLevels()
val Array(master, zkQuorum, group, topics, numThreads) = args
val ssc = new StreamingContext(master, "KafkaWordCount", Seconds(2),
System.getenv("SPARK_HOME"), StreamingContext.jarOfClass(this.getClass))
ssc.checkpoint("checkpoint")
val topicpMap = topics.split(",").map((_,numThreads.toInt)).toMap
val lines = KafkaUtils.createStream(ssc, zkQuorum, group, topicpMap).map(_._2)
val words = lines.flatMap(_.split(" "))
val wordCounts = words.map(x => (x, 1L))
.reduceByKeyAndWindow(add _, subtract _, Minutes(10), Seconds(2), 2)
wordCounts.print()
ssc.start()
ssc.awaitTermination()
}
}
// Produces some random words between 1 and 100.
object KafkaWordCountProducer {
def main(args: Array[String]) {
if (args.length < 2) {
System.err.println("Usage: KafkaWordCountProducer <metadataBrokerList> <topic> " +
"<messagesPerSec> <wordsPerMessage>")
System.exit(1)
}
val Array(brokers, topic, messagesPerSec, wordsPerMessage) = args
// Zookeper connection properties
val props = new Properties()
props.put("metadata.broker.list", brokers)
props.put("serializer.class", "kafka.serializer.StringEncoder")
val config = new ProducerConfig(props)
val producer = new Producer[String, String](config)
// Send some messages
while(true) {
val messages = (1 to messagesPerSec.toInt).map { messageNum =>
val str = (1 to wordsPerMessage.toInt).map(x => scala.util.Random.nextInt(10).toString)
.mkString(" ")
new KeyedMessage[String, String](topic, str)
}.toArray
producer.send(messages: _*)
Thread.sleep(100)
}
}
}
| sryza/spark | examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala | Scala | apache-2.0 | 3,749 |
package Chapter20
import scala.util.matching.Regex
import scala.util.parsing.combinator.lexical.StdLexical
import scala.util.{Failure, Success, Try}
import scala.util.parsing.combinator.syntactical.{StandardTokenParsers, StdTokenParsers}
import scala.util.parsing.combinator.{PackratParsers, RegexParsers}
import scala.util.parsing.input.CharSequenceReader
object Parsing {
// topics:
// grammars
// combining parser operations
// transforming parsing results
// discarding tokens
// generating parse trees
// avoiding left recursion
// more combinators
// avoiding backtracking
// packrat parsers
// what exactly are parsers
// regex parsers
// token-based parsers
// error handling
// https://www.scala-lang.org/files/archive/api/current/scala-parser-combinators/index.html
// https://www.scala-lang.org/files/archive/api/current/scala-parser-combinators/scala/util/parsing/combinator/RegexParsers.html
// parser combinators library helps you analyze texts with fixed structure (json, for example);
// basic concepts of grammars and parsers;
// good example of Domain-Specific Language;
// RegexParser, literal strings and regular expressions match tokens;
// 'repsep' combinator, repeated items with a separator;
// token-based parser for languages with reserved words;
// parsers are functions reader => result;
// you need a robust error reporting;
// parser combinator library useful with a context-free grammars;
// grammars
def grammars = {
// a few concepts from the theory of formal languages;
// a grammar: set of rules for producing all strings in a particular format;
// e.g. arithmetic expression rules:
// - whole number is an expression;
// - '+', '-', '*' are operators;
// - 'left op right' is an expression if left, right are expressions and op is an operator;
// - '( expr )' is an expression;
// BNF
// a grammar is written in a notation: Backus-Naur Form, arithmetic expression rules:
// op ::= "+" | "-" | "*"
// expr ::= number | expr op expr | "(" expr ")"
// number is undefined, although it could be, as an sequence of digits;
// but in practice, is more efficient to collect numbers (and other tokens) before
// actual parsing, in a step called 'lexical analysis'.
// 'op', 'expr' are not tokens, they are structural elements of a grammar;
// aka 'nonterminal symbols'.
// to produce parsed text, you start with the 'start symbol' (one of nonterminal),
// and apply grammar rules until only tokens remain.
// EBNF
// Extended BNF: allows optional elements and repetition;
// ?: 0..1
// *: 0..n
// +: 1..n
// e.g. numberList ::= number ( "," numberList )?
// or numberList ::= number ( "," number )*
// arithmetic expr. rules with operator precedence support:
// expr ::= term ( ( "+" | "-" ) expr )? // term with optional (op expr)
// term ::= factor ( "*" factor )* // factor with optional many (* factor) // n.b. forgot "/"
// factor ::= number | "(" expr ")" // number or (expr) // n.b. loop: expr defined through expr
}
// combining parser operations
def combiningParserOperations = {
// extend Parsers trait and
// define parsing operations, combined from given primitives:
// - match a token
// - ops alternative (|) // first op or second op or Failure/Error
// - sequence of ops (~) // ~ case class, similar to a pair
// - repeat an op (rep) // List // postfix '*'
// - optional op (opt) // Option class // postfix '?'
// e.g. simple arithmetic expressions parser
class ExprParser extends RegexParsers {
val number = "[0-9]+".r // regex for number tokens
def expr: Parser[Any] = term ~ opt(("+" | "-") ~ expr) // expr ::= term ( ( "+" | "-" ) expr )?
def term: Parser[Any] = factor ~ rep("*" ~ factor) // term ::= factor ( "*" factor )*
def factor: Parser[Any] = number | "(" ~ expr ~ ")" // factor ::= number | "(" expr ")"
}
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "3-4*5")
println(result.get)
// ParseResult[Any] = [1.6] parsed: ((3~List())~Some((-~((4~List((*~5)))~None))))
}
// transforming parsing results
def transformingParsingResults = {
// you should transform intermediate outputs to a useful form, value or tree.
// ^^ combinator precedence lower than ~ and higher than |
// compute the value of an expression
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
def factor: Parser[Int] = // number or (expr)
number ^^ { _.toInt } | // apply toInt to number.result
"(" ~ expr ~ ")" ^^ { case _ ~ e ~ _ => e } // drop parenthesis
def expr: Parser[Int] = // term with optional (op expr)
term ~ opt(("+" | "-") ~ expr) ^^ {
case t ~ None => t
case t ~ Some("+" ~ e) => t + e
case t ~ Some("-" ~ e) => t - e
}
def term: Parser[Int] = // factor with optional many (* factor)
factor ~ rep("*" ~ factor) ^^ {
case f ~ lst => f * lst.map(_._2).product
}
}
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "3-4*5")
println(result.get)
// ParseResult[Int] = [1.6] parsed: -17
// compute the parsing tree: see below
}
// discarding tokens
def discardingTokens = {
// to do the match and then discard the token, use '~>', '<~' operators;
// <~ has a lower precedence than ~ and ~>
// e.g.: "if" ~> "(" ~> expr <~ ")" ~ expr
// this discards subexpr (")" ~ expr), not just ")"
// correct version would be
// "if" ~> "(" ~> (expr <~ ")") ~ expr
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
def term: Parser[Int] = // factor with optional many (* factor)
factor ~ rep("*" ~> factor) ^^ {
case f ~ lst => f * lst.product // rep("*" ~> factor) become a list of factors, no "*" tokens
}
def factor: Parser[Int] = // number or (expr)
number ^^ { _.toInt } | // apply toInt to number.result
"(" ~> expr <~ ")" // drop parenthesis before forming op.result
def expr: Parser[Int] = // term with optional (op expr)
term ~ opt(("+" | "-") ~ expr) ^^ {
case t ~ None => t
case t ~ Some("+" ~ e) => t + e
case t ~ Some("-" ~ e) => t - e
}
}
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "3-4*5")
println(result.get)
// ParseResult[Int] = [1.6] parsed: -17
}
// generating parse trees
def generatingParseTrees = {
// to build a compiler or interpreter you want to build up a parse tree
// done with case classes usually
class Expr
case class Number(value: Int) extends Expr
case class Operator(op: String, left: Expr, right: Expr) extends Expr
// parser should transform "3+4*5" into
// Operator("+", Number(3), Operator("*", Number(4), Number(5)))
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
def term: Parser[Expr] = // replaced rep/list with option. why?
factor ~ opt("*" ~> factor) ^^ {
case a ~ None => a
case a ~ Some(b) => Operator("*", a, b)
}
def factor: Parser[Expr] = // number or (expr)
number ^^ { n => Number(n.toInt) } |
"(" ~> expr <~ ")"
def expr: Parser[Expr] = ???
}
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "3-4*5")
println(result.get)
// ParseResult[Expr] = [1.6] parsed: Operator(-,Number(3),Operator(*,Number(4),Number(5)))
}
// avoiding left recursion
def avoidingLeftRecursion = {
// if parser func calls itself without consuming some input first: endless loop:
// def ones: Parser[Any] = ones ~ "1" | "1"
// left-recursive function
// two alternatives (three: use Packrat parser)
// def ones: Parser[Any] = "1" ~ ones | "1"
// def ones: Parser[Any] = rep1("1")
// it's a common problem.
// consider: 3-4-5
// the result should be ((3-4) - 5) = -6
// but using rule
// def expr: Parser[Expr] = term ~ opt(("+" | "-") ~ expr)
// we got (3) - (4-5) = 4
// turning rule around we would get the correct parse tree,
// but with left-recursive function
// def expr: Parser[Expr] = expr ~ opt(("+" | "-") ~ term)
// you need to collect the intermediate results and then combine them in the correct order
// easier with lists: collect -t or +t and then coll.sum
// def expr: Parser[Int] = term ~ rep(("+" | "-") ~ term)
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
def term: Parser[Int] = // factor with optional many (* factor)
factor ~ rep("*" ~> factor) ^^ {
case f ~ lst => f * lst.product
}
def factor: Parser[Int] = // number or (expr)
number ^^ { _.toInt } |
"(" ~> expr <~ ")"
def expr: Parser[Int] = term ~ rep( // term with list(+/- term)
("+" | "-") ~ term ^^ {
case "+" ~ t => t
case "-" ~ t => -t
}) ^^ { case t ~ lst => t + lst.sum }
// def expr: Parser[Int] =
// term ~ opt(("+" | "-") ~ expr) ^^ {
// case t ~ None => t
// case t ~ Some("+" ~ e) => t + e
// case t ~ Some("-" ~ e) => t - e
// }
}
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "3-4-5")
println(result.get)
// ParseResult[Int] = [1.6] parsed: -6
}
// more combinators
def moreCombinators = {
// https://www.scala-lang.org/files/archive/api/current/scala-parser-combinators/scala/util/parsing/combinator/Parsers.html#rep[T](p:=%3EParsers.this.Parser[T]):Parsers.this.Parser[List[T]]
// rep: 0..n repetitions
// repsep
// rep1
// repN
// chainl1
// def numberList = number ~ rep("," ~> number)
// def numberList = repsep(number, ",")
// into or >> : store to a variable
// p ^^^ v: for parsing literals "true" ^^^ true
// p ^? f : partial function f
// log(p)(str)
// guard(p) : useful for looking ahead
// def term = factor ~ rep("*" ~> factor)
// def term = factor into { first => rep("*" ~> factor) ^^ { first * _.product }
// def factor = log(number)("number") ^^ { _.toInt } ...
}
// avoiding backtracking
def avoidingBacktracking = {
// backtracking: processing (p | q) if p fails, the parser tries q on the same input.
// also happens when there is a failure in an 'opt' or 'rep';
// backtracking can be inefficient
// consider (3+4)*5 parsing with rules:
// def expr = term ~ ("+" | "-") ~ expr | term
// def term = factor ~ "*" ~ term | factor
// def factor = "(" ~ expr ~ ")" | number
// while parsing expr ( term ~ ("+" | "-") ~ expr | term )
// term matches the entire input, then match for +/- fails and the
// compiler backtracks to the second alternative, parsing term again
// it is often possible to rearrange the grammar rules to avoid backtracking
// def expr = term ~ opt(("+" | "-") ~ expr)
// def term = factor ~ rep("*" ~ factor)
// def factor = "(" ~ expr ~ ")" | number
// then you can use the '~!' operator to express that there is no need to backtrack
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
def expr: Parser[Any] = term ~ opt(("+" | "-") ~! expr) // always should be expr after +/-
def term: Parser[Any] = factor ~ rep("*" ~! factor) // always a factor after *
def factor: Parser[Any] = "(" ~! expr ~! ")" | number // always an expr inside parenthesis
}
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "(3+4)-5")
println(result.get)
// ParseResult[Any] = [1.8] parsed: (((((~((3~List())~Some((+~((4~List())~None)))))~))~List())~Some((-~((5~List())~None))))
}
// packrat parsers
def packratParsers = {
// parsing algorithm that caches previous parse results.
// advantages:
// - parse time is guaranteed to be proportional to the length of the input;
// - the parser can accept left-recursive grammars;
// to use packrat parser:
// mix in PackratParsers into your parser;
// use val/lazy val for parser functions;
// parser functions return : PackratParser[T];
// use a PackratReader and supply a parseAll method;
class OnesPackratParser extends RegexParsers with PackratParsers {
lazy val ones: PackratParser[Any] = ones ~ "1" | "1"
def parseAll[T](p: Parser[T], in: String): ParseResult[T] =
phrase(p)(new PackratReader(new CharSequenceReader(in)))
}
val parser = new OnesPackratParser
val result = parser.parseAll(parser.ones, "111")
println(result.get)
// ParseResult[Any] = [1.4] parsed: ((1~1)~1)
}
// what exactly are parsers
def whatExactlyAreParsers = {
// Parser[T] is a function(r: Reader[Elem]): ParseResult[T]
// Elem is an abstract type inside Parsers trait;
// Elem is a Char in RegexParsers trait; is a Token in StdTokenParsers;
// Reader[Elem] reads a sequence of elems;
// parser returns one of three options: Success[T], Failure, Error;
// Error terminates the parser;
// error happens when: p ~! q fails to match q (after p), commit(p) fails, err(msg) invoked;
// Failure is a failure to math, normally triggers alternatives in an enclosing |;
// Sucess[T] has a 'result: T'; has a 'next: Reader[Elem]';
// consider
class ExprParser extends RegexParsers {
val number = "[0-9]+".r // implicit conversion to Parser[String] function
def expr: Parser[Any] = number | "(" ~ expr ~ ")" // (p | q) is a combined function
}
}
// regex parsers
def regexParsers = {
// provides two implicit conversions
// Parser[String] from a literal "foo";
// Parser[String] from a regexp "bar".r
// by default, regex parsers skip whitespaces;
// you may override val whiteSpace = ...;
// JavaTokenParsers trait extends RegexParsers and specifies five tokens:
// ident, wholeNumber, decimalNumber, stringLiteral, floatingPointNumber;
// none of them correspond exactly to their Java forms.
}
// token-based parsers
def tokenBasedParsers = {
// use a Reader[Token] instead of a Reader[Char];
import scala.util.parsing.combinator.token.Tokens
// subtype
import scala.util.parsing.combinator.token.StdTokens
// defines four types of tokens:
// Identifier, Keyword, NumericLit, StringLit
new StandardTokenParsers
// provides parser for these tokens
// extending this parser you can add any reserved words and tokens
class MLP extends StandardTokenParsers {
lexical.reserved += ("break", "case") // becomes a Keyword, not Identifier
lexical.delimiters += ("=", "!=")
}
// ident function parses an identifier;
// numericLit, stringLit parse literals;
class ExprParser extends StandardTokenParsers {
lexical.delimiters += ("+", "-", "*", "(", ")")
def expr: Parser[Any] = term ~ rep(("+" | "-") ~ term)
def term: Parser[Any] = factor ~ rep("*" ~> factor)
def factor: Parser[Any] = numericLit | "(" ~> expr <~ ")"
def parseAll[T](p: Parser[T], in: String): ParseResult[T] =
phrase(p)(new lexical.Scanner(in))
}
// to process languages with different tokens, adapt the token parser.
// extend StdLexical and override the 'token' method;
// extend StdTokenParsers and override lexical
// e.g. overriding 'token' method using regexp
class MyLexical extends StdLexical {
def regex(r: Regex): Parser[String] = new Parser[String] {
def apply(in: Input) = r.findPrefixMatchOf(in.source.subSequence(in.offset, in.source.length)
) match {
case Some(matched) =>
Success(in.source.subSequence(in.offset, in.offset + matched.end).toString, in.drop(matched.end))
case None =>
Failure("string matching regex '$r' expected but ${in.first} found", in)
}}
override def token: Parser[Token] = {
regex("[a-z][a-zA-Z0-9]*".r) ^^ { processIdent(_) } |
regex("0|[1-9][0-9]*".r) ^^ { NumericLit(_) } |
???
}
}
}
// error handling
def errorHandling = {
// you want a message indicating where the failure occured;
// the parser reported the last visited failure point;
// e.g. in rule
// def value = numericLit | "true" | "false"
// input failed to match "false" will be reported, if none is matched;
// you can add explicit failure
// def value = numericLit | "true" | "false" | failure("not a valid value")
// or
// def value = opt(sign) ~ digits withFailureMessage "not a valid number"
// when the parser fails, parseAll returns Failure result with 'msg' property;
// 'next' property: Reader pointer to unconsumed input;
// next.pos.line, next.pos.column: failed position;
// next.first : lexical element of failure, Char for RegexParsers;
// you can add 'positioned' combinator to save positions to a parse result
// def vardecl = "var" ~ positioned(ident ^^ { Ident(_)}) ~ "=" ~ value
}
}
object Parsing_Exercises {
// 1. Add / and % operations to the arithmetic expression evaluator.
def ex1 = {
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
// factor with list(*/% factor)
def term: Parser[Int] = factor ~ rep(("*" | "/" | "%") ~ factor) ^^ {
case f ~ lst => (f /: lst)((acc, pair) => pair._1 match {
case "*" => acc * pair._2
case "/" => acc / pair._2
case "%" => acc % pair._2
})
}
// number or (expr)
def factor: Parser[Int] = number ^^ { _.toInt } | "(" ~> expr <~ ")"
// term with list(+/- term)
def expr: Parser[Int] = term ~ rep(
("+" | "-") ~ term ^^ {
case "+" ~ t => t
case "-" ~ t => -t
}) ^^ { case t ~ lst => t + lst.sum }
}
class ExprParser2 extends ExprParser {
val mul: (Int, Int) => Int = (x, y) => x * y
val div: (Int, Int) => Int = (x, y) => x / y
val mod: (Int, Int) => Int = (x, y) => x % y
// factor with list(*/% factor)
override def term: Parser[Int] = factor ~ rep(
("*" | "/" | "%") ~ factor ^^ {
case "*" ~ n => (mul, n)
case "/" ~ n => (div, n)
case "%" ~ n => (mod, n)
}) ^^ { case x ~ lst => (x /: lst)((acc, elem) => elem._1(acc, elem._2)) }
}
// test
val parser = new ExprParser
val result = parser.parseAll(parser.expr, "2*3-4/2-5%2") // 6 - 2 - 1
assert(result.get == 3)
result // ParseResult[Int] = [1.12] parsed: 3
}
// 2. Add a ^ operator to the arithmetic expression evaluator.
// As in mathematics, ^ should have a higher precedence than multiplication,
// and it should be right-associative. That is, 4^2^3 should be 4^(2^3), or 65 536.
def ex2 = {
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
// expr = term with list(+/- term)
def expr: Parser[Int] = term ~ rep(
("+" | "-") ~ term ^^ {
case "+" ~ t => t
case "-" ~ t => -t
}) ^^ { case t ~ lst => t + lst.sum }
// term = exponent with list(*/% exponent)
def term : Parser[Int] = exponent ~ rep(("*" | "/" | "%") ~ exponent) ^^ {
case f ~ lst => (f /: lst)((acc, pair) => pair._1 match {
case "*" => acc * pair._2
case "/" => acc / pair._2
case "%" => acc % pair._2
})
}
// exponent = factor with list(^ factor)
def exponent: Parser[Int] = factor ~ rep("^" ~> factor) ^^ {
case n ~ lst => (n :: lst).reduceRight((a, b) => math.pow(a.toDouble, b.toDouble).toInt)
}
// factor = number or (expr)
def factor: Parser[Int] = number ^^ { _.toInt } | "(" ~> expr <~ ")"
}
// test
def eval(e: String) = {
val parser = new ExprParser
parser.parseAll(parser.expr, e)
}
assert(eval("2*3^4").get == 162)
assert(eval("4^2^3 - (2*3-4/2-5%2)").get == 65533)
}
// 3. Write a parser that parses a list of integers (such as (1, 23, -79)) into a List[Int].
def ex3 = {
class ListParser extends RegexParsers {
def expr: Parser[List[Int]] = "(" ~> repsep(number, ",") <~ ")"
def number: Parser[Int] = numberRegex ^^ { _.toInt }
val numberRegex: Regex = """-?\\d+""".r
}
// test
def eval(e: String) = {
val parser = new ListParser
parser.parseAll(parser.expr, e)
}
assert(eval("(1, 23, -79)").get == List(1, 23, -79))
}
// 4. Write a parser that can parse date and time expressions in ISO 8601.
// Your parser should return a java.time.LocalDateTime object.
def ex4 = {
import java.time.{LocalDateTime, OffsetDateTime}
// Date and time in UTC
// 2019-03-22T09:19:51+00:00
// 2019-03-22T09:19:51Z
// case class DateTime(year: String, month: String, day: String,
// hour: String, minutes: String, seconds: String,
// offset: String)
//
// class DateTimeParser extends RegexParsers {
// val year: Regex = """\\d{4}""".r
// val month: Regex = """\\d{2}""".r
// val day: Regex = """\\d{2}""".r
// val hour: Regex = """\\d{2}""".r
// val minutes: Regex = """\\d{2}""".r
// val seconds: Regex = """\\d{2}""".r
// val offset: Regex = """(\\+|\\-)\\d{2}\\:\\d{2}""".r
//
// def expr: Parser[DateTime] = ???
// }
// TODO: implement regex parser for DateTime ISO 8601
class DateTimeParser extends RegexParsers {
def expr: Parser[LocalDateTime] = ".+".r ^^ { OffsetDateTime.parse(_).toLocalDateTime }
}
// test
def eval(e: String) = {
val parser = new DateTimeParser
val res = parser.parseAll(parser.expr, e)
println(s"input: '$e', parsed: '${res.get.toString}'")
res
}
assert(eval("2019-03-22T09:19:51+00:00").get.toString == "2019-03-22T09:19:51")
assert(eval("2019-03-22T09:19:51Z").get.toString == "2019-03-22T09:19:51")
}
// 5. Write a parser that parses a subset of XML.
// Handle tags of the form
// <ident>...</ident> or <ident/>
// Tags can be nested.
// Handle attributes inside tags.
// Attribute values can be delimited by single or double quotes.
// You don’t need to deal with character data (that is, text inside tags or CDATA sections).
// Your parser should return a Scala XML Elem value.
// The challenge is to reject mismatched tags. Hint: into, accept.
def ex5 = {
import scala.xml.{XML, Elem => xmlElem}
// TODO: implement regex parser for XML subset
class XMLParser extends RegexParsers {
def expr: Parser[xmlElem] = """.+""".r ^^ { XML.loadString }
}
// test
def eval(e: String) = {
val parser = new XMLParser
val res = parser.parseAll(parser.expr, e)
println(s"input: '$e', parsed: '${res.get.toString}'")
res
}
eval("""<ident><a href="foo">bar</a><br/></ident>""") // parsed: '<ident><a href="foo">bar</a><br/></ident>'
Try(eval("""<ident><a href="foo">bar</b><br/></ident>"""))
}
// 6. Assume that the parser in
// Section 20.5, “Generating Parse Trees,” on page 309
// is completed with
// class ExprParser extends RegexParsers {
// def expr: Parser[Expr] = (term ~ opt(("+" | "-") ~ expr)) ^^ {
// case a ~ None => a
// case a ~ Some(op ~ b) => Operator(op, a, b)
// }
// ...
// }
// Unfortunately, this parser computes an incorrect expression tree —
// operators with the same precedence are evaluated right-to-left.
// Modify the parser so that the expression tree is correct.
// For example, 3-4-5 should yield an
// Operator("-", Operator("-", 3, 4), 5)
def ex6 = {
class Expr
case class Operator(op: String, left: Expr, right: Expr) extends Expr
case class Number(value: Int) extends Expr
implicit def intToExpr(n: Int): Number = Number(n)
class ExprParserWrong extends RegexParsers {
val number: Regex = "[0-9]+".r
// input: '3-4-5', parsed: 'Operator(-,Number(3),Operator(-,Number(4),Number(5)))'
def expr: Parser[Expr] = term ~ opt(("+" | "-") ~ expr) ^^ {
case a ~ None => a
case a ~ Some(op ~ b) => Operator(op, a, b)
}
def term: Parser[Expr] =
factor ~ opt("*" ~> factor) ^^ {
case a ~ None => a
case a ~ Some(b) => Operator("*", a, b)
}
def factor: Parser[Expr] =
number ^^ { n => Number(n.toInt) } |
"(" ~> expr <~ ")"
}
class ExprParser extends ExprParserWrong {
override def expr: Parser[Expr] = term ~ rep(sumterm) ^^ {
case t ~ lst => (t /: lst)((acc, elem) => elem.asInstanceOf[Operator].copy(left = acc))
}
def sumterm: Parser[Expr] = ("+" | "-") ~ term ^^ {
case op ~ term => Operator(op, 0, term)
}
}
// test
def eval(e: String) = {
val parser = new ExprParser
val res = parser.parseAll(parser.expr, e)
println(s"input: '$e', parsed: '${res.get.toString}'")
res
}
assert(eval("3-4-5").get == Operator("-", Operator("-", 3, 4), 5))
}
// 7. Suppose in
// Section 20.6, “Avoiding Left Recursion,” on page 310,
// we first parse an expr into a list of ~ with operations and values:
// def expr: Parser[Int] = term ~ rep(("+" | "-") ~ term) ^^ {...}
// To evaluate the result, we need to compute
// ((t0 ± t1) ± t2) ± ...
// Implement this computation as a fold (see Chapter 13).
def ex7 = {
class ExprParser extends RegexParsers {
val number = "[0-9]+".r
// presumably, 'foldLeft' should be used, but he said 'fold':
// operations order will be nondeterministic
def expr: Parser[Int] = term ~ rep(("+" | "-") ~ term) ^^ {
case t ~ lst => lst.fold( this.~("", t) )((a, b) => add(a, b))._2
}
private def add(a: ~[String, Int], b: ~[String, Int]): String ~ Int = this.~(a._1, b match {
case "-" ~ n => a._2 - n
case _ ~ n => a._2 + n
})
def term: Parser[Int] =
factor ~ rep("*" ~> factor) ^^ {
case f ~ lst => f * lst.product
}
def factor: Parser[Int] =
number ^^ { _.toInt } |
"(" ~> expr <~ ")"
}
// test
def eval(e: String) = {
val parser = new ExprParser
val res = parser.parseAll(parser.expr, e)
println(s"input: '$e', parsed: '${res.get.toString}'")
res
}
assert(eval("3-4-5").get == -6)
}
// 8. Add variables and assignment to the calculator program.
// Variables are created when they are first used.
// Uninitialized variables are zero.
// To print a value, assign it to the special variable 'out'
def ex8 = {
class ExprParser extends RegexParsers {
type Name = String
// script = list(assignment)
def script: Parser[Int] = repsep(assignment, ";") ^^ { lst => lst.lastOption.getOrElse(0) }
// assignment = name=expr
def assignment: Parser[Int] = (name <~ "=") ~ expr ^^ {
case n ~ e => updateVar(n, e)
}
// expr = term with list(+/-term)
def expr: Parser[Int] = term ~ rep(
("+" | "-") ~ term ^^ { case "-" ~ n => -n; case _ ~ n => n }
) ^^ { case t ~ lst => (t /: lst)(_ + _) }
// term = factor with list(*/% factor)
def term: Parser[Int] = factor ~ rep("*" ~> factor) ^^ { case f ~ lst => f * lst.product }
// factor = name or number or (expr)
def factor: Parser[Int] = number | name ^^ { getVar } | "(" ~> expr <~ ")"
def name: Parser[Name] = """[a-z]+""".r
def number: Parser[Int] = """\\d+""".r ^^ { _.toInt }
private var env: Map[String, Int] = Map.empty.withDefaultValue(0)
def getVar(n: Name): Int = env(n)
def updateVar(n: Name, v: Int): Int = {
env = env.updated(n, v);
if (n == "out") println(s"out: $v")
v
}
}
// test
def eval(e: String) = {
val parser = new ExprParser
val res = parser.parseAll(parser.script, e)
println(s"input: '$e', parsed: '${res.get.toString}'")
res
}
assert(eval("b = (a + 3 - 1) * 4; out = b").get == 8)
}
// 9. Extend the preceding exercise into a parser for a programming language that has
// variable assignments, Boolean expressions, and if/else and while statements.
def ex9 = {
object ScriptParser extends StandardTokenParsers {
lexical.reserved ++= "while if else".split(" ")
lexical.delimiters ++= "; = < > == ( ) { } + - * %".split(" ")
def apply(in: String): Int = parseAll(block, in) match {
case Success(res, inp) => {
println(s"input: '$in', parsed: '${res}")
res.value
}
case fail: NoSuccess => sys.error(s"msg: ${fail.msg}; next: ${fail.next.pos}")
}
def parseAll(p: Parser[Expression], in: String): ParseResult[Expression] =
phrase(p)(new lexical.Scanner(in))
def block: Parser[Expression] = repsep(statement | assignment, ";") ^^ { blocks }
def assignment: Parser[Expression] = (ident <~ "=") ~ expr ^^ {
case n ~ e => Assignment(n, e)
}
def statement: Parser[Expression] = (
(("while" | "if") <~ "(") ~ (condition <~ ")") ~
("{" ~> block <~ "}") ~
opt("else" ~> "{" ~> block <~ "}" )
) ^^ {
case op ~ cond ~ tblock ~ fblock => ConditionalOp(op, cond, tblock, fblock)
}
def condition: Parser[Expression] = expr ~ ("<" | ">" | "==") ~ expr ^^ {
case left ~ op ~ right => Operator(op, left, right)
}
def expr: Parser[Expression] = term ~ rep(("+" | "-") ~ term) ^^ { operators }
def term: Parser[Expression] = factor ~ rep(("*" | "%") ~ factor) ^^ { operators }
def factor: Parser[Expression] = numericLit ^^ { x => Number(x.toInt) } |
ident ^^ { Variable } |
"(" ~> expr <~ ")"
private def blocks(xs: Seq[Expression]) = xs match {
case Nil => Number(0)
case head :: Nil => Operator(";", Number(0), xs.head)
case _ => xs.reduceLeft((a, b) => Operator(";", a, b))
}
private def operators(x: ~[Expression, Seq[~[String, Expression]]]) = x match {
case t ~ lst => (t /: lst)((left, elem) => elem match {
case op ~ right => Operator(op, left, right)
})
}
object Environment {
private var env: Map[String, Int] = Map.empty.withDefaultValue(0)
def apply(name: String): Int = env(name)
def unapply(name: String): Option[Int] = Some(env(name))
def update(name: String, value: Int): Unit = {
env = env.updated(name, value)
if (name == "out") println(s"out = '$value'")
}
}
abstract class Expression { def value: Int }
case class Number(value: Int) extends Expression {
override def toString: String = value.toString
}
case class Variable(name: String) extends Expression {
def value: Int = Environment(name)
override def toString: String = s"(name: $name, value: ${Environment(name)})"
}
case class Assignment(name: String, e: Expression) extends Expression {
override def value: Int = {
val res = e.value
Environment(name) = res
res
}
}
case class Operator(op: String, left: Expression, right: Expression) extends Expression {
override def value: Int = op match {
case ";" => left.value; right.value
case "*" => left.value * right.value
case "%" => left.value % right.value
case "+" => left.value + right.value
case "-" => left.value - right.value
case "==" => if (left.value == right.value) 1 else 0
case "<" => if (left.value < right.value) 1 else 0
case ">" => if (left.value > right.value) 1 else 0
}
}
case class ConditionalOp(op: String, condition: Expression, tblock: Expression,
fblock: Option[Expression]) extends Expression {
override def value: Int = op match {
case "while" => {
var res = 0
while(condition.value != 0) res = tblock.value
res
}
case "if" => if(condition.value != 0) tblock.value else fblock.fold(0)(_.value)
}
}
}
// test
def eval(s: String) = ScriptParser(s)
val script =
"""
|x = 11;
|while (x > 0) {
| x = x - 1;
| if (x % 2 == 0) { even = even + x }
| else { odd = odd + x };
| out = odd; out = even
|}
""".stripMargin.trim
assert(eval(script) == 30)
}
// 10. Add function definitions to the programming language of the preceding exercise.
def ex10 = {
object ScriptParser extends StandardTokenParsers {
lexical.reserved ++= "def while if else".split(" ")
lexical.delimiters ++= "=> ; = < > == ( ) { } + - * %".split(" ")
def apply(in: String): Int = parseAll(block, in) match {
case Success(res, inp) => {
println(s"input: '$in', parsed: '${res}")
res.value
}
case fail: NoSuccess => sys.error(s"msg: ${fail.msg}; next: ${fail.next.pos}")
}
def parseAll(p: Parser[Expression], in: String): ParseResult[Expression] =
phrase(p)(new lexical.Scanner(in))
def block: Parser[Expression] = repsep(funcdef | statement | assignment, ";") ^^ { blocks }
// def oddAndEvenSum (x) => { ... }
def funcdef: Parser[Expression] = (("def" ~> ident) ~
("(" ~> repsep(ident, ",") <~ ")") ~
"=>" ~
("{" ~> block <~ "}") ) ^^ {
case fname ~ params ~ "=>" ~ block => FunctionDef(fname, params, block)
}
def assignment: Parser[Expression] = (ident <~ "=") ~ expr ^^ {
case n ~ e => Assignment(n, e)
}
def statement: Parser[Expression] = (
(("while" | "if") <~ "(") ~ (condition <~ ")") ~
("{" ~> block <~ "}") ~
opt("else" ~> "{" ~> block <~ "}" )
) ^^ {
case op ~ cond ~ tblock ~ fblock => ConditionalOp(op, cond, tblock, fblock)
}
def condition: Parser[Expression] = expr ~ ("<" | ">" | "==") ~ expr ^^ {
case left ~ op ~ right => Operator(op, left, right)
}
def expr: Parser[Expression] = term ~ rep(("+" | "-") ~ term) ^^ { operators }
def term: Parser[Expression] = factor ~ rep(("*" | "%") ~ factor) ^^ { operators }
def factor: Parser[Expression] = numericLit ^^ { x => Number(x.toInt) } |
funcapp |
ident ^^ { Variable } |
"(" ~> expr <~ ")"
// oddAndEvenSum(11)
def funcapp: Parser[Expression] = ident ~ ("(" ~> repsep(numericLit, ",") <~ ")") ^^ {
case fname ~ params => FunctionApp(fname, params.map(_.toInt))
}
private def blocks(xs: Seq[Expression]) = xs match {
case Nil => Number(0)
case head :: Nil => Operator(";", Number(0), xs.head)
case _ => xs.reduceLeft((a, b) => Operator(";", a, b))
}
private def operators(x: ~[Expression, Seq[~[String, Expression]]]) = x match {
case t ~ lst => (t /: lst)((left, elem) => elem match {
case op ~ right => Operator(op, left, right)
})
}
object Environment {
private var env: Map[String, Expression] = Map.empty[String, Expression].withDefaultValue(Number(0))
def apply(name: String): Expression = env(name)
def unapply(name: String): Option[Expression] = Some(env(name))
def update(name: String, value: Expression): Unit = {
env = env.updated(name, value)
if (name == "out") println(s"out = '$value'")
}
}
abstract class Expression { def value: Int }
case class Number(value: Int) extends Expression {
override def toString: String = value.toString
}
case class Variable(name: String) extends Expression {
def value: Int = Environment(name).value
override def toString: String = s"(name: $name, value: ${Environment(name)})"
}
case class Assignment(name: String, e: Expression) extends Expression {
override def value: Int = {
val res = e.value
Environment(name) = Number(res)
res
}
}
case class Operator(op: String, left: Expression, right: Expression) extends Expression {
override def value: Int = op match {
case ";" => left.value; right.value
case "*" => left.value * right.value
case "%" => left.value % right.value
case "+" => left.value + right.value
case "-" => left.value - right.value
case "==" => if (left.value == right.value) 1 else 0
case "<" => if (left.value < right.value) 1 else 0
case ">" => if (left.value > right.value) 1 else 0
}
}
case class ConditionalOp(op: String, condition: Expression, tblock: Expression,
fblock: Option[Expression]) extends Expression {
override def value: Int = op match {
case "while" => {
var res = 0
while(condition.value != 0) res = tblock.value
res
}
case "if" => if(condition.value != 0) tblock.value else fblock.fold(0)(_.value)
}
}
case class FunctionDef(name: String, params: Seq[String], block: Expression) extends Expression {
override def value: Int = {
Environment(name) = this
0
}
}
case class FunctionApp(name: String, params: Seq[Int]) extends Expression {
override def value: Int = {
val f = Environment(name).asInstanceOf[FunctionDef]
// TODO: add stack frames
f.params.zip(params).foreach { case (k, v) => Environment(k) = Number(v) }
f.block.value
}
}
}
// test
def eval(s: String) = ScriptParser(s)
val script =
"""
|def oddAndEvenSum (x) => {
| while (x > 0) {
| x = x - 1;
| if (x % 2 == 0) { even = even + x }
| else { odd = odd + x };
| out = odd; out = even
| }
|};
|out = oddAndEvenSum(11)
""".stripMargin.trim
assert(eval(script) == 30)
}
}
| vasnake/scala-for-the-impatient | src/main/scala/Chapter20/Parsing.scala | Scala | gpl-3.0 | 43,962 |
/*
* MUSIT is a museum database to archive natural and cultural history data.
* Copyright (C) 2016 MUSIT Norway, part of www.uio.no (University of Oslo)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License,
* or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package repositories.shared.dao
import java.util.UUID
import java.sql.{Timestamp => JSqlTimestamp}
import models.storage.event.EventTypeId
import models.storage.nodes.StorageType
import no.uio.musit.models.ObjectTypes.ObjectType
import no.uio.musit.models._
import no.uio.musit.time.DefaultTimezone
import org.joda.time.DateTime
import play.api.db.slick.HasDatabaseConfig
import play.api.libs.json.{JsValue, Json}
import slick.jdbc.JdbcProfile
/**
* Working with some of the DAOs require implicit mappers to/from strongly
* typed value types/classes.
*/
trait ColumnTypeMappers { self: HasDatabaseConfig[JdbcProfile] =>
import profile.api._
implicit val storageNodeDbIdMapper: BaseColumnType[StorageNodeDatabaseId] =
MappedColumnType.base[StorageNodeDatabaseId, Long](
snid => snid.underlying,
longId => StorageNodeDatabaseId(longId)
)
implicit val storageNodeIdMapper: BaseColumnType[StorageNodeId] =
MappedColumnType.base[StorageNodeId, String](
sid => sid.asString,
strId => StorageNodeId(UUID.fromString(strId))
)
implicit val objectIdMapper: BaseColumnType[ObjectId] =
MappedColumnType.base[ObjectId, Long](
oid => oid.underlying,
longId => ObjectId(longId)
)
implicit val objectUuidMapper: BaseColumnType[ObjectUUID] =
MappedColumnType.base[ObjectUUID, String](
oid => oid.asString,
strId => ObjectUUID.unsafeFromString(strId)
)
implicit val eventIdMapper: BaseColumnType[EventId] =
MappedColumnType.base[EventId, Long](
eid => eid.underlying,
longId => EventId(longId)
)
implicit val actorIdMapper: BaseColumnType[ActorId] =
MappedColumnType.base[ActorId, String](
aid => aid.asString,
strId => ActorId(UUID.fromString(strId))
)
implicit val storageTypeMapper =
MappedColumnType.base[StorageType, String](
storageType => storageType.entryName,
string => StorageType.withName(string)
)
implicit val eventTypeIdMapper: BaseColumnType[EventTypeId] =
MappedColumnType.base[EventTypeId, Int](
eventTypeId => eventTypeId.underlying,
id => EventTypeId(id)
)
implicit val objTypeMapper: BaseColumnType[ObjectType] =
MappedColumnType.base[ObjectType, String](
tpe => tpe.name,
str => ObjectType.unsafeFromString(str)
)
implicit val museumIdMapper: BaseColumnType[MuseumId] =
MappedColumnType.base[MuseumId, Int](
museumId => museumId.underlying,
id => MuseumId(id)
)
implicit val nodePathMapper: BaseColumnType[NodePath] =
MappedColumnType.base[NodePath, String](
nodePath => nodePath.path,
pathStr => NodePath(pathStr)
)
implicit val dateTimeMapper: BaseColumnType[DateTime] =
MappedColumnType.base[DateTime, JSqlTimestamp](
dt => new JSqlTimestamp(dt.getMillis),
jt => new DateTime(jt, DefaultTimezone)
)
implicit val jsonMapper: BaseColumnType[JsValue] =
MappedColumnType.base[JsValue, String](
jsv => Json.prettyPrint(jsv),
str => Json.parse(str)
)
}
| kpmeen/musit | service_storagefacility/app/repositories/shared/dao/ColumnTypeMappers.scala | Scala | gpl-2.0 | 3,925 |
/*
* Copyright © 2014 spray-session
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spray
package routing
package session
import http.DateTime
private case class Session[T](map: Map[String, T], expires: Option[DateTime])
| gnieh/spray-session | src/main/scala/spray/routing/session/Session.scala | Scala | apache-2.0 | 730 |
package com.github.havarunner
import org.junit.runner.{Description, Runner}
import org.junit.runner.notification.RunNotifier
import scala.collection.JavaConversions._
import Validations._
import java.lang.reflect.{Field, InvocationTargetException}
import org.junit.runner.manipulation.{Filter, Filterable}
import com.github.havarunner.HavaRunner._
import com.github.havarunner.exception.TestDidNotRiseExpectedException
import com.github.havarunner.Parser._
import com.github.havarunner.Reflections._
import org.junit.internal.AssumptionViolatedException
import com.github.havarunner.TestInstanceCache._
import com.github.havarunner.ConcurrencyControl._
import com.github.havarunner.ExceptionHelper._
import com.github.havarunner.RunnerHelper._
import org.junit.runners.model.Statement
import org.junit.rules.TestRule
import org.junit.runner.notification.Failure
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
/**
* Usage: @org.junit.runner.RunWith(HavaRunner.class)
*
* @author Lauri Lehmijoki
*/
class HavaRunner(startingPoint: Class[_]) extends Runner with Filterable {
private var filterOption: Option[Filter] = None // The Filterable API requires us to use a var
def getDescription = {
val description = Description.createSuiteDescription(startingPoint)
tests.iterator() foreach (test => description.addChild(describeTest(test)))
description
}
def run(notifier: RunNotifier) {
reportIfSuite(tests).foreach(println)
val afterAllFutures = tests
.groupBy(_.groupCriterion)
.map {
case (_, testsAndParameters) => runTestsOfSameGroup(testsAndParameters, notifier)
}
waitAndHandleRestOfErrors(afterAllFutures)
}
def filter(filter: Filter) {
this.filterOption = Some(filter)
}
private[havarunner] val classesToTest = findDeclaredClasses(startingPoint)
private[havarunner] lazy val tests: java.lang.Iterable[TestAndParameters] =
parseTestsAndParameters(classesToTest).filter(acceptTest(_, filterOption))
}
/**
* Place here code that is directly related to running the tests.
*/
private object HavaRunner {
def runTestsOfSameGroup(testsAndParameters: Iterable[TestAndParameters], notifier: RunNotifier): Future[Any] = {
val runnableTests = handleIgnoredAndInvalid(testsAndParameters, notifier)
val resultsOfSameGroup: Iterable[Future[Either[FailedConstructor, InstantiatedTest]]] = runnableTests.map(implicit tp => schedule(tp, notifier, describeTest))
Future.sequence(resultsOfSameGroup) map {
testResults => runAfterAlls(testResults)(runnableTests)
}
}
def runAfterAlls(result: Iterable[Either[FailedConstructor, InstantiatedTest]])(implicit testsAndParameters: Iterable[TestAndParameters]) {
result
.map(_.right)
.flatMap(_.toOption)
.headOption
.foreach(instantiatedTest => {
implicit val testAndParams: TestAndParameters = testsAndParameters.head
withThrottle {
// It suffices to run the @AfterAlls against any instance of the group
testAndParams.afterAll.foreach(invoke(_)(instantiatedTest.testInstance))
}
})
}
def waitAndHandleRestOfErrors(afterAllFutures: Iterable[Future[Any]]) {
val allTests = Future.sequence(afterAllFutures)
var failure: Option[Throwable] = None
allTests onFailure {
case t: Throwable => failure = Some(t) // Unlift the exception from the Future container, so that we can handle it in the main thread
}
Await.result(allTests, 2 hours)
failure.foreach(throw _) // If @AfterAll methods throw exceptions, re-throw them here
}
def handleIgnoredAndInvalid(testsAndParameters: Iterable[TestAndParameters], notifier: RunNotifier) = {
val ignoredTests = testsAndParameters.filter(_.ignored)
ignoredTests.foreach(ignoredTest =>
notifier fireTestIgnored describeTest(ignoredTest)
)
val invalidTests = testsAndParameters.filterNot(reportInvalidations(_).isEmpty)
invalidTests.foreach(implicit invalidTest =>
reportFailure(reportInvalidations)(describeTest, notifier)
)
testsAndParameters
.filterNot(ignoredTests.contains(_))
.filterNot(invalidTests.contains(_))
}
def schedule(implicit testAndParameters: TestAndParameters, notifier: RunNotifier, description: Description):
Future[Either[FailedConstructor, InstantiatedTest]] =
future {
withThrottle {
implicit val instance = testInstance
notifier fireTestStarted description
try {
runWithRules {
runTest
}
} catch {
case error: Throwable =>
handleException(error)
} finally {
notifier fireTestFinished description
}
Right(InstantiatedTest(testInstance))
}
} recover {
case errorFromConstructor: Throwable =>
handleException(errorFromConstructor) // We come here when instantiating the test object failed
Left(FailedConstructor())
}
def runWithRules(test: => Any)(implicit testAndParameters: TestAndParameters, testInstance: TestInstance) {
val inner = new Statement {
def evaluate() {
test
}
}
def applyRuleAndHandleException(rule: Field, accumulator: Statement) = {
val testRule: TestRule = ensureAccessible(rule).get(testInstance.instance).asInstanceOf[TestRule]
testRule.apply(accumulator, describeTest)
}
val foldedRules =
testAndParameters
.rules
.foldLeft(inner) {
(accumulator: Statement, rule: Field) =>
applyRuleAndHandleException(rule, accumulator)
}
foldedRules.evaluate()
}
def runTest(implicit testAndParameters: TestAndParameters, notifier: RunNotifier, description: Description, testInstance: TestInstance) {
try {
testAndParameters.before.foreach(invoke)
maybeTimeouting { ensureAccessible(testAndParameters.testMethod).invoke(testInstance.instance)}
failIfExpectedExceptionNotThrown
} finally {
testAndParameters.after.foreach(invoke)
}
}
def handleException(e: Throwable)(implicit testAndParameters: TestAndParameters, notifier: RunNotifier, description: Description) {
Option(e) match {
case Some(exception) if exception.isInstanceOf[AssumptionViolatedException] =>
val msg = s"[HavaRunner] Ignored $testAndParameters because ${exception.getMessage}"
notifier fireTestAssumptionFailed new Failure(description, new AssumptionViolatedException(msg))
case Some(exception) if testAndParameters.expectedException.isDefined =>
val cause = exception match {
case e: InvocationTargetException => e.getCause
case e: Throwable => e
}
if (cause.getClass == testAndParameters.expectedException.get) {
// Expected exception. All ok.
} else {
notifier fireTestFailure new Failure(description, new TestDidNotRiseExpectedException(testAndParameters.expectedException.get, testAndParameters))
}
case Some(exception) if exception.isInstanceOf[InvocationTargetException] =>
handleException(exception.asInstanceOf[InvocationTargetException].getTargetException)
case Some(exception) =>
notifier fireTestFailure new Failure(description, exception)
}
}
def maybeTimeouting(op: => Any)(implicit testAndParameters: TestAndParameters) {
testAndParameters.timeout.map(timeout => {
val start = System.currentTimeMillis()
op
val duration = System.currentTimeMillis() - start
if (duration >= timeout) {
throw new RuntimeException(s"Test timed out after $duration milliseconds")
}
}).getOrElse(op)
}
def failIfExpectedExceptionNotThrown(implicit testAndParameters: TestAndParameters, notifier: RunNotifier, description: Description) {
testAndParameters.expectedException.foreach(expected =>
notifier fireTestFailure new Failure(description, new TestDidNotRiseExpectedException(testAndParameters.expectedException.get, testAndParameters))
)
}
case class FailedConstructor()
case class InstantiatedTest(testInstance: TestInstance)
}
| havarunner/havarunner | src/main/scala/com/github/havarunner/HavaRunner.scala | Scala | mit | 8,209 |
package com.github.duongdang.wikipedia.io
import org.apache.hadoop.io.{DataOutputBuffer, LongWritable, Text}
import org.apache.hadoop.io.compress.{CompressionCodecFactory, SplittableCompressionCodec}
import org.apache.hadoop.fs.Path
import scala.xml.XML
import org.apache.hadoop.mapreduce.lib.input.{FileSplit, FileInputFormat}
import org.apache.hadoop.mapreduce.{JobContext, RecordReader, InputSplit, TaskAttemptContext}
import org.apache.commons.logging.LogFactory
/**
* Hadoop InputFormat that splits a Wikipedia dump file into pages.
*
* The WikiPageRecordReader class inside outputs a Text as value and the starting position (byte) as key.
*
*/
class WikiInputFormat extends FileInputFormat[LongWritable, Text]
{
private val logger = LogFactory.getLog(getClass.getName)
protected override def isSplitable(context: JobContext, file: Path): Boolean =
{
val codec = new CompressionCodecFactory(context.getConfiguration).getCodec(file)
if (null == codec) true else codec.isInstanceOf[SplittableCompressionCodec]
}
override def createRecordReader(genericSplit: InputSplit, context: TaskAttemptContext): RecordReader[LongWritable, Text] =
{
val split = genericSplit.asInstanceOf[FileSplit]
logger.info("getRecordReader start.....split=" + split)
context.setStatus(split.toString)
new WikiPageRecordReader(split, context)
}
private class WikiPageRecordReader(split: FileSplit, context: TaskAttemptContext) extends RecordReader[LongWritable, Text]
{
private var key: LongWritable = null
private var value: Text = null
private val conf = context.getConfiguration
private val page = new DataOutputBuffer()
private val inputStream = SeekableInputStream(split,
split.getPath.getFileSystem(conf),
new CompressionCodecFactory(conf))
private val matcher = new ByteMatcher(inputStream)
private val (start, end) =
{
inputStream match
{
case SeekableSplitCompressedInputStream(sin) =>
(sin.getAdjustedStart, sin.getAdjustedEnd + 1)
case _ =>
(split.getStart, split.getStart + split.getLength)
}
}
private val pageBeginPattern = "<page>".getBytes("UTF-8")
private val pageEndPattern = "</page>".getBytes("UTF-8")
override def close() = inputStream.close()
override def getProgress: Float =
{
if (end == start) 1.0f else (getPos - start).asInstanceOf[Float] / (end - start).asInstanceOf[Float]
}
def getPos: Long = matcher.getPos
override def initialize(genericInputSplit: InputSplit, context: TaskAttemptContext) = ()
override def nextKeyValue(): Boolean =
{
// Initialize key and value
if (key == null) key = new LongWritable()
if (value == null) value = new Text()
if (matcher.getPos < end && matcher.readUntilMatch(pageBeginPattern, end))
{
try
{
page.write(pageBeginPattern)
if (matcher.readUntilMatch(pageEndPattern, end, Some(page)))
{
key.set(matcher.getPos)
value.set(page.getData.take(page.getLength))
return true
}
}
finally
{
page.reset()
}
}
false
}
override def getCurrentKey: LongWritable = key
override def getCurrentValue: Text = value
}
}
| duongdang/wiki-extraction | src/main/scala/com/github/duongdang/wikipedia/io/WikiInputFormat.scala | Scala | apache-2.0 | 3,428 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package jmx
import management.ManagementFactory
import javax.management.{ObjectInstance, ObjectName, StandardMBean}
import logging.Logging
object JMX extends Logging {
private val mbeanServer = ManagementFactory.getPlatformMBeanServer
def register(mbean: AnyRef, name: String): Option[ObjectInstance] = if (System.getProperty("com.linkedin.norbert.disableJMX") == null) try {
Some(mbeanServer.registerMBean(mbean, new ObjectName(getUniqueName(name))))
} catch {
case ex: Exception =>
log.error(ex, "Error when registering mbean: %s".format(mbean))
None
} else {
None
}
val map = collection.mutable.Map.empty[String, Int]
def getUniqueName(name: String): String = synchronized {
//TODO switch this to concurrent hash map but that might entail a lot more changes
//which might not play nice with scala.
val id = map.getOrElse(name, -1)
val unique = if (id == -1) name else name + "-" + id
map += (name -> (id + 1))
unique
}
def register(mbean: MBean): Option[ObjectInstance] = register(mbean, mbean.name)
def register(mbean: Option[MBean]): Option[ObjectInstance] = mbean.flatMap(m => register(m, m.name))
def unregister(mbean: ObjectInstance) = try {
mbeanServer.unregisterMBean(mbean.getObjectName)
//synchronized { map.remove(mbean.getObjectName.getCanonicalName) } //We treat the map as a mapping
//from JMX value to a sequence number.
//Overflow will result in negative values being used which should be fine.
} catch {
case ex: Exception => log.error(ex, "Error while unregistering mbean: %s".format(mbean.getObjectName))
}
def name(clientName: Option[String], serviceName: String) =
if (clientName.isDefined)
"client=%s,service=%s".format(clientName.get, serviceName)
else
"service=%s".format(serviceName)
class MBean(klass: Class[_], namePropeties: String) extends StandardMBean(klass) {
def this(klass: Class[_]) = this(klass, null)
def name: String = {
val simpleName = klass.getSimpleName
val mbeanIndex = simpleName.lastIndexOf("MBean")
val base = "com.linkedin.norbert:type=%s".format(if (mbeanIndex == -1) simpleName else simpleName.substring(0, mbeanIndex))
if (namePropeties != null) "%s,%s".format(base, namePropeties) else base
}
}
}
| linkedin/norbert | cluster/src/main/scala/com/linkedin/norbert/jmx/JMX.scala | Scala | apache-2.0 | 2,946 |
package io.skysail.server.app.resources
import io.skysail.domain.{AsyncResponseEvent, RequestEvent, ResponseEvent}
import io.skysail.domain.resources.EntityResource
import io.skysail.server.app.RootApplication
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Success
case class RootInfo(title: String, description: String, info: String = "you are seeing this as no applications have been deployed yet.")
class RootResource extends EntityResource[RootApplication,RootInfo] {
override def getAsync(requestEvent: RequestEvent) {
val appService = getApplication().appService
val apps = appService.getAllApplications(this.actorContext.system)
apps.onComplete {
case Success(s) => {
val skysailServerBundle = bundleContext.getBundles.filter(_.getSymbolicName == "skysail.server").head
val desc = "powered by skysail " + skysailServerBundle.getVersion
val msg = if (s.size <= 1)
RootInfo("skysail server", desc)
else
RootInfo("skysail server", desc, s"${s.size} app(s) deployed: " + s.map(_.name).mkString(", "))
requestEvent.controllerActor ! ResponseEvent(requestEvent, msg)
}
}
}
override def get(requestEvent: RequestEvent):AsyncResponseEvent = {
getAsync(requestEvent)
AsyncResponseEvent(requestEvent)
}
} | evandor/skysail-server | skysail.server/src/io/skysail/server/app/resources/RootResource.scala | Scala | apache-2.0 | 1,343 |
package com.kodekutters.czml
import java.io.{IOException, File, PrintWriter}
import com.kodekutters.czml.czmlProperties._
import play.api.libs.json.Json
/**
* basic supporting utilities
*/
object Util {
// constants
val COMPLETE = "COMPLETE"
val BELOW_ELLIPSOID_HORIZON = "BELOW_ELLIPSOID_HORIZON"
val ABOVE_ELLIPSOID_HORIZON = "ABOVE_ELLIPSOID_HORIZON"
val LEFT = "LEFT"
val CENTER = "CENTER"
val RIGHT = "RIGHT"
val BOTTOM = "BOTTOM"
val TOP = "TOP"
val FILL = "FILL"
val OUTLINE = "OUTLINE"
val FILL_AND_OUTLINE = "FILL_AND_OUTLINE"
val UNBOUNDED = "UNBOUNDED"
val CLAMPED = "CLAMPED"
val LOOP_STOP = "LOOP_STOP"
val SYSTEM_CLOCK = "SYSTEM_CLOCK"
val SYSTEM_CLOCK_MULTIPLIER = "SYSTEM_CLOCK_MULTIPLIER"
val TICK_DEPENDENT = "TICK_DEPENDENT"
// interpolators
val HERMITE = "HERMITE"
val LAGRANGE = "LAGRANGE"
val LINEAR = "LINEAR"
def isEmpty(x: String) = x != null && x.trim.nonEmpty
def isAllDigits(x: String) = x forall Character.isDigit
/**
* write the czml document to a file.
* @param outFile the file name to write to, if empty or missing output will be to System.out
* @param czml the CZML document, i.e. the list of CZML packets
*/
def writeCzmlToFile(czml: CZML[CZMLPacket], outFile: Option[String] = None) = {
val writer = if (outFile.isEmpty) new PrintWriter(System.out) else new PrintWriter(new File(outFile.get))
try {
writer.write(Json.prettyPrint(Json.toJson(czml)))
} catch {
case e: IOException => e.printStackTrace()
}
finally {
writer.flush()
// close files, not System.out
if (outFile.nonEmpty) writer.close()
}
}
/**
* write the (json) string representation of a czml document to a file.
* @param outFile the file name to write to, if empty or missing output will be to System.out
* @param czmljs the CZML document as a (json) string
*/
def writeJsToFile(czmljs: String, outFile: Option[String] = None) = {
val writer = if (outFile.isEmpty) new PrintWriter(System.out) else new PrintWriter(new File(outFile.get))
try {
writer.write(czmljs)
} catch {
case e: IOException => e.printStackTrace()
}
finally {
writer.flush()
// close files, not System.out
if (outFile.nonEmpty) writer.close()
}
}
}
| workingDog/scalaczml | src/main/scala/com/kodekutters/czml/Util.scala | Scala | bsd-3-clause | 2,336 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import java.util.concurrent.{TimeUnit, CountDownLatch}
import scala.collection.mutable.HashMap
import scala.concurrent.ExecutionContext
import scala.language.existentials
import scala.util.{Failure, Success}
import org.apache.spark.streaming.util.WriteAheadLogUtils
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.rpc._
import org.apache.spark.streaming.{StreamingContext, Time}
import org.apache.spark.streaming.receiver._
import org.apache.spark.util.{Utils, ThreadUtils, SerializableConfiguration}
/**
* Enumeration to identify current state of a Receiver
* 枚举类型接收器的当前状态
* */
private[streaming] object ReceiverState extends Enumeration {
//定义三个字段,然后用Value调用将它们初始化,每次调用Value都返回内部类的新实例,
//该内部类也叫做Value,或者你也可以向Value方法传入ID/名称/两个参数都传
type ReceiverState = Value
//INACTIVE 闲置的,不活动的,SCHEDULED 预定,ACTIVE 活动
val INACTIVE, SCHEDULED, ACTIVE = Value //定义一个枚举类型
}
/**
* Messages used by the NetworkReceiver and the ReceiverTracker to communicate
* with each other.
*/
private[streaming] sealed trait ReceiverTrackerMessage
private[streaming] case class RegisterReceiver(
streamId: Int,
typ: String,
hostPort: String,
receiverEndpoint: RpcEndpointRef
) extends ReceiverTrackerMessage
private[streaming] case class AddBlock(receivedBlockInfo: ReceivedBlockInfo)
extends ReceiverTrackerMessage
private[streaming] case class ReportError(streamId: Int, message: String, error: String)
private[streaming] case class DeregisterReceiver(streamId: Int, msg: String, error: String)
extends ReceiverTrackerMessage
/**
* Messages used by the driver and ReceiverTrackerEndpoint to communicate locally.
* 驱动器和接收器跟踪端点所使用的消息在本地通信
*/
private[streaming] sealed trait ReceiverTrackerLocalMessage
/**
* This message will trigger ReceiverTrackerEndpoint to restart a Spark job for the receiver.
*/
private[streaming] case class RestartReceiver(receiver: Receiver[_])
extends ReceiverTrackerLocalMessage
/**
* This message is sent to ReceiverTrackerEndpoint when we start to launch Spark jobs for receivers
* at the first time.
*/
private[streaming] case class StartAllReceivers(receiver: Seq[Receiver[_]])
extends ReceiverTrackerLocalMessage
/**
* This message will trigger ReceiverTrackerEndpoint to send stop signals to all registered
* receivers.
*/
private[streaming] case object StopAllReceivers extends ReceiverTrackerLocalMessage
/**
* A message used by ReceiverTracker to ask all receiver's ids still stored in
* ReceiverTrackerEndpoint.
*/
private[streaming] case object AllReceiverIds extends ReceiverTrackerLocalMessage
private[streaming] case class UpdateReceiverRateLimit(streamUID: Int, newRate: Long)
extends ReceiverTrackerLocalMessage
/**
* This class manages the execution of the receivers of ReceiverInputDStreams. Instance of
* this class must be created after all input streams have been added and StreamingContext.start()
* has been called because it needs the final set of input streams at the time of instantiation.
*
* @param skipReceiverLaunch Do not launch the receiver. This is useful for testing.
*/
private[streaming]
//JobScheduler 将源头输入数据的记录工作委托给 ReceiverTracker,将每个 batch 的 RDD DAG 具体生成工作委托给 JobGenerator
class ReceiverTracker(ssc: StreamingContext, skipReceiverLaunch: Boolean = false) extends Logging {
private val receiverInputStreams = ssc.graph.getReceiverInputStreams()
private val receiverInputStreamIds = receiverInputStreams.map { _.id }
private val receivedBlockTracker = new ReceivedBlockTracker(
ssc.sparkContext.conf,
ssc.sparkContext.hadoopConfiguration,
receiverInputStreamIds,
ssc.scheduler.clock,
ssc.isCheckpointPresent,
Option(ssc.checkpointDir)
)
private val listenerBus = ssc.scheduler.listenerBus
/** Enumeration to identify current state of the ReceiverTracker */
object TrackerState extends Enumeration {
type TrackerState = Value
val Initialized, Started, Stopping, Stopped = Value
}
import TrackerState._
/**
* State of the tracker. Protected by "trackerStateLock"
* 跟踪状态,受保护的 "trackerStateLock"
* */
@volatile private var trackerState = Initialized
// endpoint is created when generator starts.
// This not being null means the tracker has been started and not stopped
private var endpoint: RpcEndpointRef = null
private val schedulingPolicy = new ReceiverSchedulingPolicy()
// Track the active receiver job number. When a receiver job exits ultimately, countDown will
// be called.
private val receiverJobExitLatch = new CountDownLatch(receiverInputStreams.size)
/**
* Track all receivers' information. The key is the receiver id, the value is the receiver info.
* It's only accessed in ReceiverTrackerEndpoint.
*/
private val receiverTrackingInfos = new HashMap[Int, ReceiverTrackingInfo]
/**
* Store all preferred locations for all receivers. We need this information to schedule
* receivers. It's only accessed in ReceiverTrackerEndpoint.
*/
private val receiverPreferredLocations = new HashMap[Int, Option[String]]
/**
* Start the endpoint and receiver execution thread.
* 启动端点和接收器执行线程
* */
def start(): Unit = synchronized {
if (isTrackerStarted) {
throw new SparkException("ReceiverTracker already started")
}
if (!receiverInputStreams.isEmpty) {
endpoint = ssc.env.rpcEnv.setupEndpoint(
"ReceiverTracker", new ReceiverTrackerEndpoint(ssc.env.rpcEnv))
if (!skipReceiverLaunch) launchReceivers()
logInfo("ReceiverTracker started")
trackerState = Started
}
}
/**
* Stop the receiver execution thread.
* 停止接收执行线程,
* */
def stop(graceful: Boolean): Unit = synchronized {
if (isTrackerStarted) {
// First, stop the receivers
//首先,停止接收器
trackerState = Stopping
if (!skipReceiverLaunch) {
// Send the stop signal to all the receivers
//将停止信号发送给所有的接收器
endpoint.askWithRetry[Boolean](StopAllReceivers)
// Wait for the Spark job that runs the receivers to be over
// That is, for the receivers to quit gracefully.
//等待运行接收器的Spark作业结束,因为接收器优雅地退出
receiverJobExitLatch.await(10, TimeUnit.SECONDS)
if (graceful) {
logInfo("Waiting for receiver job to terminate gracefully")
receiverJobExitLatch.await()
logInfo("Waited for receiver job to terminate gracefully")
}
// Check if all the receivers have been deregistered or not
//检查是否所有的接收机已撤销或不
val receivers = endpoint.askWithRetry[Seq[Int]](AllReceiverIds)
if (receivers.nonEmpty) {
logWarning("Not all of the receivers have deregistered, " + receivers)
} else {
logInfo("All of the receivers have deregistered successfully")
}
}
// Finally, stop the endpoint
//最后,停止端点
ssc.env.rpcEnv.stop(endpoint)
endpoint = null
receivedBlockTracker.stop()
logInfo("ReceiverTracker stopped")
trackerState = Stopped
}
}
/**
* Allocate all unallocated blocks to the given batch.
* 所有未分配的块分配到给定的批
* */
def allocateBlocksToBatch(batchTime: Time): Unit = {
if (receiverInputStreams.nonEmpty) {
receivedBlockTracker.allocateBlocksToBatch(batchTime)
}
}
/**
* Get the blocks for the given batch and all input streams.
* 获取给定批处理和所有输入流的块
* */
def getBlocksOfBatch(batchTime: Time): Map[Int, Seq[ReceivedBlockInfo]] = {
receivedBlockTracker.getBlocksOfBatch(batchTime)
}
/**
* Get the blocks allocated to the given batch and stream.
* 获取分配给给定批处理和流的块
* */
def getBlocksOfBatchAndStream(batchTime: Time, streamId: Int): Seq[ReceivedBlockInfo] = {
receivedBlockTracker.getBlocksOfBatchAndStream(batchTime, streamId)
}
/**
* Clean up the data and metadata of blocks and batches that are strictly
* older than the threshold time. Note that this does not
* 清理超过阈值时间的块和批次的数据和元数据
*/
def cleanupOldBlocksAndBatches(cleanupThreshTime: Time) {
// Clean up old block and batch metadata
//清理旧块和批处理元数据
receivedBlockTracker.cleanupOldBatches(cleanupThreshTime, waitForCompletion = false)
// Signal the receivers to delete old block data
//信号接收器删除旧块数据
if (WriteAheadLogUtils.enableReceiverLog(ssc.conf)) {
logInfo(s"Cleanup old received batch data: $cleanupThreshTime")
synchronized {
if (isTrackerStarted) {
endpoint.send(CleanupOldBlocks(cleanupThreshTime))
}
}
}
}
/**
* Register a receiver
* 注册一个接收器
* */
private def registerReceiver(
streamId: Int,
typ: String,
hostPort: String,
receiverEndpoint: RpcEndpointRef,
senderAddress: RpcAddress
): Boolean = {
if (!receiverInputStreamIds.contains(streamId)) {
throw new SparkException("Register received for unexpected id " + streamId)
}
if (isTrackerStopping || isTrackerStopped) {
return false
}
val scheduledExecutors = receiverTrackingInfos(streamId).scheduledExecutors
val accetableExecutors = if (scheduledExecutors.nonEmpty) {
// This receiver is registering and it's scheduled by
//该接收器是注册的,它的计划scheduleReceivers
// ReceiverSchedulingPolicy.scheduleReceivers. So use "scheduledExecutors" to check it.
scheduledExecutors.get
} else {
// This receiver is scheduled by "ReceiverSchedulingPolicy.rescheduleReceiver", so calling
// "ReceiverSchedulingPolicy.rescheduleReceiver" again to check it.
scheduleReceiver(streamId)
}
if (!accetableExecutors.contains(hostPort)) {
// Refuse it since it's scheduled to a wrong executor
//拒绝它,因为它的计划是一个错误的执行人
false
} else {
val name = s"${typ}-${streamId}"
val receiverTrackingInfo = ReceiverTrackingInfo(
streamId,
ReceiverState.ACTIVE,
scheduledExecutors = None,
runningExecutor = Some(hostPort),
name = Some(name),
endpoint = Some(receiverEndpoint))
receiverTrackingInfos.put(streamId, receiverTrackingInfo)
listenerBus.post(StreamingListenerReceiverStarted(receiverTrackingInfo.toReceiverInfo))
logInfo("Registered receiver for stream " + streamId + " from " + senderAddress)
true
}
}
/**
* Deregister a receiver
* 注销一个接收器
* */
private def deregisterReceiver(streamId: Int, message: String, error: String) {
val lastErrorTime =
if (error == null || error == "") -1 else ssc.scheduler.clock.getTimeMillis()
val errorInfo = ReceiverErrorInfo(
lastErrorMessage = message, lastError = error, lastErrorTime = lastErrorTime)
val newReceiverTrackingInfo = receiverTrackingInfos.get(streamId) match {
case Some(oldInfo) =>
oldInfo.copy(state = ReceiverState.INACTIVE, errorInfo = Some(errorInfo))
case None =>
logWarning("No prior receiver info")
ReceiverTrackingInfo(
streamId, ReceiverState.INACTIVE, None, None, None, None, Some(errorInfo))
}
receiverTrackingInfos(streamId) = newReceiverTrackingInfo
listenerBus.post(StreamingListenerReceiverStopped(newReceiverTrackingInfo.toReceiverInfo))
val messageWithError = if (error != null && !error.isEmpty) {
s"$message - $error"
} else {
s"$message"
}
logError(s"Deregistered receiver for stream $streamId: $messageWithError")
}
/**
* Update a receiver's maximum ingestion rate
* 更新一个接收器的最大速率
* */
def sendRateUpdate(streamUID: Int, newRate: Long): Unit = synchronized {
if (isTrackerStarted) {
endpoint.send(UpdateReceiverRateLimit(streamUID, newRate))
}
}
/**
* Add new blocks for the given stream
* 为给定的流添加新块
* */
private def addBlock(receivedBlockInfo: ReceivedBlockInfo): Boolean = {
receivedBlockTracker.addBlock(receivedBlockInfo)
}
/**
* Report error sent by a receiver
* 由接收器发送的报告错误
* */
private def reportError(streamId: Int, message: String, error: String) {
val newReceiverTrackingInfo = receiverTrackingInfos.get(streamId) match {
case Some(oldInfo) =>
val errorInfo = ReceiverErrorInfo(lastErrorMessage = message, lastError = error,
lastErrorTime = oldInfo.errorInfo.map(_.lastErrorTime).getOrElse(-1L))
oldInfo.copy(errorInfo = Some(errorInfo))
case None =>
logWarning("No prior receiver info")
val errorInfo = ReceiverErrorInfo(lastErrorMessage = message, lastError = error,
lastErrorTime = ssc.scheduler.clock.getTimeMillis())
ReceiverTrackingInfo(
streamId, ReceiverState.INACTIVE, None, None, None, None, Some(errorInfo))
}
receiverTrackingInfos(streamId) = newReceiverTrackingInfo
listenerBus.post(StreamingListenerReceiverError(newReceiverTrackingInfo.toReceiverInfo))
val messageWithError = if (error != null && !error.isEmpty) {
s"$message - $error"
} else {
s"$message"
}
logWarning(s"Error reported by receiver for stream $streamId: $messageWithError")
}
private def scheduleReceiver(receiverId: Int): Seq[String] = {
val preferredLocation = receiverPreferredLocations.getOrElse(receiverId, None)
val scheduledExecutors = schedulingPolicy.rescheduleReceiver(
receiverId, preferredLocation, receiverTrackingInfos, getExecutors)
updateReceiverScheduledExecutors(receiverId, scheduledExecutors)
scheduledExecutors
}
private def updateReceiverScheduledExecutors(
receiverId: Int, scheduledExecutors: Seq[String]): Unit = {
val newReceiverTrackingInfo = receiverTrackingInfos.get(receiverId) match {
case Some(oldInfo) =>
oldInfo.copy(state = ReceiverState.SCHEDULED,
scheduledExecutors = Some(scheduledExecutors))
case None =>
ReceiverTrackingInfo(
receiverId,
ReceiverState.SCHEDULED,
Some(scheduledExecutors),
runningExecutor = None)
}
receiverTrackingInfos.put(receiverId, newReceiverTrackingInfo)
}
/**
* Check if any blocks are left to be processed
* 检查是否有任何块被处理
* */
def hasUnallocatedBlocks: Boolean = {
receivedBlockTracker.hasUnallocatedReceivedBlocks
}
/**
* Get the list of executors excluding driver
* 获得执行者列表不包括driver
*/
private def getExecutors: Seq[String] = {
if (ssc.sc.isLocal) {
Seq(ssc.sparkContext.env.blockManager.blockManagerId.hostPort)
} else {
ssc.sparkContext.env.blockManager.master.getMemoryStatus.filter { case (blockManagerId, _) =>
//忽略驱动程序位置
blockManagerId.executorId != SparkContext.DRIVER_IDENTIFIER // Ignore the driver location
}.map { case (blockManagerId, _) => blockManagerId.hostPort }.toSeq
}
}
/**
* Run the dummy Spark job to ensure that all slaves have registered. This avoids all the
* receivers to be scheduled on the same node.
* 运行虚拟Spark的工作,以确保所有的从节点已注册,
*
* TODO Should poll the executor number and wait for executors according to
* 该调查的执行者和执行者根据数量等
* "spark.scheduler.minRegisteredResourcesRatio" and
* "spark.scheduler.maxRegisteredResourcesWaitingTime" rather than running a dummy job.
*/
private def runDummySparkJob(): Unit = {
if (!ssc.sparkContext.isLocal) {
ssc.sparkContext.makeRDD(1 to 50, 50).map(x => (x, 1)).reduceByKey(_ + _, 20).collect()
}
assert(getExecutors.nonEmpty)
}
/**
* Get the receivers from the ReceiverInputDStreams, distributes them to the
* worker nodes as a parallel collection, and runs them.
*/
private def launchReceivers(): Unit = {
val receivers = receiverInputStreams.map(nis => {
val rcvr = nis.getReceiver()
rcvr.setReceiverId(nis.id)
rcvr
})
runDummySparkJob()
logInfo("Starting " + receivers.length + " receivers")
endpoint.send(StartAllReceivers(receivers))
}
/**
* Check if tracker has been marked for starting
* 检查是否跟踪已标记为启动
* */
private def isTrackerStarted: Boolean = trackerState == Started
/**
* Check if tracker has been marked for stopping
* 检查是否跟踪已被标记为停止
* */
private def isTrackerStopping: Boolean = trackerState == Stopping
/**
* Check if tracker has been marked for stopped
* 检查是否跟踪已被标记为停止
* */
private def isTrackerStopped: Boolean = trackerState == Stopped
/**
* RpcEndpoint to receive messages from the receivers.
* RPC端点从接收机接收消息
* */
private class ReceiverTrackerEndpoint(override val rpcEnv: RpcEnv) extends ThreadSafeRpcEndpoint {
// TODO Remove this thread pool after https://github.com/apache/spark/issues/7385 is merged
private val submitJobThreadPool = ExecutionContext.fromExecutorService(
ThreadUtils.newDaemonCachedThreadPool("submit-job-thead-pool"))
override def receive: PartialFunction[Any, Unit] = {
// Local messages
//本地消息
case StartAllReceivers(receivers) =>
val scheduledExecutors = schedulingPolicy.scheduleReceivers(receivers, getExecutors)
for (receiver <- receivers) {
val executors = scheduledExecutors(receiver.streamId)
updateReceiverScheduledExecutors(receiver.streamId, executors)
receiverPreferredLocations(receiver.streamId) = receiver.preferredLocation
startReceiver(receiver, executors)
}
case RestartReceiver(receiver) =>
// Old scheduled executors minus the ones that are not active any more
//旧的计划执行者负,不再更多的活跃
val oldScheduledExecutors = getStoredScheduledExecutors(receiver.streamId)
val scheduledExecutors = if (oldScheduledExecutors.nonEmpty) {
// Try global scheduling again
//尝试全局调度
oldScheduledExecutors
} else {
val oldReceiverInfo = receiverTrackingInfos(receiver.streamId)
// Clear "scheduledExecutors" to indicate we are going to do local scheduling
val newReceiverInfo = oldReceiverInfo.copy(
state = ReceiverState.INACTIVE, scheduledExecutors = None)
receiverTrackingInfos(receiver.streamId) = newReceiverInfo
schedulingPolicy.rescheduleReceiver(
receiver.streamId,
receiver.preferredLocation,
receiverTrackingInfos,
getExecutors)
}
// Assume there is one receiver restarting at one time, so we don't need to update
//假设有一个接收器在同一时间重新启动
// receiverTrackingInfos
startReceiver(receiver, scheduledExecutors)
case c: CleanupOldBlocks =>
receiverTrackingInfos.values.flatMap(_.endpoint).foreach(_.send(c))
case UpdateReceiverRateLimit(streamUID, newRate) =>
for (info <- receiverTrackingInfos.get(streamUID); eP <- info.endpoint) {
eP.send(UpdateRateLimit(newRate))
}
// Remote messages
//远程信息
case ReportError(streamId, message, error) =>
reportError(streamId, message, error)
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
// Remote messages
//远程信息
case RegisterReceiver(streamId, typ, hostPort, receiverEndpoint) =>
val successful =
registerReceiver(streamId, typ, hostPort, receiverEndpoint, context.sender.address)
context.reply(successful)
case AddBlock(receivedBlockInfo) =>
context.reply(addBlock(receivedBlockInfo))
case DeregisterReceiver(streamId, message, error) =>
deregisterReceiver(streamId, message, error)
context.reply(true)
// Local messages
//本地信息
case AllReceiverIds =>
context.reply(receiverTrackingInfos.filter(_._2.state != ReceiverState.INACTIVE).keys.toSeq)
case StopAllReceivers =>
assert(isTrackerStopping || isTrackerStopped)
stopReceivers()
context.reply(true)
}
/**
* Return the stored scheduled executors that are still alive.
* 返回存储计划的执行者,还活着
*/
private def getStoredScheduledExecutors(receiverId: Int): Seq[String] = {
if (receiverTrackingInfos.contains(receiverId)) {
val scheduledExecutors = receiverTrackingInfos(receiverId).scheduledExecutors
if (scheduledExecutors.nonEmpty) {
val executors = getExecutors.toSet
// Only return the alive executors
//只返回活着的executors
scheduledExecutors.get.filter(executors)
} else {
Nil
}
} else {
Nil
}
}
/**
* Start a receiver along with its scheduled executors
* ReceiverTracker 再将收到的块数据 meta 信息直接转给自己的成员 ReceivedBlockTracker,由ReceivedBlockTracker 专门管理收到的块数据 meta 信息
*/
private def startReceiver(receiver: Receiver[_], scheduledExecutors: Seq[String]): Unit = {
def shouldStartReceiver: Boolean = {
// It's okay to start when trackerState is Initialized or Started
//这是好的开始时,trackerstate初始化或开始
!(isTrackerStopping || isTrackerStopped)
}
val receiverId = receiver.streamId
if (!shouldStartReceiver) {
onReceiverJobFinish(receiverId)
return
}
val checkpointDirOption = Option(ssc.checkpointDir)
val serializableHadoopConf =
new SerializableConfiguration(ssc.sparkContext.hadoopConfiguration)
// Function to start the receiver on the worker node
//函数启动工作节点上的接收器
val startReceiverFunc: Iterator[Receiver[_]] => Unit =
(iterator: Iterator[Receiver[_]]) => {
if (!iterator.hasNext) {
throw new SparkException(
"Could not start receiver as object not found.")
}
if (TaskContext.get().attemptNumber() == 0) {
val receiver = iterator.next()
assert(iterator.hasNext == false)
val supervisor = new ReceiverSupervisorImpl(
receiver, SparkEnv.get, serializableHadoopConf.value, checkpointDirOption)
supervisor.start()
supervisor.awaitTermination()
} else {
// It's restarted by TaskScheduler, but we want to reschedule it again. So exit it.
}
}
// Create the RDD using the scheduledExecutors to run the receiver in a Spark job
//使用scheduledexecutors在Spark作业运行接收器创建RDD
val receiverRDD: RDD[Receiver[_]] =
if (scheduledExecutors.isEmpty) {
ssc.sc.makeRDD(Seq(receiver), 1)
} else {
val preferredLocations =
scheduledExecutors.map(hostPort => Utils.parseHostPort(hostPort)._1).distinct
ssc.sc.makeRDD(Seq(receiver -> preferredLocations))
}
receiverRDD.setName(s"Receiver $receiverId")
ssc.sparkContext.setJobDescription(s"Streaming job running receiver $receiverId")
ssc.sparkContext.setCallSite(Option(ssc.getStartSite()).getOrElse(Utils.getCallSite()))
val future = ssc.sparkContext.submitJob[Receiver[_], Unit, Unit](
receiverRDD, startReceiverFunc, Seq(0), (_, _) => Unit, ())
// We will keep restarting the receiver job until ReceiverTracker is stopped
//我们将重新启动接收机工作直到ReceiverTracker停止
future.onComplete {
case Success(_) =>
if (!shouldStartReceiver) {
onReceiverJobFinish(receiverId)
} else {
logInfo(s"Restarting Receiver $receiverId")
self.send(RestartReceiver(receiver))
}
case Failure(e) =>
if (!shouldStartReceiver) {
onReceiverJobFinish(receiverId)
} else {
logError("Receiver has been stopped. Try to restart it.", e)
logInfo(s"Restarting Receiver $receiverId")
self.send(RestartReceiver(receiver))
}
}(submitJobThreadPool)
logInfo(s"Receiver ${receiver.streamId} started")
}
override def onStop(): Unit = {
submitJobThreadPool.shutdownNow()
}
/**
* Call when a receiver is terminated. It means we won't restart its Spark job.
* 当一个接收器被终止时,这意味着我们不会重新启动它的Spark的工作
*/
private def onReceiverJobFinish(receiverId: Int): Unit = {
receiverJobExitLatch.countDown()
receiverTrackingInfos.remove(receiverId).foreach { receiverTrackingInfo =>
if (receiverTrackingInfo.state == ReceiverState.ACTIVE) {
logWarning(s"Receiver $receiverId exited but didn't deregister")
}
}
}
/**
* Send stop signal to the receivers.
* 向接收器发送停止信号
* */
private def stopReceivers() {
receiverTrackingInfos.values.flatMap(_.endpoint).foreach { _.send(StopReceiver) }
logInfo("Sent stop signal to all " + receiverTrackingInfos.size + " receivers")
}
}
}
| tophua/spark1.52 | streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala | Scala | apache-2.0 | 26,969 |
package com.alzindiq.example
import com.alzindiq.Plumber
import com.alzindiq.cluster.Clusterer
import com.alzindiq.indexer.FieldIndexer
import com.hp.hpl.plumb.bucketer.SimpleWordBucketer
import com.hp.hpl.plumb.indexer.InvertedIndex
import com.rockymadden.stringmetric.similarity.JaccardMetric
import org.scalatest.{Matchers, FlatSpec}
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.io.Source
object CoraTest {
val fileName = "cora-all-id.txt"
//val fileName = "cora-all-subset.txt"
val coraPlumbers : Set[Plumber] = Source.fromFile(fileName).getLines().map(CoraRecord.apply(_,4)).
filter(!_.isEmpty).map(_.get).toSet
val trimmingFunction = (w:Any) => w.toString.replaceAll("[^a-zA-Z0-9 ]", "").trim
val indexName1 = "authorsIndex"
val indexName2 = "titleIndex"
val keyExtractor1 = (p:Plumber) => p.asInstanceOf[CoraRecord].getAuthorNGrams().map(_.map(CoraTest.trimmingFunction.apply(_))).toList
val keyExtractor2 = (p:Plumber) => p.asInstanceOf[CoraRecord].getTitleNGrams().map(_.map(CoraTest.trimmingFunction.apply(_))).toList
val stopValues : Option[List[Any]] = Some(List("a", "about", "above", "after", "again", "against", "all", "am", "an", "and", "any", "are", "aren't",
"as", "at", "be", "because", "been", "before", "being", "below", "between", "both", "but",
"by", "can't", "cannot", "could", "couldn't", "did", "didn't", "do", "does", "doesn't",
"doing", "don't", "down", "during", "each", "few", "for", "from", "further", "had", "hadn't",
"has", "hasn't", "have", "haven't", "having", "he", "he'd", "he'll", "he's", "her", "here",
"here's", "hers", "herself", "him", "himself", "his", "how", "how's", "i", "i'd", "i'll",
"i'm", "i've", "if", "in", "into", "is", "isn't", "it", "it's", "its", "itself", "learning", "let's", "me",
"more", "most", "mustn't", "my", "myself", "no", "nor", "not", "of", "off", "on", "once",
"only", "or", "other", "ought", "our", "ours", "ourselves", "out", "over", "own", "same",
"shan't", "she", "she'd", "she'll", "she's", "should", "shouldn't", "so", "some", "such",
"than", "that", "that's", "the", "their", "theirs", "them", "themselves", "then", "there",
"there's", "these", "they", "they'd", "they'll", "they're", "they've", "this", "those",
"through", "to", "too", "under", "until", "up", "very", "was", "wasn't", "we", "we'd", "we'll",
"we're", "we've", "were", "weren't", "what", "what's", "when", "when's", "where", "where's",
"which", "while", "who", "who's", "whom", "why", "why's", "with", "won't", "would", "wouldn't",
"you", "you'd", "you'll", "you're", "you've", "your", "yours", "yourself", "yourselves"))
def similarityFunction (threshold : Double) = (p : Plumber, p2 : Plumber) => {
val coraP = p.asInstanceOf[CoraRecord]
val coraP2= p2.asInstanceOf[CoraRecord]
val jaccard = 0.8 *jaccardSimilarity(coraP.getTitleNGrams().flatten,coraP2.getTitleNGrams().flatten)
+ 0.2 *jaccardSimilarity(coraP.getAuthorNGrams().flatten,coraP2.getAuthorNGrams().flatten)
if(jaccard >= threshold) jaccard
else 0d
}
def jaccardSimilarity (s1 : Set[String], s2 : Set[String]) : Double = {
if((s1.size == 0 ) || (s2.size ==0)){
0d
} else {
s1.intersect(s2).size / s1.union(s2).size
}
}
}
class CoraTest extends FlatSpec with Matchers {
"Cora parser" should "create right number of records" in {
val t1 = System.currentTimeMillis
CoraTest.coraPlumbers.size shouldBe 1878
val t2 = System.currentTimeMillis
println("Test finished in " + (t2 - t1) + " msecs")
}
"Bucketer for cora" should "result in decently separated buckets" in {
val indexer = new FieldIndexer(CoraTest.stopValues,CoraTest.trimmingFunction)
val tMinus1 = System.currentTimeMillis
indexer.createIndices(Map (CoraTest.indexName1 -> CoraTest.keyExtractor1, CoraTest.indexName2 -> CoraTest.keyExtractor2),CoraTest.coraPlumbers)
val t0 = System.currentTimeMillis
println("Indexing finished in " + (t0 - tMinus1) + " msecs")
val bucketiser : (Set[Plumber]) => Map [Any,Set[Plumber]]= (in : Set[Plumber]) => {
val outMap = mutable.Map.empty[Any,mutable.Set[Plumber]]
var i = -1
in.foreach( p => {
val cora= p.asInstanceOf[CoraRecord]
val sameTwoAuth = cora.getAuthorNGrams()
val sameTwoTitle = cora.getTitleNGrams()
sameTwoAuth.map(auth => {
val sameAuth = indexer.indices.get(CoraTest.indexName1).get.getOrElse(auth, ListBuffer.empty).toSet
sameTwoTitle.map(title => {
val sameTitle = indexer.indices.get(CoraTest.indexName2).get.getOrElse(title, ListBuffer.empty).toSet
var bucket = outMap.getOrElse(auth ++ title, mutable.Set.empty)
outMap.put(auth ++ title, bucket.union(sameTitle.union(sameAuth)))
})
})
})
val out = Map(outMap.toList : _ *) // make the map immutable
out.map(p => (p._1,Set(p._2.toList : _ *))).filter(p => p._2.size >1) // make the sets immutable too
}
val t1 = System.currentTimeMillis
val buckets = bucketiser.apply(CoraTest.coraPlumbers)
val t2 = System.currentTimeMillis
println("Bucketing finished in " + (t2 - t1) + " msecs. Size "+buckets.size)
//buckets.foreach(p => println(p._1, p._2.map(_.getId.get.toString)))
val t3 = System.currentTimeMillis
val coallesced = buckets.values.toList.distinct // remove duplicates
val t4 = System.currentTimeMillis
println("Coallescing finished in " + (t4 - t2) + " msecs. Size "+coallesced.size)
//println(coallesced.map(s => s.map( p => p.getId.get.toString)))
var i = -1
val duplicateFreeMap : Map[List[Any],Set[Plumber]] = coallesced.map(p=>{
i = i+1
List(i) -> p
}).toMap
val quickie = duplicateFreeMap.get(List(273)).get
val testMap : Map[List[Any],Set[Plumber]] = Map(List(1)-> quickie)
val clusterer = new Clusterer(CoraTest.similarityFunction(0.6))
val t5 = System.currentTimeMillis
//val results = clusterer.initClusters4AllBuckets(duplicateFreeMap)
val results = clusterer.initClusters4AllBuckets(testMap)
val t6 = System.currentTimeMillis
println("Clustering finished in " + (t6 - t5) + " msecs")
println(results.size)
println(results.get(List(1)).size)
}
}
| alzindiq/plumb | src/test/scala/com/alzindiq/example/CoraTest.scala | Scala | apache-2.0 | 6,347 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.models.clustering.lda
import org.apache.commons.lang3.StringUtils
import org.trustedanalytics.sparktk.frame.Frame
/**
* Arguments to the LDA train plugin - see user docs for more on the parameters
*
* @param frame Input frame data
* @param documentColumnName Column Name for documents. Column should contain a str value.
* @param wordColumnName Column name for words. Column should contain a str value.
* @param wordCountColumnName Column name for word count. Column should contain an int32 or int64 value.
* @param maxIterations The maximum number of iterations that the algorithm will execute.
* The valid value range is all positive int. Default is 20.
* @param alpha The :term:`hyperparameter` for document-specific distribution over topics.
* Mainly used as a smoothing parameter in :term:`Bayesian inference`.
* If set to a singleton list List(-1d), then docConcentration is set automatically.
* If set to singleton list List(t) where t != -1, then t is replicated to a vector of length k during
* LDAOptimizer.initialize(). Otherwise, the alpha must be length k.
* Currently the EM optimizer only supports symmetric distributions, so all values in the vector should be the same.
* Values should be greater than 1.0. Default value is -1.0 indicating automatic setting.
* @param beta The :term:`hyperparameter` for word-specific distribution over topics.
* Mainly used as a smoothing parameter in :term:`Bayesian inference`.
* Larger value implies that topics contain all words more uniformly and
* smaller value implies that topics are more concentrated on a small
* subset of words.
* Valid value range is all positive float greater than or equal to 1.
* Default is 0.1.
* @param numTopics The number of topics to identify in the LDA model.
* Using fewer topics will speed up the computation, but the extracted topics
* might be more abstract or less specific; using more topics will
* result in more computation but lead to more specific topics.
* Valid value range is all positive int.
* Default is 10.
* @param seed An optional random seed.
* The random seed is used to initialize the pseudorandom number generator
* used in the LDA model. Setting the random seed to the same value every
* time the model is trained, allows LDA to generate the same topic distribution
* if the corpus and LDA parameters are unchanged.
* @param checkPointInterval Period (in iterations) between checkpoints (default = 10).
* Checkpointing helps with recovery (when nodes fail). It also helps with eliminating
* temporary shuffle files on disk, which can be important when LDA is run for many
* iterations. If the checkpoint directory is not set, this setting is ignored.
*
*/
case class LdaTrainArgs(frame: Frame,
documentColumnName: String,
wordColumnName: String,
wordCountColumnName: String,
maxIterations: Int = 20,
alpha: Option[List[Double]] = None,
beta: Float = 1.1f,
numTopics: Int = 10,
seed: Option[Long] = None,
checkPointInterval: Int = 10) {
require(frame != null, "frame is required")
require(StringUtils.isNotBlank(documentColumnName), "document column name is required")
require(StringUtils.isNotBlank(wordColumnName), "word column name is required")
require(maxIterations > 0, "Max iterations should be greater than 0")
if (alpha.isDefined) {
if (alpha.get.size == 1) {
require(alpha.get.head == -1d || alpha.get.head > 1d, "Alpha should be greater than 1.0. Or -1.0 indicating default setting ")
}
else {
require(alpha.get.forall(a => a > 1d), "All values of alpha should be greater than 0")
}
}
require(beta > 0, "Beta should be greater than 0")
require(numTopics > 0, "Number of topics (K) should be greater than 0")
def columnNames: List[String] = {
List(documentColumnName, wordColumnName, wordCountColumnName)
}
def getAlpha: List[Double] = {
alpha.getOrElse(List(-1d))
}
}
| trustedanalytics/spark-tk | sparktk-core/src/main/scala/org/trustedanalytics/sparktk/models/clustering/lda/LdaTrainArgs.scala | Scala | apache-2.0 | 5,270 |
import edu.uta.diql._
import scala.io.Source
import scala.collection.parallel.ParIterable
object Test {
def main ( args: Array[String] ) {
println("Number of cores: "+Runtime.getRuntime().availableProcessors())
println("Memory: "+(Runtime.getRuntime.maxMemory / 1024))
var t: Long = System.currentTimeMillis()
val a = (1 to args(0).toInt).toList
println("**** construct sequential: "+(System.currentTimeMillis()-t)/1000.0+" secs")
t = System.currentTimeMillis()
a.map(_+1)
println("**** sequential: "+(System.currentTimeMillis()-t)/1000.0+" secs")
t = System.currentTimeMillis()
a.groupBy(x => x).map(x => (x,+1))
println("**** sequential group by: "+(System.currentTimeMillis()-t)/1000.0+" secs")
t = System.currentTimeMillis()
val c = (1 to args(0).toInt).toList.par
println("**** construct parallel: "+(System.currentTimeMillis()-t)/1000.0+" secs")
explain(true)
t = System.currentTimeMillis()
q("""
select v+1 from v <- c
""")
println("**** parallel: "+(System.currentTimeMillis()-t)/1000.0+" secs")
t = System.currentTimeMillis()
q("""
select (k, +/v) from v <- c group by k: v+1
""")
println("**** parallel group by: "+(System.currentTimeMillis()-t)/1000.0+" secs")
}
}
| fegaras/DIQL | tests/parallel/map.scala | Scala | apache-2.0 | 1,308 |
package chapter8
object firstclassFunction {
var inc: (Int) => Int = {
(x: Int) => x + 1
}
def main(args: Array[String]): Unit = {
println(inc(10))
val someNumbers = List(10, 2, 4, 1, 5, -9)
someNumbers.foreach(x => print("\\t" + x))
someNumbers.foreach(println(_))
println("\\n" + someNumbers.filter(x => x > 5))
println("\\n" + someNumbers.filter(_ > 2))
}
}
| NikhilJose/ScalaPractice | src/main/scala/chapter8/firstclassFunction.scala | Scala | apache-2.0 | 398 |
package com.github.probe.backend.message
sealed trait ServerToClientMessage
| khernyo/freezing-ninja | common/src/main/scala/com/github/probe/backend/message/ServerToClientMessage.scala | Scala | apache-2.0 | 77 |
package im.actor.server.persist
import java.time.{ ZoneOffset, LocalDateTime }
import im.actor.server.db.ActorPostgresDriver.api._
import im.actor.server.models
class UserTable(tag: Tag) extends Table[models.User](tag, "users") {
import SexColumnType._
import UserStateColumnType._
def id = column[Int]("id", O.PrimaryKey)
def accessSalt = column[String]("access_salt")
def name = column[String]("name")
def countryCode = column[String]("country_code")
def sex = column[models.Sex]("sex")
def state = column[models.UserState]("state")
def createdAt = column[LocalDateTime]("created_at")
def nickname = column[Option[String]]("nickname")
def about = column[Option[String]]("about")
def deletedAt = column[Option[LocalDateTime]]("deleted_at")
def isBot = column[Boolean]("is_bot")
def * = (id, accessSalt, name, countryCode, sex, state, createdAt, nickname, about, deletedAt, isBot) <> (models.User.tupled, models.User.unapply)
}
object User {
val users = TableQuery[UserTable]
def byId(id: Rep[Int]) = users filter (_.id === id)
def nameById(id: Rep[Int]) = byId(id) map (_.name)
val byIdC = Compiled(byId _)
val nameByIdC = Compiled(nameById _)
val activeHumanUsers =
users.filter(u ⇒ u.deletedAt.isEmpty && !u.isBot)
def create(user: models.User) =
users += user
def setCountryCode(userId: Int, countryCode: String) =
users.filter(_.id === userId).map(_.countryCode).update(countryCode)
def setDeletedAt(userId: Int) =
users.filter(_.id === userId).
map(_.deletedAt).
update(Some(LocalDateTime.now(ZoneOffset.UTC)))
def setName(userId: Int, name: String) =
users.filter(_.id === userId).map(_.name).update(name)
def allIds = users.map(_.id).result
def find(id: Int) =
byIdC(id).result
def findName(id: Int) =
nameById(id).result.headOption
// TODO: #perf will it create prepared statement for each ids length?
def findSalts(ids: Set[Int]) =
users.filter(_.id inSet ids).map(u ⇒ (u.id, u.accessSalt)).result
def setNickname(userId: Int, nickname: Option[String]) =
byId(userId).map(_.nickname).update(nickname)
def setAbout(userId: Int, about: Option[String]) =
byId(userId).map(_.about).update(about)
def nicknameExists(nickname: String) =
users.filter(_.nickname.toLowerCase === nickname.toLowerCase).exists.result
def findByIds(ids: Set[Int]) =
users.filter(_.id inSet ids).result
def findByIdsPaged(ids: Set[Int], number: Int, size: Int) = {
val offset = (number - 1) * size
users.
filter(_.id inSet ids).
sortBy(_.name).
drop(offset).
take(size).
result
}
def activeUsersIds = activeHumanUsers.map(_.id).result
def page(number: Int, size: Int) = {
val offset = (number - 1) * size
activeHumanUsers.
sortBy(_.name).
drop(offset).
take(size)
}
}
| v2tmobile/actor-platform | actor-server/actor-persist/src/main/scala/im/actor/server/persist/User.scala | Scala | mit | 2,878 |
package registry
import com.yetu.oauth2resource.settings.OAuth2ProviderSettings
object MyOAuth2ProviderSettings extends OAuth2ProviderSettings {
//change this url as you please.
val Oauth2providerBaseUrl = "https://auth.yetudev.com"
}
| yetu/oauth2-resource-server | sampleOAuth2ResourceServer/app/registry/MyOAuth2ProviderSettings.scala | Scala | mit | 242 |
package com.cloudray.scalapress.widgets
import javax.persistence.{Table, Entity}
import scala.beans.BeanProperty
import com.cloudray.scalapress.framework.ScalapressRequest
import com.cloudray.scalapress.media.ImageResolver
/** @author Stephen Samuel */
@Entity
@Table(name = "boxes_custom")
class HtmlWidget extends Widget {
@BeanProperty
var simpleEditor: Boolean = false
override def backoffice = "/backoffice/widget/html/" + id
override def render(req: ScalapressRequest): Option[String] = {
Option(content).map(new ImageResolver(req.context).resolve)
}
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/widgets/HtmlWidget.scala | Scala | apache-2.0 | 578 |
package org.scalaide.extensions
package autoedits
import org.eclipse.jdt.ui.text.IJavaPartitions
import org.eclipse.jface.text.IDocument
import org.scalaide.core.lexical.ScalaPartitions
import org.scalaide.core.text.TextChange
object CloseCharSetting extends AutoEditSetting(
id = ExtensionSetting.fullyQualifiedName[CloseChar],
name = "Close char literals",
description = "Closes a typed opening char literal if necessary.",
partitions = Set(
IDocument.DEFAULT_CONTENT_TYPE,
ScalaPartitions.SCALADOC_CODE_BLOCK
)
)
trait CloseChar extends AutoEdit {
override def setting = CloseCharSetting
override def perform() = {
check(textChange) {
case TextChange(start, end, "'") =>
if (autoClosingRequired(end))
Some(TextChange(start, end, "''") withLinkedModel (start+2, singleLinkedPos(start+1)))
else
None
}
}
def ch(i: Int, c: Char) = {
val o = textChange.start + i
o >= 0 && o < document.length && document(o) == c
}
def singleLinkedPos(pos: Int): Seq[Seq[(Int, Int)]] =
Seq(Seq((pos, 0)))
def isNested(offset: Int) =
document.textRangeOpt(offset-1, offset+1) exists (Set("{}", "[]", "()", "<>", "\"\"")(_))
def autoClosingRequired(offset: Int): Boolean =
if (offset < document.length)
!ch(-1, ''') && (Character.isWhitespace(document(offset)) || isNested(offset))
else
!ch(-1, ''')
}
| romanowski/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/extensions/autoedits/CloseChar.scala | Scala | bsd-3-clause | 1,412 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.util.Locale
import scala.collection.mutable
import org.apache.spark.sql.catalyst.expressions.{Ascending, Expression, IntegerLiteral, SortOrder}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.CurrentOrigin
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.SQLConf
/**
* Collection of rules related to hints. The only hint currently available is join strategy hint.
*
* Note that this is separately into two rules because in the future we might introduce new hint
* rules that have different ordering requirements from join strategies.
*/
object ResolveHints {
/**
* The list of allowed join strategy hints is defined in [[JoinStrategyHint.strategies]], and a
* sequence of relation aliases can be specified with a join strategy hint, e.g., "MERGE(a, c)",
* "BROADCAST(a)". A join strategy hint plan node will be inserted on top of any relation (that
* is not aliased differently), subquery, or common table expression that match the specified
* name.
*
* The hint resolution works by recursively traversing down the query plan to find a relation or
* subquery that matches one of the specified relation aliases. The traversal does not go past
* beyond any view reference, with clause or subquery alias.
*
* This rule must happen before common table expressions.
*/
object ResolveJoinStrategyHints extends Rule[LogicalPlan] {
private val STRATEGY_HINT_NAMES = JoinStrategyHint.strategies.flatMap(_.hintAliases)
private def hintErrorHandler = conf.hintErrorHandler
def resolver: Resolver = conf.resolver
private def createHintInfo(hintName: String): HintInfo = {
HintInfo(strategy =
JoinStrategyHint.strategies.find(
_.hintAliases.map(
_.toUpperCase(Locale.ROOT)).contains(hintName.toUpperCase(Locale.ROOT))))
}
// This method checks if given multi-part identifiers are matched with each other.
// The [[ResolveJoinStrategyHints]] rule is applied before the resolution batch
// in the analyzer and we cannot semantically compare them at this stage.
// Therefore, we follow a simple rule; they match if an identifier in a hint
// is a tail of an identifier in a relation. This process is independent of a session
// catalog (`currentDb` in [[SessionCatalog]]) and it just compares them literally.
//
// For example,
// * in a query `SELECT /*+ BROADCAST(t) */ * FROM db1.t JOIN t`,
// the broadcast hint will match both tables, `db1.t` and `t`,
// even when the current db is `db2`.
// * in a query `SELECT /*+ BROADCAST(default.t) */ * FROM default.t JOIN t`,
// the broadcast hint will match the left-side table only, `default.t`.
private def matchedIdentifier(identInHint: Seq[String], identInQuery: Seq[String]): Boolean = {
if (identInHint.length <= identInQuery.length) {
identInHint.zip(identInQuery.takeRight(identInHint.length))
.forall { case (i1, i2) => resolver(i1, i2) }
} else {
false
}
}
private def extractIdentifier(r: SubqueryAlias): Seq[String] = {
r.identifier.qualifier :+ r.identifier.name
}
private def applyJoinStrategyHint(
plan: LogicalPlan,
relationsInHint: Set[Seq[String]],
relationsInHintWithMatch: mutable.HashSet[Seq[String]],
hintName: String): LogicalPlan = {
// Whether to continue recursing down the tree
var recurse = true
def matchedIdentifierInHint(identInQuery: Seq[String]): Boolean = {
relationsInHint.find(matchedIdentifier(_, identInQuery))
.map(relationsInHintWithMatch.add).nonEmpty
}
val newNode = CurrentOrigin.withOrigin(plan.origin) {
plan match {
case ResolvedHint(u @ UnresolvedRelation(ident, _, _), hint)
if matchedIdentifierInHint(ident) =>
ResolvedHint(u, createHintInfo(hintName).merge(hint, hintErrorHandler))
case ResolvedHint(r: SubqueryAlias, hint)
if matchedIdentifierInHint(extractIdentifier(r)) =>
ResolvedHint(r, createHintInfo(hintName).merge(hint, hintErrorHandler))
case UnresolvedRelation(ident, _, _) if matchedIdentifierInHint(ident) =>
ResolvedHint(plan, createHintInfo(hintName))
case r: SubqueryAlias if matchedIdentifierInHint(extractIdentifier(r)) =>
ResolvedHint(plan, createHintInfo(hintName))
case _: ResolvedHint | _: View | _: With | _: SubqueryAlias =>
// Don't traverse down these nodes.
// For an existing strategy hint, there is no chance for a match from this point down.
// The rest (view, with, subquery) indicates different scopes that we shouldn't traverse
// down. Note that technically when this rule is executed, we haven't completed view
// resolution yet and as a result the view part should be deadcode. I'm leaving it here
// to be more future proof in case we change the view we do view resolution.
recurse = false
plan
case _ =>
plan
}
}
if ((plan fastEquals newNode) && recurse) {
newNode.mapChildren { child =>
applyJoinStrategyHint(child, relationsInHint, relationsInHintWithMatch, hintName)
}
} else {
newNode
}
}
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsUp {
case h: UnresolvedHint if STRATEGY_HINT_NAMES.contains(h.name.toUpperCase(Locale.ROOT)) =>
if (h.parameters.isEmpty) {
// If there is no table alias specified, apply the hint on the entire subtree.
ResolvedHint(h.child, createHintInfo(h.name))
} else {
// Otherwise, find within the subtree query plans to apply the hint.
val relationNamesInHint = h.parameters.map {
case tableName: String => UnresolvedAttribute.parseAttributeName(tableName)
case tableId: UnresolvedAttribute => tableId.nameParts
case unsupported =>
throw QueryCompilationErrors.joinStrategyHintParameterNotSupportedError(unsupported)
}.toSet
val relationsInHintWithMatch = new mutable.HashSet[Seq[String]]
val applied = applyJoinStrategyHint(
h.child, relationNamesInHint, relationsInHintWithMatch, h.name)
// Filters unmatched relation identifiers in the hint
val unmatchedIdents = relationNamesInHint -- relationsInHintWithMatch
hintErrorHandler.hintRelationsNotFound(h.name, h.parameters, unmatchedIdents)
applied
}
}
}
/**
* COALESCE Hint accepts names "COALESCE", "REPARTITION", and "REPARTITION_BY_RANGE".
*/
object ResolveCoalesceHints extends Rule[LogicalPlan] {
val COALESCE_HINT_NAMES: Set[String] = Set("COALESCE", "REPARTITION", "REPARTITION_BY_RANGE")
/**
* This function handles hints for "COALESCE" and "REPARTITION".
* The "COALESCE" hint only has a partition number as a parameter. The "REPARTITION" hint
* has a partition number, columns, or both of them as parameters.
*/
private def createRepartition(
shuffle: Boolean, hint: UnresolvedHint): LogicalPlan = {
val hintName = hint.name.toUpperCase(Locale.ROOT)
def createRepartitionByExpression(
numPartitions: Option[Int], partitionExprs: Seq[Any]): RepartitionByExpression = {
val sortOrders = partitionExprs.filter(_.isInstanceOf[SortOrder])
if (sortOrders.nonEmpty) {
throw QueryCompilationErrors.invalidRepartitionExpressionsError(sortOrders)
}
val invalidParams = partitionExprs.filter(!_.isInstanceOf[UnresolvedAttribute])
if (invalidParams.nonEmpty) {
throw QueryCompilationErrors.invalidHintParameterError(hintName, invalidParams)
}
RepartitionByExpression(
partitionExprs.map(_.asInstanceOf[Expression]), hint.child, numPartitions)
}
hint.parameters match {
case Seq(IntegerLiteral(numPartitions)) =>
Repartition(numPartitions, shuffle, hint.child)
case Seq(numPartitions: Int) =>
Repartition(numPartitions, shuffle, hint.child)
// The "COALESCE" hint (shuffle = false) must have a partition number only
case _ if !shuffle =>
throw QueryCompilationErrors.invalidCoalesceHintParameterError(hintName)
case param @ Seq(IntegerLiteral(numPartitions), _*) if shuffle =>
createRepartitionByExpression(Some(numPartitions), param.tail)
case param @ Seq(numPartitions: Int, _*) if shuffle =>
createRepartitionByExpression(Some(numPartitions), param.tail)
case param @ Seq(_*) if shuffle =>
createRepartitionByExpression(None, param)
}
}
/**
* This function handles hints for "REPARTITION_BY_RANGE".
* The "REPARTITION_BY_RANGE" hint must have column names and a partition number is optional.
*/
private def createRepartitionByRange(hint: UnresolvedHint): RepartitionByExpression = {
val hintName = hint.name.toUpperCase(Locale.ROOT)
def createRepartitionByExpression(
numPartitions: Option[Int], partitionExprs: Seq[Any]): RepartitionByExpression = {
val invalidParams = partitionExprs.filter(!_.isInstanceOf[UnresolvedAttribute])
if (invalidParams.nonEmpty) {
throw QueryCompilationErrors.invalidHintParameterError(hintName, invalidParams)
}
val sortOrder = partitionExprs.map {
case expr: SortOrder => expr
case expr: Expression => SortOrder(expr, Ascending)
}
RepartitionByExpression(sortOrder, hint.child, numPartitions)
}
hint.parameters match {
case param @ Seq(IntegerLiteral(numPartitions), _*) =>
createRepartitionByExpression(Some(numPartitions), param.tail)
case param @ Seq(numPartitions: Int, _*) =>
createRepartitionByExpression(Some(numPartitions), param.tail)
case param @ Seq(_*) =>
createRepartitionByExpression(None, param)
}
}
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperators {
case hint @ UnresolvedHint(hintName, _, _) => hintName.toUpperCase(Locale.ROOT) match {
case "REPARTITION" =>
createRepartition(shuffle = true, hint)
case "COALESCE" =>
createRepartition(shuffle = false, hint)
case "REPARTITION_BY_RANGE" =>
createRepartitionByRange(hint)
case _ => hint
}
}
}
/**
* Removes all the hints, used to remove invalid hints provided by the user.
* This must be executed after all the other hint rules are executed.
*/
class RemoveAllHints extends Rule[LogicalPlan] {
private def hintErrorHandler = conf.hintErrorHandler
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsUp {
case h: UnresolvedHint =>
hintErrorHandler.hintNotRecognized(h.name, h.parameters)
h.child
}
}
/**
* Removes all the hints when `spark.sql.optimizer.disableHints` is set.
* This is executed at the very beginning of the Analyzer to disable
* the hint functionality.
*/
class DisableHints extends RemoveAllHints {
override def apply(plan: LogicalPlan): LogicalPlan = {
if (conf.getConf(SQLConf.DISABLE_HINTS)) super.apply(plan) else plan
}
}
}
| BryanCutler/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveHints.scala | Scala | apache-2.0 | 12,458 |
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.evolutionary.sort
import java.io.Serializable
/**
* Provides base functionality for comparing genomes. Specifically the ability
* to add bonuses and penalties.
*/
@SerialVersionUID(1L)
abstract class AbstractGenomeComparator extends GenomeComparator with Serializable {
override def applyBonus(value: Double, bonus: Double): Double = {
val amount: Double = value * bonus
if (shouldMinimize)
value - amount
else
value + amount
}
override def applyPenalty(value: Double, bonus: Double): Double = {
val amount: Double = value * bonus
if (!shouldMinimize)
value - amount
else
value + amount
}
override def isBetterThan(d1: Double, d2: Double): Boolean = {
if (shouldMinimize)
d1 < d2
else
d1 > d2
}
} | PeterLauris/aifh | vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/evolutionary/sort/AbstractGenomeComparator.scala | Scala | apache-2.0 | 1,758 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package com.google.protobuf.wrappers
/** Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @param value
* The bytes value.
*/
@SerialVersionUID(0L)
final case class BytesValue(
value: _root_.com.google.protobuf.ByteString = _root_.com.google.protobuf.ByteString.EMPTY,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[BytesValue] {
@transient
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
{
val __value = value
if (!__value.isEmpty) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeBytesSize(1, __value)
}
};
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var __size = __serializedSizeMemoized
if (__size == 0) {
__size = __computeSerializedSize() + 1
__serializedSizeMemoized = __size
}
__size - 1
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
{
val __v = value
if (!__v.isEmpty) {
_output__.writeBytes(1, __v)
}
};
unknownFields.writeTo(_output__)
}
def withValue(__v: _root_.com.google.protobuf.ByteString): BytesValue = copy(value = __v)
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => {
val __t = value
if (__t != _root_.com.google.protobuf.ByteString.EMPTY) __t else null
}
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => _root_.scalapb.descriptors.PByteString(value)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion: com.google.protobuf.wrappers.BytesValue.type = com.google.protobuf.wrappers.BytesValue
// @@protoc_insertion_point(GeneratedMessage[google.protobuf.BytesValue])
}
object BytesValue extends scalapb.GeneratedMessageCompanion[com.google.protobuf.wrappers.BytesValue] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[com.google.protobuf.wrappers.BytesValue] = this
def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): com.google.protobuf.wrappers.BytesValue = {
var __value: _root_.com.google.protobuf.ByteString = _root_.com.google.protobuf.ByteString.EMPTY
var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__value = _input__.readBytes()
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
}
_unknownFields__.parseField(tag, _input__)
}
}
com.google.protobuf.wrappers.BytesValue(
value = __value,
unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[com.google.protobuf.wrappers.BytesValue] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
com.google.protobuf.wrappers.BytesValue(
value = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.com.google.protobuf.ByteString]).getOrElse(_root_.com.google.protobuf.ByteString.EMPTY)
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = WrappersProto.javaDescriptor.getMessageTypes().get(8)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = WrappersProto.scalaDescriptor.messages(8)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__number)
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = com.google.protobuf.wrappers.BytesValue(
value = _root_.com.google.protobuf.ByteString.EMPTY
)
implicit class BytesValueLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.wrappers.BytesValue]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, com.google.protobuf.wrappers.BytesValue](_l) {
def value: _root_.scalapb.lenses.Lens[UpperPB, _root_.com.google.protobuf.ByteString] = field(_.value)((c_, f_) => c_.copy(value = f_))
}
final val VALUE_FIELD_NUMBER = 1
def of(
value: _root_.com.google.protobuf.ByteString
): _root_.com.google.protobuf.wrappers.BytesValue = _root_.com.google.protobuf.wrappers.BytesValue(
value
)
// @@protoc_insertion_point(GeneratedMessageCompanion[google.protobuf.BytesValue])
}
| scalapb/ScalaPB | scalapb-runtime/src/main/js-native/com/google/protobuf/wrappers/BytesValue.scala | Scala | apache-2.0 | 5,964 |
package com.twitter.finagle.exp.zookeeper.connection
import com.twitter.finagle.Status
import com.twitter.finagle.exp.zookeeper.{RepPacket, ReqPacket}
import com.twitter.finagle.{Service, ServiceFactory}
import com.twitter.util.{Duration, Future, Time}
import java.util.concurrent.atomic.AtomicBoolean
/**
* Connection manages a ServiceFactory, in charge of serving requests to server
*
* @param serviceFactory current connection to server
*/
class Connection(serviceFactory: ServiceFactory[ReqPacket, RepPacket]) {
val isValid = new AtomicBoolean(true)
private[this] var service: Future[Service[ReqPacket, RepPacket]] =
serviceFactory.apply()
/**
* Close current service and ServiceFactory
*
* @return Future.Done
*/
def close(): Future[Unit] = {
service flatMap { svc =>
isValid.set(false)
if (svc.isAvailable && serviceFactory.isAvailable) {
svc.close() before serviceFactory.close()
} else if (svc.isAvailable && !serviceFactory.isAvailable) {
svc.close()
} else if (!svc.isAvailable && serviceFactory.isAvailable) {
serviceFactory.close()
} else {
Future.Done
}
}
}
def close(time: Time): Future[Unit] = {
service flatMap { svc =>
isValid.set(false)
if (svc.isAvailable && serviceFactory.isAvailable) {
svc.close(time) before serviceFactory.close(time)
} else if (svc.isAvailable && !serviceFactory.isAvailable) {
svc.close(time)
} else if (!svc.isAvailable && serviceFactory.isAvailable) {
serviceFactory.close(time)
} else {
Future.Done
}
}
}
def close(duration: Duration): Future[Unit] = {
service flatMap { svc =>
isValid.set(false)
if (svc.isAvailable && serviceFactory.isAvailable) {
svc.close(duration) before serviceFactory.close(duration)
} else if (svc.isAvailable && !serviceFactory.isAvailable) {
svc.close(duration)
} else if (!svc.isAvailable && serviceFactory.isAvailable) {
serviceFactory.close(duration)
} else {
Future.Done
}
}
}
def serviceFactoryStatus: Status = serviceFactory.status
def isServiceFactoryAvailable: Boolean = serviceFactory.isAvailable
def isServiceAvailable: Future[Boolean] = service flatMap
(svc => Future(svc.isAvailable))
def newService(): Future[Unit] = this.synchronized {
serviceFactory.apply() flatMap { serv =>
service = Future(serv)
Future.Done
}
}
def serve(req: ReqPacket): Future[RepPacket] = service flatMap (_(req))
}
private[finagle] object Connection {
class NoConnectionAvailable(msg: String) extends RuntimeException(msg)
}
| finagle/finagle-zookeeper | core/src/main/scala/com/twitter/finagle/exp/zookeeper/connection/Connection.scala | Scala | apache-2.0 | 2,697 |
package org.jetbrains.plugins.scala.lang.typeInference
package generated
class TypeInferenceExpectedPlaceholderTest extends TypeInferenceTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "expected/placeholder/"
def testAmbiguousFunctions() {doTest()}
def testFooPlusFoo() {doTest()}
def testGenericFunction() {doTest()}
def testGenericTypedPlaceholder() {doTest()}
def testIfStatementComplex() {doTest()}
def testIfStatementSimple() {doTest()}
def testInnerImplicits() {doTest()}
def testIntSum() {doTest()}
def testListOfSeqs() {doTest()}
def testLongCalls() {doTest()}
def testLongInfix() {doTest()}
def testMethodCall() {doTest()}
def testMethodCallParams() {doTest()}
def testNamedParams() {doTest()}
def testPrefixedReference() {doTest()}
def testSCL1734() {doTest()}
def testTypeConstructor() {doTest()}
def testTypeConstructor2() {doTest()}
def testTypeConstructor3() {doTest()}
def testTypedPlaceholder() {doTest()}
def testUppercase() {doTest()}
} | katejim/intellij-scala | test/org/jetbrains/plugins/scala/lang/typeInference/generated/TypeInferenceExpectedPlaceholderTest.scala | Scala | apache-2.0 | 1,107 |
package com.blinkbox.books.marvin.magrathea.api
import java.net.URL
import java.util.UUID
import com.blinkbox.books.config.ApiConfig
import com.blinkbox.books.marvin.magrathea.message.{DocumentDao, Revision}
import com.blinkbox.books.marvin.magrathea.{History, SchemaConfig, TestHelper}
import com.blinkbox.books.spray.v2.Error
import com.blinkbox.books.test.MockitoSyrup
import org.json4s.JsonAST.{JNothing, JValue}
import org.json4s.JsonDSL._
import org.junit.runner.RunWith
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpecLike, Matchers}
import spray.http.AllOrigins
import spray.http.HttpHeaders.`Access-Control-Allow-Origin`
import spray.http.StatusCodes._
import spray.routing.HttpService
import spray.testkit.ScalatestRouteTest
import scala.concurrent.Future
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class RestApiTest extends FlatSpecLike with ScalatestRouteTest with HttpService
with MockitoSyrup with Matchers with TestHelper {
implicit val actorRefFactory = system
implicit val routeTestTimeout = RouteTestTimeout(5.seconds)
val config = mock[ApiConfig]
when(config.localUrl).thenReturn(new URL("http://localhost"))
when(config.externalUrl).thenReturn(new URL("http://localhost"))
when(config.timeout).thenReturn(5.seconds)
val schemas = mock[SchemaConfig]
when(schemas.book).thenReturn("ingestion.book.metadata.v2")
when(schemas.contributor).thenReturn("ingestion.contributor.metadata.v2")
val indexService = mock[IndexService]
val documentDao = mock[DocumentDao]
val routes = new RestApi(config, schemas, documentDao, indexService).routes
behavior of "The API"
it should "return 200 with the requested book, if it exists" in {
val book = sampleBook()
when(documentDao.getCurrentDocumentById(any[UUID], any[Option[String]])).thenReturn(
Future.successful(Some(current(book))))
Get(s"/books/$generateId") ~> routes ~> check {
status shouldEqual OK
body.asString shouldEqual compact(render(book))
}
}
it should "return 400 if the book id is not a UUID" in {
Get("/books/xxx") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "return 404 if the book does not exist" in {
when(documentDao.getCurrentDocumentById(any[UUID], any[Option[String]])).thenReturn(
Future.successful(None))
Get(s"/books/$generateId") ~> routes ~> check {
status shouldEqual NotFound
}
}
it should "return 200 and list the book's revision list" in {
when(documentDao.getDocumentHistory(any[UUID], any[String])).thenReturn(Future.successful(List(
history(sampleBook("fieldA" -> "valueA")),
history(sampleBook("fieldB" -> "valueB")),
history(sampleBook("fieldA" -> "test"))
)))
val changed0: JValue = "fieldA" -> "test"
val added1: JValue = "fieldB" -> "valueB"
val added2: JValue = "fieldA" -> "valueA"
Get(s"/books/$generateId/history") ~> routes ~> check {
status shouldEqual OK
val resp = responseAs[List[Revision]]
resp.size shouldEqual 3
List(resp(0).added, resp(0).changed, resp(0).deleted) shouldEqual List(JNothing, changed0, JNothing)
List(resp(1).added, resp(1).changed, resp(1).deleted) shouldEqual List(added1, JNothing, JNothing)
List(resp(2).added, resp(2).changed, resp(2).deleted) shouldEqual List(added2, JNothing, JNothing)
}
}
it should "return 400 getting the history of a book with an invalid id" in {
Get("/books/xxx/history") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "return 404 getting the history of a book that does not exist" in {
when(documentDao.getDocumentHistory(any[UUID], any[String])).thenReturn(Future.successful(List.empty[History]))
Get(s"/books/$generateId/history") ~> routes ~> check {
status shouldEqual NotFound
}
}
it should "return 200 and list the contributor's revision list" in {
when(documentDao.getDocumentHistory(any[UUID], any[String])).thenReturn(Future.successful(List(
history(sampleContributor("fieldA" -> "valueA")),
history(sampleContributor("fieldB" -> "valueB")),
history(sampleContributor("fieldA" -> "test"))
)))
val changed0: JValue = "fieldA" -> "test"
val added1: JValue = "fieldB" -> "valueB"
val added2: JValue = "fieldA" -> "valueA"
Get(s"/contributors/$generateId/history") ~> routes ~> check {
status shouldEqual OK
val resp = responseAs[List[Revision]]
resp.size shouldEqual 3
List(resp(0).added, resp(0).changed, resp(0).deleted) shouldEqual List(JNothing, changed0, JNothing)
List(resp(1).added, resp(1).changed, resp(1).deleted) shouldEqual List(added1, JNothing, JNothing)
List(resp(2).added, resp(2).changed, resp(2).deleted) shouldEqual List(added2, JNothing, JNothing)
}
}
it should "return 400 getting the history of a contributor with an invalid id" in {
Get("/contributors/xxx/history") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "return 404 getting the history of a contributor that does not exist" in {
when(documentDao.getDocumentHistory(any[UUID], any[String])).thenReturn(Future.successful(List.empty[History]))
Get(s"/contributors/$generateId/history") ~> routes ~> check {
status shouldEqual NotFound
}
}
it should "return 200 and re-index a book, if it exists" in {
when(indexService.reIndexCurrentDocument(any[UUID], any[String])).thenReturn(Future.successful(true))
Put(s"/books/$generateId/reindex") ~> routes ~> check {
status shouldEqual OK
}
}
it should "return 400 if the requested book's id to re-index is not a UUID" in {
Put("/books/xxx/reindex") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "return 404 if the requested book to re-index does not exist" in {
when(indexService.reIndexCurrentDocument(any[UUID], any[String])).thenReturn(Future.successful(false))
Put(s"/books/$generateId/reindex") ~> routes ~> check {
status shouldEqual NotFound
}
}
it should "return 200 with the requested contributor, if it exists" in {
val contributor = sampleContributor()
when(documentDao.getCurrentDocumentById(any[UUID], any[Option[String]])).thenReturn(
Future.successful(Some(current(contributor))))
Get(s"/contributors/$generateId") ~> routes ~> check {
status shouldEqual OK
body.asString shouldEqual compact(render(contributor))
}
}
it should "return 400 if the contributor id is not a UUID" in {
Get("/contributors/xxx") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "return 404 if the contributor does not exist" in {
when(documentDao.getCurrentDocumentById(any[UUID], any[Option[String]])).thenReturn(
Future.successful(None))
Get(s"/contributors/$generateId") ~> routes ~> check {
status shouldEqual NotFound
}
}
it should "return 200 and re-index a contributor, if it exists" in {
when(indexService.reIndexCurrentDocument(any[UUID], any[String])).thenReturn(Future.successful(true))
Put(s"/contributors/$generateId/reindex") ~> routes ~> check {
status shouldEqual OK
}
}
it should "return 400 if the requested contributor's id to re-index is not a UUID" in {
Put("/contributors/xxx/reindex") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "return 404 if the requested contributor to re-index does not exist" in {
when(indexService.reIndexCurrentDocument(any[UUID], any[String])).thenReturn(Future.successful(false))
Put(s"/contributors/$generateId/reindex") ~> routes ~> check {
status shouldEqual NotFound
}
}
it should "include CORS headers" in {
Get("/books/xxx") ~> routes ~> check {
header("Access-Control-Allow-Origin") shouldEqual Some(`Access-Control-Allow-Origin`(AllOrigins))
}
}
it should "throw a json exception for a bad request" in {
Get("/search?q=foo&count=string") ~> routes ~> check {
status shouldEqual BadRequest
responseAs[Error].code shouldEqual "BadRequest"
}
}
}
| blinkboxbooks/magrathea | src/test/scala/com/blinkbox/books/marvin/magrathea/api/RestApiTest.scala | Scala | mit | 8,232 |
// Copyright 2017 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Decoupled frontend implementation with efficient misaligned 32-bit fetch
package bottlerocket
import chisel3._
import chisel3.core.withClock
import chisel3.util.{Decoupled,Valid,Cat}
import freechips.rocketchip.amba.axi4.{AXI4Parameters}
import Params._
class FrontendReq extends Bundle {
val pc = Input(UInt(xBitwidth))
val redirect = Input(Bool())
val enter_U_mode = Input(Bool())
val exit_U_mode = Input(Bool())
}
class FrontendResp extends Bundle {
val pc = Output(UInt(xBitwidth))
val inst = Output(UInt(instBitwidth))
val error = Output(Bool())
}
class FrontendBuffer(options: BROptions) extends Module {
val io = IO(new Bundle{
val outstanding = Output(Bool())
val sleeping = Input(Bool())
val req = new FrontendReq
val resp = Decoupled(new FrontendResp)
val bus = AXI4LiteBundle(axiParams)
})
def wordAddress(x: UInt) = x & ~UInt(3, width = xBitwidth)
def isRVC(i: UInt) = i(1,0) =/= UInt(3)
def isWordAligned(a: UInt) = a(1,0) === UInt(0)
def min(a: UInt, b: UInt) = Mux(a < b, a, b)
// Hold reqvalid low during sleep
val reqvalid_ungated = Wire(Bool())
io.bus.ar.valid := reqvalid_ungated && !io.sleeping
// These two registers track issued bus requests -> must be gated with bus clock, if different
val n_pending = Reg(UInt(3.W), init = UInt(0))
val last_req_addr = Reg(UInt(xBitwidth))
io.outstanding := n_pending =/= UInt(0)
val n_to_drop = Reg(UInt(3.W), init = UInt(0))
val true_pending = n_pending - n_to_drop
val buf_vec = Reg(Vec(4, UInt(16.W)))
val err_vec = Reg(Vec(4, Bool()))
val buf_base = Reg(UInt(xBitwidth), init = UInt(options.resetVec))
val buf_size = Reg(UInt(4.W), init = UInt(0)) // SIZE IS IN BYTES
val buf_head = Reg(UInt(2.W), init = UInt(0)) // PTRS ARE HALFWORD INDICES
val buf_tail = Reg(UInt(2.W), init = UInt(0)) // PTRS ARE HALFWORD INDICES
val buffer_full = (buf_size + (true_pending << 2)) > UInt(4) || n_pending === UInt(7)
val clear_buffer = Wire(Bool())
val drop_outstanding = Wire(Bool())
val expected_bus_fetch_valid = io.bus.r.valid && n_to_drop === UInt(0)
val head_halfword = buf_vec(buf_head)
val next_halfword = buf_vec(buf_head + UInt(1))
val jumped_to_halfword_aligned = Reg(init = Bool(false))
val n_useful_bus_bytes = Mux(jumped_to_halfword_aligned, UInt(2), UInt(4))
val bus_first_halfword = Mux(jumped_to_halfword_aligned, io.bus.r.bits.data(31,16), io.bus.r.bits.data(15,0))
val bus_second_halfword = Mux(jumped_to_halfword_aligned, io.bus.r.bits.data(15,0), io.bus.r.bits.data(31,16))
val hold_reset = Reg(init = Bool(true))
hold_reset := Bool(false)
// constant AXI4Lite fields
io.bus.ar.bits.cache := AXI4Parameters.CACHE_BUFFERABLE
io.bus.r.ready := Bool(true)
// zero write channel signals
io.bus.aw.valid := false.B
io.bus.aw.bits.addr := 0.U
io.bus.aw.bits.cache := 0.U
io.bus.aw.bits.prot := 0.U
io.bus.w.valid := false.B
io.bus.w.bits.data := 0.U
io.bus.w.bits.strb := 0.U
io.bus.b.ready := true.B
// Sometimes redirects go to halfword-aligned addresses
when (io.req.redirect) {
jumped_to_halfword_aligned := !isWordAligned(io.req.pc)
} .elsewhen (expected_bus_fetch_valid) {
jumped_to_halfword_aligned := Bool(false)
}
// Record last requested address
when (io.bus.ar.ready && io.bus.ar.valid) {
last_req_addr := io.bus.ar.bits.addr
}
// Privilege modes
val bus_prot = Reg(init = AXI4Parameters.PROT_PRIVILEDGED)
when (io.req.enter_U_mode) {
bus_prot := AXI4Parameters.PROT_INSTRUCTION
} .elsewhen (io.req.exit_U_mode) {
bus_prot := AXI4Parameters.PROT_PRIVILEDGED
}
io.bus.ar.bits.prot := bus_prot
// two main behaviors: branch/jump/exception/etc or sequential code
// ALL privilege level changes are also redirects, so this handles flushing
when (hold_reset) {
reqvalid_ungated := Bool(false)
io.bus.ar.bits.addr := wordAddress(io.req.pc)
clear_buffer := Bool(false)
drop_outstanding := Bool(false)
} .elsewhen (io.req.redirect) {
reqvalid_ungated := Bool(true)
io.bus.ar.bits.addr := wordAddress(io.req.pc)
clear_buffer := Bool(true)
drop_outstanding := Bool(true)
} .otherwise {
reqvalid_ungated := !buffer_full
io.bus.ar.bits.addr := Mux(true_pending > UInt(0), last_req_addr + UInt(4), wordAddress(buf_base + buf_size))
clear_buffer := Bool(false)
drop_outstanding := Bool(false)
}
// outstanding / to-drop transaction counters
// Never more than one USEFUL outstanding transaction!
val n_pending_next = n_pending +
Mux(io.bus.ar.ready && io.bus.ar.valid, UInt(1), UInt(0)) -
Mux(io.bus.r.valid, UInt(1), UInt(0))
n_pending := n_pending_next
when (drop_outstanding) {
n_to_drop := n_pending - Mux(io.bus.r.valid, UInt(1), UInt(0))
} .elsewhen (n_to_drop =/= UInt(0) && io.bus.r.valid) {
n_to_drop := n_to_drop - UInt(1)
}
// buffer control path
when (clear_buffer) {
buf_size := UInt(0)
buf_base := io.req.pc
buf_tail := UInt(0)
buf_head := UInt(0)
} .otherwise {
val resp_inst_size = Mux(isRVC(io.resp.bits.inst), UInt(2), UInt(4))
val base_diff_bytes = Mux(io.resp.fire, resp_inst_size, UInt(0))
val end_diff_bytes = Mux(expected_bus_fetch_valid, n_useful_bus_bytes, UInt(0))
val head_diff = base_diff_bytes >> 1
val tail_diff = end_diff_bytes >> 1
buf_head := buf_head + head_diff
buf_base := buf_base + base_diff_bytes
buf_tail := buf_tail + tail_diff
buf_size := min(buf_size + end_diff_bytes - base_diff_bytes, UInt(8))
}
val busHasError = io.bus.r.valid && (io.bus.r.bits.resp === AXI4Parameters.RESP_SLVERR || io.bus.r.bits.resp === AXI4Parameters.RESP_DECERR)
// buffer refill writes:
// All replies are already filtered with 'expected_bus_fetch_valid'
// Therefore, when bus reply appears, take it!
when (expected_bus_fetch_valid) {
buf_vec(buf_tail) := bus_first_halfword
// Second half is only undesired after a branch
// In this case, all buffer contents are garbage, so writing wastefully is fine
buf_vec(buf_tail + UInt(1)) := bus_second_halfword
err_vec(buf_tail) := busHasError
err_vec(buf_tail + UInt(1)) := busHasError
}
// reply management
io.resp.bits.pc := buf_base
when (buf_size === UInt(0)) {
io.resp.valid := expected_bus_fetch_valid && (!jumped_to_halfword_aligned || isRVC(bus_first_halfword))
io.resp.bits.inst := Cat(bus_second_halfword, bus_first_halfword)
io.resp.bits.error := busHasError
} .elsewhen (buf_size === UInt(2) && !isRVC(buf_vec(buf_head))) {
io.resp.valid := expected_bus_fetch_valid
io.resp.bits.inst := Cat(bus_first_halfword, head_halfword)
io.resp.bits.error := busHasError || err_vec(buf_head)
} .otherwise {
io.resp.valid := Bool(true)
io.resp.bits.inst := Cat(next_halfword, head_halfword)
io.resp.bits.error := err_vec(buf_head) || err_vec(buf_head + UInt(1))
}
}
| google/bottlerocket | src/main/scala/bottlerocket/FrontendBuffer.scala | Scala | apache-2.0 | 7,514 |
/*
* Copyright (c) 2015 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 1/3/15 6:43 PM
*/
package base.socket.json
import base.socket.message.CommandSerializer
import org.json4s.DefaultFormats
/**
* {{ Describe the high level purpose of JsonFormats here. }}
* {{ Include relevant details here. }}
* {{ Do not skip writing good doc! }}
* @author rconrad
*/
object JsonFormats {
val default = DefaultFormats
val defaultWithCommands = DefaultFormats + new CommandSerializer
}
| robconrad/base-api | project-socket/src/main/scala/base/socket/json/JsonFormats.scala | Scala | mit | 631 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.tree.impl
import org.apache.spark.ml.tree.{ContinuousSplit, Split}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
/**
* Internal representation of LabeledPoint for DecisionTree.
* This bins feature values based on a subsampled of data as follows:
* (a) Continuous features are binned into ranges.
* (b) Unordered categorical features are binned based on subsets of feature values.
* "Unordered categorical features" are categorical features with low arity used in
* multiclass classification.
* (c) Ordered categorical features are binned based on feature values.
* "Ordered categorical features" are categorical features with high arity,
* or any categorical feature used in regression or binary classification.
*
* @param label Label from LabeledPoint
* @param binnedFeatures Binned feature values.
* Same length as LabeledPoint.features, but values are bin indices.
*/
private[spark] class TreePoint(val label: Double, val binnedFeatures: Array[Int])
extends Serializable {
}
private[spark] object TreePoint {
/**
* Convert an input dataset into its TreePoint representation,
* binning feature values in preparation for DecisionTree training.
* @param input Input dataset.
* @param splits Splits for features, of size (numFeatures, numSplits).
* @param metadata Learning and dataset metadata
* @return TreePoint dataset representation
*/
def convertToTreeRDD(
input: RDD[LabeledPoint],
splits: Array[Array[Split]],
metadata: DecisionTreeMetadata): RDD[TreePoint] = {
// Construct arrays for featureArity for efficiency in the inner loop.
val featureArity: Array[Int] = new Array[Int](metadata.numFeatures)
var featureIndex = 0
while (featureIndex < metadata.numFeatures) {
featureArity(featureIndex) = metadata.featureArity.getOrElse(featureIndex, 0)
featureIndex += 1
}
val thresholds: Array[Array[Double]] = featureArity.zipWithIndex.map { case (arity, idx) =>
if (arity == 0) {
splits(idx).map(_.asInstanceOf[ContinuousSplit].threshold)
} else {
Array.empty[Double]
}
}
input.map { x =>
TreePoint.labeledPointToTreePoint(x, thresholds, featureArity)
}
}
/**
* Convert one LabeledPoint into its TreePoint representation.
* @param thresholds For each feature, split thresholds for continuous features,
* empty for categorical features.
* @param featureArity Array indexed by feature, with value 0 for continuous and numCategories
* for categorical features.
*/
private def labeledPointToTreePoint(
labeledPoint: LabeledPoint,
thresholds: Array[Array[Double]],
featureArity: Array[Int]): TreePoint = {
val numFeatures = labeledPoint.features.size
val arr = new Array[Int](numFeatures)
var featureIndex = 0
while (featureIndex < numFeatures) {
arr(featureIndex) =
findBin(featureIndex, labeledPoint, featureArity(featureIndex), thresholds(featureIndex))
featureIndex += 1
}
new TreePoint(labeledPoint.label, arr)
}
/**
* Find discretized value for one (labeledPoint, feature).
*
* NOTE: We cannot use Bucketizer since it handles split thresholds differently than the old
* (mllib) tree API. We want to maintain the same behavior as the old tree API.
*
* @param featureArity 0 for continuous features; number of categories for categorical features.
*/
private def findBin(
featureIndex: Int,
labeledPoint: LabeledPoint,
featureArity: Int,
thresholds: Array[Double]): Int = {
val featureValue = labeledPoint.features(featureIndex)
if (featureArity == 0) {
val idx = java.util.Arrays.binarySearch(thresholds, featureValue)
if (idx >= 0) {
idx
} else {
-idx - 1
}
} else {
// Categorical feature bins are indexed by feature values.
if (featureValue < 0 || featureValue >= featureArity) {
throw new IllegalArgumentException(
s"DecisionTree given invalid data:" +
s" Feature $featureIndex is categorical with values in {0,...,${featureArity - 1}," +
s" but a data point gives it value $featureValue.\\n" +
" Bad data point: " + labeledPoint.toString)
}
featureValue.toInt
}
}
}
| xieguobin/Spark_2.0.0_cn1 | ml/tree/impl/TreePoint.scala | Scala | apache-2.0 | 5,278 |
package io.iohk.ethereum.faucet.jsonrpc
import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri
import io.circe.syntax._
import akka.util.ByteString
import io.iohk.ethereum.domain.Address
import io.iohk.ethereum.jsonrpc.client.RpcClient
import io.iohk.ethereum.jsonrpc.client.RpcClient.RpcError
import io.iohk.ethereum.security.SSLError
import io.iohk.ethereum.utils.Logger
import javax.net.ssl.SSLContext
import monix.eval.Task
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.Duration
class WalletRpcClient(node: Uri, timeout: Duration, getSSLContext: () => Either[SSLError, SSLContext])(implicit
system: ActorSystem,
ec: ExecutionContext
) extends RpcClient(node, timeout, getSSLContext)
with Logger {
import io.iohk.ethereum.jsonrpc.client.CommonJsonCodecs._
def getNonce(address: Address): Task[Either[RpcError, BigInt]] =
doRequest[BigInt]("eth_getTransactionCount", List(address.asJson, "latest".asJson))
def sendTransaction(rawTx: ByteString): Task[Either[RpcError, ByteString]] =
doRequest[ByteString]("eth_sendRawTransaction", List(rawTx.asJson))
}
| input-output-hk/etc-client | src/main/scala/io/iohk/ethereum/faucet/jsonrpc/WalletRpcClient.scala | Scala | mit | 1,125 |
/*
mcmc-stream.scala
*/
import breeze.linalg._
import breeze.plot._
import breeze.stats.distributions._
import breeze.stats.meanAndVariance
import annotation.tailrec
object MCMC {
def mcmcSummary(dv: DenseVector[Double]): Figure = {
val len = dv.length
val mav = meanAndVariance(dv)
val mean = mav.mean
val variance = mav.variance
println(s"Iters=$len, Mean=$mean, variance=$variance")
val f = Figure("MCMC Summary")
f.height = 1000
f.width = 1200
val p0 = f.subplot(1, 2, 0)
p0 += plot(linspace(1, len, len), dv)
p0.xlabel = "Iteration"
p0.ylabel = "Value"
p0.title = "Trace plot"
val p1 = f.subplot(1, 2, 1)
p1 += hist(dv, 100)
p1.xlabel = "Value"
p1.title = "Marginal density"
f
}
def time[A](f: => A) = {
val s = System.nanoTime
val ret = f
println("time: " + (System.nanoTime - s) / 1e6 + "ms")
ret
}
def metrop1(n: Int = 1000, eps: Double = 0.5): DenseVector[Double] = {
val vec = DenseVector.fill(n)(0.0)
var x = 0.0
var oldll = Gaussian(0.0, 1.0).logPdf(x)
vec(0) = x
(1 until n).foreach { i =>
val can = x + Uniform(-eps, eps).draw
val loglik = Gaussian(0.0, 1.0).logPdf(can)
val loga = loglik - oldll
if (math.log(Uniform(0.0, 1.0).draw) < loga) {
x = can
oldll = loglik
}
vec(i) = x
}
vec
}
def metrop2(n: Int = 1000, eps: Double = 0.5): Unit = {
var x = 0.0
var oldll = Gaussian(0.0, 1.0).logPdf(x)
(1 to n).foreach { i =>
val can = x + Uniform(-eps, eps).draw
val loglik = Gaussian(0.0, 1.0).logPdf(can)
val loga = loglik - oldll
if (math.log(Uniform(0.0, 1.0).draw) < loga) {
x = can
oldll = loglik
}
println(x)
}
}
@tailrec
def metrop3(n: Int = 1000, eps: Double = 0.5, x: Double = 0.0, oldll: Double = Double.MinValue): Unit = {
if (n > 0) {
println(x)
val can = x + Uniform(-eps, eps).draw
val loglik = Gaussian(0.0, 1.0).logPdf(can)
val loga = loglik - oldll
if (math.log(Uniform(0.0, 1.0).draw) < loga)
metrop3(n - 1, eps, can, loglik)
else
metrop3(n - 1, eps, x, oldll)
}
}
@tailrec
def metrop4(n: Int = 1000, eps: Double = 0.5, x: Double = 0.0, oldll: Double = Double.MinValue, acc: List[Double] = Nil): DenseVector[Double] = {
if (n == 0)
DenseVector(acc.reverse.toArray)
else {
val can = x + Uniform(-eps, eps).draw
val loglik = Gaussian(0.0, 1.0).logPdf(can)
val loga = loglik - oldll
if (math.log(Uniform(0.0, 1.0).draw) < loga)
metrop4(n - 1, eps, can, loglik, can :: acc)
else
metrop4(n - 1, eps, x, oldll, x :: acc)
}
}
def newState(x: Double, oldll: Double, eps: Double): (Double, Double) = {
val can = x + Uniform(-eps, eps).draw
val loglik = Gaussian(0.0, 1.0).logPdf(can)
val loga = loglik - oldll
if (math.log(Uniform(0.0, 1.0).draw) < loga) (can, loglik) else (x, oldll)
}
@tailrec
def metrop5(n: Int = 1000, eps: Double = 0.5, x: Double = 0.0, oldll: Double = Double.MinValue): Unit = {
if (n > 0) {
println(x)
val ns = newState(x, oldll, eps)
metrop5(n - 1, eps, ns._1, ns._2)
}
}
@tailrec
def metrop6(n: Int = 1000, eps: Double = 0.5, x: Double = 0.0, oldll: Double = Double.MinValue, acc: List[Double] = Nil): DenseVector[Double] = {
if (n == 0) DenseVector(acc.reverse.toArray) else {
val ns = newState(x, oldll, eps)
metrop6(n - 1, eps, ns._1, ns._2, ns._1 :: acc)
}
}
def nextState(eps: Double)(state: (Double, Double)): (Double, Double) = {
val x = state._1
val oldll = state._2
val can = x + Uniform(-eps, eps).draw
val loglik = Gaussian(0.0, 1.0).logPdf(can)
val loga = loglik - oldll
if (math.log(Uniform(0.0, 1.0).draw) < loga) (can, loglik) else (x, oldll)
}
def metrop7(eps: Double = 0.5, x: Double = 0.0, oldll: Double = Double.MinValue): Stream[Double] =
Stream.iterate((x, oldll))(nextState(eps)) map (_._1)
def thin[T](s: Stream[T], th: Int): Stream[T] = {
val ss = s.drop(th - 1)
if (ss.isEmpty) Stream.empty else
ss.head #:: thin(ss.tail, th)
}
def kernel(x: Double): Rand[Double] = for {
innov <- Uniform(-0.5, 0.5)
can = x + innov
oldll = Gaussian(0.0, 1.0).logPdf(x)
loglik = Gaussian(0.0, 1.0).logPdf(can)
loga = loglik - oldll
u <- Uniform(0.0, 1.0)
} yield if (math.log(u) < loga) can else x
def main(arg: Array[String]): Unit = {
println("Hi")
metrop1(10).foreach(println)
//mcmcSummary(metrop1(100))
metrop2(10)
metrop3(10)
//mcmcSummary(metrop4(1000))
metrop4(10).foreach(println)
metrop5(10)
metrop6(10).foreach(println)
//mcmcSummary(metrop6(100000))
metrop7().take(10).foreach(println)
//mcmcSummary(DenseVector(metrop7().take(100000).toArray))
//mcmcSummary(DenseVector(thin(metrop7().drop(1000),100).take(10000).toArray))
MarkovChain(0.0)(kernel).steps.take(10).foreach(println)
//mcmcSummary(DenseVector(MarkovChain(0.0)(kernel).steps.take(100000).toArray))
MarkovChain.metropolisHastings(0.0, (x: Double) => Uniform(x - 0.5, x + 0.5))(x => Gaussian(0.0, 1.0).logPdf(x)).steps.take(10).toArray.foreach(println)
mcmcSummary(DenseVector(MarkovChain.metropolisHastings(0.0,(x: Double)=>Uniform(x-0.5,x+0.5))(x=>Gaussian(0.0,1.0).logPdf(x)).steps.take(100000).toArray))
// timings...
val N=1000000
time(metrop1(N))
time(metrop4(N))
time(metrop6(N))
time(metrop7().take(N).toArray)
time(MarkovChain(0.0)(kernel).steps.take(N).toArray)
time(MarkovChain.metropolisHastings(0.0, (x: Double) => Uniform(x - 0.5, x + 0.5))(x => Gaussian(0.0, 1.0).logPdf(x)).steps.take(N).toArray)
println("Bye")
}
}
// eof
| darrenjw/blog | mcmc-stream/src/main/scala/mcmc-stream/mcmc-stream.scala | Scala | apache-2.0 | 5,843 |
package pw.ian.sysadmincraft.tasks
import org.bukkit.scheduler.BukkitRunnable
import pw.ian.sysadmincraft.SysAdmincraft
case class PermaDayTask(plugin: SysAdmincraft) extends BukkitRunnable {
override def run(): Unit = {
plugin.world.setTime(6000L)
}
}
| simplyianm/sysadmincraft | src/main/scala/pw/ian/sysadmincraft/tasks/PermaDayTask.scala | Scala | isc | 265 |
package edu.mit.csail.sdg.ormolu
import edu.mit.csail.sdg.alloy4compiler.ast.Func
import scala.collection.JavaConversions._
import edu.mit.csail.sdg.alloy4.translator.AlloyToOrmolu
import edu.mit.csail.sdg.ormolu.rel.{Relation, Variable}
import edu.mit.csail.sdg.hsqldb.syntax.value.{BoolValueExpr, ValueExpr}
import edu.mit.csail.sdg.hsqldb.syntax.literal.{FunctionLiteral}
/**
* Created by IntelliJ IDEA.
* User: Dwayne
* Date: 7/16/11
* Time: 9:24 PM
* To change this template use File | Settings | File Templates.
*/
case class Predicate(name: String, variables: Seq[Variable], body: Expression) {
def this(func: Func) = this(func.label.replaceFirst("this/", ""),
for (param <- func.params(); Relation(rel) <- AlloyToOrmolu.visitThis(param.`type`.toExpr)) yield Variable(param.label, rel),
AlloyToOrmolu.visitThis(func.getBody).head)
val parameters = for (variable <- variables; parameter <- variable.parameters) yield parameter
def call(args: Seq[ValueExpr]): BoolValueExpr = new FunctionLiteral(name + (args.map(_.toSql).mkString("(", ", ", ")"))) with BoolValueExpr
def definition =
"CREATE FUNCTION " + name + parameters.mkString("(", " INTEGER, ", " INTEGER)") +
"\\n\\tRETURNS BOOLEAN"+
"\\n\\tREADS SQL DATA"+
"\\n\\tBEGIN ATOMIC"+
variables.map("\\n\\t\\t" +_.createTable).mkString +
variables.map("\\n\\t\\t" +_.insertValues).mkString +
"\\n\\t\\tRETURN " + body.sqlExpr.toSql + ";" +
"\\n\\tEND"
}
| dlreeves/ormolu | src/edu/mit/csail/sdg/ormolu/Predicate.scala | Scala | mit | 1,490 |
package com.arcusys.valamis.persistence.impl.slide
import com.arcusys.valamis.persistence.common.{DatabaseLayer, SlickProfile}
import com.arcusys.valamis.persistence.impl.slide.schema.SlideTableComponent
import com.arcusys.valamis.slide.model.Device
import com.arcusys.valamis.slide.storage.DeviceRepository
import slick.driver.JdbcProfile
import slick.jdbc.JdbcBackend
/**
* Created by Igor Borisov on 02.11.15.
*/
class DeviceRepositoryImpl(val db: JdbcBackend#DatabaseDef,
val driver: JdbcProfile)
extends DeviceRepository
with SlickProfile
with DatabaseLayer
with SlideTableComponent {
import driver.api._
override def getAll: Seq[Device] = execSync {
devices.result
}
}
| arcusys/Valamis | valamis-slick-persistence/src/main/scala/com/arcusys/valamis/persistence/impl/slide/DeviceRepositoryImpl.scala | Scala | gpl-3.0 | 732 |
package net.mentalarray.doozie.Internal
/**
* Created by kdivincenzo on 2/6/15.
*/
trait Server {
/**
* The port that the server is listening on.
* @return The TCP port
*/
def port: Int
/**
* The delegate type of the OnClientConnected event callback (handler).
*/
type OnClientConnected = IpcChannel => Unit
/**
* Starts the server listening for client connections.
*/
def startListening() : Unit
/**
* Initiates the shutdown of the server.
*/
def shutdown() : Unit
/**
* Gets the current OnClientConnection callback.
*/
def onClientConnected : OnClientConnected
/**
* Sets the current OnClientConnected callback.
* @param value The callback to invoke when a new client is connected.
*/
def onClientConnected_=(value: OnClientConnected) : Unit
}
| antagonist112358/tomahawk | workflow-engine/src/net/mentalarray/doozie/Internal/Server.scala | Scala | apache-2.0 | 822 |
/*
* Copyright 2013 David Savage
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.chronologicalthought.modula
import java.util.Comparator
/**
* @author David Savage
*/
// TODO use this in ordering service references returned by findReferences
// ServiceComparators should be looked up by findServices which in turn allows them to be ordered
// ServiceComparators should only be visible in context that registers them
// may need new method on ModuleContext to register with filter that hides from
// other contexts?
// TODO should move this to hooks package?
trait ServiceComparator extends Comparator[ServiceReference[_]] {
// TODO move these scopes to Hooks super trait
/**
* Service attribute to specify where the comparator is used
*/
val Scope = "scope"
/**
* The service comparator is global
*/
val Global = "global"
/**
* The service comparator is only used in the context that registers it
*/
val Context = "context"
/**
* The default context of a comparator if non is specified on
* registration
*/
// TODO probably can't move default to Hooks super trait?
val Default = Context
}
| davemssavage/modula | api/src/main/scala/org/chronologicalthought/modula/ServiceComparator.scala | Scala | apache-2.0 | 1,672 |
/*
* Copyright 2017-2020 Aleksey Fomkin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package korolev.state
import java.io._
object javaSerialization {
implicit def serializer[T]: StateSerializer[T] = new StateSerializer[T] {
def serialize(value: T): Array[Byte] = {
val byteStream = new ByteArrayOutputStream()
val objectStream = new ObjectOutputStream(byteStream)
try {
objectStream.writeObject(value)
byteStream.toByteArray
}
finally {
objectStream.close()
byteStream.close()
}
}
}
implicit def deserializer[T]: StateDeserializer[T] = new StateDeserializer[T] {
def deserialize(data: Array[Byte]): Option[T] = {
val byteStream = new ByteArrayInputStream(data)
val objectStream = new ObjectInputStream(byteStream)
try {
val value = objectStream.readObject()
val typed = value.asInstanceOf[T]
Some(typed)
}
catch {
case _:InvalidClassException =>
// That means state type was changed
None
} finally {
objectStream.close()
byteStream.close()
}
}
}
}
| fomkin/korolev | modules/korolev/src/main/scala/korolev/state/javaSerialization.scala | Scala | apache-2.0 | 1,676 |
package com.fsist.safepickle.joda
import com.fsist.safepickle._
import org.joda.time._
/** Picklers for joda-time types which, by default, pickle them as Long values counting milliseconds. */
object JodaTimePicklers {
implicit object InstantPickler extends ConvertPickler[Instant, Long] {
override def convertFrom(other: Long): Instant = new Instant(other)
override def convertTo(t: Instant): Long = t.getMillis
}
implicit object DateTimePickler extends ConvertPickler[DateTime, Long] {
override def convertFrom(other: Long): DateTime = new DateTime(other)
override def convertTo(t: DateTime): Long = t.getMillis
}
implicit object DurationPickler extends ConvertPickler[Duration, Long] {
override def convertFrom(other: Long): Duration = new Duration(other)
override def convertTo(t: Duration): Long = t.getMillis
}
/** Pickles an interval as an array of start and end dates. */
implicit object IntervalPickler extends ConvertPickler[Interval, (Long, Long)] with TuplePicklers {
override def convertFrom(other: (Long, Long)): Interval = new Interval(other._1, other._2)
override def convertTo(t: Interval): (Long, Long) = (t.getStartMillis, t.getEndMillis)
}
}
| fsist/safepickle | safepickle-joda-time/src/main/scala/com/fsist/safepickle/joda/JodaTimePicklers.scala | Scala | apache-2.0 | 1,216 |
/**
* The MIT License (MIT)
*
* Copyright (c) 2018 Israel Freitas([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
package ifreitas.scalaaiml.elements
case class PatternSideBotProperty(propertyName: String) extends PatternExpression {
def toXml = <bot name={ propertyName }/>
} | ifreitas/AimlToXml | src/main/scala/ifreitas/scalaaiml/elements/PatternSideBotProperty.scala | Scala | mit | 1,348 |
package com.twitter.server.view
import com.twitter.finagle.param.Label
import com.twitter.finagle.util.StackRegistry
import com.twitter.finagle.{Stack, StackBuilder, Stackable}
import org.scalatest.funsuite.AnyFunSuite
private[server] object StackRegistryViewTest {
case class Incr(incrementBy: Int)
implicit object Incr extends Stack.Param[Incr] {
val default = Incr(50)
}
object Foo {
def module: Stackable[Int => Int] =
new Stack.Module1[Incr, Int => Int] {
val role = Stack.Role("foo")
val description = "adds incr to every value."
def make(incr: Incr, next: Int => Int) = { i =>
val Incr(j) = incr
next(i + j)
}
}
}
object Bar {
def module: Stackable[Int => Int] =
new Stack.Module0[Int => Int] {
val role = Stack.Role("bar")
val description = "adds 2 to every value."
def make(next: Int => Int) = { i => next(i + 2) }
}
}
// Test class name parameters, which were previously mangled
case class ClassNames(classNames: Seq[String])
implicit object ClassNames extends Stack.Param[ClassNames] {
val default = ClassNames(Seq("com.twitter.com.twitter.finagle.exception.ExceptionReporter"))
}
object Baz {
def module: Stackable[Int => Int] =
new Stack.Module1[ClassNames, Int => Int] {
val role = Stack.Role("baz")
val description = "adds 3 to every value."
def make(classNames: ClassNames, next: Int => Int) = { i =>
val ClassNames(j) = classNames
next(i + 3)
}
}
}
val sb = new StackBuilder(Stack.leaf[Int => Int](Stack.Role("identity"), identity[Int] _))
sb.push(Baz.module)
sb.push(Bar.module)
sb.push(Foo.module)
val stk = sb.result
val prms = Stack.Params.empty + Label("test")
}
class StackRegistryViewTest extends AnyFunSuite {
import StackRegistryViewTest._
test("render stack") {
val entry0 = StackRegistry.Entry("", stk, prms)
val res0 = StackRegistryView.render(entry0, None)
assert(res0.contains("foo"))
assert(res0.contains("bar"))
assert(res0.contains("baz"))
assert(res0.contains("List(" + ClassNames.default.classNames(0) + ")"))
val entry1 = StackRegistry.Entry("", stk.remove(Stack.Role("baz")), prms)
val res1 = StackRegistryView.render(entry1, None)
assert(res1.contains("foo"))
assert(res1.contains("bar"))
assert(!res1.contains("baz"))
}
test("render params") {
val entry0 = StackRegistry.Entry("", stk, prms)
val res0 = StackRegistryView.render(entry0, None)
assert(res0.contains("incrementBy"))
assert(res0.contains("50"))
val entry1 = StackRegistry.Entry("", stk, prms + Incr(10))
val res1 = StackRegistryView.render(entry1, None)
assert(res1.contains("incrementBy"))
assert(res1.contains("10"))
}
}
| twitter/twitter-server | server/src/test/scala/com/twitter/server/view/StackRegistryViewTest.scala | Scala | apache-2.0 | 2,845 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpl.analytics.tools.dump
import java.io._
import java.util
import gov.nasa.jpl.analytics.base.{Loggable, CliTool}
import gov.nasa.jpl.analytics.model.CdrDumpParam
import gov.nasa.jpl.analytics.nutch.SegmentReader
import gov.nasa.jpl.analytics.util.{CommonUtil, Constants}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.nutch.crawl.{Inlink, LinkDb, Inlinks}
import org.apache.nutch.metadata.Metadata
import org.apache.nutch.protocol.Content
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.json.simple.JSONObject
import org.kohsuke.args4j.Option
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
/**
* Created by karanjeetsingh on 8/31/16.
*/
class Dedup extends CliTool {
import Dedup._
@Option(name = "-m", aliases = Array("--master"))
var sparkMaster: String = "local[*]"
@Option(name = "-d", aliases = Array("--dumpDir"))
var segmentDir: String = ""
@Option(name = "-f", aliases = Array("--dumpFile"))
var segmentFile: String = ""
@Option(name = "-h", aliases = Array("--hash"))
var hash: String = ""
@Option(name = "-o", aliases = Array("--outputDir"))
var outputDir: String = ""
var sc: SparkContext = _
def init(): Unit = {
val conf = new SparkConf()
conf.setAppName("UniqueUrls")
.setMaster(sparkMaster)
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryo.classesToRegister", "java.util.HashSet,java.util.HashMap")
.set("spark.kryoserializer.buffer.max", "2040m")
conf.registerKryoClasses(Array(classOf[Content], classOf[Inlinks], classOf[Inlink], classOf[Metadata]))
sc = new SparkContext(conf)
}
override def run(): Unit = {
// Initialize SparkContext
init()
val config: Configuration = sc.hadoopConfiguration
val dumpParam: CdrDumpParam = new CdrDumpParam()
// Check & Create Output Directory
val fs: FileSystem = FileSystem.get(config)
val outPath: Path = new Path(outputDir)
if (fs.exists(outPath)) {
println("Please provide a non existing directory path")
System.exit(1)
}
//CommonUtil.makeSafeDir(outputDir)
// Generate a list of segment parts
var parts: List[Path] = List()
if (!segmentDir.isEmpty) {
parts = SegmentReader.listDumpDir(segmentDir, config)
} else if (!segmentFile.isEmpty) {
parts = SegmentReader.listFromFile(segmentFile)
} else {
println("Please provide Segment Path")
System.exit(1)
}
val hashRDD: RDD[Tuple2[String, String]] = sc.sequenceFile(hash.toString + File.separator + "part*")
val hashes = hashRDD.collectAsMap()
val jmap = new util.HashMap[String, String]()
for ((k, v) <- hashes) {
jmap.put(k, v)
}
// Converting all Segment parts to RDDs
var rdds: Seq[RDD[Tuple2[String, String]]] = Seq()
for (part <- parts) {
dumpParam.part = part.toString
val docRDD = sc.textFile(part.toString).map(doc => CommonUtil.toJson(doc.toString))
.filter(doc => SegmentReader.filterDocs(dumpParam.part + "-" + doc.get(Constants.key.CDR_ID).toString, jmap))
.map(doc => doc.toJSONString)
docRDD.saveAsTextFile(outputDir + File.separator +
dumpParam.part.substring(dumpParam.part.lastIndexOf(File.separator) + 1))
//rdds :+= sc.sequenceFile[String, Content](part.toString)
}
sc.stop()
}
}
object Dedup extends Loggable with Serializable {
/**
* Used for reading/writing to database, files, etc.
* Code From the book "Beginning Scala"
* http://www.amazon.com/Beginning-Scala-David-Pollak/dp/1430219890
*/
def using[A <: {def close(): Unit}, B](param: A)(f: A => B): B =
try { f(param) } finally { param.close() }
def appendJson(filename: String, data: String) =
using(new FileWriter(filename, true)) {
fileWriter => using(new PrintWriter(fileWriter)) {
printWriter => printWriter.println(data)
}
}
def printJson(map: Map[String, Any]): Unit = {
val obj:JSONObject = new JSONObject(map)
println(obj.toJSONString)
}
def main(args: Array[String]) {
new Dedup().run(args)
}
} | USCDataScience/nutch-analytics | src/main/scala/gov/nasa/jpl/analytics/tools/dump/Dedup.scala | Scala | apache-2.0 | 5,059 |
package mesosphere.marathon
package raml
import mesosphere.marathon.core.condition
import mesosphere.marathon.raml.LocalVolumeConversion.localVolumeIdWrites
object TaskConversion extends HealthConversion with DefaultConversions {
implicit val enrichedTaskRamlWrite: Writes[core.appinfo.EnrichedTask, Task] = Writes { enrichedTask =>
val task: core.task.Task = enrichedTask.task
val (startedAt, stagedAt, ports, version) =
if (task.isActive) {
(task.status.startedAt, Some(task.status.stagedAt), task.status.networkInfo.hostPorts, Some(task.runSpecVersion))
} else {
(None, None, Nil, None)
}
val ipAddresses = task.status.networkInfo.ipAddresses.toRaml
val localVolumes = enrichedTask.reservation.fold(Seq.empty[LocalVolumeId]) { reservation =>
reservation.volumeIds.toRaml
}
Task(
appId = enrichedTask.appId.toRaml,
healthCheckResults = enrichedTask.healthCheckResults.toRaml,
host = enrichedTask.agentInfo.host,
id = task.taskId.idString,
ipAddresses = ipAddresses,
ports = ports,
servicePorts = enrichedTask.servicePorts,
slaveId = enrichedTask.agentInfo.agentId,
state = condition.Condition.toMesosTaskStateOrStaging(task.status.condition).toRaml,
stagedAt = stagedAt.toRaml,
startedAt = startedAt.toRaml,
version = version.toRaml,
localVolumes = localVolumes,
region = enrichedTask.agentInfo.region,
zone = enrichedTask.agentInfo.zone
)
}
}
| guenter/marathon | src/main/scala/mesosphere/marathon/raml/TaskConversion.scala | Scala | apache-2.0 | 1,514 |
package com.arcusys.valamis.lesson.service.impl
import com.arcusys.valamis.lesson.model.LessonLimit
import com.arcusys.valamis.lesson.service.LessonLimitService
import com.arcusys.valamis.lesson.storage.LessonTableComponent
import com.arcusys.valamis.lesson.storage.query.LessonLimitQueries
import com.arcusys.valamis.model.PeriodTypes
import com.arcusys.valamis.persistence.common.SlickProfile
import scala.slick.driver.JdbcProfile
import scala.slick.jdbc.JdbcBackend
/**
* Created by mminin on 03.03.16.
*/
class LessonLimitServiceImpl(val db: JdbcBackend#DatabaseDef, val driver: JdbcProfile)
extends LessonLimitService
with LessonTableComponent
with LessonLimitQueries
with SlickProfile {
import driver.simple._
override def setLimit(limit: LessonLimit): Unit = {
db.withTransaction { implicit s =>
lessonLimits.filterByLessonId(limit.lessonId).delete
if (isNotEmpty(limit)) lessonLimits += limit
}
}
override def getLimit(lessonId: Long): Option[LessonLimit] = {
db.withSession { implicit s =>
lessonLimits.filterByLessonId(lessonId).firstOption
}
}
private def isNotEmpty(limit: LessonLimit): Boolean = {
val hasPassingLimit = limit.passingLimit.exists(_ > 0)
val hasPeriodLimit = limit.rerunInterval.exists(_ > 0) &&
limit.rerunIntervalType != PeriodTypes.UNLIMITED
hasPassingLimit || hasPeriodLimit
}
}
| igor-borisov/valamis | valamis-lesson/src/main/scala/com/arcusys/valamis/lesson/service/impl/LessonLimitServiceImpl.scala | Scala | gpl-3.0 | 1,404 |
/**
* Intersection Types: http://dotty.epfl.ch/docs/reference/intersection-types.html
*/
object IntersectionTypes {
sealed trait X {
def x: Double
def tpe: X
}
sealed trait Y {
def y: Double
def tpe: Y
}
type P = Y & X
type PP = X & Y
final case class Point(x: Double, y: Double) extends X with Y {
override def tpe: X & Y = ???
}
def test: Unit = {
def euclideanDistance(p1: X & Y, p2: X & Y) = {
Math.sqrt(Math.pow(p2.y - p1.y, 2) + Math.pow(p2.x - p1.x, 2))
}
val p1: P = Point(3, 4)
val p2: PP = Point(6, 8)
println(euclideanDistance(p1, p2))
}
}
| smarter/dotty-example-project | src/main/scala/IntersectionTypes.scala | Scala | bsd-3-clause | 629 |
import java.util.Date
given Conversion[String, Int] = _.length
given Conversion[Int, String] = _.toString
given Conversion[Int, Date] = new Date(_)
def f(x: String): Int = x.convert
def g(x: Int): String = x.convert
def h(x: Int): Date = x.convert
| lampepfl/dotty | tests/pos/convert.scala | Scala | apache-2.0 | 250 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import com.intellij.lang.ASTNode
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi._
import com.intellij.psi.scope.PsiScopeProcessor
import org.jetbrains.plugins.scala.lang.lexer._
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScMember
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createIdentifier
import org.jetbrains.plugins.scala.lang.psi.stubs.ScFunctionStub
import org.jetbrains.plugins.scala.lang.psi.stubs.elements.ScFunctionElementType
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
/**
* @author ilyas
*/
abstract class ScFunctionImpl protected (stub: ScFunctionStub, nodeType: ScFunctionElementType, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScMember
with ScFunction with ScTypeParametersOwner {
override def isStable = false
def nameId: PsiElement = {
val n = getNode.findChildByType(ScalaTokenTypes.tIDENTIFIER) match {
case null => getNode.findChildByType(ScalaTokenTypes.kTHIS)
case notNull => notNull
}
if (n == null) {
val stub = getGreenStub
if (stub == null) {
val message = s"Both stub and name identifier node are null for ${getClass.getSimpleName} \\n$getText"
throw new NullPointerException(message)
}
return createIdentifier(getGreenStub.getName).getPsi
}
n.getPsi
}
@Cached(ModCount.anyScalaPsiModificationCount, this)
def paramClauses: ScParameters = getStubOrPsiChild(ScalaElementTypes.PARAM_CLAUSES)
override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState,
lastParent: PsiElement, place: PsiElement): Boolean = {
if (lastParent == null) return true
// process function's type parameters
if (!super[ScTypeParametersOwner].processDeclarations(processor, state, lastParent, place)) return false
processParameters(processor, state, lastParent)
}
private def processParameters(processor: PsiScopeProcessor,
state: ResolveState,
lastParent: PsiElement): Boolean = {
if (lastParent != null && shouldProcessParameters(lastParent)) {
for {
clause <- effectiveParameterClauses
param <- clause.effectiveParameters
} {
ProgressManager.checkCanceled()
if (!processor.execute(param, state)) return false
}
}
true
}
// to resolve parameters in return type, type parameter context bounds and body;
// references in default parameters are processed in ScParametersImpl
protected def shouldProcessParameters(lastParent: PsiElement): Boolean = {
def isSynthetic = lastParent.getContext != lastParent.getParent
def isFromTypeParams = lastParent.isInstanceOf[ScTypeParamClause]
//don't compare returnTypeElement with lastParent, they may be different instances due to caches/stubs
def isReturnTypeElement = lastParent.isInstanceOf[ScTypeElement] && lastParent.getContext == this
isSynthetic || isFromTypeParams || isReturnTypeElement
}
@Cached(ModCount.anyScalaPsiModificationCount, this)
def returnTypeElement: Option[ScTypeElement] = byPsiOrStub(findChild(classOf[ScTypeElement]))(_.typeElement)
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionImpl.scala | Scala | apache-2.0 | 3,724 |
package models.daos
import java.util.UUID
import javax.inject.Inject
import models._
import models.daos.tables._
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import slick.jdbc.JdbcProfile
import slick.jdbc.PostgresProfile.api._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.language.implicitConversions
class RegistrationDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider)
extends HasDatabaseConfigProvider[JdbcProfile] {
private val registrations = TableQuery[RegistrationTable]
private val persons = TableQuery[PersonTable]
private val groups = TableQuery[GroupTable]
private val organisations = TableQuery[OrganisationTable]
private val categories = TableQuery[CategoryTable]
def all: Future[Seq[Registration]] = {
val query = for {
(r, c) <- registrations joinLeft categories on (_.category_id === _.id)
p <- persons if p.id === r.person_id
g <- groups if g.id === p.group_id
o <- organisations if o.id === g.organisation_id
} yield (r, p, g, o, c)
db.run(query.result).map(rows => rows.map {
case (r, p, g, o, c) =>
val group = Group(g.id, g.name, o)
val person = Person(p.id, p.name, p.email, p.age, group)
Registration(r.id, person, r.friday, r.saturday, r.sorting, c, r.team_leader)
})
}
def category(category_id: UUID): Future[Seq[Registration]] = {
val query = for {
(r, c) <- registrations joinLeft categories on (_.category_id === _.id)
p <- persons if p.id === r.person_id && r.category_id === category_id
g <- groups if g.id === p.group_id
o <- organisations if o.id === g.organisation_id
} yield (r, p, g, o, c)
db.run(query.result).map(rows => rows.map {
case (r, p, g, o, c) =>
val group = Group(g.id, g.name, o)
val person = Person(p.id, p.name, p.email, p.age, group)
Registration(r.id, person, r.friday, r.saturday, r.sorting, c, r.team_leader)
})
}
def group(group_id: UUID): Future[Seq[Registration]] = {
val query = for {
(r, c) <- registrations joinLeft categories on (_.category_id === _.id)
p <- persons if p.id === r.person_id && p.group_id === group_id
g <- groups if g.id === group_id
o <- organisations if o.id === g.organisation_id
} yield (r, p, g, o, c)
db.run(query.result).map(rows => rows.map {
case (r, p, g, o, c) =>
val group = Group(g.id, g.name, o)
val person = Person(p.id, p.name, p.email, p.age, group)
Registration(r.id, person, r.friday, r.saturday, r.sorting, c, r.team_leader)
})
}
def get(id: UUID): Future[Option[Registration]] = {
val query = for {
(r, c) <- registrations joinLeft categories on (_.category_id === _.id) if r.id === id
p <- persons if p.id === r.person_id
g <- groups if g.id === p.group_id
o <- organisations if o.id === g.organisation_id
} yield (r, p, g, o, c)
db.run(query.result.headOption).map(rows => rows.map {
case (r, p, g, o, c) =>
val group = Group(g.id, g.name, o)
val person = Person(p.id, p.name, p.email, p.age, group)
Registration(r.id, person, r.friday, r.saturday, r.sorting, c, r.team_leader)
})
}
def save(registration: Registration): Future[Registration] =
db.run(registrations.insertOrUpdate(toDBRegistration(registration))).map(_ => registration)
def delete(id: UUID): Future[Int] = {
val query = registrations.filter(_.id === id).delete
db.run(query)
}
implicit private def toDBRegistration(registration: Registration): DBRegistration =
DBRegistration(
registration.id,
registration.person.id,
registration.friday,
registration.saturday,
registration.sorting,
registration.category.map(cat => cat.id),
registration.teamLeader
)
} | wjglerum/bamboesmanager | app/models/daos/RegistrationDAO.scala | Scala | mit | 3,922 |
/*
* Copyright (C) 2015 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.task.tools
import org.openmole.core.highlight.HighLight
import org.openmole.core.pluginregistry.PluginRegistry
import org.osgi.framework.{ BundleActivator, BundleContext }
class Activator extends BundleActivator {
override def stop(context: BundleContext): Unit =
PluginRegistry.unregister(this)
override def start(context: BundleContext): Unit = {
import org.openmole.core.highlight.HighLight._
val keyWords: Vector[HighLight] =
Vector(
TaskHighLight(objectName(AssignTask)),
TaskHighLight(objectName(FlattenTask)),
TaskHighLight(objectName(MergeTask))
)
PluginRegistry.register(this, Vector(this.getClass.getPackage), highLight = keyWords)
}
} | openmole/openmole | openmole/plugins/org.openmole.plugin.task.tools/src/main/scala/org/openmole/plugin/task/tools/Activator.scala | Scala | agpl-3.0 | 1,454 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.